text stringlengths 8 4.13M |
|---|
use specs::*;
use component::flag::{IsDead, IsSpectating};
#[derive(SystemData)]
pub struct IsAlive<'a> {
pub is_spec: ReadStorage<'a, IsSpectating>,
pub is_dead: ReadStorage<'a, IsDead>,
}
impl<'a> IsAlive<'a> {
pub fn get(&self, ent: Entity) -> bool {
let is_spec = self.is_spec.get(ent).is_none();
let is_dead = self.is_dead.get(ent).is_none();
is_spec && is_dead
}
}
|
use datum::Datum;
use spell::Instruction;
use spell::Local;
/// A call stack is a sequence of stack frames.
#[derive(Debug)]
pub struct CallStack<'a> {
pub stack_frames: Vec<StackFrame<'a>>,
}
/// A stack frame consists of a program counter and local variables.
///
/// A stack frame represents an active spell invocation.
#[derive(Debug)]
pub struct StackFrame<'a> {
pub program_counter: ProgramCounter<'a>,
pub local_variables: Box<[Datum<'a>]>,
/// The local variable to store the result into when the callee returns. If
/// this is the active stack frame, the value of this field is irrelevant.
pub return_into: Local,
}
/// A program counter points into the instructions of a spell, and tells the
/// interpreter which instruction comes next.
#[derive(Clone, Copy, Debug)]
pub struct ProgramCounter<'a> {
pub instructions: &'a [Instruction],
pub next_instruction: usize,
}
impl ProgramCounter<'_> {
/// Get the next instruction or panic.
#[inline(always)]
pub fn get(&self) -> &Instruction {
self.instructions.get(self.next_instruction)
.expect("Program counter out of bounds")
}
/// Jump to the next instruction.
#[inline(always)]
pub fn advance(&self) -> Self {
self.jump(self.next_instruction + 1)
}
/// Jump to an arbitrary instruction.
#[inline(always)]
pub fn jump(&self, target: usize) -> Self {
ProgramCounter{
instructions: self.instructions,
next_instruction: target,
}
}
}
|
use std::net::SocketAddr;
use std::sync::Arc;
use mio::{Events, Interest, Poll, Token};
use mio::net::{TcpListener, TcpStream};
use utils::contexts::Message::{Threads, NewConnection};
use utils::contexts::PaxyThread;
use packet_transformation::handling::HandlingContext;
use packets::{c2s, s2c};
use std::{sync, thread};
use packet_transformation::TransformationResult::{Unchanged, Modified, Canceled};
use utils::buffers::{Strings, StringsMut};
mod networking;
fn register_packets(handler_context: &mut HandlingContext) {
handler_context.register_transformer(|_thread_ctx, connection_ctx, other_ctx, packet: &mut c2s::handshake::HandshakePacket| {
connection_ctx.state = packet.next_state.val as u8;
other_ctx.state = packet.next_state.val as u8;
Unchanged
});
handler_context.register_transformer(|_thread_ctx, connection_ctx, other_ctx, _packet: &mut s2c::login::LoginSuccess| {
connection_ctx.state = packets::PLAY_STATE;
other_ctx.state = packets::PLAY_STATE;
Unchanged
});
handler_context.register_transformer(|_thread_ctx, connection_ctx, other_ctx, packet: &mut s2c::login::SetCompression| {
connection_ctx.compression_threshold = packet.threshold.val;
other_ctx.compression_threshold = packet.threshold.val;
Unchanged
});
handler_context.register_transformer(|_thread_ctx, _connection_ctx, _other_ctx, packet: &mut s2c::play::PluginMessage| {
if packet.channel == "minecraft:brand" {
let string = packet.data.get_string();
packet.data.reset();
packet.data.put_string(&format!("Paxy <-> {}", string));
Modified
} else {
Unchanged
}
});
}
fn register_transformers(handler_context: &mut HandlingContext) {
/*handler_context.register_transformer(|_thread_ctx, _connection_ctx, _other_ctx, packet: &mut s2c::play::EntityPositionPacket| {
packet.delta_x = 0;
packet.delta_y = 100;
Modified
});*/
/*handler_context.register_transformer(|_thread_ctx, _connection_ctx, _other_ctx, _packet: &mut c2s::status::Ping| {
Canceled
});*/
}
fn spawn_thread(handler: Arc<HandlingContext>, id: usize) -> PaxyThread {
// todo adjust? this prob isnt enough
let (tx, rx) = sync::mpsc::sync_channel(1000);
let thread = thread::spawn(move || {
networking::thread_loop(rx, handler, id);
});
PaxyThread { thread, channel: tx }
}
//todo use generics over dynamic dispatch
pub fn start(proxy_address: SocketAddr, server_address: SocketAddr) -> Result<(), Box<dyn std::error::Error>> {
println!("Starting Paxy");
// Create TCP server
let mut listener = TcpListener::bind(proxy_address)?;
// Registering
let mut handler_context = HandlingContext::new();
register_packets(&mut handler_context);
register_transformers(&mut handler_context);
let handler_context = Arc::new(handler_context);
// Setup network threads
let thread_count = num_cpus::get() * 2;
let mut threads = Vec::with_capacity(thread_count);
for thread in 0..thread_count {
let paxy_thread = spawn_thread(handler_context.clone(), thread);
threads.push(Arc::new(paxy_thread));
}
// Finalize the thread list
let threads = Arc::new(threads);
for thread in threads.iter() {
thread.notify(Threads(threads.clone()))?
}
let mut next_thread = 0usize;
let mut events = Events::with_capacity(128);
let mut poll = Poll::new().expect("could not unwrap poll");
let listener_token = Token(0);
poll.registry().register(&mut listener, listener_token, Interest::READABLE).unwrap();
println!("Paxy Started");
// handles accepting connections and messages a thread about it
loop {
poll.poll(&mut events, None).expect("couldn't poll");
for event in events.iter() {
if event.token() == listener_token {
loop {
if let Ok((client_socket, _)) = listener.accept() {
// New client, bind it to a thread
threads[next_thread].notify(NewConnection(client_socket, TcpStream::connect(server_address)?))?;
next_thread += 1;
next_thread %= thread_count;
} else {
break;
}
}
}
}
}
} |
use nom::IResult;
use crate::kraken::Indent;
pub fn spaces_and_rest(input: &[u8]) -> IResult<&[u8], Vec<&[u8]>> {
nom::multi::fold_many0(
nom::bytes::complete::tag(" "),
Vec::new(),
|mut acc: Vec<_>, item| {
acc.push(item);
acc
},
)(input)
}
pub fn parse_ident_organism_name(input: &[u8]) -> IResult<&[u8], (Indent, &[u8])> {
let (name, spaces) = spaces_and_rest(input)?;
Ok((&[], (spaces.len(), name)))
}
|
use std::collections::HashSet;
use std::fs::File;
use std::io::prelude::*;
pub struct Group {
answers: Vec<HashSet<u8>>,
}
impl Group {
pub fn new(answers: &str) -> Group {
let mut all_answers = Vec::new();
for answer in answers.split("\n").map(|x| String::from(x)) {
let mut chars: HashSet<u8> = HashSet::new();
for byte in answer.bytes() {
chars.insert(byte);
}
all_answers.push(chars);
}
Group {
answers: all_answers
}
}
pub fn anyone_positive(&self) -> u32 {
let common = self.answers.iter()
.fold(HashSet::new(), |acc, x| acc.union(&x).map(|x| *x).collect());
common.len() as u32
}
pub fn everyone_positive(&self) -> u32 {
let first = self.answers[0].clone();
let common = self.answers[1..].iter()
.fold(first, |acc, x| acc.intersection(&x).map(|x| *x).collect());
common.len() as u32
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn common_answers() {
let group = Group::new("abc");
assert_eq!(group.anyone_positive(), 3);
let group = Group::new("a\nb\nc");
assert_eq!(group.anyone_positive(), 3);
let group = Group::new("ab\nac");
assert_eq!(group.anyone_positive(), 3);
let group = Group::new("a\na\na\na");
assert_eq!(group.anyone_positive(), 1);
let group = Group::new("b");
assert_eq!(group.anyone_positive(), 1);
}
}
pub struct Problem {
data: Vec<Group>,
}
impl Problem {
pub fn new() -> Problem {
let mut file = File::open("input/day6").unwrap();
let mut buffer = String::new();
file.read_to_string(&mut buffer).unwrap();
let res = buffer
.trim_end()
.split("\n\n")
.map(|x| Group::new(x)).collect();
Problem { data: res }
}
pub fn solve() {
let problem = Problem::new();
println!("Part a: {}", problem.part_a());
println!("Part b: {}", problem.part_b());
}
fn part_a(&self) -> u32 {
self.data.iter().map(|x| x.anyone_positive()).sum()
}
fn part_b(&self) -> u32 {
self.data.iter().map(|x| x.everyone_positive()).sum()
}
}
|
use anyhow::{format_err, Error};
use lazy_static::lazy_static;
use log::{debug, error, info};
use smallvec::{smallvec, SmallVec};
use stack_string::{format_sstr, StackString};
use std::{collections::HashMap, process::Stdio};
use tokio::{
io::{stdout, AsyncBufReadExt, AsyncWriteExt, BufReader},
process::Command,
sync::{Mutex, RwLock},
};
use url::Url;
lazy_static! {
static ref LOCK_CACHE: RwLock<HashMap<StackString, Mutex<()>>> = RwLock::new(HashMap::new());
}
#[derive(Debug, Clone)]
pub struct SSHInstance {
pub user: StackString,
pub host: StackString,
pub port: u16,
}
impl SSHInstance {
pub async fn new(user: &str, host: &str, port: u16) -> Self {
LOCK_CACHE.write().await.insert(host.into(), Mutex::new(()));
Self {
user: user.into(),
host: host.into(),
port,
}
}
/// # Errors
/// Return error if db query fails
pub async fn from_url(url: &Url) -> Result<Self, Error> {
let host = url.host_str().ok_or_else(|| format_err!("Parse error"))?;
let port = url.port().unwrap_or(22);
let user = url.username();
Ok(Self::new(user, host, port).await)
}
#[must_use]
pub fn get_ssh_str(&self, path: &str) -> StackString {
if self.port == 22 {
format_sstr!("{}@{}:{}", self.user, self.host, path)
} else {
format_sstr!("-p {} {}@{}:{}", self.port, self.user, self.host, path)
}
}
#[must_use]
pub fn get_ssh_username_host(&self) -> SmallVec<[StackString; 4]> {
let user_str = format_sstr!("{}@{}", self.user, self.host);
let port_str = format_sstr!("{}", self.port);
if self.port == 22 {
smallvec!["-C".into(), user_str,]
} else {
smallvec!["-C".into(), "-p".into(), port_str, user_str,]
}
}
/// # Errors
/// Return error if db query fails
pub async fn run_command_stream_stdout(&self, cmd: &str) -> Result<StackString, Error> {
if let Some(host_lock) = LOCK_CACHE.read().await.get(&self.host) {
let _guard = host_lock.lock().await;
info!("cmd {}", cmd);
let user_host = self.get_ssh_username_host();
let mut args: SmallVec<[&str; 5]> = user_host.iter().map(StackString::as_str).collect();
args.push(cmd);
let process = Command::new("ssh").args(&args).output().await?;
if process.status.success() {
StackString::from_utf8_vec(process.stdout).map_err(Into::into)
} else {
error!("{}", StackString::from_utf8_lossy(&process.stderr));
Err(format_err!("Process failed"))
}
} else {
Err(format_err!("Failed to acquire lock"))
}
}
/// # Errors
/// Return error if db query fails
pub async fn run_command_print_stdout(&self, cmd: &str) -> Result<(), Error> {
if let Some(host_lock) = LOCK_CACHE.read().await.get(&self.host) {
let _guard = host_lock.lock();
debug!("run_command_print_stdout cmd {}", cmd);
let user_host = self.get_ssh_username_host();
let mut args: SmallVec<[&str; 4]> = user_host.iter().map(StackString::as_str).collect();
args.push(cmd);
let mut command = Command::new("ssh")
.args(&args)
.stdout(Stdio::piped())
.spawn()?;
let stdout_handle = command
.stdout
.take()
.ok_or_else(|| format_err!("No stdout"))?;
let mut reader = BufReader::new(stdout_handle);
let mut line = String::new();
let mut stdout = stdout();
while let Ok(bytes) = reader.read_line(&mut line).await {
if bytes > 0 {
let user_host = &user_host[user_host.len() - 1];
let buf = format_sstr!("ssh://{user_host}{line}");
stdout.write_all(buf.as_bytes()).await?;
} else {
break;
}
line.clear();
}
command.wait().await?;
}
Ok(())
}
/// # Errors
/// Return error if db query fails
pub async fn run_command_ssh(&self, cmd: &str) -> Result<(), Error> {
let user_host = self.get_ssh_username_host();
let mut args: SmallVec<[&str; 4]> = user_host.iter().map(StackString::as_str).collect();
args.push(cmd);
if let Some(host_lock) = LOCK_CACHE.read().await.get(&self.host) {
let _guard = host_lock.lock().await;
debug!("run_command_ssh cmd {}", cmd);
if Command::new("ssh").args(&args).status().await?.success() {
Ok(())
} else {
Err(format_err!("{cmd} failed"))
}
} else {
Err(format_err!("Failed to acquire lock"))
}
}
/// # Errors
/// Return error if db query fails
pub async fn run_command(&self, cmd: &str, args: &[&str]) -> Result<(), Error> {
if let Some(host_lock) = LOCK_CACHE.read().await.get(&self.host) {
let _guard = host_lock.lock();
debug!("cmd {} {}", cmd, args.join(" "));
if Command::new(cmd).args(args).status().await?.success() {
Ok(())
} else {
Err(format_err!("{} {} failed", cmd, args.join(" ")))
}
} else {
Err(format_err!("Failed to acquire lock"))
}
}
/// # Errors
/// Return error if db query fails
pub async fn run_scp(&self, arg0: &str, arg1: &str) -> Result<(), Error> {
self.run_command("scp", &["-B", "-q", arg0, arg1]).await
}
}
|
#[cfg(feature = "logs")]
extern crate rayon_logs as rayon;
use ndarray::Array;
use rayon::subgraph;
use rayon::ThreadPoolBuilder;
use rayon_adaptive::prelude::*;
use rayon_adaptive::Policy;
use matrix_mult::matrix;
use matrix_mult::matrix_adaptive;
use ndarray::{linalg,ArrayView,ArrayViewMut};
use rand::Rng;
use matrix_mult::faster_vec;
use matrix_mult::naive_sequential;
use matrix_mult::vectorisation_packed_simd;
fn main() {
let mut rng = rand::thread_rng();
let random = rng.gen_range(0.0, 1.0);
let height = 500;
let width = 500;
let pool = ThreadPoolBuilder::new()
.build()
.expect("Pool creation failed");
pool.compare()
.attach_algorithm_with_setup("vectorisation Join", || {
let an = Array::from_shape_fn((height, width), |(i, j)| {
((((j + i * width) % 3) as f32 + random) as f32)
});
let bn = Array::from_shape_fn((width, height), |(i, j)| {
((((j + 7 + i * height) % 3) as f32 - random) as f32)
});
let mut dest = Array::zeros((height, height));
(an,bn,dest)
} , |(a,b,mut c)| {
let (ddim1,ddim2)= c.dim();
let mat = matrix_adaptive::Matrix {
a: a.view(),
b: b.view(),
d: c.view_mut(),
asize: a.dim(),
bsize: b.dim(),
dsize: (ddim1,ddim2),
};
mat.cut().with_policy(Policy::Join(height * height / 64)).for_each(|e| {
let (a, b, mut d) = (e.a, e.b, e.d);
let (row,col) = d.dim();
subgraph("work_op", (row * row * row), || {
vectorisation_packed_simd::mult_faster_from_ndarray(a,b,&mut d);
})
})
})
.attach_algorithm_with_setup("vectorisation seq blocks ", || {
let an = Array::from_shape_fn((height, width), |(i, j)| {
((((j + i * width) % 3) as f32 + random) as f32)
});
let bn = Array::from_shape_fn((width, height), |(i, j)| {
((((j + 7 + i * height) % 3) as f32 - random) as f32)
});
let mut dest = Array::zeros((height, height));
(an,bn,dest)
} , |(a,b,mut c)|{
let (row,col) = c.dim();
let (avec, bvec, rvec) = naive_sequential::cut_in_blocks(a.view(), b.view(), c.view_mut(), 600, 600);
naive_sequential::mult_blocks(avec, bvec, rvec, |a,b,mut c| {
let (row,col) = c.dim();
subgraph("work_op", row * row * row, || vectorisation_packed_simd::mult_faster_from_ndarray(a,b,&mut c))
});
})
.generate_logs("vectorisation_par.html")
.expect("writing logs failed");
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use async;
use failure::ResultExt;
use futures::channel::mpsc;
use futures::{FutureExt, StreamExt};
use parking_lot::RwLock;
use std::sync::Arc;
use common::bluetooth_commands::method_to_fidl;
use common::bluetooth_facade::BluetoothFacade;
use common::sl4f_types::{AsyncRequest, AsyncResponse};
pub fn run_fidl_loop(
bt_facade: Arc<RwLock<BluetoothFacade>>, receiver: mpsc::UnboundedReceiver<AsyncRequest>,
) {
let mut executor = async::Executor::new()
.context("Error creating event loop")
.expect("Failed to create an executor!");
let receiver_fut = receiver.for_each_concurrent(move |request| match request {
AsyncRequest {
tx,
id,
name,
params,
} => {
let bt_facade = bt_facade.clone();
fx_log_info!(tag: "sl4f_asyc_execute",
"Received sync request: {:?}, {:?}, {:?}, {:?}",
tx, id, name, params
);
let fidl_fut = method_to_fidl(name.clone(), params.clone(), bt_facade.clone());
fidl_fut.and_then(move |resp| {
let response = AsyncResponse::new(resp);
// Ignore any tx sending errors, other requests can still be outstanding
let _ = tx.send(response);
Ok(())
})
}
});
executor
.run_singlethreaded(receiver_fut)
.expect("Failed to execute requests from Rouille.");
}
|
๏ปฟ//=============================================================================
// vector3.rs
//
// Created by Victor on 2019/10/27
//=============================================================================
use std::ops::{Add, Div};
/// A three-dimensional vector
pub struct Vector3 {
pub x: f32,
pub y: f32,
pub z: f32,
}
impl Vector3 {
/// Computes the cross product of two vectors
pub fn cross(lhs: Vector3, rhs: Vector3) -> Vector3 {
Vector3::new(lhs.y * rhs.z - lhs.z * rhs.y, lhs.z * rhs.x - lhs.x * rhs.z, lhs.x * rhs.y - lhs.y * rhs.x)
}
/// Computes the dot product of two vectors
pub fn dot(lhs: Vector3, rhs: Vector3) -> f32 {
lhs.x * rhs.x + lhs.y * rhs.y + lhs.z * rhs.z
}
/// Creates a new three-dimensional vector
pub fn new(x_val: f32, y_val: f32, z_val: f32) -> Vector3 {
Vector3 { x: x_val, y: y_val, z: z_val }
}
}
impl Add<Vector3> for Vector3 {
type Output = Vector3;
fn add(self, rhs: Vector3) -> Self::Output {
Vector3::new(self.x + rhs.x, self.y + rhs.y, self.z + rhs.z)
}
}
impl Div<f32> for Vector3 {
type Output = Vector3;
fn div(self, rhs: f32) -> Self::Output {
Vector3::new(self.x / rhs, self.y / rhs, self.z / rhs)
}
}
|
#[doc = "Register `GICD_ISPENDR0` reader"]
pub type R = crate::R<GICD_ISPENDR0_SPEC>;
#[doc = "Register `GICD_ISPENDR0` writer"]
pub type W = crate::W<GICD_ISPENDR0_SPEC>;
#[doc = "Field `ISPENDR0` reader - ISPENDR0"]
pub type ISPENDR0_R = crate::FieldReader<u32>;
#[doc = "Field `ISPENDR0` writer - ISPENDR0"]
pub type ISPENDR0_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 32, O, u32>;
impl R {
#[doc = "Bits 0:31 - ISPENDR0"]
#[inline(always)]
pub fn ispendr0(&self) -> ISPENDR0_R {
ISPENDR0_R::new(self.bits)
}
}
impl W {
#[doc = "Bits 0:31 - ISPENDR0"]
#[inline(always)]
#[must_use]
pub fn ispendr0(&mut self) -> ISPENDR0_W<GICD_ISPENDR0_SPEC, 0> {
ISPENDR0_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "For interrupts ID\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gicd_ispendr0::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`gicd_ispendr0::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct GICD_ISPENDR0_SPEC;
impl crate::RegisterSpec for GICD_ISPENDR0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`gicd_ispendr0::R`](R) reader structure"]
impl crate::Readable for GICD_ISPENDR0_SPEC {}
#[doc = "`write(|w| ..)` method takes [`gicd_ispendr0::W`](W) writer structure"]
impl crate::Writable for GICD_ISPENDR0_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets GICD_ISPENDR0 to value 0"]
impl crate::Resettable for GICD_ISPENDR0_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::ast;
use crate::{Parse, Spanned, ToTokens};
/// An else branch of an if expression.
#[derive(Debug, Clone, ToTokens, Parse, Spanned)]
pub struct ExprElse {
/// The `else` token.
pub else_: ast::Else,
/// The body of the else statement.
pub block: Box<ast::ExprBlock>,
}
|
fn main() {
let user1 = User {
name: String::from("hdl"),
age: 18,
};
// user1.age = 19; // ไธๆฏ mut ๆ ๆณๆนๅๅผ
println!("{:?}", user1);
let mut user2 = User {
name: String::from("hdl"),
age: 18,
};
user2.age = 19; // mut ๅๅฑๆงๅฏไปฅ่ขซไฟฎๆน
println!("{:?}", user2);
}
#[derive(Debug)]
struct User {
name: String,
age: u32,
}
|
//! ODBC types those representation is compatible with the ODBC C API.
//!
//! This layer has not been created using automatic code generation. It is incomplete, i.e. it does
//! not contain every symbol or constant defined in the ODBC C headers. Symbols which are
//! deprecated since ODBC 3 have been left out intentionally. While some extra type safety has been
//! added by grouping some of C's `#define` constants into `enum`-types it mostly offers the same
//! power (all) and safety guarantess(none) as the wrapped C-API.
//! ODBC 4.0 is still under development by Microsoft, so these symbols are deactivated by default
//! in the cargo.toml
pub use self::sqlreturn::*;
pub use self::info_type::*;
pub use self::fetch_orientation::*;
pub use self::attributes::*;
pub use self::c_data_type::*;
pub use self::input_output::*;
pub use self::nullable::*;
use std::os::raw::c_void;
mod sqlreturn;
mod info_type;
mod fetch_orientation;
mod attributes;
mod c_data_type;
mod input_output;
mod nullable;
//These types can never be instantiated in Rust code.
pub enum Obj {}
pub enum Env {}
pub enum Dbc {}
pub enum Stmt {}
pub type SQLHANDLE = *mut Obj;
pub type SQLHENV = *mut Env;
/// The connection handle references storage of all information about the connection to the data
/// source, including status, transaction state, and error information.
pub type SQLHDBC = *mut Dbc;
pub type SQLHSTMT = *mut Stmt;
pub type SQLSMALLINT = i16;
pub type SQLUSMALLINT = u16;
pub type SQLINTEGER = i32;
pub type SQLUINTEGER = u32;
pub type SQLPOINTER = *mut c_void;
pub type SQLCHAR = u8;
#[cfg(target_pointer_width = "64")]
pub type SQLLEN = i64;
#[cfg(target_pointer_width = "32")]
pub type SQLLEN = SQLINTEGER;
#[cfg(target_pointer_width = "64")]
pub type SQLULEN = u64;
#[cfg(target_pointer_width = "32")]
pub type SQLULEN = SQLUINTEGER;
pub type SQLHWND = SQLPOINTER;
// flags for null-terminated string
pub const SQL_NTS: SQLSMALLINT = -3;
pub const SQL_NTSL: SQLINTEGER = -3;
/// Maximum message length
pub const SQL_MAX_MESSAGE_LENGTH: SQLSMALLINT = 512;
pub const SQL_SQLSTATE_SIZE: usize = 5;
// Special SQLGetData indicator values
pub const SQL_NULL_DATA: SQLLEN = -1;
pub const SQL_NO_TOTAL: SQLLEN = -4;
/// SQL Free Statement options
#[repr(u16)]
#[allow(non_camel_case_types)]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum FreeStmtOption {
/// Closes the cursor associated with StatementHandle (if one was defined) and discards all
/// pending results. The application can reopen this cursor later by executing a SELECT
/// statement again with the same or different parameter values. If no cursor is open, this
/// option has no effect for the application. `SQLCloseCursor` can also be called to close a
/// cursor.
SQL_CLOSE = 0,
// SQL_DROP = 1, is deprecated in favour of SQLFreeHandle
/// Sets the `SQL_DESC_COUNT` field of the ARD to 0, releasing all column buffers bound by
/// `SQLBindCol` for the given StatementHandle. This does not unbind the bookmark column; to do
/// that, the `SQL_DESC_DATA_PTR` field of the ARD for the bookmark column is set to NULL.
/// Notice that if this operation is performed on an explicitly allocated descriptor that is
/// shared by more than one statement, the operation will affect the bindings of all statements
/// that share the descriptor.
SQL_UNBIND = 2,
/// Sets the `SQL_DESC_COUNT` field of the APD to 0, releasing all parameter buffers set by
/// `SQLBindParameter` for the given StatementHandle. If this operation is performed on an
/// explicitly allocated descriptor that is shared by more than one statement, this operation
/// will affect the bindings of all the statements that share the descriptor.
SQL_RESET_PARAMS = 3,
}
pub use FreeStmtOption::*;
/// SQL Data Types
#[repr(i16)]
#[allow(non_camel_case_types)]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum SqlDataType {
SQL_UNKNOWN_TYPE = 0, // also called SQL_VARIANT_TYPE since odbc 4.0
SQL_CHAR = 1,
SQL_NUMERIC = 2,
SQL_DECIMAL = 3,
SQL_INTEGER = 4,
SQL_SMALLINT = 5,
SQL_FLOAT = 6,
SQL_REAL = 7,
SQL_DOUBLE = 8,
SQL_DATETIME = 9,
SQL_VARCHAR = 12,
#[cfg(feature = "odbc_version_4")] SQL_UDT = 17,
#[cfg(feature = "odbc_version_4")] SQL_ROW = 19,
#[cfg(feature = "odbc_version_4")] SQL_ARRAY = 50,
#[cfg(feature = "odbc_version_4")] SQL_MULTISET = 55,
// one-parameter shortcuts for date/time data types
SQL_DATE = 91,
SQL_TIME = 92,
SQL_TIMESTAMP = 93,
#[cfg(feature = "odbc_version_4")] SQL_TIME_WITH_TIMEZONE = 94,
#[cfg(feature = "odbc_version_4")] SQL_TIMESTAMP_WITH_TIMEZONE = 95,
//SQL extended datatypes:
SQL_EXT_LONGVARCHAR = -1,
SQL_EXT_BINARY = -2,
SQL_EXT_VARBINARY = -3,
SQL_EXT_LONGVARBINARY = -4,
SQL_EXT_BIGINT = -5,
SQL_EXT_TINYINT = -6,
SQL_EXT_BIT = -7,
SQL_EXT_WCHAR = -8,
SQL_EXT_WVARCHAR = -9,
SQL_EXT_WLONGVARCHAR = -10,
SQL_EXT_GUID = -11,
}
pub use self::SqlDataType::*;
/// Represented in C headers as SQLSMALLINT
#[repr(i16)]
#[allow(non_camel_case_types)]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum HandleType {
SQL_HANDLE_ENV = 1,
SQL_HANDLE_DBC = 2,
SQL_HANDLE_STMT = 3,
SQL_HANDLE_DESC = 4,
}
pub use self::HandleType::*;
/// Options for `SQLDriverConnect`
#[repr(u16)]
#[allow(non_camel_case_types)]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum SqlDriverConnectOption {
SQL_DRIVER_NOPROMPT = 0,
SQL_DRIVER_COMPLETE = 1,
SQL_DRIVER_PROMPT = 2,
SQL_DRIVER_COMPLETE_REQUIRED = 3,
}
pub use self::SqlDriverConnectOption::*;
/// Statement attributes for `SQLSetStmtAttr`
#[repr(i32)]
#[allow(non_camel_case_types)]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum SqlStatementAttribute {
SQL_ATTR_PARAM_BIND_TYPE = 18,
SQL_ATTR_PARAMSET_SIZE = 22,
SQL_ATTR_ROW_BIND_TYPE = 5,
SQL_ATTR_ROW_ARRAY_SIZE = 27,
SQL_ATTR_ROWS_FETCHED_PTR = 26,
}
pub use self::SqlStatementAttribute::*;
/// Connection attributes for `SQLSetConnectAttr`
#[repr(i32)]
#[allow(non_camel_case_types)]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum SqlConnectionAttribute {
SQL_ATTR_ACCESS_MODE = 101,
SQL_ATTR_AUTOCOMMIT = 102,
SQL_ATTR_LOGIN_TIMEOUT = 103,
SQL_ATTR_TRACE = 104,
SQL_ATTR_TRACEFILE = 105,
SQL_ATTR_TRANSLATE_LIB = 106,
SQL_ATTR_TRANSLATE_OPTION = 107,
SQL_ATTR_TXN_ISOLATION = 108,
SQL_ATTR_CURRENT_CATALOG = 109,
SQL_ATTR_ODBC_CURSORS = 110,
SQL_ATTR_QUIET_MODE = 111,
SQL_ATTR_PACKET_SIZE = 112,
SQL_ATTR_CONNECTION_TIMEOUT = 113,
SQL_ATTR_DISCONNECT_BEHAVIOR = 114,
SQL_ATTR_ENLIST_IN_DTC = 1207,
SQL_ATTR_ENLIST_IN_XA = 1208,
}
pub use self::SqlConnectionAttribute::*;
/// Completion types for `SQLEndTrans`
#[repr(i16)]
#[allow(non_camel_case_types)]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum SqlCompletionType {
SQL_COMMIT = 0,
SQL_ROLLBACK = 1,
}
pub use self::SqlCompletionType::*;
#[cfg_attr(windows, link(name = "odbc32"))]
#[cfg_attr(not(windows), link(name = "odbc"))]
extern "C" {
/// Allocates an environment, connection, statement, or descriptor handle.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, or `SQL_INVALID_HANDLE`
pub fn SQLAllocHandle(
handle_type: HandleType,
input_handle: SQLHANDLE,
output_Handle: *mut SQLHANDLE,
) -> SQLRETURN;
/// Frees resources associated with a specific environment, connection, statement, or
/// descriptor handle.
///
/// If `SQL_ERRQR` is returned the handle is still valid.
/// # Returns
/// `SQL_SUCCESS`, `SQL_ERROR`, or `SQL_INVALID_HANDLE`
pub fn SQLFreeHandle(handle_type: HandleType, handle: SQLHANDLE) -> SQLRETURN;
/// Sets attributes that govern aspects of environments
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, or `SQL_INVALID_HANDLE`
pub fn SQLSetEnvAttr(
environment_handle: SQLHENV,
attribute: EnvironmentAttribute,
value: SQLPOINTER,
string_length: SQLINTEGER,
) -> SQLRETURN;
/// Closes the connection associated with a specific connection handle.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, or `SQL_INVALID_HANDLE`
pub fn SQLDisconnect(connection_handle: SQLHDBC) -> SQLRETURN;
/// Return the current values of multiple fields of a diagnostic record that contains eror,
/// warning, and status information.
///
/// # Returns
///
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, or `SQL_INVALID_HANDLE`
pub fn SQLGetDiagRec(
handle_type: HandleType,
handle: SQLHANDLE,
RecNumber: SQLSMALLINT,
state: *mut SQLCHAR,
native_error_ptr: *mut SQLINTEGER,
message_text: *mut SQLCHAR,
buffer_length: SQLSMALLINT,
text_length_ptr: *mut SQLSMALLINT,
) -> SQLRETURN;
/// Executes a preparable statement, using the current values of the parameter marker variables
/// if any parameters exist in the statement. This is the fastest way to submit an SQL
/// statement for one-time execution
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_NEED_DATA`, `SQL_STILL_EXECUTING`, `SQL_ERROR`
/// , `SQL_NO_DATA`, `SQL_INVALID_HANDLE`, or `SQL_PARAM_DATA_AVAILABLE`.
pub fn SQLExecDirect(
statement_handle: SQLHSTMT,
statement_text: *const SQLCHAR,
text_length: SQLINTEGER,
) -> SQLRETURN;
/// Returns the number of columns in a result set
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, `SQL_INVALID_HANDLE` or
/// `SQL_STILL_EXECUTING`
pub fn SQLNumResultCols(
statement_handle: SQLHSTMT,
column_count_ptr: *mut SQLSMALLINT,
) -> SQLRETURN;
// Can be used since odbc version 3.8 to stream results
pub fn SQLGetData(
statement_handle: SQLHSTMT,
col_or_param_num: SQLUSMALLINT,
target_type: SqlCDataType,
target_value_ptr: SQLPOINTER,
buffer_length: SQLLEN,
str_len_or_ind_ptr: *mut SQLLEN,
) -> SQLRETURN;
/// SQLFetch fetches the next rowset of data from the result set and returns data for all bound
/// columns.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, `SQL_INVALID_HANDLE`, `SQL_NO_DATA` or
/// `SQL_STILL_EXECUTING`
pub fn SQLFetch(statement_handle: SQLHSTMT) -> SQLRETURN;
/// Returns general information about the driver and data source associated with a connection
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, or `SQL_INVALID_HANDLE`
pub fn SQLGetInfo(
connection_handle: SQLHDBC,
info_type: InfoType,
info_value_ptr: SQLPOINTER,
buffer_length: SQLSMALLINT,
string_length_ptr: *mut SQLSMALLINT,
) -> SQLRETURN;
/// SQLConnect establishes connections to a driver and a data source. The connection handle
/// references storage of all information about the connection to the data source, including
/// status, transaction state, and error information.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, `SQL_INVALID_HANDLE`, or
/// `SQL_STILL_EXECUTING`
pub fn SQLConnect(
connection_handle: SQLHDBC,
server_name: *const SQLCHAR,
name_length_1: SQLSMALLINT,
user_name: *const SQLCHAR,
name_length_2: SQLSMALLINT,
authentication: *const SQLCHAR,
name_length_3: SQLSMALLINT,
) -> SQLRETURN;
/// Returns the list of table, catalog, or schema names, and table types, stored in a specific
/// data source. The driver returns the information as a result set
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, `SQL_INVALID_HANDLE`, or
/// `SQL_STILL_EXECUTING`
pub fn SQLTables(
statement_handle: SQLHSTMT,
catalog_name: *const SQLCHAR,
name_length_1: SQLSMALLINT,
schema_name: *const SQLCHAR,
name_length_2: SQLSMALLINT,
table_name: *const SQLCHAR,
name_length_3: SQLSMALLINT,
TableType: *const SQLCHAR,
name_length_4: SQLSMALLINT,
) -> SQLRETURN;
/// Returns information about a data source. This function is implemented only by the Driver
/// Manager.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, `SQL_INVALID_HANDLE`, or `SQL_NO_DATA`
pub fn SQLDataSources(
environment_handle: SQLHENV,
direction: FetchOrientation,
server_name: *mut SQLCHAR,
buffer_length_1: SQLSMALLINT,
name_length_1: *mut SQLSMALLINT,
description: *mut SQLCHAR,
buffer_length_2: SQLSMALLINT,
name_length_2: *mut SQLSMALLINT,
) -> SQLRETURN;
/// An alternative to `SQLConnect`. It supports data sources that require more connection
/// information than the three arguments in `SQLConnect`, dialog boxes to prompt the user for
/// all connection information, and data sources that are not defined in the system information
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, `SQL_INVALID_HANDLE`, `SQL_NO_DATA`,
/// or `SQL_STILL_EXECUTING`
pub fn SQLDriverConnect(
connection_handle: SQLHDBC,
window_handle: SQLHWND,
in_connection_string: *const SQLCHAR,
string_length_1: SQLSMALLINT,
out_connection_string: *mut SQLCHAR,
buffer_length: SQLSMALLINT,
string_length_2: *mut SQLSMALLINT,
DriverCompletion: SqlDriverConnectOption,
) -> SQLRETURN;
/// Lists driver descriptions and driver attribute keywords. This function is implemented only
/// by the Driver Manager.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, `SQL_INVALID_HANDLE`, or `SQL_NO_DATA`
pub fn SQLDrivers(
henv: SQLHENV,
direction: FetchOrientation,
driver_desc: *mut SQLCHAR,
driver_desc_max: SQLSMALLINT,
out_driver_desc: *mut SQLSMALLINT,
driver_attributes: *mut SQLCHAR,
drvr_attr_max: SQLSMALLINT,
out_drvr_attr: *mut SQLSMALLINT,
) -> SQLRETURN;
/// Closes a cursor that has been opened on a statement and discards pending results.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR` or `SQL_INVALID_HANDLE`
pub fn SQLCloseCursor(hstmt: SQLHSTMT) -> SQLRETURN;
/// Binds a buffer to a parameter marker in an SQL statement
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR` or `SQL_INVALID_HANDLE`
pub fn SQLBindParameter(
hstmt: SQLHSTMT,
parameter_number: SQLUSMALLINT,
input_output_type: InputOutput,
value_type: SqlCDataType,
parmeter_type: SqlDataType,
column_size: SQLULEN,
decimal_digits: SQLSMALLINT,
parameter_value_ptr: SQLPOINTER,
buffer_length: SQLLEN,
str_len_or_ind_ptr: *mut SQLLEN,
) -> SQLRETURN;
/// Compiles the statement and generates an access plan.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, `SQL_INVALID_HANDLE`, or
/// `SQL_STILL_EXECUTING`
pub fn SQLPrepare(
hstmt: SQLHSTMT,
statement_text: *const SQLCHAR,
text_length: SQLINTEGER,
) -> SQLRETURN;
/// Executes a prepared statement, using the current values of the parameter marker variables
/// if any paramater markers exis in the statement.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_NEED_DATA`, `SQL_STILL_EXECUTING`, `SQL_ERROR`
/// , `SQL_NO_DATA`, `SQL_INVALID_HANDLE`, or `SQL_PARAM_DATA_AVAILABLE`.
pub fn SQLExecute(hstmt: SQLHSTMT) -> SQLRETURN;
/// Stops processing associated with a specific statement, closes any open cursors associated
/// with the statement, discards pending results, or, optionally, frees all resources
/// associated with the statement handle.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, or `SQL_INVALID_HANDLE`.
pub fn SQLFreeStmt(hstmt: SQLHSTMT, option: FreeStmtOption) -> SQLRETURN;
/// Binds application data bufferst to columns in the result set.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, or `SQL_INVALID_HANDLE`.
pub fn SQLBindCol(
hstmt: SQLHSTMT,
col_number: SQLUSMALLINT,
target_type: SqlCDataType,
target_value: SQLPOINTER,
buffer_length: SQLLEN,
length_or_indicatior: *mut SQLLEN,
) -> SQLRETURN;
/// Returns the result descriptor for one column in the result set โ column name, type, column
/// size, decimal digits, and nullability.
///
/// This information also is available in the fields of the IRD.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_STILL_EXECUTING`, `SQL_ERROR`, or
/// `SQL_INVALID_HANDLE`.
pub fn SQLDescribeCol(
hstmt: SQLHSTMT,
col_number: SQLUSMALLINT,
col_name: *mut SQLCHAR,
buffer_length: SQLSMALLINT,
name_length: *mut SQLSMALLINT,
data_type: *mut SqlDataType,
col_size: *mut SQLULEN,
decimal_digits: *mut SQLSMALLINT,
nullable: *mut Nullable,
) -> SQLRETURN;
/// Sets attributes related to a statement.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, or `SQL_INVALID_HANDLE`.
pub fn SQLSetStmtAttr(
hstmt: SQLHSTMT,
attr: SqlStatementAttribute,
value: SQLPOINTER,
str_length: SQLINTEGER,
) -> SQLRETURN;
/// Sets attributes that govern aspects of connections.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, `SQL_INVALID_HANDLE`, or `SQL_STILL_EXECUTING`.
pub fn SQLSetConnectAttr(
hdbc: SQLHDBC,
attr: SqlConnectionAttribute,
value: SQLPOINTER,
str_length: SQLINTEGER,
) -> SQLRETURN;
/// Requests a commit or rollback operation for all active operations on all statements associated with a handle.
///
/// # Returns
/// `SQL_SUCCESS`, `SQL_SUCCESS_WITH_INFO`, `SQL_ERROR`, `SQL_INVALID_HANDLE`, or `SQL_STILL_EXECUTING`.
pub fn SQLEndTran(
handle_type: HandleType,
handle: SQLHANDLE,
completion_type: SqlCompletionType,
) -> SQLRETURN;
}
|
use super::{check_proposer_block_exists, check_voter_block_exists};
use crate::block::voter::Content;
use crate::blockchain::BlockChain;
use crate::blockdb::BlockDatabase;
use crate::crypto::hash::H256;
pub fn get_missing_references(
content: &Content,
blockchain: &BlockChain,
_blockdb: &BlockDatabase,
) -> Vec<H256> {
let mut missing_blocks = vec![];
// check the voter parent
let voter_parent = check_voter_block_exists(content.voter_parent, blockchain);
if !voter_parent {
missing_blocks.push(content.voter_parent);
}
// check the votes
for prop_hash in content.votes.iter() {
let avail = check_proposer_block_exists(*prop_hash, blockchain);
if !avail {
missing_blocks.push(*prop_hash);
}
}
missing_blocks
}
pub fn check_chain_number(content: &Content, blockchain: &BlockChain) -> bool {
let chain_num = blockchain
.voter_chain_number(&content.voter_parent)
.unwrap();
chain_num == content.chain_number
}
pub fn check_levels_voted(content: &Content, blockchain: &BlockChain, parent: &H256) -> bool {
let mut start = blockchain
.deepest_voted_level(&content.voter_parent)
.unwrap(); //need to be +1
let end = blockchain.proposer_level(parent).unwrap();
if start > end {
return false;
} //end < start means incorrect parent level
if content.votes.len() != (end - start) as usize {
return false;
} //
for vote in content.votes.iter() {
start += 1;
if start != blockchain.proposer_level(vote).unwrap() {
return false;
}
}
true
}
|
use euler::utils::sieve;
fn main() {
let mut ans: i64 = 0;
let prime_flags = sieve(2_000_000);
for i in 2..2_000_000 {
if prime_flags[i] {
ans += i as i64;
}
}
println!("{}", ans);
}
|
/// An enum to represent all characters in the Kharoshthi block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Kharoshthi {
/// \u{10a00}: '๐จ'
LetterA,
/// \u{10a01}: '๐จ'
VowelSignI,
/// \u{10a02}: '๐จ'
VowelSignU,
/// \u{10a03}: '๐จ'
VowelSignVocalicR,
/// \u{10a05}: '๐จ
'
VowelSignE,
/// \u{10a06}: '๐จ'
VowelSignO,
/// \u{10a0c}: '๐จ'
VowelLengthMark,
/// \u{10a0d}: '๐จ'
SignDoubleRingBelow,
/// \u{10a0e}: '๐จ'
SignAnusvara,
/// \u{10a0f}: '๐จ'
SignVisarga,
/// \u{10a10}: '๐จ'
LetterKa,
/// \u{10a11}: '๐จ'
LetterKha,
/// \u{10a12}: '๐จ'
LetterGa,
/// \u{10a13}: '๐จ'
LetterGha,
/// \u{10a15}: '๐จ'
LetterCa,
/// \u{10a16}: '๐จ'
LetterCha,
/// \u{10a17}: '๐จ'
LetterJa,
/// \u{10a19}: '๐จ'
LetterNya,
/// \u{10a1a}: '๐จ'
LetterTta,
/// \u{10a1b}: '๐จ'
LetterTtha,
/// \u{10a1c}: '๐จ'
LetterDda,
/// \u{10a1d}: '๐จ'
LetterDdha,
/// \u{10a1e}: '๐จ'
LetterNna,
/// \u{10a1f}: '๐จ'
LetterTa,
/// \u{10a20}: '๐จ '
LetterTha,
/// \u{10a21}: '๐จก'
LetterDa,
/// \u{10a22}: '๐จข'
LetterDha,
/// \u{10a23}: '๐จฃ'
LetterNa,
/// \u{10a24}: '๐จค'
LetterPa,
/// \u{10a25}: '๐จฅ'
LetterPha,
/// \u{10a26}: '๐จฆ'
LetterBa,
/// \u{10a27}: '๐จง'
LetterBha,
/// \u{10a28}: '๐จจ'
LetterMa,
/// \u{10a29}: '๐จฉ'
LetterYa,
/// \u{10a2a}: '๐จช'
LetterRa,
/// \u{10a2b}: '๐จซ'
LetterLa,
/// \u{10a2c}: '๐จฌ'
LetterVa,
/// \u{10a2d}: '๐จญ'
LetterSha,
/// \u{10a2e}: '๐จฎ'
LetterSsa,
/// \u{10a2f}: '๐จฏ'
LetterSa,
/// \u{10a30}: '๐จฐ'
LetterZa,
/// \u{10a31}: '๐จฑ'
LetterHa,
/// \u{10a32}: '๐จฒ'
LetterKka,
/// \u{10a33}: '๐จณ'
LetterTttha,
/// \u{10a34}: '๐จด'
LetterTtta,
/// \u{10a35}: '๐จต'
LetterVha,
/// \u{10a38}: '๐จธ'
SignBarAbove,
/// \u{10a39}: '๐จน'
SignCauda,
/// \u{10a3a}: '๐จบ'
SignDotBelow,
/// \u{10a3f}: '๐จฟ'
Virama,
/// \u{10a40}: '๐ฉ'
DigitOne,
/// \u{10a41}: '๐ฉ'
DigitTwo,
/// \u{10a42}: '๐ฉ'
DigitThree,
/// \u{10a43}: '๐ฉ'
DigitFour,
/// \u{10a44}: '๐ฉ'
NumberTen,
/// \u{10a45}: '๐ฉ
'
NumberTwenty,
/// \u{10a46}: '๐ฉ'
NumberOneHundred,
/// \u{10a47}: '๐ฉ'
NumberOneThousand,
/// \u{10a48}: '๐ฉ'
FractionOneHalf,
/// \u{10a50}: '๐ฉ'
PunctuationDot,
/// \u{10a51}: '๐ฉ'
PunctuationSmallCircle,
/// \u{10a52}: '๐ฉ'
PunctuationCircle,
/// \u{10a53}: '๐ฉ'
PunctuationCrescentBar,
/// \u{10a54}: '๐ฉ'
PunctuationMangalam,
/// \u{10a55}: '๐ฉ'
PunctuationLotus,
/// \u{10a56}: '๐ฉ'
PunctuationDanda,
/// \u{10a57}: '๐ฉ'
PunctuationDoubleDanda,
/// \u{10a58}: '๐ฉ'
PunctuationLines,
}
impl Into<char> for Kharoshthi {
fn into(self) -> char {
match self {
Kharoshthi::LetterA => '๐จ',
Kharoshthi::VowelSignI => '๐จ',
Kharoshthi::VowelSignU => '๐จ',
Kharoshthi::VowelSignVocalicR => '๐จ',
Kharoshthi::VowelSignE => '๐จ
',
Kharoshthi::VowelSignO => '๐จ',
Kharoshthi::VowelLengthMark => '๐จ',
Kharoshthi::SignDoubleRingBelow => '๐จ',
Kharoshthi::SignAnusvara => '๐จ',
Kharoshthi::SignVisarga => '๐จ',
Kharoshthi::LetterKa => '๐จ',
Kharoshthi::LetterKha => '๐จ',
Kharoshthi::LetterGa => '๐จ',
Kharoshthi::LetterGha => '๐จ',
Kharoshthi::LetterCa => '๐จ',
Kharoshthi::LetterCha => '๐จ',
Kharoshthi::LetterJa => '๐จ',
Kharoshthi::LetterNya => '๐จ',
Kharoshthi::LetterTta => '๐จ',
Kharoshthi::LetterTtha => '๐จ',
Kharoshthi::LetterDda => '๐จ',
Kharoshthi::LetterDdha => '๐จ',
Kharoshthi::LetterNna => '๐จ',
Kharoshthi::LetterTa => '๐จ',
Kharoshthi::LetterTha => '๐จ ',
Kharoshthi::LetterDa => '๐จก',
Kharoshthi::LetterDha => '๐จข',
Kharoshthi::LetterNa => '๐จฃ',
Kharoshthi::LetterPa => '๐จค',
Kharoshthi::LetterPha => '๐จฅ',
Kharoshthi::LetterBa => '๐จฆ',
Kharoshthi::LetterBha => '๐จง',
Kharoshthi::LetterMa => '๐จจ',
Kharoshthi::LetterYa => '๐จฉ',
Kharoshthi::LetterRa => '๐จช',
Kharoshthi::LetterLa => '๐จซ',
Kharoshthi::LetterVa => '๐จฌ',
Kharoshthi::LetterSha => '๐จญ',
Kharoshthi::LetterSsa => '๐จฎ',
Kharoshthi::LetterSa => '๐จฏ',
Kharoshthi::LetterZa => '๐จฐ',
Kharoshthi::LetterHa => '๐จฑ',
Kharoshthi::LetterKka => '๐จฒ',
Kharoshthi::LetterTttha => '๐จณ',
Kharoshthi::LetterTtta => '๐จด',
Kharoshthi::LetterVha => '๐จต',
Kharoshthi::SignBarAbove => '๐จธ',
Kharoshthi::SignCauda => '๐จน',
Kharoshthi::SignDotBelow => '๐จบ',
Kharoshthi::Virama => '๐จฟ',
Kharoshthi::DigitOne => '๐ฉ',
Kharoshthi::DigitTwo => '๐ฉ',
Kharoshthi::DigitThree => '๐ฉ',
Kharoshthi::DigitFour => '๐ฉ',
Kharoshthi::NumberTen => '๐ฉ',
Kharoshthi::NumberTwenty => '๐ฉ
',
Kharoshthi::NumberOneHundred => '๐ฉ',
Kharoshthi::NumberOneThousand => '๐ฉ',
Kharoshthi::FractionOneHalf => '๐ฉ',
Kharoshthi::PunctuationDot => '๐ฉ',
Kharoshthi::PunctuationSmallCircle => '๐ฉ',
Kharoshthi::PunctuationCircle => '๐ฉ',
Kharoshthi::PunctuationCrescentBar => '๐ฉ',
Kharoshthi::PunctuationMangalam => '๐ฉ',
Kharoshthi::PunctuationLotus => '๐ฉ',
Kharoshthi::PunctuationDanda => '๐ฉ',
Kharoshthi::PunctuationDoubleDanda => '๐ฉ',
Kharoshthi::PunctuationLines => '๐ฉ',
}
}
}
impl std::convert::TryFrom<char> for Kharoshthi {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'๐จ' => Ok(Kharoshthi::LetterA),
'๐จ' => Ok(Kharoshthi::VowelSignI),
'๐จ' => Ok(Kharoshthi::VowelSignU),
'๐จ' => Ok(Kharoshthi::VowelSignVocalicR),
'๐จ
' => Ok(Kharoshthi::VowelSignE),
'๐จ' => Ok(Kharoshthi::VowelSignO),
'๐จ' => Ok(Kharoshthi::VowelLengthMark),
'๐จ' => Ok(Kharoshthi::SignDoubleRingBelow),
'๐จ' => Ok(Kharoshthi::SignAnusvara),
'๐จ' => Ok(Kharoshthi::SignVisarga),
'๐จ' => Ok(Kharoshthi::LetterKa),
'๐จ' => Ok(Kharoshthi::LetterKha),
'๐จ' => Ok(Kharoshthi::LetterGa),
'๐จ' => Ok(Kharoshthi::LetterGha),
'๐จ' => Ok(Kharoshthi::LetterCa),
'๐จ' => Ok(Kharoshthi::LetterCha),
'๐จ' => Ok(Kharoshthi::LetterJa),
'๐จ' => Ok(Kharoshthi::LetterNya),
'๐จ' => Ok(Kharoshthi::LetterTta),
'๐จ' => Ok(Kharoshthi::LetterTtha),
'๐จ' => Ok(Kharoshthi::LetterDda),
'๐จ' => Ok(Kharoshthi::LetterDdha),
'๐จ' => Ok(Kharoshthi::LetterNna),
'๐จ' => Ok(Kharoshthi::LetterTa),
'๐จ ' => Ok(Kharoshthi::LetterTha),
'๐จก' => Ok(Kharoshthi::LetterDa),
'๐จข' => Ok(Kharoshthi::LetterDha),
'๐จฃ' => Ok(Kharoshthi::LetterNa),
'๐จค' => Ok(Kharoshthi::LetterPa),
'๐จฅ' => Ok(Kharoshthi::LetterPha),
'๐จฆ' => Ok(Kharoshthi::LetterBa),
'๐จง' => Ok(Kharoshthi::LetterBha),
'๐จจ' => Ok(Kharoshthi::LetterMa),
'๐จฉ' => Ok(Kharoshthi::LetterYa),
'๐จช' => Ok(Kharoshthi::LetterRa),
'๐จซ' => Ok(Kharoshthi::LetterLa),
'๐จฌ' => Ok(Kharoshthi::LetterVa),
'๐จญ' => Ok(Kharoshthi::LetterSha),
'๐จฎ' => Ok(Kharoshthi::LetterSsa),
'๐จฏ' => Ok(Kharoshthi::LetterSa),
'๐จฐ' => Ok(Kharoshthi::LetterZa),
'๐จฑ' => Ok(Kharoshthi::LetterHa),
'๐จฒ' => Ok(Kharoshthi::LetterKka),
'๐จณ' => Ok(Kharoshthi::LetterTttha),
'๐จด' => Ok(Kharoshthi::LetterTtta),
'๐จต' => Ok(Kharoshthi::LetterVha),
'๐จธ' => Ok(Kharoshthi::SignBarAbove),
'๐จน' => Ok(Kharoshthi::SignCauda),
'๐จบ' => Ok(Kharoshthi::SignDotBelow),
'๐จฟ' => Ok(Kharoshthi::Virama),
'๐ฉ' => Ok(Kharoshthi::DigitOne),
'๐ฉ' => Ok(Kharoshthi::DigitTwo),
'๐ฉ' => Ok(Kharoshthi::DigitThree),
'๐ฉ' => Ok(Kharoshthi::DigitFour),
'๐ฉ' => Ok(Kharoshthi::NumberTen),
'๐ฉ
' => Ok(Kharoshthi::NumberTwenty),
'๐ฉ' => Ok(Kharoshthi::NumberOneHundred),
'๐ฉ' => Ok(Kharoshthi::NumberOneThousand),
'๐ฉ' => Ok(Kharoshthi::FractionOneHalf),
'๐ฉ' => Ok(Kharoshthi::PunctuationDot),
'๐ฉ' => Ok(Kharoshthi::PunctuationSmallCircle),
'๐ฉ' => Ok(Kharoshthi::PunctuationCircle),
'๐ฉ' => Ok(Kharoshthi::PunctuationCrescentBar),
'๐ฉ' => Ok(Kharoshthi::PunctuationMangalam),
'๐ฉ' => Ok(Kharoshthi::PunctuationLotus),
'๐ฉ' => Ok(Kharoshthi::PunctuationDanda),
'๐ฉ' => Ok(Kharoshthi::PunctuationDoubleDanda),
'๐ฉ' => Ok(Kharoshthi::PunctuationLines),
_ => Err(()),
}
}
}
impl Into<u32> for Kharoshthi {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for Kharoshthi {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for Kharoshthi {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl Kharoshthi {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
Kharoshthi::LetterA
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("Kharoshthi{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
use badgeland::Badge;
fn main() {
let badge = Badge::new().text("Badge Maker");
println!("{}", badge);
}
|
pub mod digital;
pub mod pin; |
use super::Sorter;
pub struct SelectionSort;
impl Sorter for SelectionSort {
fn sort<T: Ord>(self, slice: &mut [T]) {
for cursor in 0..slice.len() {
let mut min_value_index = cursor;
for rest_cursor in cursor..slice.len() {
if &slice[rest_cursor] < &slice[min_value_index] {
min_value_index = rest_cursor;
}
}
if &slice[cursor] != &slice[min_value_index] {
slice.swap(cursor, min_value_index);
}
}
}
}
#[cfg(test)]
mod tests {
use super::{SelectionSort, Sorter};
#[test]
fn it_works() {
let mut things = vec![25, 4, 1, 3, 5, 2, 4, 6, 4, 9];
SelectionSort.sort(&mut things);
assert_eq!(things, &[1, 2, 3, 4, 4, 4, 5, 6, 9, 25]);
}
}
|
//! Management of the index of a registry source
//!
//! This module contains management of the index and various operations, such as
//! actually parsing the index, looking for crates, etc. This is intended to be
//! abstract over remote indices (downloaded via git) and local registry indices
//! (which are all just present on the filesystem).
//!
//! ## Index Performance
//!
//! One important aspect of the index is that we want to optimize the "happy
//! path" as much as possible. Whenever you type `cargo build` Cargo will
//! *always* reparse the registry and learn about dependency information. This
//! is done because Cargo needs to learn about the upstream crates.io crates
//! that you're using and ensure that the preexisting `Cargo.lock` still matches
//! the current state of the world.
//!
//! Consequently, Cargo "null builds" (the index that Cargo adds to each build
//! itself) need to be fast when accessing the index. The primary performance
//! optimization here is to avoid parsing JSON blobs from the registry if we
//! don't need them. Most secondary optimizations are centered around removing
//! allocations and such, but avoiding parsing JSON is the #1 optimization.
//!
//! When we get queries from the resolver we're given a `Dependency`. This
//! dependency in turn has a version requirement, and with lock files that
//! already exist these version requirements are exact version requirements
//! `=a.b.c`. This means that we in theory only need to parse one line of JSON
//! per query in the registry, the one that matches version `a.b.c`.
//!
//! The crates.io index, however, is not amenable to this form of query. Instead
//! the crates.io index simply is a file where each line is a JSON blob. To
//! learn about the versions in each JSON blob we would need to parse the JSON,
//! defeating the purpose of trying to parse as little as possible.
//!
//! > Note that as a small aside even *loading* the JSON from the registry is
//! > actually pretty slow. For crates.io and remote registries we don't
//! > actually check out the git index on disk because that takes quite some
//! > time and is quite large. Instead we use `libgit2` to read the JSON from
//! > the raw git objects. This in turn can be slow (aka show up high in
//! > profiles) because libgit2 has to do deflate decompression and such.
//!
//! To solve all these issues a strategy is employed here where Cargo basically
//! creates an index into the index. The first time a package is queried about
//! (first time being for an entire computer) Cargo will load the contents
//! (slowly via libgit2) from the registry. It will then (slowly) parse every
//! single line to learn about its versions. Afterwards, however, Cargo will
//! emit a new file (a cache) which is amenable for speedily parsing in future
//! invocations.
//!
//! This cache file is currently organized by basically having the semver
//! version extracted from each JSON blob. That way Cargo can quickly and easily
//! parse all versions contained and which JSON blob they're associated with.
//! The JSON blob then doesn't actually need to get parsed unless the version is
//! parsed.
//!
//! Altogether the initial measurements of this shows a massive improvement for
//! Cargo null build performance. It's expected that the improvements earned
//! here will continue to grow over time in the sense that the previous
//! implementation (parse all lines each time) actually continues to slow down
//! over time as new versions of a crate are published. In any case when first
//! implemented a null build of Cargo itself would parse 3700 JSON blobs from
//! the registry and load 150 blobs from git. Afterwards it parses 150 JSON
//! blobs and loads 0 files git. Removing 200ms or more from Cargo's startup
//! time is certainly nothing to sneeze at!
//!
//! Note that this is just a high-level overview, there's of course lots of
//! details like invalidating caches and whatnot which are handled below, but
//! hopefully those are more obvious inline in the code itself.
use crate::core::dependency::Dependency;
use crate::core::{PackageId, SourceId, Summary};
use crate::sources::registry::{LoadResponse, RegistryData, RegistryPackage, INDEX_V_MAX};
use crate::util::interning::InternedString;
use crate::util::{internal, CargoResult, Config, Filesystem, OptVersionReq, ToSemver};
use anyhow::bail;
use cargo_util::{paths, registry::make_dep_path};
use log::{debug, info};
use semver::Version;
use std::collections::{HashMap, HashSet};
use std::fs;
use std::io::ErrorKind;
use std::path::Path;
use std::str;
use std::task::Poll;
/// Crates.io treats hyphen and underscores as interchangeable, but the index and old Cargo do not.
/// Therefore, the index must store uncanonicalized version of the name so old Cargo's can find it.
/// This loop tries all possible combinations of switching hyphen and underscores to find the
/// uncanonicalized one. As all stored inputs have the correct spelling, we start with the spelling
/// as-provided.
struct UncanonicalizedIter<'s> {
input: &'s str,
num_hyphen_underscore: u32,
hyphen_combination_num: u16,
}
impl<'s> UncanonicalizedIter<'s> {
fn new(input: &'s str) -> Self {
let num_hyphen_underscore = input.chars().filter(|&c| c == '_' || c == '-').count() as u32;
UncanonicalizedIter {
input,
num_hyphen_underscore,
hyphen_combination_num: 0,
}
}
}
impl<'s> Iterator for UncanonicalizedIter<'s> {
type Item = String;
fn next(&mut self) -> Option<Self::Item> {
if self.hyphen_combination_num > 0
&& self.hyphen_combination_num.trailing_zeros() >= self.num_hyphen_underscore
{
return None;
}
let ret = Some(
self.input
.chars()
.scan(0u16, |s, c| {
// the check against 15 here's to prevent
// shift overflow on inputs with more than 15 hyphens
if (c == '_' || c == '-') && *s <= 15 {
let switch = (self.hyphen_combination_num & (1u16 << *s)) > 0;
let out = if (c == '_') ^ switch { '_' } else { '-' };
*s += 1;
Some(out)
} else {
Some(c)
}
})
.collect(),
);
self.hyphen_combination_num += 1;
ret
}
}
#[test]
fn no_hyphen() {
assert_eq!(
UncanonicalizedIter::new("test").collect::<Vec<_>>(),
vec!["test".to_string()]
)
}
#[test]
fn two_hyphen() {
assert_eq!(
UncanonicalizedIter::new("te-_st").collect::<Vec<_>>(),
vec![
"te-_st".to_string(),
"te__st".to_string(),
"te--st".to_string(),
"te_-st".to_string()
]
)
}
#[test]
fn overflow_hyphen() {
assert_eq!(
UncanonicalizedIter::new("te-_-_-_-_-_-_-_-_-st")
.take(100)
.count(),
100
)
}
/// Manager for handling the on-disk index.
///
/// Note that local and remote registries store the index differently. Local
/// is a simple on-disk tree of files of the raw index. Remote registries are
/// stored as a raw git repository. The different means of access are handled
/// via the [`RegistryData`] trait abstraction.
///
/// This transparently handles caching of the index in a more efficient format.
pub struct RegistryIndex<'cfg> {
source_id: SourceId,
/// Root directory of the index for the registry.
path: Filesystem,
/// Cache of summary data.
///
/// This is keyed off the package name. The [`Summaries`] value handles
/// loading the summary data. It keeps an optimized on-disk representation
/// of the JSON files, which is created in an as-needed fashion. If it
/// hasn't been cached already, it uses [`RegistryData::load`] to access
/// to JSON files from the index, and the creates the optimized on-disk
/// summary cache.
summaries_cache: HashMap<InternedString, Summaries>,
/// [`Config`] reference for convenience.
config: &'cfg Config,
}
/// An internal cache of summaries for a particular package.
///
/// A list of summaries are loaded from disk via one of two methods:
///
/// 1. Primarily Cargo will parse the corresponding file for a crate in the
/// upstream crates.io registry. That's just a JSON blob per line which we
/// can parse, extract the version, and then store here.
///
/// 2. Alternatively, if Cargo has previously run, we'll have a cached index of
/// dependencies for the upstream index. This is a file that Cargo maintains
/// lazily on the local filesystem and is much faster to parse since it
/// doesn't involve parsing all of the JSON.
///
/// The outward-facing interface of this doesn't matter too much where it's
/// loaded from, but it's important when reading the implementation to note that
/// we try to parse as little as possible!
#[derive(Default)]
struct Summaries {
/// A raw vector of uninterpreted bytes. This is what `Unparsed` start/end
/// fields are indexes into. If a `Summaries` is loaded from the crates.io
/// index then this field will be empty since nothing is `Unparsed`.
raw_data: Vec<u8>,
/// All known versions of a crate, keyed from their `Version` to the
/// possibly parsed or unparsed version of the full summary.
versions: HashMap<Version, MaybeIndexSummary>,
}
/// A lazily parsed `IndexSummary`.
enum MaybeIndexSummary {
/// A summary which has not been parsed, The `start` and `end` are pointers
/// into `Summaries::raw_data` which this is an entry of.
Unparsed { start: usize, end: usize },
/// An actually parsed summary.
Parsed(IndexSummary),
}
/// A parsed representation of a summary from the index.
///
/// In addition to a full `Summary` we have information on whether it is `yanked`.
pub struct IndexSummary {
pub summary: Summary,
pub yanked: bool,
/// Schema version, see [`RegistryPackage`].
v: u32,
}
/// A representation of the cache on disk that Cargo maintains of summaries.
/// Cargo will initially parse all summaries in the registry and will then
/// serialize that into this form and place it in a new location on disk,
/// ensuring that access in the future is much speedier.
#[derive(Default)]
struct SummariesCache<'a> {
versions: Vec<(Version, &'a [u8])>,
index_version: &'a str,
}
impl<'cfg> RegistryIndex<'cfg> {
pub fn new(
source_id: SourceId,
path: &Filesystem,
config: &'cfg Config,
) -> RegistryIndex<'cfg> {
RegistryIndex {
source_id,
path: path.clone(),
summaries_cache: HashMap::new(),
config,
}
}
/// Returns the hash listed for a specified `PackageId`.
pub fn hash(&mut self, pkg: PackageId, load: &mut dyn RegistryData) -> Poll<CargoResult<&str>> {
let req = OptVersionReq::exact(pkg.version());
let summary = self.summaries(pkg.name(), &req, load)?;
let summary = match summary {
Poll::Ready(mut summary) => summary.next(),
Poll::Pending => return Poll::Pending,
};
Poll::Ready(Ok(summary
.ok_or_else(|| internal(format!("no hash listed for {}", pkg)))?
.summary
.checksum()
.ok_or_else(|| internal(format!("no hash listed for {}", pkg)))?))
}
/// Load a list of summaries for `name` package in this registry which
/// match `req`
///
/// This function will semantically parse the on-disk index, match all
/// versions, and then return an iterator over all summaries which matched.
/// Internally there's quite a few layer of caching to amortize this cost
/// though since this method is called quite a lot on null builds in Cargo.
pub fn summaries<'a, 'b>(
&'a mut self,
name: InternedString,
req: &'b OptVersionReq,
load: &mut dyn RegistryData,
) -> Poll<CargoResult<impl Iterator<Item = &'a IndexSummary> + 'b>>
where
'a: 'b,
{
let source_id = self.source_id;
let config = self.config;
// First up actually parse what summaries we have available. If Cargo
// has run previously this will parse a Cargo-specific cache file rather
// than the registry itself. In effect this is intended to be a quite
// cheap operation.
let summaries = match self.load_summaries(name, load)? {
Poll::Ready(summaries) => summaries,
Poll::Pending => return Poll::Pending,
};
// Iterate over our summaries, extract all relevant ones which match our
// version requirement, and then parse all corresponding rows in the
// registry. As a reminder this `summaries` method is called for each
// entry in a lock file on every build, so we want to absolutely
// minimize the amount of work being done here and parse as little as
// necessary.
let raw_data = &summaries.raw_data;
Poll::Ready(Ok(summaries
.versions
.iter_mut()
.filter_map(move |(k, v)| if req.matches(k) { Some(v) } else { None })
.filter_map(
move |maybe| match maybe.parse(config, raw_data, source_id) {
Ok(summary) => Some(summary),
Err(e) => {
info!("failed to parse `{}` registry package: {}", name, e);
None
}
},
)
.filter(move |is| {
if is.v > INDEX_V_MAX {
debug!(
"unsupported schema version {} ({} {})",
is.v,
is.summary.name(),
is.summary.version()
);
false
} else {
true
}
})))
}
fn load_summaries(
&mut self,
name: InternedString,
load: &mut dyn RegistryData,
) -> Poll<CargoResult<&mut Summaries>> {
// If we've previously loaded what versions are present for `name`, just
// return that since our cache should still be valid.
if self.summaries_cache.contains_key(&name) {
return Poll::Ready(Ok(self.summaries_cache.get_mut(&name).unwrap()));
}
// Prepare the `RegistryData` which will lazily initialize internal data
// structures.
load.prepare()?;
let root = load.assert_index_locked(&self.path);
let cache_root = root.join(".cache");
// See module comment in `registry/mod.rs` for why this is structured
// the way it is.
let fs_name = name
.chars()
.flat_map(|c| c.to_lowercase())
.collect::<String>();
let raw_path = make_dep_path(&fs_name, false);
let mut any_pending = false;
// Attempt to handle misspellings by searching for a chain of related
// names to the original `raw_path` name. Only return summaries
// associated with the first hit, however. The resolver will later
// reject any candidates that have the wrong name, and with this it'll
// along the way produce helpful "did you mean?" suggestions.
for (i, path) in UncanonicalizedIter::new(&raw_path).take(1024).enumerate() {
let summaries = Summaries::parse(
root,
&cache_root,
path.as_ref(),
self.source_id,
load,
self.config,
)?;
if summaries.is_pending() {
if i == 0 {
// If we have not herd back about the name as requested
// then don't ask about other spellings yet.
// This prevents us spamming all the variations in the
// case where we have the correct spelling.
return Poll::Pending;
}
any_pending = true;
}
if let Poll::Ready(Some(summaries)) = summaries {
self.summaries_cache.insert(name, summaries);
return Poll::Ready(Ok(self.summaries_cache.get_mut(&name).unwrap()));
}
}
if any_pending {
return Poll::Pending;
}
// If nothing was found then this crate doesn't exists, so just use an
// empty `Summaries` list.
self.summaries_cache.insert(name, Summaries::default());
Poll::Ready(Ok(self.summaries_cache.get_mut(&name).unwrap()))
}
/// Clears the in-memory summaries cache.
pub fn clear_summaries_cache(&mut self) {
self.summaries_cache.clear();
}
pub fn query_inner(
&mut self,
dep: &Dependency,
load: &mut dyn RegistryData,
yanked_whitelist: &HashSet<PackageId>,
f: &mut dyn FnMut(Summary),
) -> Poll<CargoResult<()>> {
if self.config.offline() {
match self.query_inner_with_online(dep, load, yanked_whitelist, f, false)? {
Poll::Ready(0) => {}
Poll::Ready(_) => return Poll::Ready(Ok(())),
Poll::Pending => return Poll::Pending,
}
// If offline, and there are no matches, try again with online.
// This is necessary for dependencies that are not used (such as
// target-cfg or optional), but are not downloaded. Normally the
// build should succeed if they are not downloaded and not used,
// but they still need to resolve. If they are actually needed
// then cargo will fail to download and an error message
// indicating that the required dependency is unavailable while
// offline will be displayed.
}
self.query_inner_with_online(dep, load, yanked_whitelist, f, true)
.map_ok(|_| ())
}
fn query_inner_with_online(
&mut self,
dep: &Dependency,
load: &mut dyn RegistryData,
yanked_whitelist: &HashSet<PackageId>,
f: &mut dyn FnMut(Summary),
online: bool,
) -> Poll<CargoResult<usize>> {
let source_id = self.source_id;
let summaries = match self.summaries(dep.package_name(), dep.version_req(), load)? {
Poll::Ready(summaries) => summaries,
Poll::Pending => return Poll::Pending,
};
let summaries = summaries
// First filter summaries for `--offline`. If we're online then
// everything is a candidate, otherwise if we're offline we're only
// going to consider candidates which are actually present on disk.
//
// Note: This particular logic can cause problems with
// optional dependencies when offline. If at least 1 version
// of an optional dependency is downloaded, but that version
// does not satisfy the requirements, then resolution will
// fail. Unfortunately, whether or not something is optional
// is not known here.
.filter(|s| (online || load.is_crate_downloaded(s.summary.package_id())))
// Next filter out all yanked packages. Some yanked packages may
// leak throguh if they're in a whitelist (aka if they were
// previously in `Cargo.lock`
.filter(|s| !s.yanked || yanked_whitelist.contains(&s.summary.package_id()))
.map(|s| s.summary.clone());
// Handle `cargo update --precise` here. If specified, our own source
// will have a precise version listed of the form
// `<pkg>=<p_req>o-><f_req>` where `<pkg>` is the name of a crate on
// this source, `<p_req>` is the version installed and `<f_req> is the
// version requested (argument to `--precise`).
let name = dep.package_name().as_str();
let precise = match source_id.precise() {
Some(p) if p.starts_with(name) && p[name.len()..].starts_with('=') => {
let mut vers = p[name.len() + 1..].splitn(2, "->");
let current_vers = vers.next().unwrap().to_semver().unwrap();
let requested_vers = vers.next().unwrap().to_semver().unwrap();
Some((current_vers, requested_vers))
}
_ => None,
};
let summaries = summaries.filter(|s| match &precise {
Some((current, requested)) => {
if dep.version_req().matches(current) {
// Unfortunately crates.io allows versions to differ only
// by build metadata. This shouldn't be allowed, but since
// it is, this will honor it if requested. However, if not
// specified, then ignore it.
let s_vers = s.version();
match (s_vers.build.is_empty(), requested.build.is_empty()) {
(true, true) => s_vers == requested,
(true, false) => false,
(false, true) => {
// Strip out the metadata.
s_vers.major == requested.major
&& s_vers.minor == requested.minor
&& s_vers.patch == requested.patch
&& s_vers.pre == requested.pre
}
(false, false) => s_vers == requested,
}
} else {
true
}
}
None => true,
});
let mut count = 0;
for summary in summaries {
f(summary);
count += 1;
}
Poll::Ready(Ok(count))
}
pub fn is_yanked(
&mut self,
pkg: PackageId,
load: &mut dyn RegistryData,
) -> Poll<CargoResult<bool>> {
let req = OptVersionReq::exact(pkg.version());
let found = self
.summaries(pkg.name(), &req, load)
.map_ok(|mut p| p.any(|summary| summary.yanked));
found
}
}
impl Summaries {
/// Parse out a `Summaries` instances from on-disk state.
///
/// This will attempt to prefer parsing a previous cache file that already
/// exists from a previous invocation of Cargo (aka you're typing `cargo
/// build` again after typing it previously). If parsing fails or the cache
/// isn't found, then we take a slower path which loads the full descriptor
/// for `relative` from the underlying index (aka typically libgit2 with
/// crates.io) and then parse everything in there.
///
/// * `root` - this is the root argument passed to `load`
/// * `cache_root` - this is the root on the filesystem itself of where to
/// store cache files.
/// * `relative` - this is the file we're loading from cache or the index
/// data
/// * `source_id` - the registry's SourceId used when parsing JSON blobs to
/// create summaries.
/// * `load` - the actual index implementation which may be very slow to
/// call. We avoid this if we can.
pub fn parse(
root: &Path,
cache_root: &Path,
relative: &Path,
source_id: SourceId,
load: &mut dyn RegistryData,
config: &Config,
) -> Poll<CargoResult<Option<Summaries>>> {
// First up, attempt to load the cache. This could fail for all manner
// of reasons, but consider all of them non-fatal and just log their
// occurrence in case anyone is debugging anything.
let cache_path = cache_root.join(relative);
let mut cached_summaries = None;
let mut index_version = None;
match fs::read(&cache_path) {
Ok(contents) => match Summaries::parse_cache(contents) {
Ok((s, v)) => {
cached_summaries = Some(s);
index_version = Some(v);
}
Err(e) => {
log::debug!("failed to parse {:?} cache: {}", relative, e);
}
},
Err(e) => log::debug!("cache missing for {:?} error: {}", relative, e),
}
let response = match load.load(root, relative, index_version.as_deref())? {
Poll::Pending => return Poll::Pending,
Poll::Ready(response) => response,
};
match response {
LoadResponse::CacheValid => {
log::debug!("fast path for registry cache of {:?}", relative);
return Poll::Ready(Ok(cached_summaries));
}
LoadResponse::NotFound => {
debug_assert!(cached_summaries.is_none());
if let Err(e) = fs::remove_file(cache_path) {
if e.kind() != ErrorKind::NotFound {
log::debug!("failed to remove from cache: {}", e);
}
}
return Poll::Ready(Ok(None));
}
LoadResponse::Data {
raw_data,
index_version,
} => {
// This is the fallback path where we actually talk to the registry backend to load
// information. Here we parse every single line in the index (as we need
// to find the versions)
log::debug!("slow path for {:?}", relative);
let mut cache = SummariesCache::default();
let mut ret = Summaries::default();
ret.raw_data = raw_data;
for line in split(&ret.raw_data, b'\n') {
// Attempt forwards-compatibility on the index by ignoring
// everything that we ourselves don't understand, that should
// allow future cargo implementations to break the
// interpretation of each line here and older cargo will simply
// ignore the new lines.
let summary = match IndexSummary::parse(config, line, source_id) {
Ok(summary) => summary,
Err(e) => {
// This should only happen when there is an index
// entry from a future version of cargo that this
// version doesn't understand. Hopefully, those future
// versions of cargo correctly set INDEX_V_MAX and
// CURRENT_CACHE_VERSION, otherwise this will skip
// entries in the cache preventing those newer
// versions from reading them (that is, until the
// cache is rebuilt).
log::info!("failed to parse {:?} registry package: {}", relative, e);
continue;
}
};
let version = summary.summary.package_id().version().clone();
cache.versions.push((version.clone(), line));
ret.versions.insert(version, summary.into());
}
if let Some(index_version) = index_version {
log::trace!("caching index_version {}", index_version);
let cache_bytes = cache.serialize(index_version.as_str());
// Once we have our `cache_bytes` which represents the `Summaries` we're
// about to return, write that back out to disk so future Cargo
// invocations can use it.
//
// This is opportunistic so we ignore failure here but are sure to log
// something in case of error.
if paths::create_dir_all(cache_path.parent().unwrap()).is_ok() {
let path = Filesystem::new(cache_path.clone());
config.assert_package_cache_locked(&path);
if let Err(e) = fs::write(cache_path, &cache_bytes) {
log::info!("failed to write cache: {}", e);
}
}
// If we've got debug assertions enabled read back in the cached values
// and assert they match the expected result.
#[cfg(debug_assertions)]
{
let readback = SummariesCache::parse(&cache_bytes)
.expect("failed to parse cache we just wrote");
assert_eq!(
readback.index_version, index_version,
"index_version mismatch"
);
assert_eq!(readback.versions, cache.versions, "versions mismatch");
}
}
Poll::Ready(Ok(Some(ret)))
}
}
}
/// Parses an open `File` which represents information previously cached by
/// Cargo.
pub fn parse_cache(contents: Vec<u8>) -> CargoResult<(Summaries, InternedString)> {
let cache = SummariesCache::parse(&contents)?;
let index_version = InternedString::new(cache.index_version);
let mut ret = Summaries::default();
for (version, summary) in cache.versions {
let (start, end) = subslice_bounds(&contents, summary);
ret.versions
.insert(version, MaybeIndexSummary::Unparsed { start, end });
}
ret.raw_data = contents;
return Ok((ret, index_version));
// Returns the start/end offsets of `inner` with `outer`. Asserts that
// `inner` is a subslice of `outer`.
fn subslice_bounds(outer: &[u8], inner: &[u8]) -> (usize, usize) {
let outer_start = outer.as_ptr() as usize;
let outer_end = outer_start + outer.len();
let inner_start = inner.as_ptr() as usize;
let inner_end = inner_start + inner.len();
assert!(inner_start >= outer_start);
assert!(inner_end <= outer_end);
(inner_start - outer_start, inner_end - outer_start)
}
}
}
// Implementation of serializing/deserializing the cache of summaries on disk.
// Currently the format looks like:
//
// +--------------------+----------------------+-------------+---+
// | cache version byte | index format version | git sha rev | 0 |
// +--------------------+----------------------+-------------+---+
//
// followed by...
//
// +----------------+---+------------+---+
// | semver version | 0 | JSON blob | 0 | ...
// +----------------+---+------------+---+
//
// The idea is that this is a very easy file for Cargo to parse in future
// invocations. The read from disk should be quite fast and then afterwards all
// we need to know is what versions correspond to which JSON blob.
//
// The leading version byte is intended to ensure that there's some level of
// future compatibility against changes to this cache format so if different
// versions of Cargo share the same cache they don't get too confused. The git
// sha lets us know when the file needs to be regenerated (it needs regeneration
// whenever the index itself updates).
//
// Cache versions:
// * `1`: The original version.
// * `2`: Added the "index format version" field so that if the index format
// changes, different versions of cargo won't get confused reading each
// other's caches.
// * `3`: Bumped the version to work around an issue where multiple versions of
// a package were published that differ only by semver metadata. For
// example, openssl-src 110.0.0 and 110.0.0+1.1.0f. Previously, the cache
// would be incorrectly populated with two entries, both 110.0.0. After
// this, the metadata will be correctly included. This isn't really a format
// change, just a version bump to clear the incorrect cache entries. Note:
// the index shouldn't allow these, but unfortunately crates.io doesn't
// check it.
const CURRENT_CACHE_VERSION: u8 = 3;
impl<'a> SummariesCache<'a> {
fn parse(data: &'a [u8]) -> CargoResult<SummariesCache<'a>> {
// NB: keep this method in sync with `serialize` below
let (first_byte, rest) = data
.split_first()
.ok_or_else(|| anyhow::format_err!("malformed cache"))?;
if *first_byte != CURRENT_CACHE_VERSION {
bail!("looks like a different Cargo's cache, bailing out");
}
let index_v_bytes = rest
.get(..4)
.ok_or_else(|| anyhow::anyhow!("cache expected 4 bytes for index version"))?;
let index_v = u32::from_le_bytes(index_v_bytes.try_into().unwrap());
if index_v != INDEX_V_MAX {
bail!(
"index format version {} doesn't match the version I know ({})",
index_v,
INDEX_V_MAX
);
}
let rest = &rest[4..];
let mut iter = split(rest, 0);
let last_index_update = if let Some(update) = iter.next() {
str::from_utf8(update)?
} else {
bail!("malformed file");
};
let mut ret = SummariesCache::default();
ret.index_version = last_index_update;
while let Some(version) = iter.next() {
let version = str::from_utf8(version)?;
let version = Version::parse(version)?;
let summary = iter.next().unwrap();
ret.versions.push((version, summary));
}
Ok(ret)
}
fn serialize(&self, index_version: &str) -> Vec<u8> {
// NB: keep this method in sync with `parse` above
let size = self
.versions
.iter()
.map(|(_version, data)| (10 + data.len()))
.sum();
let mut contents = Vec::with_capacity(size);
contents.push(CURRENT_CACHE_VERSION);
contents.extend(&u32::to_le_bytes(INDEX_V_MAX));
contents.extend_from_slice(index_version.as_bytes());
contents.push(0);
for (version, data) in self.versions.iter() {
contents.extend_from_slice(version.to_string().as_bytes());
contents.push(0);
contents.extend_from_slice(data);
contents.push(0);
}
contents
}
}
impl MaybeIndexSummary {
/// Parses this "maybe a summary" into a `Parsed` for sure variant.
///
/// Does nothing if this is already `Parsed`, and otherwise the `raw_data`
/// passed in is sliced with the bounds in `Unparsed` and then actually
/// parsed.
fn parse(
&mut self,
config: &Config,
raw_data: &[u8],
source_id: SourceId,
) -> CargoResult<&IndexSummary> {
let (start, end) = match self {
MaybeIndexSummary::Unparsed { start, end } => (*start, *end),
MaybeIndexSummary::Parsed(summary) => return Ok(summary),
};
let summary = IndexSummary::parse(config, &raw_data[start..end], source_id)?;
*self = MaybeIndexSummary::Parsed(summary);
match self {
MaybeIndexSummary::Unparsed { .. } => unreachable!(),
MaybeIndexSummary::Parsed(summary) => Ok(summary),
}
}
}
impl From<IndexSummary> for MaybeIndexSummary {
fn from(summary: IndexSummary) -> MaybeIndexSummary {
MaybeIndexSummary::Parsed(summary)
}
}
impl IndexSummary {
/// Parses a line from the registry's index file into an `IndexSummary` for
/// a package.
///
/// The `line` provided is expected to be valid JSON.
fn parse(config: &Config, line: &[u8], source_id: SourceId) -> CargoResult<IndexSummary> {
// ****CAUTION**** Please be extremely careful with returning errors
// from this function. Entries that error are not included in the
// index cache, and can cause cargo to get confused when switching
// between different versions that understand the index differently.
// Make sure to consider the INDEX_V_MAX and CURRENT_CACHE_VERSION
// values carefully when making changes here.
let RegistryPackage {
name,
vers,
cksum,
deps,
mut features,
features2,
yanked,
links,
v,
} = serde_json::from_slice(line)?;
let v = v.unwrap_or(1);
log::trace!("json parsed registry {}/{}", name, vers);
let pkgid = PackageId::new(name, &vers, source_id)?;
let deps = deps
.into_iter()
.map(|dep| dep.into_dep(source_id))
.collect::<CargoResult<Vec<_>>>()?;
if let Some(features2) = features2 {
for (name, values) in features2 {
features.entry(name).or_default().extend(values);
}
}
let mut summary = Summary::new(config, pkgid, deps, &features, links)?;
summary.set_checksum(cksum);
Ok(IndexSummary {
summary,
yanked: yanked.unwrap_or(false),
v,
})
}
}
fn split(haystack: &[u8], needle: u8) -> impl Iterator<Item = &[u8]> {
struct Split<'a> {
haystack: &'a [u8],
needle: u8,
}
impl<'a> Iterator for Split<'a> {
type Item = &'a [u8];
fn next(&mut self) -> Option<&'a [u8]> {
if self.haystack.is_empty() {
return None;
}
let (ret, remaining) = match memchr::memchr(self.needle, self.haystack) {
Some(pos) => (&self.haystack[..pos], &self.haystack[pos + 1..]),
None => (self.haystack, &[][..]),
};
self.haystack = remaining;
Some(ret)
}
}
Split { haystack, needle }
}
|
mod constants;
mod file_wrapper;
mod game;
mod keybindings;
mod rectangle;
mod run;
mod tile;
mod utility;
mod vector;
use crate::serialization;
pub use file_wrapper::FileWrapper;
use game::Game;
pub use keybindings::Keybindings;
pub use rectangle::Rectangle;
pub use run::run_internal;
use serialization::MapDistance;
use tile::Tile;
|
use serde::{Serialize, Deserialize};
use std::sync::atomic::{AtomicUsize, AtomicU64, AtomicU32, Ordering, AtomicBool};
use std::time::{SystemTime, UNIX_EPOCH, Duration};
lazy_static! {
pub static ref PERFORMANCE_COUNTER: Counter = { Counter::default() };
}
#[derive(Default)]
pub struct Counter {
scale_id: AtomicUsize,
generated_transactions: AtomicUsize,
confirmed_transactions: AtomicUsize,
chain_depth: AtomicUsize,
token: AtomicBool,
propose_block: AtomicUsize, // side node block id
sign_block: AtomicUsize, // scale node # block id 0 for idle
submit_block: AtomicUsize, // is submitting blocks id 0 for idle
block: AtomicU64,
coll_block: AtomicUsize,
propose_sec: AtomicU64,
propose_millis: AtomicU32,
propose_num: AtomicUsize,
propose_anc: AtomicUsize,
sign_loaded: AtomicBool,
sign_sec: AtomicU64,
sign_millis: AtomicU32,
sign_num: AtomicU64,
sign_anc: AtomicU64,
submit_loaded: AtomicBool,
submit_sec: AtomicU64,
submit_millis: AtomicU32,
submit_num: AtomicU64,
submit_anc: AtomicU64,
block_loaded: AtomicBool,
block_sec: AtomicU64,
block_millis: AtomicU32,
block_num: AtomicU64,
coll_loaded: AtomicBool,
coll_sec: AtomicU64,
coll_millis: AtomicU32,
coll_num: AtomicU64,
propose_latency: AtomicUsize,
sign_latency: AtomicUsize,
submit_latency: AtomicUsize, // time taken to submit
coll_latency: AtomicUsize,
block_latency: AtomicUsize,
gas: AtomicUsize,
}
impl Counter {
pub fn record_scale_id(&self, scale_id: usize) {
self.scale_id.store(scale_id, Ordering::Relaxed);
}
pub fn record_generated_transaction(&self) {
self.generated_transactions.fetch_add(1, Ordering::Relaxed);
}
pub fn record_confirmeded_transaction(&self) {
self.confirmed_transactions.fetch_add(1, Ordering::Relaxed);
}
pub fn record_generated_transactions(&self, num: usize) {
self.generated_transactions.fetch_add(num, Ordering::Relaxed);
}
pub fn record_confirmeded_transactions(&self, num: usize) {
self.confirmed_transactions.fetch_add(num, Ordering::Relaxed);
}
pub fn record_chain_update(&self) {
self.chain_depth.fetch_add(1, Ordering::Relaxed);
}
pub fn record_gas_update(&self, gas: usize ) {
self.gas.fetch_add(gas, Ordering::Relaxed);
}
// should not be used later
pub fn store_chain_depth(&self, chain_len: usize) {
self.chain_depth.store(chain_len, Ordering::Relaxed);
}
pub fn record_token_update(&self, new_flag: bool) {
self.token.store(new_flag, Ordering::Relaxed);
}
fn get_times(&self) -> (u64, u32) {
let dur = SystemTime::now().
duration_since(SystemTime::UNIX_EPOCH).
unwrap();
let sec = dur.as_secs();
let millis = dur.subsec_millis();
(sec, millis)
}
fn subtract_times(&self, sec: u64, millis: u32, psec: u64, pmillis: u32) -> usize {
((sec - psec) * 1000) as usize + millis as usize - pmillis as usize
}
pub fn record_propose_block_update(&self, id: u64) {
let (sec, millis) = self.get_times();
self.propose_sec.store(sec, Ordering::Relaxed);
self.propose_millis.store(millis, Ordering::Relaxed);
}
pub fn record_propose_block_id(&self, id: usize) {
self.propose_block.store(id, Ordering::Relaxed);
}
pub fn record_propose_block_stop(&self) {
let (sec, millis) = self.get_times();
let psec = self.propose_sec.load(Ordering::Relaxed);
let pmillis = self.propose_millis.load(Ordering::Relaxed);
let lat = self.subtract_times(sec, millis, psec, pmillis);
self.propose_latency.fetch_add(lat, Ordering::Relaxed);
self.propose_num.fetch_add(1, Ordering::Relaxed);
info!("propse lat lat {}", lat);
}
pub fn record_sign_block_update(&self, id: u64) {
self.sign_block.store(id as usize, Ordering::Relaxed);
if !self.sign_loaded.compare_and_swap(false, true, Ordering::Relaxed) {
self.sign_anc.store(id , Ordering::Relaxed);
let (sec, millis) = self.get_times();
self.sign_sec.store(sec, Ordering::Relaxed);
self.sign_millis.store(millis, Ordering::Relaxed);
}
}
pub fn record_sign_block_stop(&self, id: usize) {
if id as u64 == self.sign_anc.load(Ordering::Relaxed) {
let (sec, millis) = self.get_times();
let psec = self.sign_sec.load(Ordering::Relaxed);
let pmillis = self.sign_millis.load(Ordering::Relaxed);
let lat = self.subtract_times(sec, millis, psec, pmillis);
self.sign_latency.fetch_add(lat, Ordering::Relaxed);
self.sign_num.fetch_add(1, Ordering::Relaxed);
self.sign_loaded.store(false, Ordering::Relaxed);
info!("sign latency block id {} lat {}", id, lat);
}
}
pub fn record_submit_block_update(&self, id: u64) {
self.submit_block.store(id as usize, Ordering::Relaxed);
if !self.submit_loaded.compare_and_swap(false, true, Ordering::Relaxed) {
self.submit_anc.store(id, Ordering::Relaxed);
let (sec, millis) = self.get_times();
self.submit_sec.store(sec, Ordering::Relaxed);
self.submit_millis.store(millis, Ordering::Relaxed);
}
}
pub fn record_submit_block_stop(&self, id: usize) {
if id as u64 == self.submit_anc.load(Ordering::Relaxed) {
let (sec, millis) = self.get_times();
let psec = self.submit_sec.load(Ordering::Relaxed);
let pmillis = self.submit_millis.load(Ordering::Relaxed);
let lat = self.subtract_times(sec, millis, psec, pmillis);
self.submit_latency.fetch_add(lat, Ordering::Relaxed);
self.submit_num.fetch_add(1, Ordering::Relaxed);
self.submit_loaded.store(false, Ordering::Relaxed);
info!("submit latency block id {} lat {}", id, lat);
}
}
pub fn record_block_update(&self, id: u64) {
if !self.block_loaded.compare_and_swap(false, true, Ordering::Relaxed) {
self.block.store(id, Ordering::Relaxed);
let (sec, millis) = self.get_times();
self.block_sec.store(sec, Ordering::Relaxed);
self.block_millis.store(millis, Ordering::Relaxed);
}
}
pub fn record_block_stop(&self, id: u64) {
if id == self.block.load(Ordering::Relaxed) {
self.block.store(0, Ordering::Relaxed);
let (sec, millis) = self.get_times();
let psec = self.block_sec.load(Ordering::Relaxed);
let pmillis = self.block_millis.load(Ordering::Relaxed);
let lat = self.subtract_times(sec, millis, psec, pmillis);
self.block_latency.fetch_add(lat, Ordering::Relaxed);
self.block_num.fetch_add(1, Ordering::Relaxed);
self.block_loaded.store(false, Ordering::Relaxed);
info!("block id {} lat {}", id, lat);
}
}
pub fn record_coll_block_update(&self, id: u64) {
if !self.coll_loaded.compare_and_swap(false, true, Ordering::Relaxed) {
self.coll_block.store(id as usize, Ordering::Relaxed);
let (sec, millis) = self.get_times();
self.coll_sec.store(sec, Ordering::Relaxed);
self.coll_millis.store(millis, Ordering::Relaxed);
}
}
pub fn record_coll_block_stop(&self, id: usize) {
if id == self.coll_block.load(Ordering::Relaxed) {
let (sec, millis) = self.get_times();
let psec = self.coll_sec.load(Ordering::Relaxed);
let pmillis = self.coll_millis.load(Ordering::Relaxed);
let lat = self.subtract_times(sec, millis, psec, pmillis);
//info!("coll id {} lat {}", id, lat);
self.coll_latency.fetch_add(lat, Ordering::Relaxed);
self.coll_num.fetch_add(1, Ordering::Relaxed);
self.coll_loaded.store(false, Ordering::Relaxed);
}
}
pub fn snapshot(&self) -> Snapshot {
Snapshot {
scale_id: self.scale_id.load(Ordering::Relaxed),
generated_transactions: self.generated_transactions.load(Ordering::Relaxed),
confirmed_transactions: self.confirmed_transactions.load(Ordering::Relaxed),
chain_depth: self.chain_depth.load(Ordering::Relaxed),
token: self.token.load(Ordering::Relaxed),
propose_block: self.propose_block.load(Ordering::Relaxed),
sign_block: self.sign_block.load(Ordering::Relaxed),
submit_block: self.submit_block.load(Ordering::Relaxed),
coll_block: self.coll_block.load(Ordering::Relaxed),
propose_latency: self.propose_latency.load(Ordering::Relaxed),
sign_latency: self.sign_latency.load(Ordering::Relaxed),
submit_latency: self.submit_latency.load(Ordering::Relaxed),
block_latency: self.block_latency.load(Ordering::Relaxed),
coll_latency: self.coll_latency.load(Ordering::Relaxed),
gas: self.gas.load(Ordering::Relaxed),
propose_num: self.propose_num.load(Ordering::Relaxed),
sign_num: self.sign_num.load(Ordering::Relaxed) as usize,
submit_num: self.submit_num.load(Ordering::Relaxed) as usize,
block_num: self.block_num.load(Ordering::Relaxed) as usize,
coll_num: self.coll_num.load(Ordering::Relaxed) as usize,
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Snapshot {
scale_id: usize,
generated_transactions: usize,
confirmed_transactions: usize,
chain_depth: usize,
token: bool,
propose_block: usize,
sign_block: usize, // scale node # block id 0 for idle
submit_block: usize,
coll_block: usize,
propose_latency: usize,
sign_latency: usize,
submit_latency: usize,
block_latency: usize,
coll_latency: usize,
gas: usize,
propose_num: usize,
sign_num: usize,
submit_num: usize,
block_num: usize,
coll_num: usize,
}
|
table! {
transactions (id) {
id -> Int4,
transaction_date -> Varchar,
transaction_details -> Varchar,
funds_out -> Numeric,
funds_in -> Numeric,
}
}
|
use std::fs::OpenOptions;
use std::fs::{read_dir, File};
use std::io::{Read, Write};
fn main() {
let mut ups = vec![];
for it in read_dir("migrations").unwrap() {
let it = it.unwrap();
if it.metadata().unwrap().is_dir() {
let mut path = it.path().to_path_buf();
path.push("up.sql");
ups.push(path);
}
}
ups.sort();
let mut mig = OpenOptions::new().write(true).create(true).truncate(true).open("migrations.sql").unwrap();
for up in ups {
let mut file = File::open(up).unwrap();
let mut sql = "".to_owned();
file.read_to_string(&mut sql).unwrap();
writeln!(mig, "{}", sql).unwrap();
}
writeln!(mig, "SELECT true").unwrap();
println!("cargo:rerun-if-changed=build.rs");
}
|
use std::mem::MaybeUninit;
use crate::plan::Plan;
use crate::policy::largeobjectspace::LargeObjectSpace;
use crate::policy::mallocspace::MallocSpace;
use crate::policy::space::Space;
use crate::util::alloc::LargeObjectAllocator;
use crate::util::alloc::MallocAllocator;
use crate::util::alloc::{Allocator, BumpAllocator};
use crate::util::OpaquePointer;
use crate::vm::VMBinding;
const MAX_BUMP_ALLOCATORS: usize = 5;
const MAX_LARGE_OBJECT_ALLOCATORS: usize = 1;
const MAX_MALLOC_ALLOCATORS: usize = 1;
// The allocators set owned by each mutator. We provide a fixed number of allocators for each allocator type in the mutator,
// and each plan will select part of the allocators to use.
// Note that this struct is part of the Mutator struct.
// We are trying to make it fixed-sized so that VM bindings can easily define a Mutator type to have the exact same layout as our Mutator struct.
#[repr(C)]
pub struct Allocators<VM: VMBinding> {
pub bump_pointer: [MaybeUninit<BumpAllocator<VM>>; MAX_BUMP_ALLOCATORS],
pub large_object: [MaybeUninit<LargeObjectAllocator<VM>>; MAX_LARGE_OBJECT_ALLOCATORS],
pub malloc: [MaybeUninit<MallocAllocator<VM>>; MAX_MALLOC_ALLOCATORS],
}
impl<VM: VMBinding> Allocators<VM> {
/// # Safety
/// The selector needs to be valid, and points to an allocator that has been initialized.
pub unsafe fn get_allocator(&self, selector: AllocatorSelector) -> &dyn Allocator<VM> {
match selector {
AllocatorSelector::BumpPointer(index) => {
self.bump_pointer[index as usize].assume_init_ref()
}
AllocatorSelector::LargeObject(index) => {
self.large_object[index as usize].assume_init_ref()
}
AllocatorSelector::Malloc(index) => self.malloc[index as usize].assume_init_ref(),
}
}
/// # Safety
/// The selector needs to be valid, and points to an allocator that has been initialized.
pub unsafe fn get_allocator_mut(
&mut self,
selector: AllocatorSelector,
) -> &mut dyn Allocator<VM> {
match selector {
AllocatorSelector::BumpPointer(index) => {
self.bump_pointer[index as usize].assume_init_mut()
}
AllocatorSelector::LargeObject(index) => {
self.large_object[index as usize].assume_init_mut()
}
AllocatorSelector::Malloc(index) => self.malloc[index as usize].assume_init_mut(),
}
}
pub fn new(
mutator_tls: OpaquePointer,
plan: &'static dyn Plan<VM = VM>,
space_mapping: &[(AllocatorSelector, &'static dyn Space<VM>)],
) -> Self {
let mut ret = Allocators {
bump_pointer: unsafe { MaybeUninit::uninit().assume_init() },
large_object: unsafe { MaybeUninit::uninit().assume_init() },
malloc: unsafe { MaybeUninit::uninit().assume_init() },
};
for &(selector, space) in space_mapping.iter() {
match selector {
AllocatorSelector::BumpPointer(index) => {
ret.bump_pointer[index as usize].write(BumpAllocator::new(
mutator_tls,
Some(space),
plan,
));
}
AllocatorSelector::LargeObject(index) => {
ret.large_object[index as usize].write(LargeObjectAllocator::new(
mutator_tls,
Some(space.downcast_ref::<LargeObjectSpace<VM>>().unwrap()),
plan,
));
}
AllocatorSelector::Malloc(index) => {
ret.malloc[index as usize].write(MallocAllocator::new(
mutator_tls,
Some(space.downcast_ref::<MallocSpace<VM>>().unwrap()),
plan,
));
}
}
}
ret
}
}
// This type describe which allocator in the allocators set.
// For VM binding implementors, this type is equivalent to the following native types:
// #[repr(C)]
// struct AllocatorSelector {
// tag: AllocatorSelectorTag,
// payload: u8,
// }
// #[repr(u8)]
// enum AllocatorSelectorTag {
// BumpPointer,
// LargeObject,
// }
#[repr(C, u8)]
#[derive(Copy, Clone, Debug)]
pub enum AllocatorSelector {
BumpPointer(u8),
LargeObject(u8),
Malloc(u8),
}
|
//! Simple parsing functionality for extracting SBP
//! messages from binary streams
use byteorder::{LittleEndian, ReadBytesExt};
use nom::Err as NomErr;
use crate::{messages::SBP, Error, Result, SbpString};
pub fn read_string(buf: &mut &[u8]) -> Result<SbpString> {
let amount = buf.len();
let (head, tail) = buf.split_at(amount);
*buf = tail;
Ok(SbpString(head.to_vec()))
}
pub fn read_string_limit(buf: &mut &[u8], n: usize) -> Result<SbpString> {
let n = std::cmp::min(n, buf.len());
let (mut head, tail) = buf.split_at(n);
read_string(&mut head).map(|sbp_string| {
*buf = tail;
sbp_string
})
}
pub fn read_u16_array(buf: &mut &[u8]) -> Result<Vec<u16>> {
// buf is in fact an array of u16, so at least 2 u8 elem, unless buf is empty
let iter = buf.chunks_exact(2);
// collect() guarantees that it will return Err if at least one Err is found while iterating
// over the Vec https://doc.rust-lang.org/std/iter/trait.FromIterator.html#method.from_iter-14
// map_err necessary to convert the generic read_u16's Error into our Error enum type
// LittleEndian means chunks are read from right-to-left
let v = iter
.map(|mut x| x.read_u16::<LittleEndian>().map_err(|e| e.into()))
.collect();
v
}
pub fn read_u8_array(buf: &mut &[u8]) -> Result<Vec<u8>> {
Ok(buf.to_vec())
}
pub fn read_u8_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<u8>> {
let mut v = Vec::new();
for _ in 0..n {
v.push(buf.read_u8()?);
}
Ok(v)
}
pub fn read_s8_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<i8>> {
let mut v = Vec::new();
for _ in 0..n {
v.push(buf.read_i8()?);
}
Ok(v)
}
pub fn read_s16_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<i16>> {
let mut v = Vec::new();
for _ in 0..n {
v.push(buf.read_i16::<LittleEndian>()?);
}
Ok(v)
}
pub fn read_u16_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<u16>> {
let mut v = Vec::new();
for _ in 0..n {
v.push(buf.read_u16::<LittleEndian>()?);
}
Ok(v)
}
pub fn read_float_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<f32>> {
let mut v = Vec::new();
for _ in 0..n {
v.push(buf.read_f32::<LittleEndian>()?);
}
Ok(v)
}
pub fn read_double_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<f64>> {
let mut v = Vec::new();
for _ in 0..n {
v.push(buf.read_f64::<LittleEndian>()?);
}
Ok(v)
}
#[derive(Debug)]
pub enum ParseResult {
Ok((usize, SBP)),
Err((usize, Error)),
Incomplete,
}
pub fn parse_sbp<'a>(input: &'a [u8]) -> ParseResult {
let (new_input, mut frame) = match frame::parse(input) {
Ok(res) => res,
Err(e) => match e {
NomErr::Incomplete(_) => return ParseResult::Incomplete,
NomErr::Error((_, kind)) | NomErr::Failure((_, kind)) => {
return ParseResult::Err((1, Error::ParseError { kind }))
}
},
};
let bytes_read = input.len() - new_input.len();
if !frame::check_crc(&frame) {
return ParseResult::Err((
bytes_read,
Error::CrcError {
msg_type: frame.1,
sender_id: frame.2,
crc: frame.4,
},
));
}
let msg = match SBP::parse(frame.1, frame.2, &mut frame.3) {
Ok(msg) => msg,
Err(err) => return ParseResult::Err((bytes_read, err)),
};
ParseResult::Ok((bytes_read, msg))
}
pub mod frame {
use nom::{bytes, error::ErrorKind, number, sequence::tuple, IResult};
pub type Frame<'a> = (&'a [u8], u16, u16, &'a [u8], u16);
pub type Error<'a> = (&'a [u8], ErrorKind);
pub fn parse(i: &[u8]) -> IResult<&[u8], Frame, Error> {
tuple((
parse_preamble,
parse_msg_type,
parse_sender_id,
parse_payload,
parse_crc,
))(i)
}
pub fn check_crc((_preamble, msg_type, sender_id, payload, crc_in): &Frame) -> bool {
let mut crc = crc16::State::<crc16::XMODEM>::new();
crc.update(&msg_type.to_le_bytes());
crc.update(&sender_id.to_le_bytes());
crc.update(&[payload.len() as u8]);
crc.update(payload);
crc.get() == *crc_in
}
fn parse_preamble(i: &[u8]) -> IResult<&[u8], &[u8], Error> {
bytes::streaming::is_a("\x55")(i)
}
fn parse_msg_type(i: &[u8]) -> IResult<&[u8], u16, Error> {
number::streaming::le_u16(i)
}
fn parse_sender_id(i: &[u8]) -> IResult<&[u8], u16, Error> {
number::streaming::le_u16(i)
}
fn parse_payload(i: &[u8]) -> IResult<&[u8], &[u8], Error> {
let (i, length) = number::streaming::le_u8(i)?;
bytes::streaming::take(length)(i)
}
fn parse_crc(i: &[u8]) -> IResult<&[u8], u16, Error> {
number::streaming::le_u16(i)
}
}
#[cfg(test)]
mod tests {
use std::io::Cursor;
use crate::serialize::SbpSerialize;
use super::*;
#[test]
fn test_parse_sbp() {
let packet = [
0x55u8, 0x0b, 0x02, 0xd3, 0x88, 0x14, 0x28, 0xf4, 0x7a, 0x13, 0x96, 0x62, 0xee, 0xff,
0xbe, 0x40, 0x14, 0x00, 0xf6, 0xa3, 0x09, 0x00, 0x00, 0x00, 0x0e, 0x00, 0xdb, 0xbf,
];
let baseline_ecef_expectation = crate::messages::navigation::MsgBaselineECEF {
sender_id: Some(0x88d3),
accuracy: 0,
flags: 0,
n_sats: 14,
tow: 326825000,
x: -1154410,
y: 1327294,
z: 631798,
};
let (consumed, msg) = match parse_sbp(&packet[..]) {
ParseResult::Ok((consumed, SBP::MsgBaselineECEF(msg))) => (consumed, msg),
err => panic!("unexpected parse result: {:?}", err),
};
assert_eq!(packet.len(), consumed);
assert_eq!(msg.sender_id, baseline_ecef_expectation.sender_id);
assert_eq!(msg.accuracy, baseline_ecef_expectation.accuracy);
assert_eq!(msg.flags, baseline_ecef_expectation.flags);
assert_eq!(msg.n_sats, baseline_ecef_expectation.n_sats);
assert_eq!(msg.tow, baseline_ecef_expectation.tow);
assert_eq!(msg.x, baseline_ecef_expectation.x);
assert_eq!(msg.y, baseline_ecef_expectation.y);
assert_eq!(msg.z, baseline_ecef_expectation.z);
}
/// Test parsing when we don't have enough data for a frame message
#[test]
fn test_parse_sbp_eof() {
let packet = [
0x55u8, 0x0b, 0x02, 0xd3, 0x88, 0x14, 0x28, 0xf4, 0x7a, 0x13, 0x96, 0x62, 0xee, 0xff,
0xbe, 0x40, 0x14, 0x00, 0xf6, 0xa3, 0x09, 0x00, 0x00, 0x00, 0x0e, 0x00, 0xdb, 0xbf,
];
let res = parse_sbp(&packet[..packet.len() - 1]);
assert!(matches!(res, ParseResult::Incomplete));
let res = parse_sbp(&packet[..]);
assert!(matches!(
res,
ParseResult::Ok((28, SBP::MsgBaselineECEF(_)))
));
}
#[test]
fn test_parse_sbp_crc_error() {
let packet = vec![
// Start with a mostly valid message, with a single byte error
0x55, 0x0c, // This byte should be 0x0b, changed to intentionally cause a CRC error
0x02, 0xd3, 0x88, 0x14, 0x28, 0xf4, 0x7a, 0x13, 0x96, 0x62, 0xee, 0xff, 0xbe, 0x40,
0x14, 0x00, 0xf6, 0xa3, 0x09, 0x00, 0x00, 0x00, 0x0e, 0x00, 0xdb, 0xbf, 0xde, 0xad,
0xbe, 0xef, // Include another valid message to properly parse
0x55, 0x0b, 0x02, 0xd3, 0x88, 0x14, 0x28, 0xf4, 0x7a, 0x13, 0x96, 0x62, 0xee, 0xff,
0xbe, 0x40, 0x14, 0x00, 0xf6, 0xa3, 0x09, 0x00, 0x00, 0x00, 0x0e, 0x00, 0xdb, 0xbf,
0xde, 0xad, 0xbe, 0xef,
];
let reader = Cursor::new(packet);
let mut msgs = crate::iter_messages(reader);
let res = msgs.next().unwrap().unwrap_err();
assert!(matches!(res, Error::CrcError { .. }));
let res = msgs.next().unwrap();
assert!(res.is_ok());
}
#[test]
fn test_parser_iter() {
let mut payload = vec![
0x55u8, 0x0b, 0x02, 0xd3, 0x88, 0x14, 0x28, 0xf4, 0x7a, 0x13, 0x96, 0x62, 0xee, 0xff,
0xbe, 0x40, 0x14, 0x00, 0xf6, 0xa3, 0x09, 0x00, 0x00, 0x00, 0x0e, 0x00, 0xdb, 0xbf,
];
payload.append(&mut payload.clone());
let input = Cursor::new(payload);
let mut count = 0;
for msg in crate::iter_messages(input) {
assert!(msg.is_ok());
count += 1;
}
assert_eq!(count, 2);
}
#[test]
fn test_invalid_utf8() {
let packet = vec![
0x55, 0xa7, 0x0, 0x0, 0x10, 0x48, 0x8, 0x0, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f,
0x6e, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xb6, 0xe8, 0xab,
];
let reader = Cursor::new(packet);
let mut msgs = crate::iter_messages(reader);
let sbp_result = msgs.next().unwrap();
assert!(sbp_result.is_ok());
let sbp_message = sbp_result.unwrap();
assert_eq!(sbp_message.sbp_size(), 72);
}
#[test]
fn test_read_string_invalid_utf8() {
// (9 * 8) - 2 = 70
let buf = vec![
0x73, 0x6f, 0x6c, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0xb6,
];
let mut slice = &buf[..];
let sbp_string = read_string(&mut slice).unwrap();
let string: String = sbp_string.clone().into();
let vec: Vec<u8> = sbp_string.into();
// The last 0xb6 get's transformed into 3 UTF-8 bytes (๏ฟฝ aka U+FFFD REPLACEMENT CHARACTER)
assert_eq!(string.len(), 69 + 3);
assert_eq!(vec.len(), 70);
}
#[test]
fn test_read_string() {
let v = b"hi, imma string";
let mut slice = &v[..];
let string: String = read_string(&mut slice).unwrap().into();
assert_eq!(string, "hi, imma string".to_string());
let string: String = read_string(&mut slice).unwrap().into();
assert_eq!(string, "".to_string());
let v = b"hi, imma string";
let mut slice = &v[..];
let string: String = read_string_limit(&mut slice, 8).unwrap().into();
assert_eq!(string, "hi, imma".to_string());
let string: String = read_string_limit(&mut slice, 8).unwrap().into();
assert_eq!(string, " string".to_string());
}
#[test]
fn test_read_u16_array() {
// A basic unit test for read_u16_array, LittleEndian convention assumed everywhere
let mock_data: [u8; 4] = [0b00000001, 0b00010000, 0b00000010, 0b00000001];
let mut expected_vec = Vec::with_capacity(2);
// 0b00010000+0b00000001
expected_vec.push(4097);
expected_vec.push(258);
let returned_vec = read_u16_array(&mut &mock_data[..]).unwrap();
assert_eq!(expected_vec, returned_vec);
}
}
|
pub mod cors;
pub mod trace;
|
pub mod act1;
|
//!
//! This module specifies the `Numeric` trait.
//!
use crate::prelude::*;
/// Common trait for all byte arrays and sequences.
pub trait SeqTrait<T: Clone>:
Index<usize, Output = T> + IndexMut<usize, Output = T> + Sized
{
fn len(&self) -> usize;
fn iter(&self) -> core::slice::Iter<T>;
fn create(len: usize) -> Self;
/// Update this sequence with `l` elements of `v`, starting at `start_in`,
/// at `start_out`.
///
/// # Examples
///
/// ```
/// use hacspec_lib::*;
///
/// let mut s = Seq::<u8>::new(5);
/// let tmp = Seq::<u8>::from_native_slice(&[2, 3]);
/// s = s.update_slice(2, &tmp, 1, 1);
/// // assert_eq!(s, Seq::<u8>::from_array(&[0, 0, 3, 0, 0]));
/// ```
fn update_slice<A: SeqTrait<T>>(
self,
start_out: usize,
v: &A,
start_in: usize,
len: usize,
) -> Self;
/// Update this sequence with `v` starting at `start`.
///
/// # Examples
///
/// ```
/// use hacspec_lib::*;
///
/// let mut s = Seq::<u8>::new(5);
/// let tmp = Seq::<u8>::from_native_slice(&[2, 3]);
/// s = s.update(2, &tmp);
/// // assert_eq!(s, Seq::<u8>::from_array(&[0, 0, 2, 3, 0]));
/// ```
#[cfg_attr(feature = "use_attributes", in_hacspec)]
fn update<A: SeqTrait<T>>(self, start: usize, v: &A) -> Self {
let len = v.len();
self.update_slice(start, v, 0, len)
}
#[cfg_attr(feature = "use_attributes", in_hacspec)]
fn update_start<A: SeqTrait<T>>(self, v: &A) -> Self {
let len = v.len();
self.update_slice(0, v, 0, len)
}
}
/// This trait extends the `Numeric` trait and is implemented by all integer
/// types. It offers bit manipulation, instantiation from literal, and convenient
/// constants.
pub trait Integer: Numeric {
const NUM_BITS: usize;
// Some useful values.
// Not constants because math integers can't do that.
#[allow(non_snake_case)]
fn ZERO() -> Self;
#[allow(non_snake_case)]
fn ONE() -> Self;
#[allow(non_snake_case)]
fn TWO() -> Self;
/// Get an integer with value `val`.
fn from_literal(val: u128) -> Self;
/// Read a hex string (starting with 0x) into an `Integer`.
fn from_hex_string(s: &String) -> Self;
fn get_bit(self, i: usize) -> Self;
fn set_bit(self, b: Self, i: usize) -> Self;
fn set(self, pos: usize, y: Self, yi: usize) -> Self;
fn rotate_left(self, n: usize) -> Self;
fn rotate_right(self, n: usize) -> Self;
}
pub trait SecretInteger: Integer {
type PublicVersion: PublicInteger;
fn classify(x: Self::PublicVersion) -> Self;
}
pub trait SecretIntegerCopy: SecretInteger + Copy {
type PublicVersionCopy: PublicIntegerCopy;
fn classify(x: Self::PublicVersionCopy) -> Self;
}
pub trait PublicInteger: Integer {
type SecretVersion: Integer;
}
pub trait PublicIntegerCopy: PublicInteger + Copy {
type SecretVersionCopy: Integer + Copy;
}
pub trait UnsignedInteger: Integer {}
pub trait UnsignedIntegerCopy: UnsignedInteger + Copy {}
pub trait SignedInteger: Integer {}
pub trait SignedIntegerCopy: SignedInteger + Copy {}
pub trait UnsignedSecretInteger: UnsignedInteger + SecretInteger {
fn to_le_bytes(self) -> Seq<U8>;
fn to_be_bytes(self) -> Seq<U8>;
fn from_le_bytes(x: &Seq<U8>) -> Self;
fn from_be_bytes(x: &Seq<U8>) -> Self;
/// Get byte `i` of this integer.
#[inline]
#[cfg_attr(feature = "use_attributes", in_hacspec)]
fn get_byte(self, i: usize) -> Self {
(self >> (i * 8)) & ((Self::ONE() << 8) - Self::ONE())
}
}
pub trait UnsignedSecretIntegerCopy: UnsignedSecretInteger + SecretIntegerCopy {}
pub trait UnsignedPublicInteger: UnsignedInteger + PublicInteger {
fn to_le_bytes(self) -> Seq<u8>;
fn to_be_bytes(self) -> Seq<u8>;
fn from_le_bytes(x: &Seq<u8>) -> Self;
fn from_be_bytes(x: &Seq<u8>) -> Self;
}
pub trait UnsignedPublicIntegerCopy: UnsignedPublicInteger + PublicIntegerCopy {}
pub trait ModNumeric {
/// (self - rhs) % n.
fn sub_mod(self, rhs: Self, n: Self) -> Self;
/// `(self + rhs) % n`
fn add_mod(self, rhs: Self, n: Self) -> Self;
/// `(self * rhs) % n`
fn mul_mod(self, rhs: Self, n: Self) -> Self;
/// `(self ^ exp) % n`
fn pow_mod(self, exp: Self, n: Self) -> Self;
/// `self % n`
fn modulo(self, n: Self) -> Self;
/// `self % n` that always returns a positive integer
fn signed_modulo(self, n: Self) -> Self;
/// `|self|`
fn absolute(self) -> Self;
}
pub trait NumericCopy: Copy {}
/// The `Numeric` trait has to be implemented by all numeric objects.
pub trait Numeric:
ModNumeric
+ Add<Self, Output = Self>
+ Sub<Self, Output = Self>
+ Mul<Self, Output = Self>
+ BitXor<Self, Output = Self>
+ BitOr<Self, Output = Self>
+ BitAnd<Self, Output = Self>
+ Shl<usize, Output = Self>
+ Shr<usize, Output = Self>
+ Not<Output = Self>
+ Default
+ Clone
+ Debug
{
/// Return largest value that can be represented.
fn max_val() -> Self;
fn wrap_add(self, rhs: Self) -> Self;
fn wrap_sub(self, rhs: Self) -> Self;
fn wrap_mul(self, rhs: Self) -> Self;
fn wrap_div(self, rhs: Self) -> Self;
/// `self ^ exp` where `exp` is a `u32`.
fn exp(self, exp: u32) -> Self;
/// `self ^ exp` where `exp` is a `Self`.
fn pow_self(self, exp: Self) -> Self;
/// Division.
fn divide(self, rhs: Self) -> Self;
/// Invert self modulo n.
fn inv(self, n: Self) -> Self;
// Comparison functions returning bool.
fn equal(self, other: Self) -> bool;
fn greater_than(self, other: Self) -> bool;
fn greater_than_or_equal(self, other: Self) -> bool;
fn less_than(self, other: Self) -> bool;
fn less_than_or_equal(self, other: Self) -> bool;
// Comparison functions returning a bit mask (0x0..0 or 0xF..F).
fn not_equal_bm(self, other: Self) -> Self;
fn equal_bm(self, other: Self) -> Self;
fn greater_than_bm(self, other: Self) -> Self;
fn greater_than_or_equal_bm(self, other: Self) -> Self;
fn less_than_bm(self, other: Self) -> Self;
fn less_than_or_equal_bm(self, other: Self) -> Self;
}
|
#[cfg(test)]
mod cli {
use std::process::Command;
use assert_cmd::prelude::*;
use predicates::prelude::*;
#[test]
fn should_return_exitcode_0_when_matches_are_found() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg("-c").arg(".name");
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{}
{\"name\":\"jeff goldblum\",\"list\":[]}
{\"list\":[{\"name\":\"jeff goldblum\"},{\"name\":\"John Doe\"}]}\n",
);
assert_cmd.assert().success().code(predicate::eq(0));
}
#[test]
fn should_return_exitcode_1_when_no_matches_are_found() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg("-c").arg(".age");
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{}
{\"name\":\"jeff goldblum\",\"list\":[]}
{\"list\":[{\"name\":\"jeff goldblum\"},{\"name\":\"John Doe\"}]}\n",
);
assert_cmd.assert().failure().code(predicate::eq(1));
}
#[test]
fn should_only_return_exitcode_when_a_match_is_found_and_quiet_mode_flag_is_specified() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg("-q").arg(".name");
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{}
{\"name\":\"jeff goldblum\",\"list\":[]}
{\"list\":[{\"name\":\"jeff goldblum\"},{\"name\":\"John Doe\"}]}\n",
);
assert_cmd
.assert()
.success()
.code(predicate::eq(0))
.stdout("");
}
#[test]
fn should_only_return_exitcode_when_no_matches_are_found_and_quiet_mode_flag_is_specified() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg("-q").arg(".age");
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
"{}
{\"name\":\"jeff goldblum\",\"list\":[]}
{\"list\":[{\"name\":\"jeff goldblum\"},{\"name\":\"John Doe\"}]}\n",
);
assert_cmd
.assert()
.failure()
.code(predicate::eq(1))
.stdout("");
}
#[test]
fn should_match_multiple_patterns_when_multiple_patterns_are_provided() {
let mut cmd = Command::main_binary().unwrap();
cmd.arg("-e").arg(r#"{"eye_color":"yellow"}"#);
cmd.arg("-e").arg(r#"{"hair_color":"n/a"}"#);
let mut stdin_cmd = cmd.with_stdin();
let mut assert_cmd = stdin_cmd.buffer(
r#"{"name":"Luke Skywalker","hair_color":"blond","eye_color":"blue"}
{"name":"C-3PO","hair_color":"n/a","eye_color":"yellow"}
{"name":"R2-D2","hair_color":"n/a","eye_color":"red"}
{"name":"Admiral Ackbar","hair_color":"n/a","eye_color":"yellow"}
{"name":"Obi-Wan Kenobi","hair_color":"auburn, white","eye_color":"blue-gray"}"#,
);
assert_cmd.assert().success().stdout(
r#"{"name":"C-3PO","hair_color":"n/a","eye_color":"yellow"}
{"name":"R2-D2","hair_color":"n/a","eye_color":"red"}
{"name":"Admiral Ackbar","hair_color":"n/a","eye_color":"yellow"}
"#,
);
}
}
|
use std::env;
use std::fs;
use std::path::PathBuf;
fn main() {
if env::var_os("CARGO_FEATURE_RT").is_some() {
let out = &PathBuf::from(env::var_os("OUT_DIR").unwrap());
println!("cargo:rustc-link-search={}", out.display());
let device_file = if env::var_os("CARGO_FEATURE_STM32L412").is_some() {
"src/stm32l412/device.x"
} else if env::var_os("CARGO_FEATURE_STM32L4P5").is_some() {
"src/stm32l4p5/device.x"
} else if env::var_os("CARGO_FEATURE_STM32L4R5").is_some() {
"src/stm32l4r5/device.x"
} else if env::var_os("CARGO_FEATURE_STM32L4R9").is_some() {
"src/stm32l4r9/device.x"
} else if env::var_os("CARGO_FEATURE_STM32L4X1").is_some() {
"src/stm32l4x1/device.x"
} else if env::var_os("CARGO_FEATURE_STM32L4X2").is_some() {
"src/stm32l4x2/device.x"
} else if env::var_os("CARGO_FEATURE_STM32L4X3").is_some() {
"src/stm32l4x3/device.x"
} else if env::var_os("CARGO_FEATURE_STM32L4X5").is_some() {
"src/stm32l4x5/device.x"
} else if env::var_os("CARGO_FEATURE_STM32L4X6").is_some() {
"src/stm32l4x6/device.x"
} else { panic!("No device features selected"); };
fs::copy(device_file, out.join("device.x")).unwrap();
println!("cargo:rerun-if-changed={}", device_file);
}
println!("cargo:rerun-if-changed=build.rs");
}
|
use std::{cell::RefCell, rc::Rc};
use crate::prelude::World;
use cao_alloc::linear::LinearAllocator;
use cao_lang::prelude::*;
use super::*;
fn init_basic_storage() -> World {
World::new()
}
fn get_alloc() -> Rc<RefCell<LinearAllocator>> {
Rc::new(RefCell::new(LinearAllocator::new(100_000_000)))
}
#[test]
fn test_parse_world_position() {
let storage = init_basic_storage();
let data = ScriptExecutionData::new(
&storage,
Default::default(),
Default::default(),
Default::default(),
get_alloc(),
);
let mut vm = Vm::new(data).unwrap();
fn test_parse(
_vm: &mut Vm<ScriptExecutionData>,
inp: *mut FieldTable,
) -> Result<(), ExecutionError> {
let res = unsafe { parse_world_pos(&*inp)? };
assert_eq!(
res,
WorldPosition {
room: Axial::new(1, 2),
pos: Axial::new(3, 4)
}
);
Ok(())
}
vm.register_function("WorldPosition", into_f1(test_parse));
const PROGRAM: &str = r#"
lanes:
- cards:
- ty: CreateTable
- ty: SetVar
val: pos
- ty: ReadVar
val: pos
- ty: StringLiteral
val: rq
- ty: ScalarInt
val: 1
- ty: SetProperty
- ty: ReadVar
val: pos
- ty: StringLiteral
val: rr
- ty: ScalarInt
val: 2
- ty: SetProperty
- ty: ReadVar
val: pos
- ty: StringLiteral
val: q
- ty: ScalarInt
val: 3
- ty: SetProperty
- ty: ReadVar
val: pos
- ty: StringLiteral
val: r
- ty: ScalarInt
val: 4
- ty: SetProperty
- ty: ReadVar
val: pos
- ty: CallNative
val: "WorldPosition"
"#;
let program = serde_yaml::from_str(PROGRAM).unwrap();
let program = compile(&program, None).unwrap();
vm.run(&program).unwrap();
}
#[test]
fn test_say() {
let mut storage = World::new();
let entity_id = storage.insert_entity();
let mut vm = Vm::new(ScriptExecutionData::new(
&storage,
Default::default(),
entity_id,
Default::default(),
get_alloc(),
))
.unwrap();
const PROGRAM: &str = r#"
lanes:
- cards:
- ty: StringLiteral
val: "pog"
- ty: CallNative
val: "say"
"#;
let program = serde_yaml::from_str(PROGRAM).unwrap();
let program = compile(&program, None).unwrap();
vm.register_function("say", into_f1(say));
vm.run(&program).unwrap();
let intent = vm.unwrap_aux().intents.say_intent.unwrap();
assert_eq!(intent.entity, entity_id);
assert_eq!(intent.payload.as_str(), "pog");
}
#[test]
fn test_say_bad_len() {
let mut storage = World::new();
let entity_id = storage.insert_entity();
let mut vm = Vm::new(ScriptExecutionData::new(
&storage,
Default::default(),
entity_id,
Default::default(),
get_alloc(),
))
.unwrap();
const PROGRAM: &str = r#"
lanes:
- cards:
- ty: StringLiteral
val: "pog"
- ty: CallNative
val: "sayasdsdadasdadasdasdasdasdsdsdsdsdsldkjskdjdlsjdklsjdklsjdklsjdaldasljdsldjkldsldjsldjkljaldjaldsljsljdsljd"
"#;
let program = serde_yaml::from_str(PROGRAM).unwrap();
let program = compile(&program, None).unwrap();
vm.register_function("say", into_f1(say));
vm.run(&program).unwrap_err();
}
|
use std::collections::{HashSet, VecDeque};
use crate::util::{lines, time};
pub fn day6() {
println!("== Day 6 ==");
let input = "src/day6/input.txt";
time(part_a, input, "A");
time(part_b, input, "B");
}
fn part_a(input: &str) -> usize {
find_seq(lines(input)[0].as_str(), 4)
}
fn find_seq(input: &str, size: usize) -> usize {
let mut seq = VecDeque::with_capacity(size);
for (i, c) in input.chars().enumerate() {
seq.push_back(c);
if seq.len() == size {
if seq.iter().collect::<HashSet<&char>>().len() == size {
return i + 1;
} else {
seq.pop_front();
}
}
}
0
}
fn part_b(input: &str) -> usize {
find_seq(lines(input)[0].as_str(), 14)
}
#[cfg(test)]
mod tests {
use super::*;
#[ignore]
#[test]
fn runday() {
day6();
}
#[ignore]
#[test]
fn real_a() {
let input = "src/day6/input.txt";
assert_eq!(1210, part_a(input));
}
#[ignore]
#[test]
fn real_b() {
let input = "src/day6/input.txt";
assert_eq!(3476, part_b(input));
}
#[test]
fn part_a_test_input() {
let input = "src/day6/test-input.txt";
let result = part_a(input);
assert_eq!(7, result);
}
#[test]
fn find_seq_test() {
assert_eq!(5, find_seq("bvwbjplbgvbhsrlpgdmjqwftvncz", 4));
assert_eq!(6, find_seq("nppdvjthqldpwncqszvftbrmjlhg", 4));
assert_eq!(10, find_seq("nznrnfrfntjfmvfwmzdfjlvtqnbhcprsg", 4));
assert_eq!(11, find_seq("zcfzfwzzqfrljwzlrfnpqdbhtmscgvjw", 4));
}
#[test]
fn part_b_test_input() {
assert_eq!(19, find_seq("mjqjpqmgbljsphdztnvjfqwrcgsmlb", 14));
assert_eq!(23, find_seq("bvwbjplbgvbhsrlpgdmjqwftvncz", 14));
assert_eq!(23, find_seq("nppdvjthqldpwncqszvftbrmjlhg", 14));
assert_eq!(29, find_seq("nznrnfrfntjfmvfwmzdfjlvtqnbhcprsg", 14));
assert_eq!(26, find_seq("zcfzfwzzqfrljwzlrfnpqdbhtmscgvjw", 14));
}
} |
use std::time::SystemTime;
use bonsaidb::{
core::{
schema::{Collection, CollectionName, InvalidNameError, Schematic},
Error,
},
local::{config::Configuration, Database},
};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
struct Message {
pub timestamp: SystemTime,
pub contents: String,
}
impl Collection for Message {
fn collection_name() -> Result<CollectionName, InvalidNameError> {
CollectionName::new("khonsulabs", "messages")
}
fn define_views(_schema: &mut Schematic) -> Result<(), Error> {
Ok(())
}
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let db = Database::<Message>::open_local(
"basic.bonsaidb",
Configuration::default(),
)
.await?;
// Insert a new `Message` into the database. `Message` is a `Collection`
// implementor, which makes them act in a similar fashion to tables in other
// databases. `BonsaiDb` stores each "row" as a `Document`. This document
// will have a unique ID, some other metadata, and your stored value. In
// this case, `Message` implements `Serialize` and `Deserialize`, so we can
// use convenience methods that return a `CollectionDocument`, moving all
// needs of serialization behind the scenes.
let document = Message {
contents: String::from("Hello, World!"),
timestamp: SystemTime::now(),
}
.insert_into(&db)
.await?;
// Retrieve the message using the id returned from the previous call. both
// `document` and `message_doc` should be identical.
let message_doc = Message::get(document.header.id, &db)
.await?
.expect("couldn't retrieve stored item");
println!(
"Inserted message '{:?}' with id {}",
message_doc.contents, message_doc.header.id
);
Ok(())
}
|
extern "C" {
fn evenmorehello();
}
pub fn helloer() {
println!("I'm saying \"hello\" again!");
unsafe { evenmorehello() };
}
|
use crate::errors::*;
use crate::types::*;
use uuid::Uuid;
/// Describes the photo of a chat
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ChatPhoto {
#[doc(hidden)]
#[serde(rename(serialize = "@type", deserialize = "@type"))]
td_name: String,
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
/// A small (160x160) chat photo. The file can be downloaded only before the photo is changed
small: File,
/// A big (640x640) chat photo. The file can be downloaded only before the photo is changed
big: File,
}
impl RObject for ChatPhoto {
#[doc(hidden)]
fn td_name(&self) -> &'static str {
"chatPhoto"
}
#[doc(hidden)]
fn extra(&self) -> Option<String> {
self.extra.clone()
}
fn to_json(&self) -> RTDResult<String> {
Ok(serde_json::to_string(self)?)
}
}
impl ChatPhoto {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDChatPhotoBuilder {
let mut inner = ChatPhoto::default();
inner.td_name = "chatPhoto".to_string();
inner.extra = Some(Uuid::new_v4().to_string());
RTDChatPhotoBuilder { inner }
}
pub fn small(&self) -> &File {
&self.small
}
pub fn big(&self) -> &File {
&self.big
}
}
#[doc(hidden)]
pub struct RTDChatPhotoBuilder {
inner: ChatPhoto,
}
impl RTDChatPhotoBuilder {
pub fn build(&self) -> ChatPhoto {
self.inner.clone()
}
pub fn small<T: AsRef<File>>(&mut self, small: T) -> &mut Self {
self.inner.small = small.as_ref().clone();
self
}
pub fn big<T: AsRef<File>>(&mut self, big: T) -> &mut Self {
self.inner.big = big.as_ref().clone();
self
}
}
impl AsRef<ChatPhoto> for ChatPhoto {
fn as_ref(&self) -> &ChatPhoto {
self
}
}
impl AsRef<ChatPhoto> for RTDChatPhotoBuilder {
fn as_ref(&self) -> &ChatPhoto {
&self.inner
}
}
|
use crate::transaction::{PackageStatus, PackageStatusError};
use pahkat_types::package::Version;
pub(crate) fn cmp(
installed_version: &str,
candidate_version: &Version,
) -> Result<PackageStatus, PackageStatusError> {
let installed_version = match Version::new(installed_version) {
Ok(v) => v,
Err(_) => return Err(PackageStatusError::ParsingVersion),
};
if candidate_version > &installed_version {
Ok(PackageStatus::RequiresUpdate)
} else {
Ok(PackageStatus::UpToDate)
}
}
|
use crate::player::Player;
/* On main led 7 segments
* 0
* --
* 1| 3|2
* --
* 5| |4
* --
* 6
*/
// we use u8 for bits because it is more visual and easy to edit than bool
const PINS_0: [u8; 7] = [1, 1, 1, 0, 1, 1, 1];
const PINS_1: [u8; 7] = [0, 0, 1, 0, 1, 0, 0];
const PINS_2: [u8; 7] = [1, 0, 1, 1, 0, 1, 1];
const PINS_3: [u8; 7] = [1, 0, 1, 1, 1, 0, 1];
const PINS_4: [u8; 7] = [0, 1, 1, 1, 1, 0, 0];
const PINS_5: [u8; 7] = [1, 1, 0, 1, 1, 0, 1];
const PINS_6: [u8; 7] = [1, 1, 0, 1, 1, 1, 1];
const PINS_7: [u8; 7] = [1, 0, 1, 0, 1, 0, 0];
const PINS_8: [u8; 7] = [1, 1, 1, 1, 1, 1, 1];
const PINS_9: [u8; 7] = [1, 1, 1, 1, 1, 0, 1];
const PINS_X: [u8; 7] = [1, 1, 0, 1, 0, 1, 1];
/* On ceiling 7 segments
* 3
* --
* 2| 6|7.
* -- 4
* 1| |5.
* --
* 0
*/
const CEILING_MANGLE: [usize; 8] = [6, 5, 1, 0, 7, 4, 3, 2];
const CEILING_UPWARDS_MANGLE: [usize; 8] = [0, 2, 4, 6, 7, 1, 3, 5];
pub struct ClockData {
pub hours: u8,
pub minutes: u8,
pub has_alarm: bool,
pub alarm_enabled: bool,
pub error: u8,
pub regular_dim: u8, // percentage
pub refresh_rate: u32, // hertz (regular 7 segments and ceiling led)
pub ceiling_dim: u8, // percentage
pub ceiling_upwards: bool,
pub player: Player,
}
impl ClockData {
pub fn new() -> Self {
ClockData {
hours: 88,
minutes: 88,
has_alarm: false,
alarm_enabled: true,
error: 0,
regular_dim: 50,
refresh_rate: 100,
ceiling_dim: 50,
ceiling_upwards: true,
player: Player::new(),
}
}
fn get_time_pins(&self, pos: usize) -> [u8; 7] {
let value = match pos {
0 => (self.hours / 10) as u8,
1 => (self.hours % 10) as u8,
2 => (self.minutes / 10) as u8,
3 => (self.minutes % 10) as u8,
_ => 10,
};
match value {
0 => PINS_0,
1 => PINS_1,
2 => PINS_2,
3 => PINS_3,
4 => PINS_4,
5 => PINS_5,
6 => PINS_6,
7 => PINS_7,
8 => PINS_8,
9 => PINS_9,
_ => PINS_X,
}
}
fn mangle_pins(&self, pos: usize, mangle: &[usize; 8]) -> [u8; 8] {
let mut pins = [0; 8];
let pin_list = self.get_time_pins(pos);
for i in mangle {
if mangle[*i] == 7 {
pins[*i] = 0;
} else {
pins[*i] = pin_list[mangle[*i]];
}
}
return pins;
}
pub fn get_row_pins_led(&self, col: usize) -> [u8; 7] {
match col {
0 => self.get_time_pins(0),
1 => self.left_opts(),
2 => self.get_time_pins(1),
3 => [0, 1, 0, 0, 0, 1, 0],
4 => self.get_time_pins(2),
5 => self.right_opts(),
6 => self.get_time_pins(3),
_ => [0; 7],
}
}
pub fn get_row_pins_ceiling(&self, col: usize) -> [u8; 8] {
if self.ceiling_upwards {
self.mangle_pins(col, &CEILING_MANGLE)
} else {
self.mangle_pins(col, &CEILING_UPWARDS_MANGLE)
}
}
fn left_opts(&self) -> [u8; 7] {
let mut result = [0; 7];
// left opts are alarm related
if self.has_alarm {
result[5] = 1;
}
if !self.alarm_enabled {
result[1] = 1;
}
return result;
}
#[rustfmt::skip]
fn right_opts(&self) -> [u8; 7] {
let mut result = [0; 7];
// right opts are error related
let err = if self.error < 16 { self.error } else { 15 };
if err % 2 == 1 { result[5] = 1; }
if (err / 2) % 2 == 1 { result[4] = 1; }
if (err / 4) % 2 == 1 { result[2] = 1; }
if (err / 8) % 2 == 1 { result[1] = 1; }
return result;
}
pub fn pwm_time(&self, up: bool) -> u64 {
let pct = if up {
self.regular_dim as u64
} else {
100 - self.regular_dim as u64
};
return (1_000_000 / self.refresh_rate as u64) * pct / 100;
}
}
|
use url::Url;
use WebSocket::{ Result, Error, ErrorKind };
use httparse;
use crypto::sha1;
use crypto::hmac::Hmac;
use crypto::mac::Mac;
use chrono::Utc;
use hex::ToHex;
use settings::auth::Authorization;
pub struct HttpData {
url: Url,
auth: Authorization
}
impl HttpData {
pub fn new(path: &str, auth: Authorization) -> Result<Self> {
match Url::parse(&format!("ws://localhost.com{}", path)) {
Ok(url) => Ok(HttpData { url: url, auth: auth }),
Err(e) => Err(Error::new(ErrorKind::Http(httparse::Error::NewLine), format!("Cannot parse request: {}", e)))
}
}
pub fn get_group(&self) -> String {
let mut group = self.url.path().to_lowercase();
if group.starts_with("/") {
group.remove(0);
}
let len = match group.len() > 0 {
true => group.len()- 1,
_ => 0
};
if group.ends_with("/") {
group.remove(len);
}
group
}
pub fn validate(&self) -> Option<Error> {
let (token, public_key) = match self.get_token_and_public_key(
self.auth.get_token_name().unwrap_or("token".to_string()).as_str(),
self.auth.get_time_name().unwrap_or("nonce".to_string()).as_str()
) {
Some((t,k)) => (t,k),
_ => return Some(Error::new(ErrorKind::Http(httparse::Error::Token), format!("Not valid request.")))
};
let public_key_time: i64 = match public_key.parse() {
Ok(k) => k,
Err(_) => return Some(Error::new(ErrorKind::Http(httparse::Error::Token), format!("Not valid request. Public key is not integer: {:?}", public_key)))
};
if self.validate_time(public_key_time, self.auth.get_keep_alive()) == false {
return Some(Error::new(ErrorKind::Http(httparse::Error::Token), format!("Not valid request. Expired: {:?}", public_key)))
}
if self.validate_token(token.as_str(), public_key.as_str()) == false {
return Some(Error::new(ErrorKind::Http(httparse::Error::Token), format!("Not valid request. Token not valid: {}", token)))
}
None
}
fn get_token_and_public_key(&self, token_name: &str, public_key_name: &str) -> Option<(String, String)> {
let pairs = self.url.query_pairs();
let mut token: Option<String> = None;
let mut public_key: Option<String> = None;
for (key, value) in pairs {
if key == token_name {
token = Some(value.to_string());
}
if key == public_key_name {
public_key = Some(value.to_string());
}
}
match token.is_none() || public_key.is_none() {
true => {
error!(
"Not found token ([name={},value={:?}]) or public key ([name={},value={:?}]) in query {:?}",
token_name,
token,
public_key_name,
public_key,
self.url.query()
);
None
},
false => Some((token.unwrap_or("".to_string()), public_key.unwrap_or("".to_string())))
}
}
fn validate_token(&self, token: &str, public_key: &str) -> bool {
let mut auth = Hmac::new(sha1::Sha1::new(), self.auth.get_private_key().as_bytes());
auth.input(public_key.as_bytes());
if token != auth.result().code().to_hex() {
error!("Token not valid. Got [{}], should [{}]", token, auth.result().code().to_hex());
}
token == auth.result().code().to_hex()
}
fn validate_time(&self, nonce: i64, keep_alive: Option<i64>) -> bool {
let max_different_time = keep_alive.unwrap_or(120);
if 0 == max_different_time {
return true;
}
Utc::now().timestamp() - nonce < max_different_time
}
}
#[cfg(test)]
mod test {
use utils::HttpData;
use chrono::{ Utc, DateTime };
use crypto::{ hmac, sha1 };
use crypto::mac::Mac;
use hex::ToHex;
use settings::auth::Authorization;
fn get_auth_default() -> Authorization {
Authorization {
private_key: "usocksecret".to_string(),
keep_alive: None,
token_name: None,
time_name: None
}
}
#[test]
fn test_get_group() {
let authorization_settings: Authorization = get_auth_default();
let data: HttpData = HttpData::new("/hello/world?jytirr", authorization_settings.clone()).unwrap();
assert_eq!(data.get_group(), "hello/world".to_string());
let data: HttpData = HttpData::new("/hello/world/", authorization_settings.clone()).unwrap();
assert_eq!(data.get_group(), "hello/world".to_string());
let data: HttpData = HttpData::new("/heLlo/worlD/", authorization_settings).unwrap();
assert_eq!(data.get_group(), "hello/world".to_string());
}
#[test]
fn test_validate_time() {
let data: HttpData = HttpData::new("/hello/world?nonce=1504970846", get_auth_default()).unwrap();
assert_eq!(true, data.validate_time(1504970846, Some(0)));
assert_eq!(false, data.validate_time(1504970846, Some(120)));
let time: DateTime<Utc> = Utc::now();
assert_eq!(true, data.validate_time(time.timestamp(), Some(120)));
assert_eq!(true, data.validate_time(time.timestamp() - 119i64, Some(120)));
assert_eq!(false, data.validate_time(time.timestamp() - 120i64, Some(120)));
assert_eq!(true, data.validate_time(time.timestamp() - 119i64, None));
}
#[test]
fn test_validate_token() {
let data: HttpData = HttpData::new("/hello/world?nonce=1504970846", get_auth_default()).unwrap();
assert_eq!(true, data.validate_token("c3c3358c4fe308b198ee875597b16606f1c728aa", "1504970846"));
}
#[test]
fn test_get_token_and_public_key() {
let data: HttpData = HttpData::new("/hello/world?nonce=1504970846&my_token=token_value", get_auth_default()).unwrap();
assert_eq!(Some(("token_value".to_string(), "1504970846".to_string())), data.get_token_and_public_key("my_token", "nonce"));
assert_eq!(None, data.get_token_and_public_key("my_token", "nonc"));
assert_eq!(None, data.get_token_and_public_key("my_toke", "nonce"));
}
#[test]
fn test_validate() {
let time = format!("{}", Utc::now().timestamp());
let mut auth: hmac::Hmac<sha1::Sha1> = hmac::Hmac::new(sha1::Sha1::new(), "usocksecret".as_bytes());
auth.input(time.as_bytes());
let data: HttpData = HttpData::new(
format!("/hello/world?nonce={}&token={}", time.as_str(), auth.result().code().to_hex()).as_str(),
get_auth_default()
).unwrap();
assert!(data.validate().is_none());
}
}
|
use std::io::{Read, Write};
use dencode::{Decoder, Encoder, FramedRead, FramedWrite, IterSinkExt};
use serde_json::ser::Formatter;
use sbp::{
codec::{
json::{Json2JsonDecoder, Json2JsonEncoder, JsonDecoder, JsonEncoder},
sbp::{SbpDecoder, SbpEncoder},
},
Error, Result,
};
pub fn json2sbp<R, W>(input: R, output: W, buffered: bool, fatal_errors: bool) -> Result<()>
where
R: Read,
W: Write,
{
let source = FramedRead::new(input, JsonDecoder::new());
let sink = SbpEncoder::framed(output);
maybe_send_buffered(source, sink, buffered, fatal_errors)?;
Ok(())
}
pub fn json2json<R, W, F>(
input: R,
output: W,
formatter: F,
buffered: bool,
fatal_errors: bool,
) -> Result<()>
where
R: Read,
W: Write,
F: Formatter + Clone,
{
let source = FramedRead::new(input, Json2JsonDecoder {});
let sink = FramedWrite::new(output, Json2JsonEncoder::new(formatter));
maybe_send_buffered(source, sink, buffered, fatal_errors)?;
Ok(())
}
pub fn sbp2json<R, W, F>(
input: R,
output: W,
formatter: F,
buffered: bool,
fatal_errors: bool,
) -> Result<()>
where
R: Read,
W: Write,
F: Formatter + Clone,
{
let source = FramedRead::new(input, SbpDecoder {});
let sink = JsonEncoder::framed(output, formatter);
maybe_send_buffered(source, sink, buffered, fatal_errors)?;
Ok(())
}
fn maybe_send_buffered<R, W, D, E>(
mut source: FramedRead<R, D>,
mut sink: FramedWrite<W, E>,
buffered: bool,
fatal_errors: bool,
) -> Result<()>
where
R: Read,
W: Write,
D: Decoder<Error = Error>,
E: Encoder<D::Item, Error = Error>,
{
if buffered {
sink.send_all(source)?;
} else {
while let Some(msg) = source.next() {
match msg {
Ok(msg) => {
sink.send(msg)?;
}
Err(e) if fatal_errors => return Err(e),
Err(e) => eprintln!("error: {}", e),
}
}
}
Ok(())
}
|
use std::env;
use std::path::PathBuf;
extern crate spatialos_gdk_codegen;
fn main() {
let json_path = PathBuf::from(env::var("OUT_DIR").unwrap()).join("json");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()).join("generated.rs");
spatialos_gdk_codegen::codegen(json_path, out_path);
}
|
use crate::snapshot_utils::ArchiveFormat;
use crate::snapshot_utils::SnapshotVersion;
use solana_sdk::clock::Slot;
use std::path::PathBuf;
/// Snapshot configuration and runtime information
#[derive(Clone, Debug)]
pub struct SnapshotConfig {
/// Generate a new full snapshot archive every this many slots
pub full_snapshot_archive_interval_slots: Slot,
/// Generate a new incremental snapshot archive every this many slots
pub incremental_snapshot_archive_interval_slots: Slot,
/// Path to the directory where snapshot archives are stored
pub snapshot_archives_dir: PathBuf,
/// Path to the directory where bank snapshots are stored
pub bank_snapshots_dir: PathBuf,
/// The archive format to use for snapshots
pub archive_format: ArchiveFormat,
/// Snapshot version to generate
pub snapshot_version: SnapshotVersion,
/// Maximum number of full snapshot archives to retain
pub maximum_full_snapshot_archives_to_retain: usize,
/// Maximum number of incremental snapshot archives to retain
/// NOTE: Incremental snapshots will only be kept for the latest full snapshot
pub maximum_incremental_snapshot_archives_to_retain: usize,
}
|
use chrono::prelude::*;
use std::fs::File;
use std::io::prelude::*;
use uuid::Uuid;
#[cfg(feature = "mocks")]
use mocktopus::macros::*;
pub fn now() -> chrono::naive::NaiveDateTime {
Utc::now().naive_local()
}
pub fn read_file_to_string(path: &String) -> std::io::Result<String> {
let mut file = File::open(path)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
Ok(contents)
}
pub fn read_file_to_bytes(path: &String) -> std::io::Result<Vec<u8>> {
let mut file = File::open(path)?;
let mut contents = Vec::<u8>::new();
file.read_to_end(&mut contents)?;
Ok(contents)
}
#[cfg_attr(feature = "mocks", mockable)]
pub fn gen_uuid() -> String {
Uuid::new_v4().to_simple().to_string()
}
|
#[cfg(feature = "serde1")]
mod serde;
use core::num::NonZeroU64;
/// Handle to an entity.
///
/// It has two parts, an index and a generation.
#[derive(Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[repr(transparent)]
pub struct EntityId(pub(super) NonZeroU64);
impl EntityId {
// Number of bits used by the generation
pub(crate) const GEN_LEN: u64 = 16;
pub(super) const INDEX_MASK: u64 = !0 >> Self::GEN_LEN;
pub(super) const GEN_MASK: u64 = !Self::INDEX_MASK;
/// Returns the index part of the EntityId.
/// โ ๏ธ You shouldn't use it to index a storage.
#[inline]
pub fn index(self) -> u64 {
(self.0.get() & Self::INDEX_MASK) - 1
}
/// Returns the index part of the EntityId as an usize.
/// โ ๏ธ You shouldn't use it to index a storage.
#[inline]
pub fn uindex(self) -> usize {
self.index() as usize
}
/// Returns the generation part of the EntityId.
#[inline]
pub fn gen(self) -> u64 {
(self.0.get() & Self::GEN_MASK) >> (64 - Self::GEN_LEN)
}
/// Make a new EntityId with the given index.
#[inline]
pub(crate) fn new(index: u64) -> Self {
assert!(index < Self::INDEX_MASK);
// SAFE never zero
EntityId(unsafe { NonZeroU64::new_unchecked(index + 1) })
}
/// Make a new `EntityId` with the given generation and index.
/// It must be alive in the `World` it is used with.
#[cfg(feature = "serde1")]
#[inline]
pub(crate) fn new_from_pair_unchecked(index: u64, gen: u16) -> Self {
assert!(index < Self::INDEX_MASK);
// SAFE never zero
EntityId(unsafe {
NonZeroU64::new_unchecked((index + 1) | ((gen as u64) << (64 - Self::GEN_LEN)))
})
}
/// Modify the index.
#[cfg(not(test))]
#[inline]
pub(super) fn set_index(&mut self, index: u64) {
assert!(index < Self::INDEX_MASK);
// SAFE never zero
self.0 = unsafe { NonZeroU64::new_unchecked((self.0.get() & Self::GEN_MASK) | (index + 1)) }
}
/// Modify the index.
#[cfg(test)]
pub(crate) fn set_index(&mut self, index: u64) {
assert!(index + 1 <= Self::INDEX_MASK);
// SAFE never zero
self.0 = unsafe { NonZeroU64::new_unchecked((self.0.get() & Self::GEN_MASK) | (index + 1)) }
}
/// Increments the generation, returns Err if gen + 1 == gen::MAX().
#[inline]
pub(super) fn bump_gen(&mut self) -> Result<(), ()> {
if self.0.get() < !(!0 >> (Self::GEN_LEN - 1)) {
// SAFE never zero
self.0 = unsafe {
NonZeroU64::new_unchecked(
(self.index() + 1) | ((self.gen() + 1) << (64 - Self::GEN_LEN)),
)
};
Ok(())
} else {
Err(())
}
}
#[cfg(test)]
pub(crate) fn zero() -> Self {
EntityId(NonZeroU64::new(1).unwrap())
}
/// Returns a dead EntityId, it can be used as a null entity.
pub fn dead() -> Self {
// SAFE not zero
EntityId(unsafe { NonZeroU64::new_unchecked(core::u64::MAX) })
}
pub(crate) fn bucket(self) -> usize {
self.uindex() / crate::sparse_set::BUCKET_SIZE
}
pub(crate) fn bucket_index(self) -> usize {
self.uindex() % crate::sparse_set::BUCKET_SIZE
}
pub(crate) fn shared_bucket(self) -> usize {
self.uindex() / crate::sparse_set::SHARED_BUCKET_SIZE
}
pub(crate) fn shared_bucket_index(self) -> usize {
self.uindex() % crate::sparse_set::SHARED_BUCKET_SIZE
}
}
impl core::fmt::Debug for EntityId {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(
f,
"EntityId {{ index: {}, gen: {} }}",
self.index(),
self.gen()
)
}
}
#[test]
fn entity_id() {
let mut entity_id = EntityId::new(0);
assert_eq!(entity_id.index(), 0);
assert_eq!(entity_id.gen(), 0);
entity_id.set_index(701);
assert_eq!(entity_id.index(), 701);
assert_eq!(entity_id.gen(), 0);
entity_id.bump_gen().unwrap();
entity_id.bump_gen().unwrap();
entity_id.bump_gen().unwrap();
assert_eq!(entity_id.index(), 701);
assert_eq!(entity_id.gen(), 3);
entity_id.set_index(554);
assert_eq!(entity_id.index(), 554);
assert_eq!(entity_id.gen(), 3);
}
|
//! Types for change streams using sessions.
use serde::de::DeserializeOwned;
use crate::{
cursor::{BatchValue, NextInBatchFuture},
error::Result,
ClientSession,
SessionCursor,
};
use super::{
event::{ChangeStreamEvent, ResumeToken},
get_resume_token,
ChangeStreamData,
WatchArgs,
};
/// A [`SessionChangeStream`] is a change stream that was created with a ClientSession that must
/// be iterated using one. To iterate, use [`SessionChangeStream::next`]:
///
/// ```
/// # use mongodb::{bson::Document, Client, error::Result};
/// #
/// # async fn do_stuff() -> Result<()> {
/// # let client = Client::with_uri_str("mongodb://example.com").await?;
/// # let mut session = client.start_session(None).await?;
/// # let coll = client.database("foo").collection::<Document>("bar");
/// #
/// let mut cs = coll.watch_with_session(None, None, &mut session).await?;
/// while let Some(event) = cs.next(&mut session).await? {
/// println!("{:?}", event)
/// }
/// #
/// # Ok(())
/// # }
/// ```
///
/// If a [`SessionChangeStream`] is still open when it goes out of scope, it will automatically be
/// closed via an asynchronous [killCursors](https://www.mongodb.com/docs/manual/reference/command/killCursors/) command executed
/// from its [`Drop`](https://doc.rust-lang.org/std/ops/trait.Drop.html) implementation.
pub struct SessionChangeStream<T>
where
T: DeserializeOwned + Unpin,
{
cursor: SessionCursor<T>,
args: WatchArgs,
data: ChangeStreamData,
}
impl<T> SessionChangeStream<T>
where
T: DeserializeOwned + Unpin + Send + Sync,
{
pub(crate) fn new(cursor: SessionCursor<T>, args: WatchArgs, data: ChangeStreamData) -> Self {
Self { cursor, args, data }
}
/// Returns the cached resume token that can be used to resume after the most recently returned
/// change.
///
/// See the documentation
/// [here](https://www.mongodb.com/docs/manual/changeStreams/#change-stream-resume-token) for more
/// information on change stream resume tokens.
pub fn resume_token(&self) -> Option<ResumeToken> {
self.data.resume_token.clone()
}
/// Update the type streamed values will be parsed as.
pub fn with_type<D: DeserializeOwned + Unpin + Send + Sync>(self) -> SessionChangeStream<D> {
SessionChangeStream::new(self.cursor.with_type(), self.args, self.data)
}
/// Retrieve the next result from the change stream.
/// The session provided must be the same session used to create the change stream.
///
/// ```
/// # use bson::{doc, Document};
/// # use mongodb::Client;
/// # fn main() {
/// # async {
/// # let client = Client::with_uri_str("foo").await?;
/// # let coll = client.database("foo").collection::<Document>("bar");
/// # let other_coll = coll.clone();
/// # let mut session = client.start_session(None).await?;
/// let mut cs = coll.watch_with_session(None, None, &mut session).await?;
/// while let Some(event) = cs.next(&mut session).await? {
/// let id = bson::to_bson(&event.id)?;
/// other_coll.insert_one_with_session(doc! { "id": id }, None, &mut session).await?;
/// }
/// # Ok::<(), mongodb::error::Error>(())
/// # };
/// # }
/// ```
pub async fn next(&mut self, session: &mut ClientSession) -> Result<Option<T>> {
loop {
let maybe_next = self.next_if_any(session).await?;
match maybe_next {
Some(t) => return Ok(Some(t)),
None if self.is_alive() => continue,
None => return Ok(None),
}
}
}
/// Returns whether the change stream will continue to receive events.
pub fn is_alive(&self) -> bool {
!self.cursor.is_exhausted()
}
/// Retrieve the next result from the change stream, if any.
///
/// Where calling `next` will internally loop until a change document is received,
/// this will make at most one request and return `None` if the returned document batch is
/// empty. This method should be used when storing the resume token in order to ensure the
/// most up to date token is received, e.g.
///
/// ```
/// # use mongodb::{Client, Collection, bson::Document, error::Result};
/// # async fn func() -> Result<()> {
/// # let client = Client::with_uri_str("mongodb://example.com").await?;
/// # let coll: Collection<Document> = client.database("foo").collection("bar");
/// # let mut session = client.start_session(None).await?;
/// let mut change_stream = coll.watch_with_session(None, None, &mut session).await?;
/// let mut resume_token = None;
/// while change_stream.is_alive() {
/// if let Some(event) = change_stream.next_if_any(&mut session).await? {
/// // process event
/// }
/// resume_token = change_stream.resume_token();
/// }
/// #
/// # Ok(())
/// # }
/// ```
pub async fn next_if_any(&mut self, session: &mut ClientSession) -> Result<Option<T>> {
loop {
let (next, post_batch_token, client) = {
let mut stream = self.cursor.stream(session);
let next = NextInBatchFuture::new(&mut stream).await;
let post_batch_token = stream.post_batch_resume_token().cloned();
let client = stream.client().clone();
(next, post_batch_token, client)
};
match next {
Ok(bv) => {
if let Some(token) = get_resume_token(&bv, post_batch_token.as_ref())? {
self.data.resume_token = Some(token);
}
match bv {
BatchValue::Some { doc, .. } => {
self.data.document_returned = true;
return Ok(Some(bson::from_slice(doc.as_bytes())?));
}
BatchValue::Empty | BatchValue::Exhausted => return Ok(None),
}
}
Err(e) if e.is_resumable() && !self.data.resume_attempted => {
self.data.resume_attempted = true;
let args = self.args.clone();
let new_stream: SessionChangeStream<ChangeStreamEvent<()>> = client
.execute_watch_with_session(
args.pipeline,
args.options,
args.target,
Some(self.data.take()),
session,
)
.await?;
let new_stream = new_stream.with_type::<T>();
self.cursor
.set_drop_address(new_stream.cursor.address().clone());
self.cursor = new_stream.cursor;
self.args = new_stream.args;
// After a successful resume, another resume must be allowed.
self.data.resume_attempted = false;
continue;
}
Err(e) => return Err(e),
}
}
}
}
|
extern crate bindgen;
use cmake;
use std::env;
use std::path::PathBuf;
fn main() {
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let mut dir_builder = std::fs::DirBuilder::new();
dir_builder.recursive(true);
// Build cyclonedds
let cyclonedds_dir = out_dir.join("cyclonedds-build");
dir_builder.create(&cyclonedds_dir).unwrap();
let cyclonedds = cmake::Config::new("cyclonedds")
.define("BUILD_SHARED_LIBS", "OFF")
.define("BUILD_IDLC", "NO")
.define("ENABLE_SSL", "NO")
.define("CMAKE_INSTALL_LIBDIR", "lib")
.out_dir(cyclonedds_dir)
.build();
let cyclonedds_include = cyclonedds.join("include");
let cyclonedds_lib = cyclonedds.join("lib");
// Add cyclonedds lib to link
println!(
"cargo:rustc-link-search=native={}",
cyclonedds_lib.display()
);
println!("cargo:rustc-link-lib=static=ddsc");
// Build cyclocut
let cyclocut_dir = out_dir.join("cyclocut-build");
dir_builder.create(&cyclocut_dir).unwrap();
let cyclocut = cmake::Config::new("cyclocut")
.env("CYCLONE_INCLUDE", &cyclonedds_include)
.env("CYCLONE_LIB", &cyclonedds_lib)
.define("CYCLONE_INCLUDE", cyclonedds_include.clone())
.define("CYCLONE_LIB", cyclonedds_lib.clone())
.define("BUILD_CDDS_UTIL_EXAMPLES", "OFF")
.define("BUILD_SHARED_LIBS", "OFF")
.define("CMAKE_INSTALL_LIBDIR", "lib")
.out_dir(cyclocut_dir)
.build();
let cyclocut_include = cyclocut.join("include");
let cyclocut_lib = cyclocut.join("lib");
// Add cyclocut lib to link
println!("cargo:rustc-link-search=native={}", cyclocut_lib.display());
println!("cargo:rustc-link-lib=static=cdds-util");
// Generate bindings
let bindings = bindgen::Builder::default()
.header("wrapper.h")
.clang_arg(format!("-I{}", cyclonedds_include.to_str().unwrap()))
.clang_arg(format!("-I{}", cyclocut_include.to_str().unwrap()))
.generate_comments(false)
.generate()
.expect("Unable to generate bindings");
bindings
.write_to_file(out_dir.join("bindings.rs"))
.expect("Couldn't write bindings!");
}
|
mod api {
// GET
pub const INFO: &'static str = "https://api.tumblr.com/v2/user/info";
pub const DASHBOARD: &'static str = "https://api.tumblr.com/v2/user/dashboard";
pub const LIKES: &'static str = "https://api.tumblr.com/v2/user/likes";
pub const FOLLOWING: &'static str = "https://api.tumblr.com/v2/user/following";
// POST
pub const FOLLOW: &'static str = "https://api.tumblr.com/v2/user/follow";
pub const UNFOLLOW: &'static str = "https://api.tumblr.com/v2/user/unfollow";
pub const LIKE: &'static str = "https://api.tumblr.com/v2/user/like";
pub const UNLIKE: &'static str = "https://api.tumblr.com/v2/user/unlike";
}
// --- external ---
use serde_json::Value;
// --- custom ---
use super::{TumblrClient, build_oauth_headers, build_query, build_params};
#[derive(Default)]
pub struct GetUserDashboardOptionalParams<'a> {
limit: Option<&'a str>,
offset: Option<&'a str>,
r#type: Option<&'a str>,
since_id: Option<&'a str>,
reblog_info: Option<&'a str>,
notes_info: Option<&'a str>,
}
impl<'a> GetUserDashboardOptionalParams<'a> {
set_attr!(self, limit);
set_attr!(self, offset);
set_attr!(self, r#type);
set_attr!(self, since_id);
set_attr!(self, reblog_info);
set_attr!(self, notes_info);
}
#[derive(Default)]
pub struct GetUserLikesOptionalParams<'a> {
limit: Option<&'a str>,
offset: Option<&'a str>,
before: Option<&'a str>,
after: Option<&'a str>,
}
impl<'a> GetUserLikesOptionalParams<'a> {
set_attr!(self, limit);
set_attr!(self, offset);
set_attr!(self, before);
set_attr!(self, after);
}
#[derive(Default)]
pub struct GetUserFollowingOptionalParams<'a> {
limit: Option<&'a str>,
offset: Option<&'a str>,
}
impl<'a> GetUserFollowingOptionalParams<'a> {
set_attr!(self, limit);
set_attr!(self, offset);
}
impl TumblrClient {
pub fn get_user_info(&self) -> Value {
let headers = build_oauth_headers(
"GET",
api::INFO,
&self.keys.consumer(),
Some(&self.keys.token()),
None,
);
self.get(api::INFO, Some(headers))
.json()
.unwrap()
}
pub fn get_user_dashboard(&self, optional_params: Option<GetUserDashboardOptionalParams>) -> Value {
let params = if let Some(optional_params) = optional_params {
set_params![
("limit", optional_params.limit),
("offset", optional_params.offset),
("type", optional_params.r#type),
("since_id", optional_params.since_id),
("reblog_info", optional_params.reblog_info),
("notes_info", optional_params.notes_info)
]
} else { vec![] };
let url = build_query(api::DASHBOARD, ¶ms);
let headers = build_oauth_headers(
"GET",
api::DASHBOARD,
&self.keys.consumer(),
Some(&self.keys.token()),
Some(&build_params(params)),
);
self.get(&url, Some(headers))
.json()
.unwrap()
}
pub fn get_user_likes(&self, optional_params: Option<GetUserLikesOptionalParams>) -> Value {
let params = if let Some(optional_params) = optional_params {
set_params![
("limit", optional_params.limit),
("offset", optional_params.offset),
("before", optional_params.before),
("after", optional_params.after)
]
} else { vec![] };
let url = build_query(api::LIKES, ¶ms);
let headers = build_oauth_headers(
"GET",
api::LIKES,
&self.keys.consumer(),
Some(&self.keys.token()),
Some(&build_params(params)),
);
self.get(&url, Some(headers))
.json()
.unwrap()
}
pub fn get_user_following(&self, optional_params: Option<GetUserFollowingOptionalParams>) -> Value {
let params = if let Some(optional_params) = optional_params {
set_params![
("limit", optional_params.limit),
("offset", optional_params.offset)
]
} else { vec![] };
let url = build_query(api::FOLLOWING, ¶ms);
let headers = build_oauth_headers(
"GET",
api::FOLLOWING,
&self.keys.consumer(),
Some(&self.keys.token()),
Some(&build_params(params)),
);
self.get(&url, Some(headers))
.json()
.unwrap()
}
pub fn follow_blog(&self, url: &str) -> Value {
let form = vec![("url", url)];
let headers = build_oauth_headers(
"POST",
api::FOLLOW,
&self.keys.consumer(),
Some(&self.keys.token()),
Some(&build_params(form.clone())),
);
self.post(api::FOLLOW, headers, &form)
.json()
.unwrap()
}
pub fn unfollow_blog(&self, url: &str) -> Value {
let form = vec![("url", url)];
let headers = build_oauth_headers(
"POST",
api::UNFOLLOW,
&self.keys.consumer(),
Some(&self.keys.token()),
Some(&build_params(form.clone())),
);
self.post(api::UNFOLLOW, headers, &form)
.json()
.unwrap()
}
pub fn like_post(&self, id: &str, reblog_key: &str) -> Value {
let form = vec![("id", id), ("reblog_key", reblog_key)];
let headers = build_oauth_headers(
"POST",
api::LIKE,
&self.keys.consumer(),
Some(&self.keys.token()),
Some(&build_params(form.clone())),
);
self.post(api::LIKE, headers, &form)
.json()
.unwrap()
}
pub fn unlike_post(&self, id: &str, reblog_key: &str) -> Value {
let form = vec![("id", id), ("reblog_key", reblog_key)];
let headers = build_oauth_headers(
"POST",
api::UNLIKE,
&self.keys.consumer(),
Some(&self.keys.token()),
Some(&build_params(form.clone())),
);
self.post(api::UNLIKE, headers, &form)
.json()
.unwrap()
}
}
|
use pasture_core::{
containers::{
InterleavedPointBufferMutExt, PerAttributePointBufferMutExt, PerAttributeVecPointStorage,
PointBufferExt,
},
nalgebra::Vector3,
};
use pasture_core::{
containers::{InterleavedVecPointStorage, PerAttributePointBuffer},
layout::{
attributes::{INTENSITY, POSITION_3D},
PointType,
},
};
use pasture_derive::PointType;
/// We define a simple point type here that has two attributes: 3D position and intensity
#[repr(C)]
#[derive(PointType, Debug)]
struct SimplePoint {
#[pasture(BUILTIN_POSITION_3D)]
pub position: Vector3<f64>,
#[pasture(BUILTIN_INTENSITY)]
pub intensity: u16,
}
fn main() {
// Create some points
let points = vec![
SimplePoint {
position: Vector3::new(1.0, 2.0, 3.0),
intensity: 42,
},
SimplePoint {
position: Vector3::new(-1.0, -2.0, -3.0),
intensity: 84,
},
];
// By default, our data is in interleaved format, because a struct is a form of interleaved data. So
// let's create a buffer to hold our points:
{
let mut buffer = InterleavedVecPointStorage::new(SimplePoint::layout());
// We can add interleaved data like so:
buffer.push_points(points.as_slice());
println!("Iterating over interleaved points:");
// The buffer itself is not strongly typed, but there are some helper methods in the `PointBufferExt` trait to access the data in
// a strongly typed fashion. `iter_point<T>` creates an iterator over strongly typed points in the buffer:
for point in buffer.iter_point::<SimplePoint>() {
println!("{:?}", point);
}
// The iterator returned by `iter_point<T>` iterates over the points by value. Let's try mutating the points instead. For this, we
// can use the `InterleavedPointBufferMutExt` trait. `iter_point_mut<T>` creates an iterator over strongly typed mutable references
// to the points in the buffer:
for point_mut in buffer.iter_point_mut::<SimplePoint>() {
point_mut.intensity *= 2;
}
// We can also directly slice our buffer (also see the docs of the `slice` method which explains the syntax)
println!("Iterating over interleaved points slice:");
let sliced = buffer.slice(1..2);
for point in sliced.iter_point::<SimplePoint>() {
println!("{:?}", point);
}
}
// There are several different types of point buffers. Most code in Pasture can deal with any of these buffer types, though
// sometimes this is not possible due to memory layout concerns or general performance.
// Let's try a different type of buffer:
{
let mut buffer = PerAttributeVecPointStorage::new(SimplePoint::layout());
// This buffer stores points with a different memory layout internally (PerAttribute as opposed to Interleaved). We can
// still add our strongly typed points to it:
buffer.push_points(points.as_slice());
//... and iterate it:
println!("Iterating over per-attribute points:");
for point in buffer.iter_point::<SimplePoint>() {
println!("{:?}", point);
}
// With the PerAttribute memory layout, we can iterate over specific attributes and even mutate them, instead of always
// iterating over the whole point. This can give better performance in many cases.
// As the buffer is not strongly typed, we need to specify the type of the attribute, similar to the call to `iter_point<T>`
// before. In addition, we have to give Pasture an 'attribute specifier' to determine which attribute we want:
println!("Iterating over a single attribute:");
for position in buffer.iter_attribute::<Vector3<f64>>(&POSITION_3D) {
// Notice that `iter_attribute<T>` returns `T` by value. It is available for all point buffer types, at the expense of
// only receiving a copy of the attribute.
println!("Position: {:?}", position);
}
// There are several builtin attribute specifiers in the namespace `pasture_core::layout::attributes`.These are the ones that
// are used when you `#[derive(PointType)]` and say `#[pasture(BUILTIN_XYZ)]`. An attribute specifier internally uses a unique
// name to identify the attribute, as well as the default datatype of the attribute. Using the builtin specifiers guarantees that
// all attributes are always correctly addressed.
// Let's try mutating a specific attribute. This is only possible for a buffer that stores data in PerAttribute memory layout. We
// can use the `PerAttributePointBufferMutExt` extension trait, which gives us a method to obtain an iterator over mutable references
// to attribute values:
for intensity in buffer.iter_attribute_mut::<u16>(&INTENSITY) {
*intensity *= 2;
}
// Just as with the Interleaved buffer, we can slice (but make sure the `PerAttributePointBuffer` trait is in scope!):
println!("Iterating over per-attribute point slice:");
let sliced = buffer.slice(1..2);
for point in sliced.iter_point::<SimplePoint>() {
println!("{:?}", point);
}
}
}
|
extern crate env_logger;
extern crate svg2polylines;
use std::env;
use std::fs;
use std::io::Read;
use std::process::exit;
use svg2polylines::Polyline;
fn main() {
// Logging
env_logger::init();
// Argument parsing
let args: Vec<_> = env::args().collect();
match args.len() {
2 => {},
_ => {
println!("Usage: {} <path/to/file.svg>", args[0]);
exit(1);
},
};
// Load file
let mut file = fs::File::open(&args[1]).unwrap();
let mut s = String::new();
file.read_to_string(&mut s).unwrap();
// Parse data
let polylines: Vec<Polyline> = svg2polylines::parse(&s);
// Print data
println!("Found {} polylines.", polylines.len());
for line in polylines {
println!("- {:?}", line);
}
}
|
#[derive(Serialize, Deserialize, Clone, PartialEq, Debug)]
pub struct ThemeImages {
directory_icon: String,
file_icon: String,
save_icon: String,
settings_icon: String,
}
impl ThemeImages {
pub fn new(
directory_icon: String,
file_icon: String,
save_icon: String,
settings_icon: String,
) -> Self {
Self {
file_icon,
directory_icon,
save_icon,
settings_icon,
}
}
pub fn directory_icon(&self) -> String {
self.directory_icon.clone()
}
pub fn file_icon(&self) -> String {
self.file_icon.clone()
}
pub fn save_icon(&self) -> String {
self.save_icon.clone()
}
pub fn settings_icon(&self) -> String {
self.settings_icon.clone()
}
}
impl Default for ThemeImages {
fn default() -> Self {
Self {
directory_icon: "default/images/directory-64x64.png".to_string(),
file_icon: "default/images/file-64x64.png".to_string(),
save_icon: "default/images/save-16x16.png".to_string(),
settings_icon: "default/images/settings-16x16.png".to_string(),
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn assert_directory_icon() {
let config = ThemeImages::new(
"foo".to_owned(),
"bar".to_owned(),
"baz".to_owned(),
"foz".to_owned(),
);
let result = config.directory_icon();
let expected = "foo".to_owned();
assert_eq!(result, expected);
}
#[test]
fn assert_file_icon() {
let config = ThemeImages::new(
"foo".to_owned(),
"bar".to_owned(),
"baz".to_owned(),
"foz".to_owned(),
);
let result = config.file_icon();
let expected = "bar".to_owned();
assert_eq!(result, expected);
}
#[test]
fn assert_save_icon() {
let config = ThemeImages::new(
"foo".to_owned(),
"bar".to_owned(),
"baz".to_owned(),
"foz".to_owned(),
);
let result = config.save_icon();
let expected = "baz".to_owned();
assert_eq!(result, expected);
}
#[test]
fn assert_settings_icon() {
let config = ThemeImages::new(
"foo".to_owned(),
"bar".to_owned(),
"baz".to_owned(),
"foz".to_owned(),
);
let result = config.settings_icon();
let expected = "foz".to_owned();
assert_eq!(result, expected);
}
}
|
use crate::{
bin_u32,
classification::{depth::DepthBlock, quotes::QuoteClassifiedBlock},
debug,
input::InputBlock,
};
use std::marker::PhantomData;
const SIZE: usize = 32;
/// Works on a 32-byte slice, but uses a heuristic to quickly
/// respond to queries and not count the depth exactly unless
/// needed.
///
/// The heuristic checks if it is possible to achieve the queried
/// depth within the block by counting the number of opening
/// and closing structural characters. This can be done much
/// more quickly than precise depth calculation.
pub(crate) struct DepthVector32<'a, B: InputBlock<'a, SIZE>> {
pub(crate) quote_classified: QuoteClassifiedBlock<B, u32, SIZE>,
pub(crate) opening_mask: u32,
pub(crate) opening_count: u32,
pub(crate) closing_mask: u32,
pub(crate) idx: usize,
pub(crate) depth: i32,
pub(crate) phantom: PhantomData<&'a ()>,
}
impl<'a, B: InputBlock<'a, SIZE>> DepthBlock<'a> for DepthVector32<'a, B> {
#[inline(always)]
fn advance_to_next_depth_decrease(&mut self) -> bool {
let next_closing = self.closing_mask.trailing_zeros() as usize;
if next_closing == SIZE {
return false;
}
bin_u32!("opening_mask", self.opening_mask);
bin_u32!("closing_mask", self.closing_mask);
self.opening_mask >>= next_closing;
self.closing_mask >>= next_closing;
self.opening_mask >>= 1;
self.closing_mask >>= 1;
bin_u32!("new opening_mask", self.opening_mask);
bin_u32!("new closing_mask", self.closing_mask);
let new_opening_count = self.opening_mask.count_ones() as i32;
let delta = (self.opening_count as i32) - new_opening_count - 1;
self.opening_count = new_opening_count as u32;
debug!("next_closing: {next_closing}");
debug!("new_opening_count: {new_opening_count}");
debug!("delta: {delta}");
self.depth += delta;
self.idx += next_closing + 1;
true
}
#[inline(always)]
fn get_depth(&self) -> isize {
self.depth as isize
}
#[inline(always)]
fn depth_at_end(&self) -> isize {
(((self.opening_count as i32) - (self.closing_mask.count_ones() as i32)) + self.depth) as isize
}
#[inline(always)]
fn add_depth(&mut self, depth: isize) {
self.depth += depth as i32;
}
#[inline(always)]
fn estimate_lowest_possible_depth(&self) -> isize {
(self.depth - (self.closing_mask.count_ones() as i32)) as isize
}
}
|
extern crate cc;
#[cfg(feature = "sse4")]
fn is_enable_sse() -> bool {
true
}
#[cfg(not(feature = "sse4"))]
fn is_enable_sse() -> bool {
false
}
fn main() {
let mut build = cc::Build::new();
if is_enable_sse() {
build.flag("-msse4");
}
build
.file("deps/picohttpparser/picohttpparser.c")
.include("deps/picohttpparser")
.compile("libpicohttpparser.a");
}
|
use std::cell::RefCell;
use std::rc::Rc;
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
use monkey::compiler;
use monkey::evaluator;
use monkey::evaluator::objects;
use monkey::lexer;
use monkey::parser;
use monkey::vm;
fn run_vm(input: String) -> objects::Object {
let mut constants: Vec<objects::Object> = vec![];
let mut globals: vm::GlobalSpace = Default::default();
let symbol_table = Rc::new(RefCell::new(compiler::SymbolTable::new_with_builtin()));
let lexer = lexer::Lexer::new(input);
let mut parser = parser::Parser::new(lexer);
let program = parser.parse_program().unwrap();
let mut comp = compiler::Compiler::new_with_state(Rc::clone(&symbol_table), &mut constants);
comp.compile(program.into()).unwrap();
let bytecode: vm::bytecode::Bytecode = comp.into();
let mut machine = vm::VM::new_with_globals_store(bytecode, &mut globals);
machine.run().unwrap();
machine.last_popped_stack_elem().clone()
}
fn run_evaluator(input: String) -> objects::Object {
let env = Rc::new(RefCell::new(evaluator::env::Environment::new(None)));
let lexer = lexer::Lexer::new(input);
let mut parser = parser::Parser::new(lexer);
let program = parser.parse_program().unwrap();
let evaluated = evaluator::eval_node(&program.into(), env);
evaluated.unwrap()
}
const INPUT1: &str = "
let fibonacci = fn(x) {
if (x == 0) {
0
} else {
if (x == 1) {
return 1;
} else {
fibonacci(x - 1) + fibonacci(x - 2);
}
}
};
fibonacci(5);
";
const INPUT2: &str = "
let fibonacci = fn(x) {
if (x == 0) {
0
} else {
if (x == 1) {
return 1;
} else {
fibonacci(x - 1) + fibonacci(x - 2);
}
}
};
fibonacci(20);
";
const INPUT3: &str = "
let fibonacci = fn(x) {
if (x == 0) {
0
} else {
if (x == 1) {
return 1;
} else {
fibonacci(x - 1) + fibonacci(x - 2);
}
}
};
fibonacci(35);
";
fn bench_fibs(c: &mut Criterion) {
let mut group = c.benchmark_group("Fibonacci");
for (input, p) in [(INPUT1, 5), (INPUT2, 20), (INPUT3, 35)].iter() {
group.bench_with_input(BenchmarkId::new("Evaluator", p), input, |b, input| {
b.iter(|| run_evaluator(black_box(input.to_string())))
});
group.bench_with_input(BenchmarkId::new("VM", p), input, |b, input| {
b.iter(|| run_vm(black_box(input.to_string())))
});
}
group.finish();
}
criterion_group!(benches, bench_fibs);
criterion_main!(benches);
|
use super::Collector;
use DocId;
use Result;
use Score;
use SegmentLocalId;
use SegmentReader;
/// `CountCollector` collector only counts how many
/// documents match the query.
///
/// ```rust
/// #[macro_use]
/// extern crate tantivy;
/// use tantivy::schema::{SchemaBuilder, TEXT};
/// use tantivy::{Index, Result};
/// use tantivy::collector::CountCollector;
/// use tantivy::query::QueryParser;
///
/// # fn main() { example().unwrap(); }
/// fn example() -> Result<()> {
/// let mut schema_builder = SchemaBuilder::new();
/// let title = schema_builder.add_text_field("title", TEXT);
/// let schema = schema_builder.build();
/// let index = Index::create_in_ram(schema);
/// {
/// let mut index_writer = index.writer(3_000_000)?;
/// index_writer.add_document(doc!(
/// title => "The Name of the Wind",
/// ));
/// index_writer.add_document(doc!(
/// title => "The Diary of Muadib",
/// ));
/// index_writer.add_document(doc!(
/// title => "A Dairy Cow",
/// ));
/// index_writer.add_document(doc!(
/// title => "The Diary of a Young Girl",
/// ));
/// index_writer.commit().unwrap();
/// }
///
/// index.load_searchers()?;
/// let searcher = index.searcher();
///
/// {
/// let mut count_collector = CountCollector::default();
/// let query_parser = QueryParser::for_index(&index, vec![title]);
/// let query = query_parser.parse_query("diary")?;
/// searcher.search(&*query, &mut count_collector).unwrap();
///
/// assert_eq!(count_collector.count(), 2);
/// }
///
/// Ok(())
/// }
/// ```
#[derive(Default)]
pub struct CountCollector {
count: usize,
}
impl CountCollector {
/// Returns the count of documents that were
/// collected.
pub fn count(&self) -> usize {
self.count
}
}
impl Collector for CountCollector {
fn set_segment(&mut self, _: SegmentLocalId, _: &SegmentReader) -> Result<()> {
Ok(())
}
fn collect(&mut self, _: DocId, _: Score) {
self.count += 1;
}
fn requires_scoring(&self) -> bool {
false
}
}
#[cfg(test)]
mod tests {
use collector::{Collector, CountCollector};
#[test]
fn test_count_collector() {
let mut count_collector = CountCollector::default();
assert_eq!(count_collector.count(), 0);
count_collector.collect(0u32, 1f32);
assert_eq!(count_collector.count(), 1);
assert_eq!(count_collector.count(), 1);
count_collector.collect(1u32, 1f32);
assert_eq!(count_collector.count(), 2);
assert!(!count_collector.requires_scoring());
}
}
|
#![allow(missing_docs)]
use thiserror::Error;
#[derive(Error, Debug)]
pub enum Error {
#[error("couldn't convert request from actix-web format to perseus format")]
RequestConversionFailed {
#[source]
source: actix_web::client::HttpError,
},
}
|
use std::{
collections::HashMap,
fmt::Display,
sync::{Arc, Mutex},
};
use crate::{
interpreter::{Interpreter, InterpreterError},
token::Token,
};
use super::{CallableValue, RuntimeValue, UserFunction};
#[derive(Debug)]
pub struct ClassDefinitionStorage {
name: Token,
superclass: Option<ClassDefinition>,
methods: HashMap<String, UserFunction>,
}
#[derive(Debug, Clone)]
pub struct ClassDefinition(Arc<ClassDefinitionStorage>);
impl Display for ClassDefinition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "<class {}>", self.0.name.lexeme)
}
}
impl PartialEq for ClassDefinition {
fn eq(&self, other: &Self) -> bool {
self.0.name == other.0.name
}
}
impl CallableValue for ClassDefinition {
fn call(
&self,
interpreter: &mut Interpreter,
args: Vec<RuntimeValue>,
) -> Result<RuntimeValue, InterpreterError> {
let instance = ClassInstance::new(self);
let initializer = self.find_method("init");
if let Some(fun) = initializer {
fun.bind(&instance).call(interpreter, args)?;
}
Ok(RuntimeValue::Instance(instance))
}
fn arity(&self) -> usize {
let initializer = self.find_method("init");
initializer.as_ref().map(CallableValue::arity).unwrap_or(0)
}
}
impl ClassDefinition {
pub fn new(
name: &Token,
superclass: Option<ClassDefinition>,
methods: HashMap<String, UserFunction>,
) -> Self {
Self(
ClassDefinitionStorage {
name: name.clone(),
superclass,
methods,
}
.into(),
)
}
pub fn find_method(&self, name: &str) -> Option<UserFunction> {
let self_method = self.0.methods.get(name).cloned();
match (&self_method, &self.0.superclass) {
(Some(_), _) => self_method,
(None, Some(sc)) => sc.find_method(name),
(None, None) => None,
}
}
}
#[derive(Debug)]
struct ClassInstanceStorage {
class: Arc<ClassDefinition>,
fields: Mutex<HashMap<String, RuntimeValue>>,
}
#[derive(Debug, Clone)]
pub struct ClassInstance(Arc<ClassInstanceStorage>);
impl Display for ClassInstance {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"instance {}({})",
&self.0.class.0.name.lexeme,
self.0
.fields
.lock()
.unwrap()
.keys()
.cloned()
.collect::<Vec<String>>()
.join(", ")
)
}
}
impl PartialEq for ClassInstance {
fn eq(&self, other: &Self) -> bool {
let map: &HashMap<String, RuntimeValue> = &self.0.fields.lock().unwrap();
let other_map: &HashMap<String, RuntimeValue> = &other.0.fields.lock().unwrap();
self.0.class == other.0.class && map == other_map
}
}
impl ClassInstance {
pub fn new(class: &ClassDefinition) -> Self {
Self(
ClassInstanceStorage {
class: class.clone().into(),
fields: HashMap::new().into(),
}
.into(),
)
}
pub fn get(&self, name: &Token) -> Option<RuntimeValue> {
let field = self.0.fields.lock().unwrap().get(&name.lexeme).cloned();
match field {
Some(_) => field,
None => self
.0
.class
.find_method(&name.lexeme)
.map(|it| it.bind(self))
.map(RuntimeValue::UserFunction),
}
}
pub fn set(&self, name: &Token, value: RuntimeValue) {
self.0
.fields
.lock()
.unwrap()
.insert(name.lexeme.clone(), value);
}
}
|
use crate::GetFieldExt;
#[cfg(feature="alloc")]
use crate::pmr::Box;
use core_extensions::{SelfOps,Void};
use core_extensions::type_asserts::AssertEq;
#[cfg(feature="alloc")]
#[test]
fn boxed_fields() {
let mut f = Box::new((0, 1, Box::new((20, 21)), 3));
let (f_0, f_1, f_2_0, f_2_1, f_3) = f.fields_mut(fp!(0, 1, 2.0, 2.1, 3));
*f_0 = 0;
*f_1 = 0;
*f_2_0 = 0;
*f_2_1 = 0;
*f_3 = 0;
*f_0 = 5;
*f_1 = 6;
*f_2_0 = 7;
*f_2_1 = 8;
*f_3 = 9;
assert_eq!(f.0, 5);
assert_eq!(f.1, 6);
assert_eq!((f.2).0, 7);
assert_eq!((f.2).1, 8);
assert_eq!(f.3, 9);
}
fn wrap_single<T>(value:T)->(T,){
(value,)
}
#[test]
fn deeply_nested(){
{
let mut f=make_struct!{
a:make_struct!{
aa:(101,103),
ab:"hello",
},
b:false,
};
let (f_aa_0,f_aa_1,f_ab,f_b)=f.fields_mut(fp!( a.aa.0, a.aa.1, a.ab, b ));
assert_eq!(f_aa_0 , &mut 101);
assert_eq!(f_aa_1, &mut 103);
assert_eq!(f_ab, &mut "hello");
*f_aa_0*=3;
*f_aa_1*=2;
*f_ab="shoot";
*f_b=true;
assert_eq!(f_aa_0 , &mut 303);
assert_eq!(f_aa_1, &mut 206);
assert_eq!(f_ab, &mut "shoot");
assert_eq!(f_b, &mut true);
assert_eq!(f.a.aa.0, 303);
assert_eq!(f.a.aa.1, 206);
assert_eq!(f.a.ab, "shoot");
assert_eq!(f.b, true);
}
{
let mut this=10
.piped(wrap_single)
.piped(wrap_single)
.piped(wrap_single)
.piped(wrap_single);
assert_eq!( (((this.0).0).0).0, 10 );
let num=this.field_mut(fp!(0.0.0.0));
*num*=2;
assert_eq!( (((this.0).0).0).0, 20 );
}
}
#[test]
fn identity_getters(){
#[cfg(feature="alloc")]
{
let mut this=Box::new((0,1));
let ()=this.fields_mut(fp!());
}
/*
{
let other=this.field_mut(fp!(()));
*other=Default::default();
assert_eq!(this, Default::default());
}
{
let _:FieldPathSet<(),UniquePaths>=
fp!();
let _:FieldPath<()>=
fp!(());
let _:FieldPathSet<(FieldPath<()>,FieldPath<()>),AliasedPaths>=
fp!((),());
let _:FieldPathSet<(FieldPath<()>,FieldPath<()>,FieldPath<()>),AliasedPaths>=
fp!((),(),());
}
*/
}
field_path_aliases!{
FP_0_0=0.0,
FP_0_1=0.1,
FP_0_2_0=0.2.0,
FP_0_2_1=0.2.1,
FP_0_2_1_0=0.2.1.0,
FP_0_2_1_1=0.2.1.1,
}
#[test]
fn get_nested_field_types(){
use crate::{
GetFieldType,GetFieldType2,GetFieldType3,GetFieldType4,
RevGetFieldType,
};
type FP0=FP!(0);
type FP1=FP!(1);
type FP2=FP!(2);
type Unary<T>=(T,);
type SStr=&'static str;
type VVec=&'static [()];
{
type Tuple=Unary<SStr>;
let _:AssertEq<GetFieldType<Tuple,FP0>,SStr>;
}
{
type Tuple=Unary<(Void,SStr)>;
let _:AssertEq<GetFieldType2<Tuple,FP0,FP0>,Void>;
let _:AssertEq<GetFieldType2<Tuple,FP0,FP1>,SStr>;
let _:AssertEq<RevGetFieldType<FP_0_0,Tuple>,Void>;
let _:AssertEq<RevGetFieldType<FP_0_1,Tuple>,SStr>;
}
{
type Tuple=Unary<((),(),(u64,SStr))>;
let _:AssertEq<GetFieldType3<Tuple,FP0,FP2,FP0>,u64>;
let _:AssertEq<GetFieldType3<Tuple,FP0,FP2,FP1>,SStr>;
let _:AssertEq<RevGetFieldType<FP_0_2_0,Tuple>,u64>;
let _:AssertEq<RevGetFieldType<FP_0_2_1,Tuple>,SStr>;
}
{
type Tuple=Unary<((),(),((),(SStr,VVec)))>;
let _:AssertEq<GetFieldType4<Tuple,FP0,FP2,FP1,FP0>,SStr>;
let _:AssertEq<GetFieldType4<Tuple,FP0,FP2,FP1,FP1>,VVec>;
let _:AssertEq<RevGetFieldType<FP_0_2_1_0,Tuple>,SStr>;
let _:AssertEq<RevGetFieldType<FP_0_2_1_1,Tuple>,VVec>;
}
}
|
pub mod adt_blog;
pub mod blog;
pub mod idiomatic_blog;
/*************************************
* 17.1 examples (encapsulation)
*************************************/
#[derive(PartialEq, Debug)]
pub struct AveragedCollection {
list: Vec<i32>,
average: f64,
}
impl AveragedCollection {
pub fn add(&mut self, value: i32) {
self.list.push(value);
self.update_average();
}
pub fn remove(&mut self) -> Option<i32> {
let result = self.list.pop();
match result {
Some(value) => {
self.update_average();
Some(value)
}
None => None,
}
}
pub fn average(&self) -> f64 {
self.average
}
fn update_average(&mut self) {
let total: i32 = self.list.iter().sum();
self.average = total as f64 / self.list.len() as f64
}
}
/***************************************************
* 17.2 examples (duck-typing, open for extension)
**************************************************/
// if we wrote a GUI library like this...
pub mod gui {
pub trait Draw {
fn draw(&self);
}
pub struct Screen {
// note: this uses a trait object (`dyn Draw`)
// this requires dynamic dispatch to find the `draw` method
// (instead of static dispatch via monomporphic compiler optimizations)
// which is necessary if we want screen to be able to call
// `run` on a list of polymorphically defined `Draw` implementors
// thus we get greater code flexibility at the cost
// of a small runtime perf tradeoff
pub components: Vec<Box<dyn Draw>>,
// further note: we can only make a trait object out of
// "object-safe" traits; criteria for which is that all methods must:
// (1) not return `Self`
// (2) not use generic type params
// b/c doing either would require knowing concrete type info that
// creating a type object requires that we forget
}
impl Screen {
pub fn run(&self) {
for component in self.components.iter() {
component.draw();
}
}
}
pub struct Button {
pub width: u32,
pub height: u32,
pub label: String,
}
impl Draw for Button {
fn draw(&self) {
// code to draw the button
}
}
// a user of the library could extend it as demonstrated in main.rs
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_add() {
let mut c = AveragedCollection {
list: vec![],
average: 0.0,
};
c.add(1);
assert_eq!(
c,
AveragedCollection {
list: vec![1],
average: 1.0
}
)
}
#[test]
fn test_remove() {
let mut c = AveragedCollection {
list: vec![1, 2, 3],
average: 2.0,
};
let x = c.remove();
assert_eq!(x, Some(3));
assert_eq!(
c,
AveragedCollection {
list: vec![1, 2],
average: 1.5
}
)
}
#[test]
fn test_extending_gui_lib() {}
}
|
use super::preludes::*;
use vm::convert::ToBytes;
use vm::opcode;
pub type Instruction = u8;
#[derive(Clone, Debug, Default, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub struct Instructions(pub Vec<Instruction>);
impl From<Vec<Instruction>> for Instructions {
fn from(value: Vec<Instruction>) -> Self {
Instructions(value)
}
}
impl From<opcode::Constant> for Instructions {
fn from(value: opcode::Constant) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::JumpNotTruthy> for Instructions {
fn from(value: opcode::JumpNotTruthy) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::Jump> for Instructions {
fn from(value: opcode::Jump) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::GetGlobal> for Instructions {
fn from(value: opcode::GetGlobal) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::SetGlobal> for Instructions {
fn from(value: opcode::SetGlobal) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::Array> for Instructions {
fn from(value: opcode::Array) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::Hash> for Instructions {
fn from(value: opcode::Hash) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::GetLocal> for Instructions {
fn from(value: opcode::GetLocal) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::SetLocal> for Instructions {
fn from(value: opcode::SetLocal) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::Call> for Instructions {
fn from(value: opcode::Call) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::GetBuiltin> for Instructions {
fn from(value: opcode::GetBuiltin) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::Closure> for Instructions {
fn from(value: opcode::Closure) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<opcode::GetFree> for Instructions {
fn from(value: opcode::GetFree) -> Self {
value.to_bytes().to_vec().into()
}
}
impl<T: ToBytes<1, 0>> From<T> for Instructions {
fn from(value: T) -> Self {
value.to_bytes().to_vec().into()
}
}
impl From<Instructions> for Vec<Instruction> {
fn from(value: Instructions) -> Self {
value.0
}
}
impl From<Vec<opcode::Opcode>> for Instructions {
fn from(value: Vec<opcode::Opcode>) -> Self {
value
.into_iter()
.flat_map(|v| v.to_bytes())
.collect::<Vec<_>>()
.into()
}
}
impl Display for Instructions {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let mut pos = 0;
let mut buf = String::new();
while pos < self.0.len() {
let op = opcode::Opcode::try_from(&self.0[pos..]);
match &op {
Ok(op) => {
let msg = format!("{:>04} {}ยฅn", pos, op);
buf.push_str(msg.as_str());
pos = pos + 1 + op.readsize();
}
Err(e) => {
let msg = format!("{:>04} Error: {}ยฅn", pos, e);
buf.push_str(msg.as_str());
break;
}
};
}
write!(f, "{}", buf)
}
}
impl From<Vec<Instructions>> for Instructions {
fn from(value: Vec<Instructions>) -> Self {
value
.into_iter()
.flat_map(|v| v.0.to_vec())
.collect::<Vec<_>>()
.into()
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_instructions_string() {
let instructions: Vec<Instructions> = vec![
opcode::Add.into(),
opcode::Constant(2).into(),
opcode::Constant(65535).into(),
opcode::Pop.into(),
opcode::Sub.into(),
opcode::Mul.into(),
opcode::Div.into(),
opcode::Equal.into(),
opcode::NotEqual.into(),
opcode::GreaterThan.into(),
opcode::JumpNotTruthy(1).into(),
opcode::Jump(2).into(),
opcode::Null.into(),
opcode::GetGlobal(65535).into(),
opcode::SetGlobal(65535).into(),
opcode::Array(65535).into(),
opcode::Hash(65535).into(),
opcode::Index.into(),
opcode::Call(254).into(),
opcode::ReturnValue.into(),
opcode::Return.into(),
opcode::GetLocal(254).into(),
opcode::SetLocal(254).into(),
opcode::GetBuiltin(254).into(),
opcode::Closure(65535, 255).into(),
opcode::GetFree(254).into(),
opcode::CurrentClosure.into(),
];
let instructions = Instructions::from(instructions);
let expected = "\
0000 Addยฅn\
0001 Constant 2ยฅn\
0004 Constant 65535ยฅn\
0007 Popยฅn\
0008 Subยฅn\
0009 Mulยฅn\
0010 Divยฅn\
0011 Equalยฅn\
0012 NotEqualยฅn\
0013 GreaterThanยฅn\
0014 JumpNotTruthy 1ยฅn\
0017 Jump 2ยฅn\
0020 Nullยฅn\
0021 GetGlobal 65535ยฅn\
0024 SetGlobal 65535ยฅn\
0027 Array 65535ยฅn\
0030 Hash 65535ยฅn\
0033 Indexยฅn\
0034 Call 254ยฅn\
0036 ReturnValueยฅn\
0037 Returnยฅn\
0038 GetLocal 254ยฅn\
0040 SetLocal 254ยฅn\
0042 GetBuiltin 254ยฅn\
0044 Closure 65535 255ยฅn\
0048 GetFree 254ยฅn\
0050 CurrentClosureยฅn";
assert_eq!(instructions.to_string(), expected);
}
}
|
/**
* cargo new ep1
* cd C:\Users\ใใใงใ\OneDrive\ใใญใฅใกใณใ\practice-rust\concurrency\ep1
* cargo build --example channel-3
* cargo run --example channel-3
*
* [ใกใใปใผใธๅใๆธกใใไฝฟใฃใฆในใฌใใ้ใงใใผใฟใ่ปข้ใใ](https://doc.rust-jp.rs/book/second-edition/ch16-02-message-passing.html)
*/
use std::thread;
use std::sync::mpsc;
fn main() {
// 2ใคใฎๆนๅใ
let (child1Enter, child1Exit) = mpsc::channel();
let (child2Enter, child2Exit) = mpsc::channel();
let (mainEnter1, mainExit1) = mpsc::channel();
let (mainEnter2, mainExit2) = mpsc::channel();
let child1Handle = thread::spawn(move || {
let mut childBall = mainExit1.recv().unwrap();
println!("Child1 | Expected: 1, Got: {}.", childBall);
childBall += 10;
child1Enter.send(childBall).unwrap();
childBall = mainExit1.recv().unwrap();
println!("Child | Expected: 11111, Got: {}, Finished.", childBall);
childBall += 100000;
child1Enter.send(childBall).unwrap();
});
let child2Handle = thread::spawn(move || {
let mut childBall = mainExit2.recv().unwrap();
println!("Child | Expected: 111, Got: {}.", childBall);
childBall += 1000;
child2Enter.send(childBall).unwrap();
childBall = mainExit2.recv().unwrap();
println!("Child | Expected: 1111111, Got: {}, Finished.", childBall);
childBall += 10000000;
child2Enter.send(childBall).unwrap();
});
let mut mainBall = 1;
mainEnter1.send(mainBall).unwrap();
mainBall = child1Exit.recv().unwrap();
println!("Main | Expected: 11, Got: {}.", mainBall);
mainBall += 100;
mainEnter2.send(mainBall).unwrap();
mainBall = child2Exit.recv().unwrap();
println!("Main | Expected: 1111, Got: {}.", mainBall);
mainBall += 10000;
mainEnter1.send(mainBall).unwrap();
mainBall = child1Exit.recv().unwrap();
println!("Main | Expected: 111111, Got: {}.", mainBall);
mainBall += 1000000;
mainEnter2.send(mainBall).unwrap();
mainBall = child2Exit.recv().unwrap();
println!("Main | Expected: 11111111, Got: {}.", mainBall);
child1Handle.join().unwrap();
child2Handle.join().unwrap();
println!("Main | Finished.");
} |
#![no_std]
#![feature(start)]
#![no_main]
use ferr_os_librust::{io,
syscall,
interfaces::keyboard};
mod action;
mod direction;
mod errors;
mod state;
mod snake;
mod point_generator;
mod game;
use action::Action;
use direction::Dir;
use errors::SnakeError;
use state::State;
use snake::Snake;
use point_generator::PointGenerator;
use game::Game;
extern crate alloc;
extern crate rand;
use alloc::{collections::vec_deque::VecDeque,
string::String,
format,
vec::Vec
};
use rand::RngCore;
pub const WIDTH: usize = 59;
pub const HEIGHT: usize = 20;
pub const SIZE : usize = WIDTH as usize * HEIGHT as usize;
#[no_mangle]
pub extern "C" fn _start(heap_address: u64, heap_size: u64, args: u64, args_number: u64) {
ferr_os_librust::allocator::init(heap_address, heap_size);
let arguments = ferr_os_librust::env::retrieve_arguments(args_number, args);
main(arguments);
}
fn buffer_to_line(buffer:[State; SIZE], y: usize) -> String {
let beg = y * WIDTH;
let end = (y+1) * WIDTH;
let line = &buffer[beg..end];
let mut res = String::new();
for pixel in line {
res.push_str(&pixel.to_string()[..]);
}
res.push('\n');
res
}
fn get_point(_g: &mut Game) {
unsafe{ io::push_sound(SOUND_FD, 350, 2, 0);
io::push_sound(SOUND_FD, 500, 2, 2)};
}
fn annoying(g: &mut Game) {
if unsafe {!MUTE} {
let pitch = ((g.rng.rng.next_u64()) % 4)*50+ 50;
if g.rng.rng.next_u64() %4 < 2 {
unsafe{io::push_sound(SOUND_FD, pitch, 2, 0)}
}
}
}
fn loose(_g: &mut Game) {
unsafe{io::push_sound(SOUND_FD, 500, 3, 0)};
unsafe{io::push_sound(SOUND_FD, 400, 3, 3)};
unsafe{io::push_sound(SOUND_FD, 300, 3, 6)};
unsafe{io::push_sound(SOUND_FD, 200, 8, 9)};
}
static mut SOUND_FD : u64 = 0_u64;
static mut MUTE: bool = false;
#[inline(never)]
fn main(args: Vec<String>) {
if args.len() > 3 {
io::_print(&String::from("Got too many arguments, try -h for help\n"));
return;
} else if args.len() == 3 {
if &args[1] == "-h" {
io::_print(&String::from("-S Mute background music\n"));
io::_print(&String::from("-h Show this\n"));
return;
} else if &args[1] == "-S" {
unsafe { MUTE = true };
} else {
io::_print(&String::from("Incorrect argument, try -h for help\n"));
return;
}
}
unsafe {
let fd = syscall::open(&String::from("/hard/screen"), io::OpenFlags::OWR);
syscall::set_layer(10);
syscall::dup2(io::STD_OUT, fd);
syscall::close(fd);
syscall::set_screen_size(HEIGHT+2,WIDTH+2);
syscall::set_screen_pos(1,0);
SOUND_FD = syscall::open(&String::from("/hard/sound"), io::OpenFlags::OWR) as u64;
}
let mut game = Game::init();
game.display();
main_loop(&mut game);
end_screen(&mut game);
}
fn get_inputs() -> String {
let v = io::read_input(io::STD_IN, 512);
let mut begin = String::new();
let mut _end = String::new();
keyboard::translate(v, &mut begin, &mut _end);
begin
}
fn char_to_action(c:char) -> Action {
match c.to_ascii_lowercase() {
'q' => Action::Turn(Dir::Left),
's' => Action::Turn(Dir::Down),
'd' => Action::Turn(Dir::Right),
'z' => Action::Turn(Dir::Up),
_ => Action::Nop,
}
}
fn sleep(n: usize) {
for _ in 0..n {
unsafe { syscall::sleep() }
}
}
fn main_loop(g:&mut Game) {
while !g.ended {
sleep(75);
for c in get_inputs().chars() {
g.do_action(char_to_action(c));
}
g.update();
annoying(g);
g.display();
}
}
fn move_cursor(d:Dir, n: usize) {
let end = match d {
Dir::Left => 'D',
Dir::Right => 'C',
Dir::Down => 'B',
Dir::Up => 'A'
};
io::_print(&format!("\x1B[{}{}",n,end))
}
fn end_screen(g: &mut Game) {
loose(g);
for _ in 0..HEIGHT{
io::_print(&String::from("\n"));
}
io::_print(&String::from(
" ----------==========GAME OVER==========---------- \n"));
io::_print(&format!("Collected {} fruit",g.score));
loop {
if !get_inputs().is_empty(){
return
}
}
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Clock control register"]
pub cr: CR,
#[doc = "0x04 - Internal clock sources calibration register"]
pub icscr: ICSCR,
#[doc = "0x08 - Clock configuration register"]
pub cfgr: CFGR,
#[doc = "0x0c - PLL configuration register"]
pub pllcfgr: PLLCFGR,
#[doc = "0x10 - PLLSAI1 configuration register"]
pub pllsai1cfgr: PLLSAI1CFGR,
#[doc = "0x14 - PLLSAI2 configuration register"]
pub pllsai2cfgr: PLLSAI2CFGR,
#[doc = "0x18 - Clock interrupt enable register"]
pub cier: CIER,
#[doc = "0x1c - Clock interrupt flag register"]
pub cifr: CIFR,
#[doc = "0x20 - Clock interrupt clear register"]
pub cicr: CICR,
_reserved9: [u8; 0x04],
#[doc = "0x28 - AHB1 peripheral reset register"]
pub ahb1rstr: AHB1RSTR,
#[doc = "0x2c - AHB2 peripheral reset register"]
pub ahb2rstr: AHB2RSTR,
#[doc = "0x30 - AHB3 peripheral reset register"]
pub ahb3rstr: AHB3RSTR,
_reserved12: [u8; 0x04],
#[doc = "0x38 - APB1 peripheral reset register 1"]
pub apb1rstr1: APB1RSTR1,
#[doc = "0x3c - APB1 peripheral reset register 2"]
pub apb1rstr2: APB1RSTR2,
#[doc = "0x40 - APB2 peripheral reset register"]
pub apb2rstr: APB2RSTR,
_reserved15: [u8; 0x04],
#[doc = "0x48 - AHB1 peripheral clock enable register"]
pub ahb1enr: AHB1ENR,
#[doc = "0x4c - AHB2 peripheral clock enable register"]
pub ahb2enr: AHB2ENR,
#[doc = "0x50 - AHB3 peripheral clock enable register"]
pub ahb3enr: AHB3ENR,
_reserved18: [u8; 0x04],
#[doc = "0x58 - APB1ENR1"]
pub apb1enr1: APB1ENR1,
#[doc = "0x5c - APB1 peripheral clock enable register 2"]
pub apb1enr2: APB1ENR2,
#[doc = "0x60 - APB2ENR"]
pub apb2enr: APB2ENR,
_reserved21: [u8; 0x04],
#[doc = "0x68 - AHB1 peripheral clocks enable in Sleep and Stop modes register"]
pub ahb1smenr: AHB1SMENR,
#[doc = "0x6c - AHB2 peripheral clocks enable in Sleep and Stop modes register"]
pub ahb2smenr: AHB2SMENR,
#[doc = "0x70 - AHB3 peripheral clocks enable in Sleep and Stop modes register"]
pub ahb3smenr: AHB3SMENR,
_reserved24: [u8; 0x04],
#[doc = "0x78 - APB1SMENR1"]
pub apb1smenr1: APB1SMENR1,
#[doc = "0x7c - APB1 peripheral clocks enable in Sleep and Stop modes register 2"]
pub apb1smenr2: APB1SMENR2,
#[doc = "0x80 - APB2SMENR"]
pub apb2smenr: APB2SMENR,
_reserved27: [u8; 0x04],
#[doc = "0x88 - CCIPR"]
pub ccipr: CCIPR,
_reserved28: [u8; 0x04],
#[doc = "0x90 - BDCR"]
pub bdcr: BDCR,
#[doc = "0x94 - CSR"]
pub csr: CSR,
#[doc = "0x98 - Clock recovery RC register"]
pub crrcr: CRRCR,
#[doc = "0x9c - Peripherals independent clock configuration register"]
pub ccipr2: CCIPR2,
}
#[doc = "CR (rw) register accessor: Clock control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr`]
module"]
pub type CR = crate::Reg<cr::CR_SPEC>;
#[doc = "Clock control register"]
pub mod cr;
#[doc = "ICSCR (rw) register accessor: Internal clock sources calibration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`icscr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`icscr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`icscr`]
module"]
pub type ICSCR = crate::Reg<icscr::ICSCR_SPEC>;
#[doc = "Internal clock sources calibration register"]
pub mod icscr;
#[doc = "CFGR (rw) register accessor: Clock configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cfgr`]
module"]
pub type CFGR = crate::Reg<cfgr::CFGR_SPEC>;
#[doc = "Clock configuration register"]
pub mod cfgr;
#[doc = "PLLCFGR (rw) register accessor: PLL configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pllcfgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pllcfgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`pllcfgr`]
module"]
pub type PLLCFGR = crate::Reg<pllcfgr::PLLCFGR_SPEC>;
#[doc = "PLL configuration register"]
pub mod pllcfgr;
#[doc = "PLLSAI1CFGR (rw) register accessor: PLLSAI1 configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pllsai1cfgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pllsai1cfgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`pllsai1cfgr`]
module"]
pub type PLLSAI1CFGR = crate::Reg<pllsai1cfgr::PLLSAI1CFGR_SPEC>;
#[doc = "PLLSAI1 configuration register"]
pub mod pllsai1cfgr;
#[doc = "PLLSAI2CFGR (rw) register accessor: PLLSAI2 configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pllsai2cfgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pllsai2cfgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`pllsai2cfgr`]
module"]
pub type PLLSAI2CFGR = crate::Reg<pllsai2cfgr::PLLSAI2CFGR_SPEC>;
#[doc = "PLLSAI2 configuration register"]
pub mod pllsai2cfgr;
#[doc = "CIER (rw) register accessor: Clock interrupt enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cier::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cier::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cier`]
module"]
pub type CIER = crate::Reg<cier::CIER_SPEC>;
#[doc = "Clock interrupt enable register"]
pub mod cier;
#[doc = "CIFR (r) register accessor: Clock interrupt flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cifr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cifr`]
module"]
pub type CIFR = crate::Reg<cifr::CIFR_SPEC>;
#[doc = "Clock interrupt flag register"]
pub mod cifr;
#[doc = "CICR (w) register accessor: Clock interrupt clear register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cicr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cicr`]
module"]
pub type CICR = crate::Reg<cicr::CICR_SPEC>;
#[doc = "Clock interrupt clear register"]
pub mod cicr;
#[doc = "AHB1RSTR (rw) register accessor: AHB1 peripheral reset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb1rstr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb1rstr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ahb1rstr`]
module"]
pub type AHB1RSTR = crate::Reg<ahb1rstr::AHB1RSTR_SPEC>;
#[doc = "AHB1 peripheral reset register"]
pub mod ahb1rstr;
#[doc = "AHB2RSTR (rw) register accessor: AHB2 peripheral reset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb2rstr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb2rstr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ahb2rstr`]
module"]
pub type AHB2RSTR = crate::Reg<ahb2rstr::AHB2RSTR_SPEC>;
#[doc = "AHB2 peripheral reset register"]
pub mod ahb2rstr;
#[doc = "AHB3RSTR (rw) register accessor: AHB3 peripheral reset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb3rstr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb3rstr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ahb3rstr`]
module"]
pub type AHB3RSTR = crate::Reg<ahb3rstr::AHB3RSTR_SPEC>;
#[doc = "AHB3 peripheral reset register"]
pub mod ahb3rstr;
#[doc = "APB1RSTR1 (rw) register accessor: APB1 peripheral reset register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1rstr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1rstr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`apb1rstr1`]
module"]
pub type APB1RSTR1 = crate::Reg<apb1rstr1::APB1RSTR1_SPEC>;
#[doc = "APB1 peripheral reset register 1"]
pub mod apb1rstr1;
#[doc = "APB1RSTR2 (rw) register accessor: APB1 peripheral reset register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1rstr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1rstr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`apb1rstr2`]
module"]
pub type APB1RSTR2 = crate::Reg<apb1rstr2::APB1RSTR2_SPEC>;
#[doc = "APB1 peripheral reset register 2"]
pub mod apb1rstr2;
#[doc = "APB2RSTR (rw) register accessor: APB2 peripheral reset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb2rstr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb2rstr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`apb2rstr`]
module"]
pub type APB2RSTR = crate::Reg<apb2rstr::APB2RSTR_SPEC>;
#[doc = "APB2 peripheral reset register"]
pub mod apb2rstr;
#[doc = "AHB1ENR (rw) register accessor: AHB1 peripheral clock enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb1enr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb1enr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ahb1enr`]
module"]
pub type AHB1ENR = crate::Reg<ahb1enr::AHB1ENR_SPEC>;
#[doc = "AHB1 peripheral clock enable register"]
pub mod ahb1enr;
#[doc = "AHB2ENR (rw) register accessor: AHB2 peripheral clock enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb2enr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb2enr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ahb2enr`]
module"]
pub type AHB2ENR = crate::Reg<ahb2enr::AHB2ENR_SPEC>;
#[doc = "AHB2 peripheral clock enable register"]
pub mod ahb2enr;
#[doc = "AHB3ENR (rw) register accessor: AHB3 peripheral clock enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb3enr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb3enr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ahb3enr`]
module"]
pub type AHB3ENR = crate::Reg<ahb3enr::AHB3ENR_SPEC>;
#[doc = "AHB3 peripheral clock enable register"]
pub mod ahb3enr;
#[doc = "APB1ENR1 (rw) register accessor: APB1ENR1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1enr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1enr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`apb1enr1`]
module"]
pub type APB1ENR1 = crate::Reg<apb1enr1::APB1ENR1_SPEC>;
#[doc = "APB1ENR1"]
pub mod apb1enr1;
#[doc = "APB1ENR2 (rw) register accessor: APB1 peripheral clock enable register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1enr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1enr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`apb1enr2`]
module"]
pub type APB1ENR2 = crate::Reg<apb1enr2::APB1ENR2_SPEC>;
#[doc = "APB1 peripheral clock enable register 2"]
pub mod apb1enr2;
#[doc = "APB2ENR (rw) register accessor: APB2ENR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb2enr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb2enr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`apb2enr`]
module"]
pub type APB2ENR = crate::Reg<apb2enr::APB2ENR_SPEC>;
#[doc = "APB2ENR"]
pub mod apb2enr;
#[doc = "AHB1SMENR (rw) register accessor: AHB1 peripheral clocks enable in Sleep and Stop modes register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb1smenr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb1smenr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ahb1smenr`]
module"]
pub type AHB1SMENR = crate::Reg<ahb1smenr::AHB1SMENR_SPEC>;
#[doc = "AHB1 peripheral clocks enable in Sleep and Stop modes register"]
pub mod ahb1smenr;
#[doc = "AHB2SMENR (rw) register accessor: AHB2 peripheral clocks enable in Sleep and Stop modes register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb2smenr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb2smenr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ahb2smenr`]
module"]
pub type AHB2SMENR = crate::Reg<ahb2smenr::AHB2SMENR_SPEC>;
#[doc = "AHB2 peripheral clocks enable in Sleep and Stop modes register"]
pub mod ahb2smenr;
#[doc = "AHB3SMENR (rw) register accessor: AHB3 peripheral clocks enable in Sleep and Stop modes register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb3smenr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb3smenr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ahb3smenr`]
module"]
pub type AHB3SMENR = crate::Reg<ahb3smenr::AHB3SMENR_SPEC>;
#[doc = "AHB3 peripheral clocks enable in Sleep and Stop modes register"]
pub mod ahb3smenr;
#[doc = "APB1SMENR1 (rw) register accessor: APB1SMENR1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1smenr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1smenr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`apb1smenr1`]
module"]
pub type APB1SMENR1 = crate::Reg<apb1smenr1::APB1SMENR1_SPEC>;
#[doc = "APB1SMENR1"]
pub mod apb1smenr1;
#[doc = "APB1SMENR2 (rw) register accessor: APB1 peripheral clocks enable in Sleep and Stop modes register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb1smenr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb1smenr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`apb1smenr2`]
module"]
pub type APB1SMENR2 = crate::Reg<apb1smenr2::APB1SMENR2_SPEC>;
#[doc = "APB1 peripheral clocks enable in Sleep and Stop modes register 2"]
pub mod apb1smenr2;
#[doc = "APB2SMENR (rw) register accessor: APB2SMENR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb2smenr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb2smenr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`apb2smenr`]
module"]
pub type APB2SMENR = crate::Reg<apb2smenr::APB2SMENR_SPEC>;
#[doc = "APB2SMENR"]
pub mod apb2smenr;
#[doc = "CCIPR (rw) register accessor: CCIPR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccipr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccipr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ccipr`]
module"]
pub type CCIPR = crate::Reg<ccipr::CCIPR_SPEC>;
#[doc = "CCIPR"]
pub mod ccipr;
#[doc = "BDCR (rw) register accessor: BDCR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bdcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bdcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bdcr`]
module"]
pub type BDCR = crate::Reg<bdcr::BDCR_SPEC>;
#[doc = "BDCR"]
pub mod bdcr;
#[doc = "CSR (rw) register accessor: CSR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`csr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`csr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`csr`]
module"]
pub type CSR = crate::Reg<csr::CSR_SPEC>;
#[doc = "CSR"]
pub mod csr;
#[doc = "CRRCR (rw) register accessor: Clock recovery RC register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`crrcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`crrcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`crrcr`]
module"]
pub type CRRCR = crate::Reg<crrcr::CRRCR_SPEC>;
#[doc = "Clock recovery RC register"]
pub mod crrcr;
#[doc = "CCIPR2 (rw) register accessor: Peripherals independent clock configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccipr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccipr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ccipr2`]
module"]
pub type CCIPR2 = crate::Reg<ccipr2::CCIPR2_SPEC>;
#[doc = "Peripherals independent clock configuration register"]
pub mod ccipr2;
|
#![allow(clippy::upper_case_acronyms)]
// Table B.1
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum Marker {
ZERO,
/// Start Of Frame markers
SOF(SOFType),
/// Reserved for JPEG extensions
JPG,
/// Define Huffman table(s)
DHT,
/// Define arithmetic coding conditioning(s)
DAC,
/// Restart with modulo 8 count `m`
RST(u8),
/// Start of image
SOI,
/// End of image
EOI,
/// Start of scan
SOS,
/// Define quantization table(s)
DQT,
/// Define number of lines
DNL,
/// Define restart interval
DRI,
/// Define hierarchical progression
DHP,
/// Expand reference component(s)
EXP,
/// Reserved for application segments
APP(u8),
/// Reserved for JPEG extensions
JPGn(u8),
/// Comment
COM,
/// For temporary private use in arithmetic coding
TEM,
/// Reserved
RES,
/// Fill byte
FILL,
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SOFType {
/// SOF(0)
BaselineDCT,
/// SOF(1)
ExtendedSequentialDCT,
/// SOF(2)
ProgressiveDCT,
/// SOF(3)
Lossless,
/// SOF(5)
DifferentialSequentialDCT,
/// SOF(6)
DifferentialProgressiveDCT,
/// SOF(7)
DifferentialLossless,
/// SOF(9)
ExtendedSequentialDCTArithmetic,
/// SOF(10)
ProgressiveDCTArithmetic,
/// SOF(11)
LosslessArithmeticCoding,
/// SOF(13)
DifferentialSequentialDCTArithmetic,
/// SOF(14)
DifferentialProgressiveDCTArithmetic,
/// SOF(15)
DifferentialLosslessArithmetic,
}
impl From<Marker> for u8 {
fn from(marker: Marker) -> Self {
use self::{Marker::*, SOFType::*};
match marker {
ZERO => 0x00,
TEM => 0x01,
RES => 0x02,
SOF(BaselineDCT) => 0xC0,
SOF(ExtendedSequentialDCT) => 0xC1,
SOF(ProgressiveDCT) => 0xC2,
SOF(Lossless) => 0xC3,
DHT => 0xC4,
SOF(DifferentialSequentialDCT) => 0xC5,
SOF(DifferentialProgressiveDCT) => 0xC6,
SOF(DifferentialLossless) => 0xC7,
JPG => 0xC8,
SOF(ExtendedSequentialDCTArithmetic) => 0xC9,
SOF(ProgressiveDCTArithmetic) => 0xCA,
SOF(LosslessArithmeticCoding) => 0xCB,
DAC => 0xCC,
SOF(DifferentialSequentialDCTArithmetic) => 0xCD,
SOF(DifferentialProgressiveDCTArithmetic) => 0xCE,
SOF(DifferentialLosslessArithmetic) => 0xCF,
RST(v) => 0xD0 + v,
SOI => 0xD8,
EOI => 0xD9,
SOS => 0xDA,
DQT => 0xDB,
DNL => 0xDC,
DRI => 0xDD,
DHP => 0xDE,
EXP => 0xDF,
APP(v) => 0xE0 + v,
JPGn(v) => 0xF0 + v,
COM => 0xFE,
FILL => 0xFF,
}
}
}
|
pub mod auth_service;
pub mod link;
pub mod session;
pub mod user;
|
pub mod icmp;
pub mod tcp;
pub mod udp;
use std::collections::HashMap;
use std::net::IpAddr;
use std::net::Ipv4Addr;
use pnet::packet::ip::{IpNextHeaderProtocol, IpNextHeaderProtocols};
pub fn handle_transport_protocol(interface_name: &str,
source: IpAddr,
destination: IpAddr,
protocol: IpNextHeaderProtocol,
packet: &[u8],
stream_hash_map: &mut HashMap<u64, tcp::tcp_stream>) {
match protocol {
IpNextHeaderProtocols::Udp => {
udp::handle_udp_packet(interface_name, source, destination, packet)
}
IpNextHeaderProtocols::Tcp => {
tcp::handle_tcp_packet(interface_name, source, destination, packet, stream_hash_map)
}
IpNextHeaderProtocols::Icmp => {
icmp::handle_icmp_packet(interface_name, source, destination, packet)
}
_ => {
println!("[{}]: Unknown {} packet: {} > {}; protocol: {:?} length: {}",
interface_name,
match source {
IpAddr::V4(..) => "IPv4",
_ => "IPv6",
},
source,
destination,
protocol,
packet.len())
}
}
}
|
#[doc = "Reader of register INTR_CAUSE"]
pub type R = crate::R<u32, super::INTR_CAUSE>;
#[doc = "Reader of field `M`"]
pub type M_R = crate::R<bool, bool>;
#[doc = "Reader of field `S`"]
pub type S_R = crate::R<bool, bool>;
#[doc = "Reader of field `TX`"]
pub type TX_R = crate::R<bool, bool>;
#[doc = "Reader of field `RX`"]
pub type RX_R = crate::R<bool, bool>;
#[doc = "Reader of field `I2C_EC`"]
pub type I2C_EC_R = crate::R<bool, bool>;
#[doc = "Reader of field `SPI_EC`"]
pub type SPI_EC_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - Master interrupt active ('interrupt_master'): INTR_M_MASKED != 0."]
#[inline(always)]
pub fn m(&self) -> M_R {
M_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Slave interrupt active ('interrupt_slave'): INTR_S_MASKED != 0."]
#[inline(always)]
pub fn s(&self) -> S_R {
S_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Transmitter interrupt active ('interrupt_tx'): INTR_TX_MASKED != 0."]
#[inline(always)]
pub fn tx(&self) -> TX_R {
TX_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Receiver interrupt active ('interrupt_rx'): INTR_RX_MASKED != 0."]
#[inline(always)]
pub fn rx(&self) -> RX_R {
RX_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Externally clock I2C interrupt active ('interrupt_i2c_ec'): INTR_I2C_EC_MASKED != 0."]
#[inline(always)]
pub fn i2c_ec(&self) -> I2C_EC_R {
I2C_EC_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Externally clocked SPI interrupt active ('interrupt_spi_ec'): INTR_SPI_EC_MASKED != 0."]
#[inline(always)]
pub fn spi_ec(&self) -> SPI_EC_R {
SPI_EC_R::new(((self.bits >> 5) & 0x01) != 0)
}
}
|
use std::str::FromStr;
#[derive(Debug)]
pub struct KnownTag {
pub tag: String,
}
impl From<OpenTracingTag> for KnownTag {
fn from(tag: OpenTracingTag) -> KnownTag {
let tag_str: &'static str = tag.into();
KnownTag {
tag: tag_str.to_string(),
}
}
}
impl From<IkrellnTags> for KnownTag {
fn from(tag: IkrellnTags) -> KnownTag {
let tag_str: &'static str = tag.into();
KnownTag {
tag: tag_str.to_string(),
}
}
}
// OpenTracing semantics v1.1
// https://github.com/opentracing/specification/blob/master/semantic_conventions.md#span-tags-table
#[derive(Clone)]
pub enum OpenTracingTag {
Component,
DbInstance,
DbStatement,
DbType,
DbUser,
Error,
HttpMethod,
HttpStatusCode,
HttpUrl,
MessageBusDestination,
PeerAddress,
PeerHostname,
PeerIpv4,
PeerIpv6,
PeerPort,
PeerService,
SamplingPriority,
SpanKind,
}
impl From<OpenTracingTag> for &'static str {
fn from(tag: OpenTracingTag) -> &'static str {
match tag {
OpenTracingTag::Component => "component",
OpenTracingTag::DbInstance => "db.instance",
OpenTracingTag::DbStatement => "db.statement",
OpenTracingTag::DbType => "db.type",
OpenTracingTag::DbUser => "db.user",
OpenTracingTag::Error => "error",
OpenTracingTag::HttpMethod => "http.method",
OpenTracingTag::HttpStatusCode => "http.status_code",
OpenTracingTag::HttpUrl => "http.url",
OpenTracingTag::MessageBusDestination => "message_bus.destination",
OpenTracingTag::PeerAddress => "peer.address",
OpenTracingTag::PeerHostname => "peer.hostname",
OpenTracingTag::PeerIpv4 => "peer.ipv4",
OpenTracingTag::PeerIpv6 => "peer.ipv6",
OpenTracingTag::PeerPort => "peer.port",
OpenTracingTag::PeerService => "peer.service",
OpenTracingTag::SamplingPriority => "sampling.priority",
OpenTracingTag::SpanKind => "span.kind",
}
}
}
pub struct NonOpenTracingTag;
impl FromStr for OpenTracingTag {
type Err = NonOpenTracingTag;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"component" => Ok(OpenTracingTag::Component),
"db.instance" => Ok(OpenTracingTag::DbInstance),
"db.statement" => Ok(OpenTracingTag::DbStatement),
"db.type" => Ok(OpenTracingTag::DbType),
"db.user" => Ok(OpenTracingTag::DbUser),
"error" => Ok(OpenTracingTag::Error),
"http.method" => Ok(OpenTracingTag::HttpMethod),
"http.status_code" => Ok(OpenTracingTag::HttpStatusCode),
"http.url" => Ok(OpenTracingTag::HttpUrl),
"message_bus.destination" => Ok(OpenTracingTag::MessageBusDestination),
"peer.address" => Ok(OpenTracingTag::PeerAddress),
"peer.hostname" => Ok(OpenTracingTag::PeerHostname),
"peer.ipv4" => Ok(OpenTracingTag::PeerIpv4),
"peer.ipv6" => Ok(OpenTracingTag::PeerIpv6),
"peer.port" => Ok(OpenTracingTag::PeerPort),
"peer.service" => Ok(OpenTracingTag::PeerService),
"sampling.priority" => Ok(OpenTracingTag::SamplingPriority),
"span.kind" => Ok(OpenTracingTag::SpanKind),
&_ => Err(NonOpenTracingTag),
}
}
}
#[derive(Clone)]
pub enum IkrellnTags {
Class,
Environment,
Name,
Result,
StepParameters,
StepStatus,
StepType,
Suite,
}
impl From<IkrellnTags> for &'static str {
fn from(tag: IkrellnTags) -> &'static str {
match tag {
IkrellnTags::Class => "test.class",
IkrellnTags::Environment => "test.environment",
IkrellnTags::Name => "test.name",
IkrellnTags::Result => "test.result",
IkrellnTags::StepParameters => "test.step_parameters",
IkrellnTags::StepStatus => "test.step_status",
IkrellnTags::StepType => "test.step_type",
IkrellnTags::Suite => "test.suite",
}
}
}
pub struct NonIkrellnTag;
impl FromStr for IkrellnTags {
type Err = NonIkrellnTag;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"test.class" => Ok(IkrellnTags::Class),
"test.environment" => Ok(IkrellnTags::Environment),
"test.name" => Ok(IkrellnTags::Name),
"test.result" => Ok(IkrellnTags::Result),
"test.step_parameters" => Ok(IkrellnTags::StepParameters),
"test.step_status" => Ok(IkrellnTags::StepStatus),
"test.step_type" => Ok(IkrellnTags::StepType),
"test.suite" => Ok(IkrellnTags::Suite),
&_ => Err(NonIkrellnTag),
}
}
}
|
mod passport;
pub mod validate;
pub fn solve_1() {
let input = include_str!("input.txt");
let parsed = passport::parse(input);
println!(
"{} valid passwords found (of {} total).",
parsed.iter().filter(|p| p.is_ok()).count(),
parsed.len()
);
for pass in parsed {
println!("{:?}", pass);
}
}
|
use common::tokio::time::Instant;
use std::time::Duration;
use super::super::{TIMER_G, TIMER_H, TIMER_T2};
#[derive(Debug, Clone, Copy)]
pub struct Completed {
pub entered_at: Instant,
pub retransmissions_count: u8,
pub last_retransmission_at: Instant,
}
impl Completed {
pub fn next_retrasmission(&self) -> Duration {
use std::iter;
std::cmp::min(
iter::repeat(Duration::from_millis(TIMER_G))
.take(2_i32.pow(self.retransmissions_count.into()) as usize)
.fold(Duration::from_secs(0), |acc, x| acc + x),
Duration::from_millis(TIMER_T2),
)
}
pub fn has_timedout(&self) -> bool {
self.entered_at.elapsed() >= Duration::from_millis(TIMER_H)
}
pub fn should_retransmit(&self) -> bool {
self.last_retransmission_at.elapsed() > self.next_retrasmission()
}
pub fn retransmit(self) -> Self {
Self {
retransmissions_count: self.retransmissions_count + 1,
last_retransmission_at: Instant::now(),
..self
}
}
}
impl Default for Completed {
fn default() -> Self {
Self {
entered_at: Instant::now(),
retransmissions_count: 0,
last_retransmission_at: Instant::now(),
}
}
}
|
#[doc = "Register `DDRCTRL_DFITMG0` reader"]
pub type R = crate::R<DDRCTRL_DFITMG0_SPEC>;
#[doc = "Register `DDRCTRL_DFITMG0` writer"]
pub type W = crate::W<DDRCTRL_DFITMG0_SPEC>;
#[doc = "Field `DFI_TPHY_WRLAT` reader - DFI_TPHY_WRLAT"]
pub type DFI_TPHY_WRLAT_R = crate::FieldReader;
#[doc = "Field `DFI_TPHY_WRLAT` writer - DFI_TPHY_WRLAT"]
pub type DFI_TPHY_WRLAT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 6, O>;
#[doc = "Field `DFI_TPHY_WRDATA` reader - DFI_TPHY_WRDATA"]
pub type DFI_TPHY_WRDATA_R = crate::FieldReader;
#[doc = "Field `DFI_TPHY_WRDATA` writer - DFI_TPHY_WRDATA"]
pub type DFI_TPHY_WRDATA_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 6, O>;
#[doc = "Field `DFI_T_RDDATA_EN` reader - DFI_T_RDDATA_EN"]
pub type DFI_T_RDDATA_EN_R = crate::FieldReader;
#[doc = "Field `DFI_T_RDDATA_EN` writer - DFI_T_RDDATA_EN"]
pub type DFI_T_RDDATA_EN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
#[doc = "Field `DFI_T_CTRL_DELAY` reader - DFI_T_CTRL_DELAY"]
pub type DFI_T_CTRL_DELAY_R = crate::FieldReader;
#[doc = "Field `DFI_T_CTRL_DELAY` writer - DFI_T_CTRL_DELAY"]
pub type DFI_T_CTRL_DELAY_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>;
impl R {
#[doc = "Bits 0:5 - DFI_TPHY_WRLAT"]
#[inline(always)]
pub fn dfi_tphy_wrlat(&self) -> DFI_TPHY_WRLAT_R {
DFI_TPHY_WRLAT_R::new((self.bits & 0x3f) as u8)
}
#[doc = "Bits 8:13 - DFI_TPHY_WRDATA"]
#[inline(always)]
pub fn dfi_tphy_wrdata(&self) -> DFI_TPHY_WRDATA_R {
DFI_TPHY_WRDATA_R::new(((self.bits >> 8) & 0x3f) as u8)
}
#[doc = "Bits 16:22 - DFI_T_RDDATA_EN"]
#[inline(always)]
pub fn dfi_t_rddata_en(&self) -> DFI_T_RDDATA_EN_R {
DFI_T_RDDATA_EN_R::new(((self.bits >> 16) & 0x7f) as u8)
}
#[doc = "Bits 24:28 - DFI_T_CTRL_DELAY"]
#[inline(always)]
pub fn dfi_t_ctrl_delay(&self) -> DFI_T_CTRL_DELAY_R {
DFI_T_CTRL_DELAY_R::new(((self.bits >> 24) & 0x1f) as u8)
}
}
impl W {
#[doc = "Bits 0:5 - DFI_TPHY_WRLAT"]
#[inline(always)]
#[must_use]
pub fn dfi_tphy_wrlat(&mut self) -> DFI_TPHY_WRLAT_W<DDRCTRL_DFITMG0_SPEC, 0> {
DFI_TPHY_WRLAT_W::new(self)
}
#[doc = "Bits 8:13 - DFI_TPHY_WRDATA"]
#[inline(always)]
#[must_use]
pub fn dfi_tphy_wrdata(&mut self) -> DFI_TPHY_WRDATA_W<DDRCTRL_DFITMG0_SPEC, 8> {
DFI_TPHY_WRDATA_W::new(self)
}
#[doc = "Bits 16:22 - DFI_T_RDDATA_EN"]
#[inline(always)]
#[must_use]
pub fn dfi_t_rddata_en(&mut self) -> DFI_T_RDDATA_EN_W<DDRCTRL_DFITMG0_SPEC, 16> {
DFI_T_RDDATA_EN_W::new(self)
}
#[doc = "Bits 24:28 - DFI_T_CTRL_DELAY"]
#[inline(always)]
#[must_use]
pub fn dfi_t_ctrl_delay(&mut self) -> DFI_T_CTRL_DELAY_W<DDRCTRL_DFITMG0_SPEC, 24> {
DFI_T_CTRL_DELAY_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DDRCTRL DFI timing register 0\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ddrctrl_dfitmg0::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ddrctrl_dfitmg0::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DDRCTRL_DFITMG0_SPEC;
impl crate::RegisterSpec for DDRCTRL_DFITMG0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ddrctrl_dfitmg0::R`](R) reader structure"]
impl crate::Readable for DDRCTRL_DFITMG0_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ddrctrl_dfitmg0::W`](W) writer structure"]
impl crate::Writable for DDRCTRL_DFITMG0_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DDRCTRL_DFITMG0 to value 0x0702_0002"]
impl crate::Resettable for DDRCTRL_DFITMG0_SPEC {
const RESET_VALUE: Self::Ux = 0x0702_0002;
}
|
use hyper::{Client, Url};
use serde_json;
use ::{Package};
use ::package::PackageError;
pub struct Search {
query: String
}
impl Search {
pub fn new(query: &str) -> Search {
Search { query: String::from(query) }
}
pub fn execute(&self) -> Result<Vec<Package>, PackageError> {
Package::search(self.query.as_ref())
}
}
|
use std::sync::{Arc, Mutex};
use std::vec::Vec;
use std::{panic, vec};
use crate::{Busy, Error, PinState, Ready, Reset, Transaction, Transactional};
use embedded_hal::blocking::delay::DelayMs;
use embedded_hal::blocking::spi;
use embedded_hal::digital::v2;
/// Base mock type
pub struct Mock {
inner: Arc<Mutex<Inner>>,
count: Id,
}
pub type Id = u32;
/// Mock Transactional SPI implementation
#[derive(Clone, Debug)]
pub struct Spi {
id: Id,
inner: Arc<Mutex<Inner>>,
}
/// Mock Pin implementation
#[derive(Clone, Debug)]
pub struct Pin {
id: Id,
inner: Arc<Mutex<Inner>>,
}
/// Mock Delay implementation
#[derive(Clone, Debug)]
pub struct Delay {
id: Id,
inner: Arc<Mutex<Inner>>,
}
/// Mock transaction type for setting and checking expectations
#[derive(Clone, Debug, PartialEq)]
pub enum MockTransaction {
None,
SpiWrite(Id, Vec<u8>, Vec<u8>),
SpiRead(Id, Vec<u8>, Vec<u8>),
SpiExec(Id, Vec<MockExec>),
Busy(Id, PinState),
Ready(Id, PinState),
Reset(Id, PinState),
Write(Id, Vec<u8>),
Transfer(Id, Vec<u8>, Vec<u8>),
IsHigh(Id, bool),
IsLow(Id, bool),
SetHigh(Id),
SetLow(Id),
DelayMs(u32),
}
impl MockTransaction {
pub fn spi_write<A, B>(spi: &Spi, prefix: A, outgoing: B) -> Self
where
A: AsRef<[u8]>,
B: AsRef<[u8]>,
{
MockTransaction::SpiWrite(spi.id, prefix.as_ref().to_vec(), outgoing.as_ref().to_vec())
}
pub fn spi_read<A, B>(spi: &Spi, prefix: A, incoming: B) -> Self
where
A: AsRef<[u8]>,
B: AsRef<[u8]>,
{
MockTransaction::SpiRead(spi.id, prefix.as_ref().to_vec(), incoming.as_ref().to_vec())
}
pub fn busy(spi: &Spi, value: PinState) -> Self {
MockTransaction::Busy(spi.id, value)
}
pub fn ready(spi: &Spi, value: PinState) -> Self {
MockTransaction::Ready(spi.id, value)
}
pub fn reset(spi: &Spi, value: PinState) -> Self {
MockTransaction::Reset(spi.id, value)
}
pub fn delay_ms(v: u32) -> Self {
MockTransaction::DelayMs(v)
}
pub fn write<B>(spi: &Spi, outgoing: B) -> Self
where
B: AsRef<[u8]>,
{
MockTransaction::Write(spi.id, outgoing.as_ref().to_vec())
}
pub fn transfer<B>(spi: &Spi, outgoing: B, incoming: B) -> Self
where
B: AsRef<[u8]>,
{
MockTransaction::Transfer(
spi.id,
outgoing.as_ref().to_vec(),
incoming.as_ref().to_vec(),
)
}
pub fn is_high(pin: &Pin, value: bool) -> Self {
MockTransaction::IsHigh(pin.id, value)
}
pub fn is_low(pin: &Pin, value: bool) -> Self {
MockTransaction::IsLow(pin.id, value)
}
pub fn set_high(pin: &Pin) -> Self {
MockTransaction::SetHigh(pin.id)
}
pub fn set_low(pin: &Pin) -> Self {
MockTransaction::SetLow(pin.id)
}
}
/// MockExec type for composing mock exec transactions
#[derive(Clone, Debug, PartialEq)]
pub enum MockExec {
SpiWrite(Vec<u8>),
SpiRead(Vec<u8>),
}
impl<'a> From<&Transaction<'a>> for MockExec {
fn from(t: &Transaction<'a>) -> Self {
match t {
Transaction::Read(ref d) => {
let mut v = Vec::with_capacity(d.len());
v.copy_from_slice(d);
MockExec::SpiRead(v)
}
Transaction::Write(ref d) => {
let mut v = Vec::with_capacity(d.len());
v.copy_from_slice(d);
MockExec::SpiWrite(v)
}
}
}
}
#[derive(Clone, Debug, PartialEq)]
struct Inner {
index: usize,
expected: Vec<MockTransaction>,
actual: Vec<MockTransaction>,
}
impl Inner {
fn finalise(&mut self) {
assert_eq!(self.expected, self.actual);
}
}
impl Mock {
/// Create a new mock instance
pub fn new() -> Self {
Self {
inner: Arc::new(Mutex::new(Inner {
index: 0,
expected: Vec::new(),
actual: Vec::new(),
})),
count: 0,
}
}
/// Set expectations on the instance
pub fn expect<T>(&mut self, transactions: T)
where
T: AsRef<[MockTransaction]>,
{
let expected: Vec<_> = transactions.as_ref().to_vec();
let actual = vec![];
let i = Inner {
index: 0,
expected,
actual,
};
*self.inner.lock().unwrap() = i;
}
pub fn spi(&mut self) -> Spi {
let id = self.count;
self.count += 1;
Spi {
inner: self.inner.clone(),
id,
}
}
pub fn pin(&mut self) -> Pin {
let id = self.count;
self.count += 1;
Pin {
inner: self.inner.clone(),
id,
}
}
pub fn delay(&mut self) -> Delay {
let id = self.count;
self.count += 1;
Delay {
inner: self.inner.clone(),
id,
}
}
/// Finalise expectations
/// This will cause previous expectations to be evaluated
pub fn finalise(&self) {
let mut i = self.inner.lock().unwrap();
i.finalise();
}
}
impl Transactional for Spi {
type Error = Error<(), ()>;
/// Read data from a specified address
/// This consumes the provided input data array and returns a reference to this on success
fn spi_read(&mut self, prefix: &[u8], data: &mut [u8]) -> Result<(), Self::Error> {
let mut i = self.inner.lock().unwrap();
let index = i.index;
// Copy read data from expectation
match &i.expected.get(index) {
Some(MockTransaction::SpiRead(_id, _outgoing, incoming)) => {
data.copy_from_slice(&incoming);
}
_ => (),
};
// Save actual call
i.actual.push(MockTransaction::SpiRead(
self.id,
prefix.into(),
data.into(),
));
// Update expectation index
i.index += 1;
Ok(())
}
/// Write data to a specified register address
fn spi_write(&mut self, prefix: &[u8], data: &[u8]) -> Result<(), Self::Error> {
let mut i = self.inner.lock().unwrap();
// Save actual call
i.actual.push(MockTransaction::SpiWrite(
self.id,
prefix.into(),
data.into(),
));
// Update expectation index
i.index += 1;
Ok(())
}
/// Execute the provided transactions
fn spi_exec(&mut self, transactions: &mut [Transaction]) -> Result<(), Self::Error> {
let mut i = self.inner.lock().unwrap();
let index = i.index;
// Save actual calls
let t: Vec<MockExec> = transactions
.iter()
.map(|ref v| MockExec::from(*v))
.collect();
i.actual.push(MockTransaction::SpiExec(self.id, t));
// Load expected reads
if let MockTransaction::SpiExec(_id, e) = &i.expected[index] {
for i in 0..transactions.len() {
let t = &mut transactions[i];
let x = e.get(i);
match (t, x) {
(Transaction::Read(ref mut v), Some(MockExec::SpiRead(d))) => {
v.copy_from_slice(&d)
}
_ => (),
}
}
}
// Update expectation index
i.index += 1;
Ok(())
}
}
impl Busy for Spi {
type Error = Error<(), ()>;
/// Check peripheral busy status
fn get_busy(&mut self) -> Result<PinState, Self::Error> {
let mut i = self.inner.lock().unwrap();
let index = i.index;
let state = match &i.expected.get(index) {
Some(MockTransaction::Busy(_id, state)) => state.clone(),
_ => PinState::Low,
};
i.actual.push(MockTransaction::Busy(self.id, state.clone()));
i.index += 1;
Ok(state)
}
}
impl Ready for Spi {
type Error = Error<(), ()>;
/// Check peripheral ready status
fn get_ready(&mut self) -> Result<PinState, Self::Error> {
let mut i = self.inner.lock().unwrap();
let index = i.index;
let state = match &i.expected.get(index) {
Some(MockTransaction::Ready(_id, state)) => state.clone(),
_ => PinState::Low,
};
i.actual
.push(MockTransaction::Ready(self.id, state.clone()));
i.index += 1;
Ok(state)
}
}
impl Reset for Spi {
type Error = Error<(), ()>;
/// Check peripheral ready status
fn set_reset(&mut self, state: PinState) -> Result<(), Self::Error> {
let mut i = self.inner.lock().unwrap();
i.actual.push(MockTransaction::Reset(self.id, state));
i.index += 1;
Ok(())
}
}
impl DelayMs<u32> for Spi {
fn delay_ms(&mut self, t: u32) {
let mut i = self.inner.lock().unwrap();
// Save actual call
i.actual.push(MockTransaction::DelayMs(t));
// Update expectation index
i.index += 1;
}
}
impl spi::Transfer<u8> for Spi {
type Error = Error<(), ()>;
fn transfer<'w>(&mut self, data: &'w mut [u8]) -> Result<&'w [u8], Self::Error> {
let mut i = self.inner.lock().unwrap();
let index = i.index;
let incoming: Vec<_> = data.into();
// Copy read data from expectation
match &i.expected.get(index) {
Some(MockTransaction::Transfer(_id, _outgoing, incoming)) => {
if incoming.len() == data.len() {
data.copy_from_slice(&incoming);
}
}
_ => (),
};
// Save actual call
i.actual
.push(MockTransaction::Transfer(self.id, incoming, data.into()));
// Update expectation index
i.index += 1;
Ok(data)
}
}
impl spi::Write<u8> for Spi {
type Error = Error<(), ()>;
fn write<'w>(&mut self, data: &[u8]) -> Result<(), Self::Error> {
let mut i = self.inner.lock().unwrap();
// Save actual call
i.actual.push(MockTransaction::Write(self.id, data.into()));
// Update expectation index
i.index += 1;
Ok(())
}
}
impl v2::InputPin for Pin {
type Error = ();
fn is_high(&self) -> Result<bool, Self::Error> {
let mut i = self.inner.lock().unwrap();
let index = i.index;
// Fetch expectation if found
let v = match &i.expected.get(index) {
Some(MockTransaction::IsHigh(_id, v)) => *v,
_ => false,
};
// Save actual call
i.actual.push(MockTransaction::IsHigh(self.id, v));
// Update expectation index
i.index += 1;
Ok(v)
}
fn is_low(&self) -> Result<bool, Self::Error> {
let mut i = self.inner.lock().unwrap();
let index = i.index;
// Fetch expectation if found
let v = match &i.expected.get(index) {
Some(MockTransaction::IsLow(_id, v)) => *v,
_ => false,
};
// Save actual call
i.actual.push(MockTransaction::IsLow(self.id, v));
// Update expectation index
i.index += 1;
Ok(v)
}
}
impl v2::OutputPin for Pin {
type Error = ();
fn set_high(&mut self) -> Result<(), Self::Error> {
let mut i = self.inner.lock().unwrap();
// Save actual call
i.actual.push(MockTransaction::SetHigh(self.id));
// Update expectation index
i.index += 1;
Ok(())
}
fn set_low(&mut self) -> Result<(), Self::Error> {
let mut i = self.inner.lock().unwrap();
// Save actual call
i.actual.push(MockTransaction::SetLow(self.id));
// Update expectation index
i.index += 1;
Ok(())
}
}
impl DelayMs<u32> for Delay {
fn delay_ms(&mut self, t: u32) {
let mut i = self.inner.lock().unwrap();
// Save actual call
i.actual.push(MockTransaction::DelayMs(t));
// Update expectation index
i.index += 1;
}
}
#[cfg(test)]
mod test {
use std::*;
use std::{panic, vec};
use super::*;
#[test]
fn test_transactional_read() {
let mut m = Mock::new();
let mut s = m.spi();
let prefix = vec![0xFF];
let data = vec![0xAA, 0xBB];
m.expect(vec![MockTransaction::spi_read(
&s,
prefix.clone(),
data.clone(),
)]);
let mut d = [0u8; 2];
s.spi_read(&prefix, &mut d).expect("read failure");
m.finalise();
assert_eq!(&data, &d);
}
#[test]
#[should_panic]
fn test_transactional_read_expect_write() {
let mut m = Mock::new();
let mut s = m.spi();
let prefix = vec![0xFF];
let data = vec![0xAA, 0xBB];
m.expect(vec![MockTransaction::spi_write(
&s,
prefix.clone(),
data.clone(),
)]);
let mut d = [0u8; 2];
s.spi_read(&prefix, &mut d).expect("read failure");
m.finalise();
assert_eq!(&data, &d);
}
#[test]
fn test_transactional_write() {
let mut m = Mock::new();
let mut s = m.spi();
let prefix = vec![0xFF];
let data = vec![0xAA, 0xBB];
m.expect(vec![MockTransaction::spi_write(
&s,
prefix.clone(),
data.clone(),
)]);
s.spi_write(&prefix, &data).expect("write failure");
m.finalise();
}
#[test]
#[should_panic]
fn test_transactional_write_expect_read() {
let mut m = Mock::new();
let mut s = m.spi();
let prefix = vec![0xFF];
let data = vec![0xAA, 0xBB];
m.expect(vec![MockTransaction::spi_read(
&s,
prefix.clone(),
data.clone(),
)]);
s.spi_write(&prefix, &data).expect("write failure");
m.finalise();
}
#[test]
fn test_standard_write() {
use embedded_hal::blocking::spi::Write;
let mut m = Mock::new();
let mut s = m.spi();
let data = vec![0xAA, 0xBB];
m.expect(vec![MockTransaction::write(&s, data.clone())]);
s.write(&data).expect("write failure");
m.finalise();
}
#[test]
fn test_standard_transfer() {
use embedded_hal::blocking::spi::Transfer;
let mut m = Mock::new();
let mut s = m.spi();
let outgoing = vec![0xAA, 0xBB];
let incoming = vec![0xCC, 0xDD];
m.expect(vec![MockTransaction::transfer(
&s,
outgoing.clone(),
incoming.clone(),
)]);
let mut d = outgoing.clone();
s.transfer(&mut d).expect("read failure");
m.finalise();
assert_eq!(&incoming, &d);
}
#[test]
fn test_pins() {
use embedded_hal::digital::v2::{InputPin, OutputPin};
let mut m = Mock::new();
let mut p = m.pin();
m.expect(vec![
MockTransaction::is_high(&p, true),
MockTransaction::is_low(&p, false),
MockTransaction::set_high(&p),
MockTransaction::set_low(&p),
]);
assert_eq!(true, p.is_high().unwrap());
assert_eq!(false, p.is_low().unwrap());
p.set_high().unwrap();
p.set_low().unwrap();
m.finalise();
}
#[test]
#[should_panic]
fn test_incorrect_pin() {
use embedded_hal::digital::v2::InputPin;
let mut m = Mock::new();
let p1 = m.pin();
let p2 = m.pin();
m.expect(vec![MockTransaction::is_high(&p1, true)]);
p2.is_high().unwrap();
m.finalise();
}
}
|
#[cfg(test)]
mod url_test;
use crate::errors::*;
use util::Error;
use std::borrow::Cow;
use std::convert::From;
use std::fmt;
/// The type of server used in the ice.URL structure.
#[derive(PartialEq, Debug, Copy, Clone)]
pub enum SchemeType {
/// The URL represents a STUN server.
Stun,
/// The URL represents a STUNS (secure) server.
Stuns,
/// The URL represents a TURN server.
Turn,
/// The URL represents a TURNS (secure) server.
Turns,
/// Default public constant to use for "enum" like struct comparisons when no value was defined.
Unknown,
}
impl Default for SchemeType {
fn default() -> Self {
Self::Unknown
}
}
impl From<&str> for SchemeType {
/// Defines a procedure for creating a new `SchemeType` from a raw
/// string naming the scheme type.
fn from(raw: &str) -> Self {
match raw {
"stun" => Self::Stun,
"stuns" => Self::Stuns,
"turn" => Self::Turn,
"turns" => Self::Turns,
_ => Self::Unknown,
}
}
}
impl fmt::Display for SchemeType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s = match *self {
SchemeType::Stun => "stun",
SchemeType::Stuns => "stuns",
SchemeType::Turn => "turn",
SchemeType::Turns => "turns",
SchemeType::Unknown => "unknown",
};
write!(f, "{}", s)
}
}
/// The transport protocol type that is used in the `ice::url::Url` structure.
#[derive(PartialEq, Debug, Copy, Clone)]
pub enum ProtoType {
/// The URL uses a UDP transport.
Udp,
/// The URL uses a TCP transport.
Tcp,
Unknown,
}
impl Default for ProtoType {
fn default() -> Self {
Self::Udp
}
}
// defines a procedure for creating a new ProtoType from a raw
// string naming the transport protocol type.
impl From<&str> for ProtoType {
// NewSchemeType defines a procedure for creating a new SchemeType from a raw
// string naming the scheme type.
fn from(raw: &str) -> Self {
match raw {
"udp" => Self::Udp,
"tcp" => Self::Tcp,
_ => Self::Unknown,
}
}
}
impl fmt::Display for ProtoType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s = match *self {
Self::Udp => "udp",
Self::Tcp => "tcp",
Self::Unknown => "unknown",
};
write!(f, "{}", s)
}
}
/// Represents a STUN (rfc7064) or TURN (rfc7065) URL.
#[derive(Debug, Clone, Default)]
pub struct Url {
pub scheme: SchemeType,
pub host: String,
pub port: u16,
pub username: String,
pub password: String,
pub proto: ProtoType,
}
impl fmt::Display for Url {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let host = if self.host.contains("::") {
"[".to_owned() + self.host.as_str() + "]"
} else {
self.host.clone()
};
if self.scheme == SchemeType::Turn || self.scheme == SchemeType::Turns {
write!(
f,
"{}:{}:{}?transport={}",
self.scheme, host, self.port, self.proto
)
} else {
write!(f, "{}:{}:{}", self.scheme, host, self.port)
}
}
}
impl Url {
/// Parses a STUN or TURN urls following the ABNF syntax described in
/// [IETF rfc-7064](https://tools.ietf.org/html/rfc7064) and
/// [IETF rfc-7065](https://tools.ietf.org/html/rfc7065) respectively.
pub fn parse_url(raw: &str) -> Result<Self, Error> {
// work around for url crate
if raw.contains("//") {
return Err(ERR_INVALID_URL.to_owned());
}
let mut s = raw.to_string();
let pos = raw.find(':');
if let Some(p) = pos {
s.replace_range(p..=p, "://");
} else {
return Err(ERR_SCHEME_TYPE.to_owned());
}
let raw_parts = url::Url::parse(&s)?;
let scheme = raw_parts.scheme().into();
let host = if let Some(host) = raw_parts.host_str() {
host.trim()
.trim_start_matches('[')
.trim_end_matches(']')
.to_owned()
} else {
return Err(ERR_HOST.to_owned());
};
let port = if let Some(port) = raw_parts.port() {
port
} else if scheme == SchemeType::Stun || scheme == SchemeType::Turn {
3478
} else {
5349
};
let mut q_args = raw_parts.query_pairs();
let proto = match scheme {
SchemeType::Stun => {
if q_args.count() > 0 {
return Err(ERR_STUN_QUERY.to_owned());
}
ProtoType::Udp
}
SchemeType::Stuns => {
if q_args.count() > 0 {
return Err(ERR_STUN_QUERY.to_owned());
}
ProtoType::Tcp
}
SchemeType::Turn => {
if q_args.count() > 1 {
return Err(ERR_INVALID_QUERY.to_owned());
}
if let Some((key, value)) = q_args.next() {
if key == Cow::Borrowed("transport") {
let proto: ProtoType = value.as_ref().into();
if proto == ProtoType::Unknown {
return Err(ERR_PROTO_TYPE.to_owned());
}
proto
} else {
return Err(ERR_INVALID_QUERY.to_owned());
}
} else {
ProtoType::Udp
}
}
SchemeType::Turns => {
if q_args.count() > 1 {
return Err(ERR_INVALID_QUERY.to_owned());
}
if let Some((key, value)) = q_args.next() {
if key == Cow::Borrowed("transport") {
let proto: ProtoType = value.as_ref().into();
if proto == ProtoType::Unknown {
return Err(ERR_PROTO_TYPE.to_owned());
}
proto
} else {
return Err(ERR_INVALID_QUERY.to_owned());
}
} else {
ProtoType::Tcp
}
}
SchemeType::Unknown => {
return Err(ERR_SCHEME_TYPE.to_owned());
}
};
Ok(Self {
scheme,
host,
port,
username: "".to_owned(),
password: "".to_owned(),
proto,
})
}
/*
fn parse_proto(raw:&str) ->Result<ProtoType, Error> {
let qArgs= raw.split('=');
if qArgs.len() != 2 {
return Err(ERR_INVALID_QUERY.to_owned());
}
var proto ProtoType
if rawProto := qArgs.Get("transport"); rawProto != "" {
if proto = NewProtoType(rawProto); proto == ProtoType(0) {
return ProtoType(Unknown), ErrProtoType
}
return proto, nil
}
if len(qArgs) > 0 {
return ProtoType(Unknown), ErrInvalidQuery
}
return proto, nil
}*/
/// Returns whether the this URL's scheme describes secure scheme or not.
#[must_use]
pub fn is_secure(&self) -> bool {
self.scheme == SchemeType::Stuns || self.scheme == SchemeType::Turns
}
}
|
use isahc::{prelude::*, Request};
pub fn notmain() -> Result<String, isahc::Error> {
let mut response =
Request::get("https://hurrxycxigvviayjhlxr.supabase.co/rest/v1/anime?select=link")
.header("apikey", "anon-key")
.header("Authorization", "Bearer token")
.body(r#"{"never": "gonna","give": you up}"#,)?
.send()?;
let data = response.text().unwrap().replace("[", "").replace("]", "");
Ok(data)
}
|
//! PEG parser for name variants.
use super::types::*;
peg::parser! {
grammar name_parser() for str {
rule space() = quiet!{[' ' | '\n' | '\r' | '\t']}
rule digit() = quiet!{['0'..='9']}
rule trailing_junk() = [',' | '.'] space()* ![_]
rule year_range() -> String
= range:$(digit()+ "-" digit()*) { range.to_owned() }
rule year_tag() -> String
= (space()* ",")? space()* "("? y:year_range() ")"? {
y
}
rule ending() -> Option<String>
= trailing_junk() { None }
/ y:year_tag() { Some(y) }
rule cs_name() -> NameFmt
= last:$([^',']*) "," space()* rest:$(([_] !ending())* [^',']?) {
if rest.trim().is_empty() {
NameFmt::Single(last.trim().to_owned())
} else {
NameFmt::TwoPart(last.trim().to_owned(), rest.trim().to_owned())
}
}
rule single_name() -> NameFmt
= name:$(([_] !ending())* [^',' | '.']?) { NameFmt::Single(name.trim().to_owned()) }
rule name() -> NameFmt = ("!!!" ['a'..='z' | 'A'..='Z']+ "!!!" space()*)? n:(cs_name() / single_name()) { n }
#[no_eof]
pub rule name_entry() -> NameEntry
= year:year_tag() { NameEntry { name:NameFmt::Empty, year: Some(year) } }
/ name:name() year:ending()? { NameEntry { name, year: year.flatten() } }
}
}
pub fn parse_name_entry(name: &str) -> Result<NameEntry, NameError> {
let res = name_parser::name_entry(name)?;
Ok(res)
}
|
use crate::enums::{
Align, CallbackTrigger, Color, Damage, Event, Font, FrameType, LabelType, Shortcut,
};
use crate::image::Image;
use crate::prelude::*;
use crate::utils::FlString;
use fltk_sys::button::*;
use std::{
ffi::{CStr, CString},
mem,
os::raw,
};
/// Creates a normal button
#[derive(WidgetBase, WidgetExt, ButtonExt, Debug)]
pub struct Button {
inner: *mut Fl_Button,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
/// Defines the button type, which can be changed dynamically using the `set_type()`.
#[repr(i32)]
#[derive(WidgetType, Debug, Copy, Clone, PartialEq)]
pub enum ButtonType {
/// Normal button
Normal = 0,
/// Toggle button
Toggle = 1,
/// Radio button
Radio = 102,
/// Hidden button
Hidden = 3,
}
/// Creates a radio button
#[derive(WidgetBase, WidgetExt, ButtonExt, Debug)]
pub struct RadioButton {
inner: *mut Fl_Radio_Button,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
impl RadioButton {
/// Check whether a `RadioButton` is toggled
pub fn is_toggled(&self) -> bool {
unsafe {
assert!(!self.was_deleted());
Fl_Radio_Button_is_toggled(self.inner) != 0
}
}
/// Sets whether the `RadioButton` is toggled or not
pub fn toggle(&mut self, val: bool) {
assert!(!self.was_deleted());
unsafe { Fl_Radio_Button_toggle(self.inner, val as i32) }
}
}
/// Creates a radio round button
#[derive(WidgetBase, WidgetExt, ButtonExt, Debug)]
pub struct RadioRoundButton {
inner: *mut Fl_Radio_Round_Button,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
impl RadioRoundButton {
/// Check whether a `RadioRoundButton` is toggled
pub fn is_toggled(&self) -> bool {
unsafe {
assert!(!self.was_deleted());
Fl_Radio_Round_Button_is_toggled(self.inner) != 0
}
}
/// Sets whether the `RadioRoundButton` is toggled or not
pub fn toggle(&mut self, val: bool) {
assert!(!self.was_deleted());
unsafe { Fl_Radio_Round_Button_toggle(self.inner, val as i32) }
}
}
/// Creates a radio light button
#[derive(WidgetBase, WidgetExt, ButtonExt, Debug)]
pub struct RadioLightButton {
inner: *mut Fl_Radio_Light_Button,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
impl RadioLightButton {
/// Check whether a `RadioLightButton` is toggled
pub fn is_toggled(&self) -> bool {
unsafe {
assert!(!self.was_deleted());
Fl_Radio_Light_Button_is_toggled(self.inner) != 0
}
}
/// Sets whether the `RadioLightButton` is toggled or not
pub fn toggle(&mut self, val: bool) {
assert!(!self.was_deleted());
unsafe { Fl_Radio_Light_Button_toggle(self.inner, val as i32) }
}
}
/// Creates a round button
#[derive(WidgetBase, WidgetExt, ButtonExt, Debug)]
pub struct RoundButton {
inner: *mut Fl_Round_Button,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
impl RoundButton {
/// Check whether a `RoundButton` is toggled
pub fn is_toggled(&self) -> bool {
unsafe {
assert!(!self.was_deleted());
Fl_Round_Button_is_toggled(self.inner) != 0
}
}
/// Sets whether the `RoundButton` is toggled or not
pub fn toggle(&mut self, val: bool) {
assert!(!self.was_deleted());
unsafe { Fl_Round_Button_toggle(self.inner, val as i32) }
}
}
/// Creates a check button
#[derive(WidgetBase, WidgetExt, ButtonExt, Debug)]
pub struct CheckButton {
inner: *mut Fl_Check_Button,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
impl CheckButton {
/// Check whether a `CheckButton` is checked
pub fn is_checked(&self) -> bool {
unsafe {
assert!(!self.was_deleted());
Fl_Check_Button_is_checked(self.inner) != 0
}
}
/// Set whether `CheckButton` is checked or not
pub fn set_checked(&self, checked: bool) {
unsafe {
assert!(!self.was_deleted());
Fl_Check_Button_set_checked(self.inner, checked as i32);
}
}
}
/// Creates a toggle button
#[derive(WidgetBase, WidgetExt, ButtonExt, Debug)]
pub struct ToggleButton {
inner: *mut Fl_Toggle_Button,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
impl ToggleButton {
/// Check whether a `ToggleButton` is toggled
pub fn is_toggled(&self) -> bool {
unsafe {
assert!(!self.was_deleted());
Fl_Toggle_Button_is_toggled(self.inner) != 0
}
}
/// Sets whether the `ToggleButton` is toggled or not
pub fn toggle(&mut self, val: bool) {
assert!(!self.was_deleted());
unsafe { Fl_Toggle_Button_toggle(self.inner, val as i32) }
}
}
/// Creates a light button
#[derive(WidgetBase, WidgetExt, ButtonExt, Debug)]
pub struct LightButton {
inner: *mut Fl_Light_Button,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
impl LightButton {
/// Check whether a `LightButton` is on
pub fn is_on(&self) -> bool {
unsafe {
assert!(!self.was_deleted());
Fl_Light_Button_is_on(self.inner) != 0
}
}
/// Sets whether the `LightButton` is on or not
pub fn turn_on(&mut self, on: bool) {
assert!(!self.was_deleted());
unsafe { Fl_Light_Button_turn_on(self.inner, on as i32) }
}
}
/// Creates a repeat button
#[derive(WidgetBase, WidgetExt, ButtonExt, Debug)]
pub struct RepeatButton {
inner: *mut Fl_Repeat_Button,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
/// Creates a return button
#[derive(WidgetBase, WidgetExt, ButtonExt, Debug)]
pub struct ReturnButton {
inner: *mut Fl_Return_Button,
tracker: *mut fltk_sys::fl::Fl_Widget_Tracker,
}
|
// Copyright (c) 2016 Anatoly Ikorsky
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
use bytes::{BufMut, BytesMut};
use futures_core::{ready, stream};
use mysql_common::proto::codec::PacketCodec as PacketCodecInner;
use native_tls::{Certificate, Identity, TlsConnector};
use pin_project::{pin_project, project};
use tokio::net::TcpStream;
use tokio::prelude::*;
use tokio_util::codec::{Decoder, Encoder, Framed, FramedParts};
use std::{
fmt,
fs::File,
io::Read,
mem::MaybeUninit,
net::ToSocketAddrs,
ops::{Deref, DerefMut},
path::Path,
pin::Pin,
task::{Context, Poll},
time::Duration,
};
use crate::{
error::*,
io::{
futures::{new_connecting_tcp_stream, new_write_packet, WritePacket},
socket::Socket,
},
opts::SslOpts,
};
mod async_tls;
pub mod futures;
mod socket;
#[derive(Debug, Default)]
pub struct PacketCodec(PacketCodecInner);
impl Deref for PacketCodec {
type Target = PacketCodecInner;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for PacketCodec {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl Decoder for PacketCodec {
type Item = Vec<u8>;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>> {
Ok(self.0.decode(src)?)
}
}
impl Encoder for PacketCodec {
type Item = Vec<u8>;
type Error = Error;
fn encode(&mut self, item: Self::Item, dst: &mut BytesMut) -> Result<()> {
Ok(self.0.encode(item, dst)?)
}
}
#[pin_project]
#[derive(Debug)]
pub enum Endpoint {
Plain(#[pin] TcpStream),
Secure(#[pin] self::async_tls::TlsStream<TcpStream>),
Socket(#[pin] Socket),
}
impl Endpoint {
pub fn is_secure(&self) -> bool {
if let Endpoint::Secure(_) = self {
true
} else {
false
}
}
pub fn set_keepalive_ms(&self, ms: Option<u32>) -> Result<()> {
let ms = ms.map(|val| Duration::from_millis(u64::from(val)));
match *self {
Endpoint::Plain(ref stream) => stream.set_keepalive(ms)?,
Endpoint::Secure(ref stream) => stream.get_ref().get_ref().set_keepalive(ms)?,
Endpoint::Socket(_) => (/* inapplicable */),
}
Ok(())
}
pub fn set_tcp_nodelay(&self, val: bool) -> Result<()> {
match *self {
Endpoint::Plain(ref stream) => stream.set_nodelay(val)?,
Endpoint::Secure(ref stream) => stream.get_ref().get_ref().set_nodelay(val)?,
Endpoint::Socket(_) => (/* inapplicable */),
}
Ok(())
}
pub async fn make_secure(self, domain: String, ssl_opts: SslOpts) -> Result<Self> {
if let Endpoint::Socket(_) = self {
// inapplicable
return Ok(self);
}
let mut builder = TlsConnector::builder();
match ssl_opts.root_cert_path() {
Some(root_cert_path) => {
let mut root_cert_der = vec![];
let mut root_cert_file = File::open(root_cert_path)?;
root_cert_file.read_to_end(&mut root_cert_der)?;
let root_cert = Certificate::from_der(&*root_cert_der)?;
builder.add_root_certificate(root_cert);
}
None => (),
}
if let Some(pkcs12_path) = ssl_opts.pkcs12_path() {
let der = std::fs::read(pkcs12_path)?;
let identity = Identity::from_pkcs12(&*der, ssl_opts.password().unwrap_or(""))?;
builder.identity(identity);
}
builder.danger_accept_invalid_hostnames(ssl_opts.skip_domain_validation());
builder.danger_accept_invalid_certs(ssl_opts.accept_invalid_certs());
let tls_connector = builder.build()?;
let tls_stream = match self {
Endpoint::Plain(stream) => {
self::async_tls::connect_async(&tls_connector, &*domain, stream).await?
}
Endpoint::Secure(_) | Endpoint::Socket(_) => unreachable!(),
};
Ok(Endpoint::Secure(tls_stream))
}
}
impl From<TcpStream> for Endpoint {
fn from(stream: TcpStream) -> Self {
Endpoint::Plain(stream)
}
}
impl From<Socket> for Endpoint {
fn from(socket: Socket) -> Self {
Endpoint::Socket(socket)
}
}
impl From<self::async_tls::TlsStream<TcpStream>> for Endpoint {
fn from(stream: self::async_tls::TlsStream<TcpStream>) -> Self {
Endpoint::Secure(stream)
}
}
impl AsyncRead for Endpoint {
#[project]
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context,
buf: &mut [u8],
) -> Poll<StdResult<usize, tokio::io::Error>> {
#[project]
match self.project() {
Endpoint::Plain(stream) => stream.poll_read(cx, buf),
Endpoint::Secure(stream) => stream.poll_read(cx, buf),
Endpoint::Socket(stream) => stream.poll_read(cx, buf),
}
}
unsafe fn prepare_uninitialized_buffer(&self, buf: &mut [MaybeUninit<u8>]) -> bool {
match self {
Endpoint::Plain(stream) => stream.prepare_uninitialized_buffer(buf),
Endpoint::Secure(stream) => stream.prepare_uninitialized_buffer(buf),
Endpoint::Socket(stream) => stream.prepare_uninitialized_buffer(buf),
}
}
#[project]
fn poll_read_buf<B>(
self: Pin<&mut Self>,
cx: &mut Context,
buf: &mut B,
) -> Poll<StdResult<usize, tokio::io::Error>>
where
B: BufMut,
{
#[project]
match self.project() {
Endpoint::Plain(stream) => stream.poll_read_buf(cx, buf),
Endpoint::Secure(stream) => stream.poll_read_buf(cx, buf),
Endpoint::Socket(stream) => stream.poll_read_buf(cx, buf),
}
}
}
impl AsyncWrite for Endpoint {
#[project]
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context,
buf: &[u8],
) -> Poll<StdResult<usize, tokio::io::Error>> {
#[project]
match self.project() {
Endpoint::Plain(stream) => stream.poll_write(cx, buf),
Endpoint::Secure(stream) => stream.poll_write(cx, buf),
Endpoint::Socket(stream) => stream.poll_write(cx, buf),
}
}
#[project]
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context) -> Poll<StdResult<(), tokio::io::Error>> {
#[project]
match self.project() {
Endpoint::Plain(stream) => stream.poll_flush(cx),
Endpoint::Secure(stream) => stream.poll_flush(cx),
Endpoint::Socket(stream) => stream.poll_flush(cx),
}
}
#[project]
fn poll_shutdown(
self: Pin<&mut Self>,
cx: &mut Context,
) -> Poll<StdResult<(), tokio::io::Error>> {
#[project]
match self.project() {
Endpoint::Plain(stream) => stream.poll_shutdown(cx),
Endpoint::Secure(stream) => stream.poll_shutdown(cx),
Endpoint::Socket(stream) => stream.poll_shutdown(cx),
}
}
}
/// Stream connected to MySql server.
pub struct Stream {
closed: bool,
codec: Option<Box<Framed<Endpoint, PacketCodec>>>,
}
impl fmt::Debug for Stream {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Stream (endpoint={:?})",
self.codec.as_ref().unwrap().get_ref()
)
}
}
impl Stream {
fn new<T: Into<Endpoint>>(endpoint: T) -> Self {
let endpoint = endpoint.into();
Self {
closed: false,
codec: Box::new(Framed::new(endpoint, PacketCodec::default())).into(),
}
}
pub async fn connect_tcp<S>(addr: S) -> Result<Stream>
where
S: ToSocketAddrs,
{
new_connecting_tcp_stream(addr).await
}
pub async fn connect_socket<P: AsRef<Path>>(path: P) -> Result<Stream> {
Ok(Stream::new(Socket::new(path).await?))
}
pub fn write_packet(self, data: Vec<u8>) -> WritePacket {
new_write_packet(self, data)
}
pub fn set_keepalive_ms(&self, ms: Option<u32>) -> Result<()> {
self.codec.as_ref().unwrap().get_ref().set_keepalive_ms(ms)
}
pub fn set_tcp_nodelay(&self, val: bool) -> Result<()> {
self.codec.as_ref().unwrap().get_ref().set_tcp_nodelay(val)
}
pub async fn make_secure(mut self, domain: String, ssl_opts: SslOpts) -> Result<Self> {
let codec = self.codec.take().unwrap();
let FramedParts { io, codec, .. } = codec.into_parts();
let endpoint = io.make_secure(domain, ssl_opts).await?;
let codec = Framed::new(endpoint, codec);
self.codec = Some(Box::new(codec));
Ok(self)
}
pub fn is_secure(&self) -> bool {
self.codec.as_ref().unwrap().get_ref().is_secure()
}
pub fn reset_seq_id(&mut self) {
if let Some(codec) = self.codec.as_mut() {
codec.codec_mut().reset_seq_id();
}
}
pub fn sync_seq_id(&mut self) {
if let Some(codec) = self.codec.as_mut() {
codec.codec_mut().sync_seq_id();
}
}
pub fn set_max_allowed_packet(&mut self, max_allowed_packet: usize) {
if let Some(codec) = self.codec.as_mut() {
codec.codec_mut().max_allowed_packet = max_allowed_packet;
}
}
pub fn compress(&mut self, level: crate::Compression) {
if let Some(codec) = self.codec.as_mut() {
codec.codec_mut().compress(level);
}
}
}
impl stream::Stream for Stream {
type Item = Result<Vec<u8>>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
if !self.closed {
let item = ready!(Pin::new(self.codec.as_mut().unwrap()).poll_next(cx)).transpose()?;
Poll::Ready(Ok(item).transpose())
} else {
Poll::Ready(None)
}
}
}
|
use criterion::{criterion_group, criterion_main, Criterion};
use nu_plugin::{EncodingType, PluginResponse};
use nu_protocol::{Span, Value};
// generate a new table data with `row_cnt` rows, `col_cnt` columns.
fn new_test_data(row_cnt: usize, col_cnt: usize) -> Value {
let columns: Vec<String> = (0..col_cnt).map(|x| format!("col_{x}")).collect();
let vals: Vec<Value> = (0..col_cnt as i64).map(|i| Value::test_int(i)).collect();
Value::List {
vals: (0..row_cnt)
.map(|_| Value::test_record(columns.clone(), vals.clone()))
.collect(),
span: Span::test_data(),
}
}
fn bench_encoding(c: &mut Criterion) {
let mut group = c.benchmark_group("Encoding");
let test_cnt_pairs = [
(100, 5),
(100, 10),
(100, 15),
(1000, 5),
(1000, 10),
(1000, 15),
(10000, 5),
(10000, 10),
(10000, 15),
];
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
for fmt in ["json", "msgpack"] {
group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| {
let mut res = vec![];
let test_data = PluginResponse::Value(Box::new(new_test_data(row_cnt, col_cnt)));
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
b.iter(|| encoder.encode_response(&test_data, &mut res))
});
}
}
group.finish();
}
fn bench_decoding(c: &mut Criterion) {
let mut group = c.benchmark_group("Decoding");
let test_cnt_pairs = [
(100, 5),
(100, 10),
(100, 15),
(1000, 5),
(1000, 10),
(1000, 15),
(10000, 5),
(10000, 10),
(10000, 15),
];
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
for fmt in ["json", "msgpack"] {
group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| {
let mut res = vec![];
let test_data = PluginResponse::Value(Box::new(new_test_data(row_cnt, col_cnt)));
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
encoder.encode_response(&test_data, &mut res).unwrap();
let mut binary_data = std::io::Cursor::new(res);
b.iter(|| {
binary_data.set_position(0);
encoder.decode_response(&mut binary_data)
})
});
}
}
group.finish();
}
criterion_group!(benches, bench_encoding, bench_decoding);
criterion_main!(benches);
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use] extern crate serde_derive;
#[macro_use] extern crate rocket;
use rocket::{get, routes};
use rocket::http::RawStr;
use std::fs;
#[derive(Deserialize, Debug)]
#[derive(FromForm)]
struct OaiQuery {
verb: Option<String>,
#[serde(rename = "metadataPrefix")]
metadata_prefix: Option<String>,
set: Option<String>,
}
#[get("/?<verb>")]
fn oai(verb: &RawStr) -> String {
let filename = format!("data/{}", verb.as_str());
let contents = fs::read_to_string(filename.to_lowercase());
contents.unwrap_or("not found".to_string())
}
fn main() -> () {
rocket::ignite().mount("/", routes![oai]).launch();
}
|
pub mod bitutil;
pub mod flat_map;
|
#[doc = "Register `CR2` reader"]
pub type R = crate::R<CR2_SPEC>;
#[doc = "Register `CR2` writer"]
pub type W = crate::W<CR2_SPEC>;
#[doc = "Field `ADDM7` reader - 7-bit Address Detection/4-bit Address Detection"]
pub type ADDM7_R = crate::BitReader<ADDM7_A>;
#[doc = "7-bit Address Detection/4-bit Address Detection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ADDM7_A {
#[doc = "0: 4-bit address detection"]
Bit4 = 0,
#[doc = "1: 7-bit address detection"]
Bit7 = 1,
}
impl From<ADDM7_A> for bool {
#[inline(always)]
fn from(variant: ADDM7_A) -> Self {
variant as u8 != 0
}
}
impl ADDM7_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADDM7_A {
match self.bits {
false => ADDM7_A::Bit4,
true => ADDM7_A::Bit7,
}
}
#[doc = "4-bit address detection"]
#[inline(always)]
pub fn is_bit4(&self) -> bool {
*self == ADDM7_A::Bit4
}
#[doc = "7-bit address detection"]
#[inline(always)]
pub fn is_bit7(&self) -> bool {
*self == ADDM7_A::Bit7
}
}
#[doc = "Field `ADDM7` writer - 7-bit Address Detection/4-bit Address Detection"]
pub type ADDM7_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ADDM7_A>;
impl<'a, REG, const O: u8> ADDM7_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "4-bit address detection"]
#[inline(always)]
pub fn bit4(self) -> &'a mut crate::W<REG> {
self.variant(ADDM7_A::Bit4)
}
#[doc = "7-bit address detection"]
#[inline(always)]
pub fn bit7(self) -> &'a mut crate::W<REG> {
self.variant(ADDM7_A::Bit7)
}
}
#[doc = "Field `STOP` reader - STOP bits"]
pub type STOP_R = crate::FieldReader<STOP_A>;
#[doc = "STOP bits\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum STOP_A {
#[doc = "0: 1 stop bit"]
Stop1 = 0,
#[doc = "2: 2 stop bit"]
Stop2 = 2,
}
impl From<STOP_A> for u8 {
#[inline(always)]
fn from(variant: STOP_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for STOP_A {
type Ux = u8;
}
impl STOP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<STOP_A> {
match self.bits {
0 => Some(STOP_A::Stop1),
2 => Some(STOP_A::Stop2),
_ => None,
}
}
#[doc = "1 stop bit"]
#[inline(always)]
pub fn is_stop1(&self) -> bool {
*self == STOP_A::Stop1
}
#[doc = "2 stop bit"]
#[inline(always)]
pub fn is_stop2(&self) -> bool {
*self == STOP_A::Stop2
}
}
#[doc = "Field `STOP` writer - STOP bits"]
pub type STOP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, STOP_A>;
impl<'a, REG, const O: u8> STOP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "1 stop bit"]
#[inline(always)]
pub fn stop1(self) -> &'a mut crate::W<REG> {
self.variant(STOP_A::Stop1)
}
#[doc = "2 stop bit"]
#[inline(always)]
pub fn stop2(self) -> &'a mut crate::W<REG> {
self.variant(STOP_A::Stop2)
}
}
#[doc = "Field `SWAP` reader - Swap TX/RX pins"]
pub type SWAP_R = crate::BitReader<SWAP_A>;
#[doc = "Swap TX/RX pins\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SWAP_A {
#[doc = "0: TX/RX pins are used as defined in standard pinout"]
Standard = 0,
#[doc = "1: The TX and RX pins functions are swapped"]
Swapped = 1,
}
impl From<SWAP_A> for bool {
#[inline(always)]
fn from(variant: SWAP_A) -> Self {
variant as u8 != 0
}
}
impl SWAP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SWAP_A {
match self.bits {
false => SWAP_A::Standard,
true => SWAP_A::Swapped,
}
}
#[doc = "TX/RX pins are used as defined in standard pinout"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == SWAP_A::Standard
}
#[doc = "The TX and RX pins functions are swapped"]
#[inline(always)]
pub fn is_swapped(&self) -> bool {
*self == SWAP_A::Swapped
}
}
#[doc = "Field `SWAP` writer - Swap TX/RX pins"]
pub type SWAP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SWAP_A>;
impl<'a, REG, const O: u8> SWAP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "TX/RX pins are used as defined in standard pinout"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(SWAP_A::Standard)
}
#[doc = "The TX and RX pins functions are swapped"]
#[inline(always)]
pub fn swapped(self) -> &'a mut crate::W<REG> {
self.variant(SWAP_A::Swapped)
}
}
#[doc = "Field `RXINV` reader - RX pin active level inversion"]
pub type RXINV_R = crate::BitReader<RXINV_A>;
#[doc = "RX pin active level inversion\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RXINV_A {
#[doc = "0: RX pin signal works using the standard logic levels"]
Standard = 0,
#[doc = "1: RX pin signal values are inverted"]
Inverted = 1,
}
impl From<RXINV_A> for bool {
#[inline(always)]
fn from(variant: RXINV_A) -> Self {
variant as u8 != 0
}
}
impl RXINV_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RXINV_A {
match self.bits {
false => RXINV_A::Standard,
true => RXINV_A::Inverted,
}
}
#[doc = "RX pin signal works using the standard logic levels"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == RXINV_A::Standard
}
#[doc = "RX pin signal values are inverted"]
#[inline(always)]
pub fn is_inverted(&self) -> bool {
*self == RXINV_A::Inverted
}
}
#[doc = "Field `RXINV` writer - RX pin active level inversion"]
pub type RXINV_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RXINV_A>;
impl<'a, REG, const O: u8> RXINV_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "RX pin signal works using the standard logic levels"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(RXINV_A::Standard)
}
#[doc = "RX pin signal values are inverted"]
#[inline(always)]
pub fn inverted(self) -> &'a mut crate::W<REG> {
self.variant(RXINV_A::Inverted)
}
}
#[doc = "Field `TXINV` reader - TX pin active level inversion"]
pub type TXINV_R = crate::BitReader<TXINV_A>;
#[doc = "TX pin active level inversion\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TXINV_A {
#[doc = "0: TX pin signal works using the standard logic levels"]
Standard = 0,
#[doc = "1: TX pin signal values are inverted"]
Inverted = 1,
}
impl From<TXINV_A> for bool {
#[inline(always)]
fn from(variant: TXINV_A) -> Self {
variant as u8 != 0
}
}
impl TXINV_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TXINV_A {
match self.bits {
false => TXINV_A::Standard,
true => TXINV_A::Inverted,
}
}
#[doc = "TX pin signal works using the standard logic levels"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == TXINV_A::Standard
}
#[doc = "TX pin signal values are inverted"]
#[inline(always)]
pub fn is_inverted(&self) -> bool {
*self == TXINV_A::Inverted
}
}
#[doc = "Field `TXINV` writer - TX pin active level inversion"]
pub type TXINV_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TXINV_A>;
impl<'a, REG, const O: u8> TXINV_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "TX pin signal works using the standard logic levels"]
#[inline(always)]
pub fn standard(self) -> &'a mut crate::W<REG> {
self.variant(TXINV_A::Standard)
}
#[doc = "TX pin signal values are inverted"]
#[inline(always)]
pub fn inverted(self) -> &'a mut crate::W<REG> {
self.variant(TXINV_A::Inverted)
}
}
#[doc = "Field `DATAINV` reader - Binary data inversion"]
pub type DATAINV_R = crate::BitReader<DATAINV_A>;
#[doc = "Binary data inversion\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum DATAINV_A {
#[doc = "0: Logical data from the data register are send/received in positive/direct logic"]
Positive = 0,
#[doc = "1: Logical data from the data register are send/received in negative/inverse logic"]
Negative = 1,
}
impl From<DATAINV_A> for bool {
#[inline(always)]
fn from(variant: DATAINV_A) -> Self {
variant as u8 != 0
}
}
impl DATAINV_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DATAINV_A {
match self.bits {
false => DATAINV_A::Positive,
true => DATAINV_A::Negative,
}
}
#[doc = "Logical data from the data register are send/received in positive/direct logic"]
#[inline(always)]
pub fn is_positive(&self) -> bool {
*self == DATAINV_A::Positive
}
#[doc = "Logical data from the data register are send/received in negative/inverse logic"]
#[inline(always)]
pub fn is_negative(&self) -> bool {
*self == DATAINV_A::Negative
}
}
#[doc = "Field `DATAINV` writer - Binary data inversion"]
pub type DATAINV_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DATAINV_A>;
impl<'a, REG, const O: u8> DATAINV_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Logical data from the data register are send/received in positive/direct logic"]
#[inline(always)]
pub fn positive(self) -> &'a mut crate::W<REG> {
self.variant(DATAINV_A::Positive)
}
#[doc = "Logical data from the data register are send/received in negative/inverse logic"]
#[inline(always)]
pub fn negative(self) -> &'a mut crate::W<REG> {
self.variant(DATAINV_A::Negative)
}
}
#[doc = "Field `MSBFIRST` reader - Most significant bit first"]
pub type MSBFIRST_R = crate::BitReader<MSBFIRST_A>;
#[doc = "Most significant bit first\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MSBFIRST_A {
#[doc = "0: data is transmitted/received with data bit 0 first, following the start bit"]
Lsb = 0,
#[doc = "1: data is transmitted/received with MSB (bit 7/8/9) first, following the start bit"]
Msb = 1,
}
impl From<MSBFIRST_A> for bool {
#[inline(always)]
fn from(variant: MSBFIRST_A) -> Self {
variant as u8 != 0
}
}
impl MSBFIRST_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MSBFIRST_A {
match self.bits {
false => MSBFIRST_A::Lsb,
true => MSBFIRST_A::Msb,
}
}
#[doc = "data is transmitted/received with data bit 0 first, following the start bit"]
#[inline(always)]
pub fn is_lsb(&self) -> bool {
*self == MSBFIRST_A::Lsb
}
#[doc = "data is transmitted/received with MSB (bit 7/8/9) first, following the start bit"]
#[inline(always)]
pub fn is_msb(&self) -> bool {
*self == MSBFIRST_A::Msb
}
}
#[doc = "Field `MSBFIRST` writer - Most significant bit first"]
pub type MSBFIRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, MSBFIRST_A>;
impl<'a, REG, const O: u8> MSBFIRST_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "data is transmitted/received with data bit 0 first, following the start bit"]
#[inline(always)]
pub fn lsb(self) -> &'a mut crate::W<REG> {
self.variant(MSBFIRST_A::Lsb)
}
#[doc = "data is transmitted/received with MSB (bit 7/8/9) first, following the start bit"]
#[inline(always)]
pub fn msb(self) -> &'a mut crate::W<REG> {
self.variant(MSBFIRST_A::Msb)
}
}
#[doc = "Field `ADD` reader - Address of the LPUART node"]
pub type ADD_R = crate::FieldReader;
#[doc = "Field `ADD` writer - Address of the LPUART node"]
pub type ADD_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 8, O>;
impl R {
#[doc = "Bit 4 - 7-bit Address Detection/4-bit Address Detection"]
#[inline(always)]
pub fn addm7(&self) -> ADDM7_R {
ADDM7_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bits 12:13 - STOP bits"]
#[inline(always)]
pub fn stop(&self) -> STOP_R {
STOP_R::new(((self.bits >> 12) & 3) as u8)
}
#[doc = "Bit 15 - Swap TX/RX pins"]
#[inline(always)]
pub fn swap(&self) -> SWAP_R {
SWAP_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - RX pin active level inversion"]
#[inline(always)]
pub fn rxinv(&self) -> RXINV_R {
RXINV_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - TX pin active level inversion"]
#[inline(always)]
pub fn txinv(&self) -> TXINV_R {
TXINV_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - Binary data inversion"]
#[inline(always)]
pub fn datainv(&self) -> DATAINV_R {
DATAINV_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - Most significant bit first"]
#[inline(always)]
pub fn msbfirst(&self) -> MSBFIRST_R {
MSBFIRST_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bits 24:31 - Address of the LPUART node"]
#[inline(always)]
pub fn add(&self) -> ADD_R {
ADD_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
impl W {
#[doc = "Bit 4 - 7-bit Address Detection/4-bit Address Detection"]
#[inline(always)]
#[must_use]
pub fn addm7(&mut self) -> ADDM7_W<CR2_SPEC, 4> {
ADDM7_W::new(self)
}
#[doc = "Bits 12:13 - STOP bits"]
#[inline(always)]
#[must_use]
pub fn stop(&mut self) -> STOP_W<CR2_SPEC, 12> {
STOP_W::new(self)
}
#[doc = "Bit 15 - Swap TX/RX pins"]
#[inline(always)]
#[must_use]
pub fn swap(&mut self) -> SWAP_W<CR2_SPEC, 15> {
SWAP_W::new(self)
}
#[doc = "Bit 16 - RX pin active level inversion"]
#[inline(always)]
#[must_use]
pub fn rxinv(&mut self) -> RXINV_W<CR2_SPEC, 16> {
RXINV_W::new(self)
}
#[doc = "Bit 17 - TX pin active level inversion"]
#[inline(always)]
#[must_use]
pub fn txinv(&mut self) -> TXINV_W<CR2_SPEC, 17> {
TXINV_W::new(self)
}
#[doc = "Bit 18 - Binary data inversion"]
#[inline(always)]
#[must_use]
pub fn datainv(&mut self) -> DATAINV_W<CR2_SPEC, 18> {
DATAINV_W::new(self)
}
#[doc = "Bit 19 - Most significant bit first"]
#[inline(always)]
#[must_use]
pub fn msbfirst(&mut self) -> MSBFIRST_W<CR2_SPEC, 19> {
MSBFIRST_W::new(self)
}
#[doc = "Bits 24:31 - Address of the LPUART node"]
#[inline(always)]
#[must_use]
pub fn add(&mut self) -> ADD_W<CR2_SPEC, 24> {
ADD_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Control register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CR2_SPEC;
impl crate::RegisterSpec for CR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cr2::R`](R) reader structure"]
impl crate::Readable for CR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cr2::W`](W) writer structure"]
impl crate::Writable for CR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CR2 to value 0"]
impl crate::Resettable for CR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use hyper::{
server::Server,
service::{make_service_fn, service_fn},
Error,
};
use std::{convert::Infallible, fmt, sync::Arc};
use crate::{middleware::NotFound, Context, Router};
pub struct Trek<State> {
state: State,
router: Router<Context<State>>,
}
impl<State: Send + Sync + 'static> Trek<State> {
pub fn with_state(state: State) -> Self {
Self {
state,
router: Router::new(),
}
}
pub fn router(&mut self) -> &mut Router<Context<State>> {
&mut self.router
}
#[cfg(feature = "tokio")]
pub async fn run(self, addr: impl std::net::ToSocketAddrs) -> std::io::Result<()> {
let addr = addr
.to_socket_addrs()
.unwrap()
.next()
.ok_or(std::io::ErrorKind::InvalidInput)?;
let builder = Server::try_bind(&addr).map_err(|e| {
error!("error bind to {}: {}", addr, e);
std::io::Error::new(std::io::ErrorKind::Other, e)
})?;
info!("Trek is running on http://{}", addr);
let state = Arc::new(self.state);
let router = Arc::new(self.router);
let not_found = Arc::new(NotFound::new());
Ok(builder
.serve(make_service_fn(move |_socket| {
let state = state.clone();
let router = router.clone();
let not_found = not_found.clone();
async move {
Ok::<_, Infallible>(service_fn(move |req| {
let state = state.clone();
let path = req.uri().path().to_owned();
let method = req.method().to_owned();
let middleware = router.middleware.clone();
let mut cx = Context::new(state, req, vec![], middleware.clone());
match router.find(&path, method) {
Some((m, p)) => {
cx.middleware.append(&mut m.clone());
cx.params = p
.iter()
.map(|(k, v)| ((*k).to_string(), (*v).to_string()))
.collect();
}
None => {
cx.middleware.push(not_found.clone());
}
};
async move { Ok::<_, Error>(cx.next().await) }
}))
}
}))
.await
.map_err(|e| {
error!("server error: {}", e);
std::io::Error::new(std::io::ErrorKind::Other, e)
})?)
}
#[cfg(feature = "async-std")]
pub async fn run(self, addr: impl async_std::net::ToSocketAddrs) -> std::io::Result<()> {
Ok(())
}
}
impl Trek<()> {
pub fn new() -> Self {
Self::with_state(())
}
}
impl Default for Trek<()> {
fn default() -> Self {
Self::new()
}
}
impl<State> fmt::Debug for Trek<State> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.debug_struct("Trek")
.field("router", &self.router)
.finish()
}
}
|
/// a renderer is something that abstracts all of `gfxal` away from the client, using a
/// renderer should not require any raw gfx-hal calls, or any calls inside gfxal.
/// This said they should still be powerful.
pub trait Renderer {
/// begins the scene, this should be used for stuff that needs to happen before anything else.
fn begin_scene(&mut self) {}
/// ends the scene, this should be used to finish whatever was happening before the
/// underlying graphics context ends its scene.
fn end_scene(&mut self) {}
}
|
mod utils;
mod fraction;
mod primes;
mod fib;
mod queue;
mod poker;
mod euler;
use euler::phi;
use fraction::Fraction;
use utils::check_permutation;
fn main() {
let mut res = 0;
let mut min = Fraction::new(10, 1);
for i in 2..=500_000 {
if i % 10_000 == 0 {
println!("i: {}", i);
}
let i = i * 2 - 1;
let p = phi(i);
if check_permutation(p as u128, i as u128) {
let r = Fraction::new(i as u128, p as u128);
if r < min {
println!("min: {:?}", r);
min = r;
res = i;
}
}
}
println!("{}", res);
} |
use crate::prelude::*;
pub mod prelude {
pub use super::BitvecConst;
}
/// A constant bitvec term expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct BitvecConst {
/// The constant bitvec value.
pub val: Bitvec,
}
impl BitvecConst {
/// Creates a new `BitvecConst` for the given bit width with a value of zero.
pub fn zero(ty: BitvecTy) -> BitvecConst {
BitvecConst::from(Bitvec::zero(ty.width()))
}
/// Creates a new `BitvecConst` for the given bit width with a value of one.
pub fn one(ty: BitvecTy) -> BitvecConst {
BitvecConst::from(Bitvec::one(ty.width()))
}
/// Creates a new `BitvecConst` for the given bit width with a value that has all bits set.
pub fn all_set(ty: BitvecTy) -> BitvecConst {
BitvecConst::from(Bitvec::all_set(ty.width()))
}
}
impl<T> From<T> for BitvecConst
where
T: Into<Bitvec>
{
fn from(val: T) -> Self {
BitvecConst{ val: val.into() }
}
}
impl Children for BitvecConst {
fn children(&self) -> ChildrenIter {
ChildrenIter::none()
}
}
impl ChildrenMut for BitvecConst {
fn children_mut(&mut self) -> ChildrenIterMut {
ChildrenIterMut::none()
}
}
impl IntoChildren for BitvecConst {
fn into_children(self) -> IntoChildrenIter {
IntoChildrenIter::none()
}
}
impl HasType for BitvecConst {
fn ty(&self) -> Type {
self.val.ty()
}
}
impl HasKind for BitvecConst {
fn kind(&self) -> ExprKind {
ExprKind::BitvecConst
}
}
impl HasArity for BitvecConst {
fn arity(&self) -> usize {
0
}
}
impl From<BitvecConst> for AnyExpr {
fn from(bitvec_const: BitvecConst) -> AnyExpr {
AnyExpr::BitvecConst(bitvec_const)
}
}
|
//! This example demonstrates using [`IterTable`], an [allocation](https://doc.rust-lang.org/nomicon/vec/vec-alloc.html)
//! free [`Table`] alternative that translates an iterator into a display.
//!
//! * Note how [`IterTable`] supports the familiar `.with()` syntax for applying display
//! modifications.
//!
//! * [`IterTable`] supports manual configuration of:
//! * Record sniffing (default 1000 rows)
//! * Row cutoff
//! * Row height
//! * Column cutoff
//! * Column width
use std::io::BufRead;
use tabled::{settings::Style, tables::IterTable};
fn main() {
let path = file!();
let file = std::fs::File::open(path).unwrap();
let reader = std::io::BufReader::new(file);
let iterator = reader.lines().enumerate().map(|(i, line)| match line {
Ok(line) => [i.to_string(), String::from("ok"), line],
Err(err) => [i.to_string(), String::from("error"), err.to_string()],
});
let table = IterTable::new(iterator).with(Style::ascii_rounded());
table.build(std::io::stdout()).unwrap();
println!()
}
|
// implements the color space submodule
pub enum ColorTransform {
BGR_to_RGB,
BGR_to_HSV,
BGR_to_GREY,
RGB_to_BGR,
RGB_to_HSV,
RGB_to_GREY,
HSV_to_BGR,
HSV_to_RGB,
HSV_to_GREY,
}
pub fn color_space(/*image: &Image,*/ color_space: ColorTransform) /*->Image*/ {
}
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use libc;
use std::cmp;
use std::mem;
use super::error::Result;
use super::memory::{allocate_aligned, free_aligned};
/// Memory pool for allocating memory. It's also responsible for tracking memory usage.
pub trait MemoryPool {
/// Allocate memory.
/// The implementation should ensures that allocated memory is aligned.
fn allocate(&self, size: usize) -> Result<*mut u8>;
/// Reallocate memory.
/// If the implementation doesn't support reallocating aligned memory, it allocates new memory
/// and copied old memory to it.
fn reallocate(&self, old_size: usize, new_size: usize, pointer: *const u8)
-> Result<*const u8>;
/// Free memory.
fn free(&self, ptr: *const u8);
}
/// Implementation of memory pool using libc api.
#[allow(dead_code)]
struct LibcMemoryPool;
impl MemoryPool for LibcMemoryPool {
fn allocate(&self, size: usize) -> Result<*mut u8> {
allocate_aligned(size as i64)
}
fn reallocate(
&self,
old_size: usize,
new_size: usize,
pointer: *const u8,
) -> Result<*const u8> {
unsafe {
let old_src = mem::transmute::<*const u8, *mut libc::c_void>(pointer);
let result = self.allocate(new_size)?;
let dst = mem::transmute::<*const u8, *mut libc::c_void>(result);
libc::memcpy(dst, old_src, cmp::min(old_size, new_size));
free_aligned(pointer);
Ok(result)
}
}
fn free(&self, ptr: *const u8) {
free_aligned(ptr)
}
}
#[cfg(test)]
mod tests {
use super::*;
const ALIGNMENT: usize = 64;
#[test]
fn test_allocate() {
let memory_pool = LibcMemoryPool {};
for _ in 0..10 {
let p = memory_pool.allocate(1024).unwrap();
// make sure this is 64-byte aligned
assert_eq!(0, (p as usize) % ALIGNMENT);
memory_pool.free(p);
}
}
#[test]
fn test_reallocate() {
let memory_pool = LibcMemoryPool {};
for _ in 0..10 {
let p1 = memory_pool.allocate(1024).unwrap();
let p2 = memory_pool.reallocate(1024, 2048, p1).unwrap();
// make sure this is 64-byte aligned
assert_eq!(0, (p1 as usize) % ALIGNMENT);
assert_eq!(0, (p2 as usize) % ALIGNMENT);
memory_pool.free(p2);
}
}
}
|
use apllodb_shared_components::UnaryOperator;
use apllodb_sql_parser::apllodb_ast;
use crate::ast_translator::AstTranslator;
impl AstTranslator {
pub(crate) fn unary_operator(ast_unary_operator: apllodb_ast::UnaryOperator) -> UnaryOperator {
match ast_unary_operator {
apllodb_ast::UnaryOperator::Minus => UnaryOperator::Minus,
}
}
}
|
#![feature(collection_placement)]
#![feature(placement_in_syntax)]
#![feature(test)]
// #[macro_use]
// extern crate serde_derive;
#[macro_use]
extern crate log;
extern crate flexi_logger;
#[macro_use]
extern crate serde_json;
extern crate rand;
extern crate serde;
extern crate test;
extern crate veloci;
#[macro_use]
extern crate criterion;
use criterion::Criterion;
use veloci::*;
static TEST_FOLDER: &str = "bench_taschenbuch";
fn load_persistence_disk() -> persistence::Persistence {
use std::path::Path;
if Path::new(TEST_FOLDER).exists() {
return persistence::Persistence::load(TEST_FOLDER.to_string()).expect("Could not load persistence");
}
let object = r#"{"type":"taschenbuch","title":"mein buch"}"#.to_owned() + "\n";
let mut data = String::new();
for _ in 0..6_000_000 {
data += &object;
}
let mut pers = persistence::Persistence::create_type(TEST_FOLDER.to_string(), persistence::PersistenceType::Persistent).unwrap();
println!("{:?}", create::create_indices_from_str(&mut pers, &data, "[]", None, true));
pers
}
fn search_freestyle(term: &str, pers: &persistence::Persistence) -> Vec<search::DocWithHit> {
let yop = query_generator::SearchQueryGeneratorParameters {
search_term: term.to_string(),
..Default::default()
};
let requesto = query_generator::search_query(pers, yop);
let hits = search::search(requesto, pers).unwrap();
search::to_documents(pers, &hits.data, None, &hits)
}
fn searches(c: &mut Criterion) {
// veloci::trace::enable_log();
let pers = load_persistence_disk();
// c.bench_function("jmdict_search_anschauen", |b| b.iter(|| search("anschauen", &pers, 1)));
// c.bench_function("jmdict_search_haus", |b| b.iter(|| search("haus", &pers, 1)));
c.bench_function("jmdict_search_taschenbuch", move |b| b.iter(|| search_freestyle("taschenbuch", &pers)));
}
criterion_group!(benches, searches);
criterion_main!(benches);
|
use crate::{ApertureShape, Camera, Device};
use cgmath::prelude::*;
use cgmath::{Matrix4, Point3, Vector3};
use js_sys::Error;
use zerocopy::{AsBytes, FromBytes};
#[repr(align(16), C)]
#[derive(AsBytes, FromBytes, Debug, Default)]
pub struct CameraData {
aperture_settings: [f32; 4],
camera_transform: [[f32; 4]; 4],
camera_settings: [f32; 4],
}
impl Device {
pub(crate) fn update_camera(&mut self, camera: &Camera) -> Result<(), Error> {
let mut data = CameraData::default();
let position: Point3<f32> = camera.position.into();
let mut direction: Vector3<f32> = camera.direction.into();
let mut up_vector: Vector3<f32> = camera.up_vector.into();
direction = direction.normalize();
up_vector = up_vector.normalize();
// Matrix4::look_at uses a right-handed coordinate system, which is wrong for
// us. The easiest way to work around it is by negating the camera direction.
let xfm: Matrix4<f32> = Transform::look_at(position, position - direction, up_vector);
data.aperture_settings = aperture_settings(&camera.aperture);
data.camera_transform = xfm.inverse_transform().unwrap().into();
data.camera_settings[0] = camera.field_of_view;
data.camera_settings[1] = camera.focal_distance;
data.camera_settings[2] = camera.focal_curvature;
data.camera_settings[3] = 0.0;
self.camera_buffer.write(&data)
}
}
fn aperture_settings(aperture: &ApertureShape) -> [f32; 4] {
match aperture {
ApertureShape::Point => [-1.0, 0.0, 0.0, 0.0],
ApertureShape::Circle { radius } => [0.0, 0.0, 0.0, *radius],
ApertureShape::Ngon {
sides,
rotation,
radius,
} => [1.0, *sides as f32, *rotation as f32, *radius],
}
}
|
#![no_std]
#![no_main]
#![feature(abi_x86_interrupt)]
#![feature(custom_test_frameworks)]
#![test_runner(xagima::testing::runner)]
#![reexport_test_harness_main = "test_main"]
#![feature(default_alloc_error_handler)]
use bootloader::BootInfo;
use core::panic::PanicInfo;
#[panic_handler]
fn panic(_: &PanicInfo) -> ! {
xagima::testing::success();
xagima::halt();
}
#[no_mangle]
pub extern "C" fn _start(_boot_info: &'static BootInfo) -> ! {
xagima::init(_boot_info);
test_main();
xagima::testing::fail();
xagima::halt();
}
#[test_case]
fn test() {
unsafe {
*(0xdeadbeef as *mut u64) = 1234;
}
}
|
#![doc = include_str!("../README.md")]
#[cfg(feature = "python")]
pub mod py;
/// Mathematical methods.
pub mod math;
/// Models for polymer physics.
pub mod physics;
|
use ggez;
use ggez::{event, timer};
use ggez::graphics;
use ggez::nalgebra as na;
use ggez::{Context, GameResult};
use ggez::conf::NumSamples;
use ggez::input::keyboard::KeyCode;
use ggez::event::KeyMods;
#[derive(Debug)]
struct InputState {
xaxi: f32,
yaxi: f32
}
impl Default for InputState {
fn default() -> Self {
InputState {
xaxi: 0.0,
yaxi: 0.0,
}
}
}
struct MainState {
input: InputState
}
impl MainState {
fn new() -> GameResult<MainState> {
let s = MainState {
input: InputState::default()
};
Ok(s)
}
}
impl event::EventHandler for MainState {
fn update(&mut self, ctx: &mut Context) -> GameResult {
const DESIRED_FPS: u32 = 60;
while timer::check_update_time(ctx, DESIRED_FPS) {
let seconds = 1.0 / (DESIRED_FPS as f32);
println!("{}", seconds)
}
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
graphics::clear(ctx, [0.2, 0.2, 0.2, 1.0].into());
let (width, height) = ggez::graphics::size(ctx);
let circle = graphics::Mesh::new_circle(
ctx,
graphics::DrawMode::fill(),
na::Point2::new(width/2.0, height/2.0),
50.0,
1.0,
[1.0,1.0,1.0,1.0].into()
)?;
graphics::draw(ctx, &circle, (na::Point2::new(self.input.xaxi, self.input.yaxi),))?;
graphics::present(ctx)?;
Ok(())
}
fn key_down_event(
&mut self,
ctx: &mut Context,
keycode: KeyCode,
_keymod: KeyMods,
_repeat: bool,
) {
match keycode {
KeyCode::W => {
println!("W");
self.input.yaxi -= 3.0;
}
KeyCode::S => {
println!("S");
self.input.yaxi += 3.0;
}
KeyCode::D => {
println!("D");
self.input.xaxi += 3.0;
}
KeyCode::A => {
println!("A");
self.input.xaxi -= 3.0;
}
KeyCode::Escape => event::quit(ctx),
_ => {}
}
}
}
pub fn main() -> GameResult {
let ws = ggez::conf::WindowSetup {
title: "Test".to_string(),
samples: NumSamples::Sixteen,
vsync: true,
icon: "".to_string(),
srgb: false
};
let cb = ggez::ContextBuilder::new("aaaa", "ggez")
.window_setup(ws);
let (ctx, event_loop) = &mut cb.build()?;
let state = &mut MainState::new()?;
event::run(ctx, event_loop, state)
} |
#[doc = "Register `IFCR` reader"]
pub type R = crate::R<IFCR_SPEC>;
#[doc = "Field `CGIF1` reader - Clear channel 1 global interrupt flag"]
pub type CGIF1_R = crate::BitReader<CGIF1_A>;
#[doc = "Clear channel 1 global interrupt flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CGIF1_A {
#[doc = "1: Clears the GIF, TEIF, HTIF, TCIF flags in the ISR register"]
Clear = 1,
}
impl From<CGIF1_A> for bool {
#[inline(always)]
fn from(variant: CGIF1_A) -> Self {
variant as u8 != 0
}
}
impl CGIF1_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CGIF1_A> {
match self.bits {
true => Some(CGIF1_A::Clear),
_ => None,
}
}
#[doc = "Clears the GIF, TEIF, HTIF, TCIF flags in the ISR register"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == CGIF1_A::Clear
}
}
#[doc = "Field `CTCIF1` reader - Clear channel 1 transfer complete flag"]
pub type CTCIF1_R = crate::BitReader<CTCIF1_A>;
#[doc = "Clear channel 1 transfer complete flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CTCIF1_A {
#[doc = "1: Clears the TCIF flag in the ISR register"]
Clear = 1,
}
impl From<CTCIF1_A> for bool {
#[inline(always)]
fn from(variant: CTCIF1_A) -> Self {
variant as u8 != 0
}
}
impl CTCIF1_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CTCIF1_A> {
match self.bits {
true => Some(CTCIF1_A::Clear),
_ => None,
}
}
#[doc = "Clears the TCIF flag in the ISR register"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == CTCIF1_A::Clear
}
}
#[doc = "Field `CHTIF1` reader - Clear channel 1 half transfer flag"]
pub type CHTIF1_R = crate::BitReader<CHTIF1_A>;
#[doc = "Clear channel 1 half transfer flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CHTIF1_A {
#[doc = "1: Clears the HTIF flag in the ISR register"]
Clear = 1,
}
impl From<CHTIF1_A> for bool {
#[inline(always)]
fn from(variant: CHTIF1_A) -> Self {
variant as u8 != 0
}
}
impl CHTIF1_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CHTIF1_A> {
match self.bits {
true => Some(CHTIF1_A::Clear),
_ => None,
}
}
#[doc = "Clears the HTIF flag in the ISR register"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == CHTIF1_A::Clear
}
}
#[doc = "Field `CTEIF1` reader - Clear channel 1 transfer error flag"]
pub type CTEIF1_R = crate::BitReader<CTEIF1_A>;
#[doc = "Clear channel 1 transfer error flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CTEIF1_A {
#[doc = "1: Clears the TEIF flag in the ISR register"]
Clear = 1,
}
impl From<CTEIF1_A> for bool {
#[inline(always)]
fn from(variant: CTEIF1_A) -> Self {
variant as u8 != 0
}
}
impl CTEIF1_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CTEIF1_A> {
match self.bits {
true => Some(CTEIF1_A::Clear),
_ => None,
}
}
#[doc = "Clears the TEIF flag in the ISR register"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == CTEIF1_A::Clear
}
}
#[doc = "Field `CGIF2` reader - Clear channel 2 global interrupt flag"]
pub use CGIF1_R as CGIF2_R;
#[doc = "Field `CGIF3` reader - Clear channel 3 global interrupt flag"]
pub use CGIF1_R as CGIF3_R;
#[doc = "Field `CGIF4` reader - Clear channel 4 global interrupt flag"]
pub use CGIF1_R as CGIF4_R;
#[doc = "Field `CGIF5` reader - Clear channel 5 global interrupt flag"]
pub use CGIF1_R as CGIF5_R;
#[doc = "Field `CHTIF2` reader - Clear channel 2 half transfer flag"]
pub use CHTIF1_R as CHTIF2_R;
#[doc = "Field `CHTIF3` reader - Clear channel 3 half transfer flag"]
pub use CHTIF1_R as CHTIF3_R;
#[doc = "Field `CHTIF4` reader - Clear channel 4 half transfer flag"]
pub use CHTIF1_R as CHTIF4_R;
#[doc = "Field `CHTIF5` reader - Clear channel 5 half transfer flag"]
pub use CHTIF1_R as CHTIF5_R;
#[doc = "Field `CTCIF2` reader - Clear channel 2 transfer complete flag"]
pub use CTCIF1_R as CTCIF2_R;
#[doc = "Field `CTCIF3` reader - Clear channel 3 transfer complete flag"]
pub use CTCIF1_R as CTCIF3_R;
#[doc = "Field `CTCIF4` reader - Clear channel 4 transfer complete flag"]
pub use CTCIF1_R as CTCIF4_R;
#[doc = "Field `CTCIF5` reader - Clear channel 5 transfer complete flag"]
pub use CTCIF1_R as CTCIF5_R;
#[doc = "Field `CTEIF2` reader - Clear channel 2 transfer error flag"]
pub use CTEIF1_R as CTEIF2_R;
#[doc = "Field `CTEIF3` reader - Clear channel 3 transfer error flag"]
pub use CTEIF1_R as CTEIF3_R;
#[doc = "Field `CTEIF4` reader - Clear channel 4 transfer error flag"]
pub use CTEIF1_R as CTEIF4_R;
#[doc = "Field `CTEIF5` reader - Clear channel 5 transfer error flag"]
pub use CTEIF1_R as CTEIF5_R;
impl R {
#[doc = "Bit 0 - Clear channel 1 global interrupt flag"]
#[inline(always)]
pub fn cgif1(&self) -> CGIF1_R {
CGIF1_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Clear channel 1 transfer complete flag"]
#[inline(always)]
pub fn ctcif1(&self) -> CTCIF1_R {
CTCIF1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Clear channel 1 half transfer flag"]
#[inline(always)]
pub fn chtif1(&self) -> CHTIF1_R {
CHTIF1_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Clear channel 1 transfer error flag"]
#[inline(always)]
pub fn cteif1(&self) -> CTEIF1_R {
CTEIF1_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Clear channel 2 global interrupt flag"]
#[inline(always)]
pub fn cgif2(&self) -> CGIF2_R {
CGIF2_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Clear channel 2 transfer complete flag"]
#[inline(always)]
pub fn ctcif2(&self) -> CTCIF2_R {
CTCIF2_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Clear channel 2 half transfer flag"]
#[inline(always)]
pub fn chtif2(&self) -> CHTIF2_R {
CHTIF2_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - Clear channel 2 transfer error flag"]
#[inline(always)]
pub fn cteif2(&self) -> CTEIF2_R {
CTEIF2_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - Clear channel 3 global interrupt flag"]
#[inline(always)]
pub fn cgif3(&self) -> CGIF3_R {
CGIF3_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Clear channel 3 transfer complete flag"]
#[inline(always)]
pub fn ctcif3(&self) -> CTCIF3_R {
CTCIF3_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Clear channel 3 half transfer flag"]
#[inline(always)]
pub fn chtif3(&self) -> CHTIF3_R {
CHTIF3_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Clear channel 3 transfer error flag"]
#[inline(always)]
pub fn cteif3(&self) -> CTEIF3_R {
CTEIF3_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - Clear channel 4 global interrupt flag"]
#[inline(always)]
pub fn cgif4(&self) -> CGIF4_R {
CGIF4_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - Clear channel 4 transfer complete flag"]
#[inline(always)]
pub fn ctcif4(&self) -> CTCIF4_R {
CTCIF4_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Clear channel 4 half transfer flag"]
#[inline(always)]
pub fn chtif4(&self) -> CHTIF4_R {
CHTIF4_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Clear channel 4 transfer error flag"]
#[inline(always)]
pub fn cteif4(&self) -> CTEIF4_R {
CTEIF4_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - Clear channel 5 global interrupt flag"]
#[inline(always)]
pub fn cgif5(&self) -> CGIF5_R {
CGIF5_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - Clear channel 5 transfer complete flag"]
#[inline(always)]
pub fn ctcif5(&self) -> CTCIF5_R {
CTCIF5_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - Clear channel 5 half transfer flag"]
#[inline(always)]
pub fn chtif5(&self) -> CHTIF5_R {
CHTIF5_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - Clear channel 5 transfer error flag"]
#[inline(always)]
pub fn cteif5(&self) -> CTEIF5_R {
CTEIF5_R::new(((self.bits >> 19) & 1) != 0)
}
}
#[doc = "high interrupt status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ifcr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct IFCR_SPEC;
impl crate::RegisterSpec for IFCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ifcr::R`](R) reader structure"]
impl crate::Readable for IFCR_SPEC {}
#[doc = "`reset()` method sets IFCR to value 0"]
impl crate::Resettable for IFCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use clap::{App, Arg, ArgMatches, Values};
pub mod check;
pub mod generate;
pub mod import;
pub mod solve;
use std::fs::File;
use std::io::{stdout, BufReader, BufWriter, Write};
use std::process;
use std::str::FromStr;
use vrp_cli::extensions::check::check_pragmatic_solution;
pub(crate) fn create_write_buffer(out_file: Option<File>) -> BufWriter<Box<dyn Write>> {
if let Some(out_file) = out_file {
BufWriter::new(Box::new(out_file))
} else {
BufWriter::new(Box::new(stdout()))
}
}
fn open_file(path: &str, description: &str) -> File {
File::open(path).unwrap_or_else(|err| {
eprintln!("Cannot open {} file '{}': '{}'", description, path, err.to_string());
process::exit(1);
})
}
fn create_file(path: &str, description: &str) -> File {
File::create(path).unwrap_or_else(|err| {
eprintln!("Cannot create {} file '{}': '{}'", description, path, err.to_string());
process::exit(1);
})
}
fn parse_float_value<T: FromStr<Err = std::num::ParseFloatError>>(
matches: &ArgMatches,
arg_name: &str,
arg_desc: &str,
) -> Result<Option<T>, String> {
matches
.value_of(arg_name)
.map(|arg| {
arg.parse::<T>()
.map_err(|err| format!("cannot get float value, error: '{}': '{}'", err.to_string(), arg_desc))
.map(Some)
})
.unwrap_or(Ok(None))
}
fn parse_int_value<T: FromStr<Err = std::num::ParseIntError>>(
matches: &ArgMatches,
arg_name: &str,
arg_desc: &str,
) -> Result<Option<T>, String> {
matches
.value_of(arg_name)
.map(|arg| {
arg.parse::<T>()
.map_err(|err| format!("cannot get integer value, error: '{}': '{}'", err.to_string(), arg_desc))
.map(Some)
})
.unwrap_or(Ok(None))
}
fn check_solution(
matches: &ArgMatches,
input_format: &str,
problem_arg_name: &str,
solution_arg_name: &str,
matrix_arg_name: &str,
) -> Result<(), String> {
let problem_files = matches
.values_of(problem_arg_name)
.map(|paths: Values| paths.map(|path| BufReader::new(open_file(path, "problem"))).collect::<Vec<_>>());
let solution_file = matches.value_of(solution_arg_name).map(|path| BufReader::new(open_file(path, "solution")));
let matrix_files = matches
.values_of(matrix_arg_name)
.map(|paths: Values| paths.map(|path| BufReader::new(open_file(path, "routing matrix"))).collect());
match (input_format, problem_files, solution_file) {
("pragmatic", Some(mut problem_files), Some(solution_file)) if problem_files.len() == 1 => {
check_pragmatic_solution(problem_files.swap_remove(0), solution_file, matrix_files)
}
("pragmatic", _, _) => {
Err(vec!["pragmatic format expects one problem, one solution file, and optionally matrices".to_string()])
}
_ => Err(vec![format!("unknown format: '{}'", input_format)]),
}
.map_err(|err| format!("checker found {} errors:\n{}", err.len(), err.join("\n")))
}
|
#![feature(alloc)]
#![feature(core_intrinsics)]
#![feature(heap_api)]
#![feature(raw)]
#![feature(unique)]
//! # mo-gc
//!
//! A pauseless, concurrent, generational, parallel mark-and-sweep garbage collector.
//!
//! This is an experimental design to research an idea into a pauseless garbage collector.
//!
//! The GC handles multiple OS thread mutators without stopping their worlds. It does this by
//! deferring reference counting of stack-rooted pointers to the GC thread through a journal
//! of stack root changes. The journal itself is fast to write to, adding an amortized 25% to
//! the cost of `Box::new()` using jemalloc for a 64 byte object.
//!
//! Thus the mutator never needs to be stopped for it's stack to be scanned or for any collection
//! phase.
//!
//! See [project TODO](https://github.com/pliniker/mo-gc/blob/master/TODO.md) for limitations.
//!
//! ## Usage
//!
//! Usage is best illustrated by the examples provided.
extern crate bitmaptrie;
extern crate num_cpus;
extern crate scoped_pool;
extern crate time;
mod appthread;
mod constants;
mod gcthread;
mod heap;
mod journal;
mod parheap;
mod statistics;
mod trace;
mod youngheap;
pub use appthread::{AppThread, Gc, GcAtomic, GcBox, GcRoot};
pub use constants::*;
pub use gcthread::GcThread;
pub use heap::{CollectOps, TraceOps, TraceStack};
pub use journal::{make_journal, Receiver, Sender};
pub use parheap::ParHeap;
pub use statistics::StatsLogger;
pub use trace::Trace;
pub use youngheap::YoungHeap;
|
use crate::engine::cache::{CachedEntities, CachedEntity};
use crate::frame::frame::ConcreteGraphicsPipeline;
use crate::scene::camera::CameraMatrices;
use crate::scene::lights::Light;
use crate::scene::lights::PointLight;
use vulkano::descriptor::descriptor_set::PersistentDescriptorSetBuf;
use vulkano::descriptor::descriptor_set::PersistentDescriptorSetImg;
use vulkano::descriptor::descriptor_set::PersistentDescriptorSetSampler;
use vulkano::image::AttachmentImage;
use vulkano::image::SwapchainImage;
use winit::window::Window;
use std::convert::TryInto;
use std::sync::Arc;
use vulkano::buffer::BufferUsage;
use vulkano::buffer::CpuAccessibleBuffer;
use vulkano::command_buffer::AutoCommandBuffer;
use vulkano::command_buffer::AutoCommandBufferBuilder;
use vulkano::command_buffer::DynamicState;
use vulkano::descriptor::descriptor_set::PersistentDescriptorSet;
use vulkano::device::Queue;
use vulkano::framebuffer::RenderPassAbstract;
use vulkano::framebuffer::Subpass;
use vulkano::image::ImageViewAccess;
use vulkano::pipeline::GraphicsPipeline;
use vulkano::pipeline::GraphicsPipelineAbstract;
use vulkano::sampler::{Filter, MipmapMode, Sampler, SamplerAddressMode};
// type FullImage = ImageViewAccess + Send + Sync + Clone + 'static;
type PDS = PersistentDescriptorSet<(
(
(
(
(
(
((), PersistentDescriptorSetImg<Arc<AttachmentImage>>),
PersistentDescriptorSetSampler,
),
PersistentDescriptorSetImg<Arc<AttachmentImage>>,
),
PersistentDescriptorSetSampler,
),
PersistentDescriptorSetImg<Arc<AttachmentImage>>,
),
PersistentDescriptorSetSampler,
),
PersistentDescriptorSetBuf<Arc<CpuAccessibleBuffer<fs::ty::UBO>>>,
)>;
/// Allows applying a directional light source to a scene.
pub struct LightingSystem {
gfx_queue: Arc<Queue>,
pipeline: Arc<ConcreteGraphicsPipeline>,
default_sampler: Arc<Sampler>,
buff: Arc<CpuAccessibleBuffer<fs::ty::UBO>>,
set: Arc<PDS>,
}
impl LightingSystem {
/// Initializes the directional lighting system.
pub fn new(
gfx_queue: Arc<Queue>,
subpass: Subpass<Arc<dyn RenderPassAbstract + Send + Sync + 'static>>,
position_buffer: Arc<AttachmentImage>,
normals_input: Arc<AttachmentImage>,
albedo_input: Arc<AttachmentImage>,
color_debug_level: i32,
) -> LightingSystem {
log::trace!("insance of {}", std::any::type_name::<Self>());
let pipeline = {
let vs = vs::Shader::load(gfx_queue.device().clone())
.expect("failed to create shader module");
let fs = fs::Shader::load(gfx_queue.device().clone())
.expect("failed to create shader module");
Arc::new(
GraphicsPipeline::start()
.vertex_input_single_buffer()
.vertex_shader(vs.main_entry_point(), ())
.triangle_list()
.viewports_dynamic_scissors_irrelevant(1)
.fragment_shader(fs.main_entry_point(), ())
.render_pass(subpass)
.build(gfx_queue.device().clone())
.unwrap(),
)
};
let default_sampler = Sampler::new(
pipeline.device().clone(),
Filter::Nearest,
Filter::Nearest,
MipmapMode::Linear,
SamplerAddressMode::ClampToEdge,
SamplerAddressMode::ClampToEdge,
SamplerAddressMode::ClampToEdge,
0.0,
1.0,
0.0,
1.0,
)
.unwrap();
let default_lights = PointLight::default_lights();
let mut packed_lights = Vec::new();
for l in default_lights {
match l {
Light::Point(pl) => packed_lights.push(fs::ty::Light {
position: pl.position.into(),
color: pl.color.into(),
radius: pl.radius,
}),
};
}
let push_constants = fs::ty::UBO {
lights: packed_lights.try_into().unwrap(),
viewPos: [1.0, 1.0, 1.0, 0.0],
displayDebugTarget: color_debug_level,
};
let buff = CpuAccessibleBuffer::from_data(
pipeline.device().clone(),
BufferUsage::uniform_buffer(),
false,
push_constants,
)
.unwrap();
let layout = pipeline.layout().descriptor_set_layout(0).unwrap();
let set = Arc::new(
PersistentDescriptorSet::start(layout.clone())
.add_empty()
.unwrap()
.add_sampled_image(position_buffer, default_sampler.clone())
.unwrap()
.add_sampled_image(normals_input, default_sampler.clone())
.unwrap()
.add_sampled_image(albedo_input, default_sampler.clone())
.unwrap()
.add_buffer(buff.clone())
.unwrap()
.build()
.unwrap(),
);
LightingSystem {
gfx_queue,
pipeline,
default_sampler,
buff,
set,
}
}
pub fn draw(
&self,
albedo_input: Arc<AttachmentImage>,
normals_input: Arc<AttachmentImage>,
depth_input: Arc<AttachmentImage>,
lights: &[Light],
matrices_buff: &CameraMatrices,
cached_scene: &CachedEntities,
dynamic_state: &DynamicState,
update_images_pds: bool,
color_debug_level: i32,
) -> AutoCommandBuffer {
{
let eye = matrices_buff.camera_position;
let view_pos = [eye[0] * -1.0, eye[1] * -1.0, eye[2] * -1.0, 0.0];
let mut packed_lights = Vec::new();
for l in lights {
match l {
Light::Point(pl) => packed_lights.push(fs::ty::Light {
position: pl.position.into(),
color: pl.color.into(),
radius: pl.radius,
}),
}
}
let mut content = self.buff.write().unwrap();
content.lights = packed_lights.try_into().unwrap();
content.viewPos = view_pos;
}
let mut builder = AutoCommandBufferBuilder::secondary_graphics_one_time_submit(
self.gfx_queue.device().clone(),
self.gfx_queue.family(),
self.pipeline.clone().subpass(),
)
.unwrap();
let cached_entity = &cached_scene.entities[0];
// for cached_entity in cached_scene.entities.clone() {
match cached_entity {
CachedEntity::Regular(r) => {
// for _mutation in r.mutations {
builder
.draw(
self.pipeline.clone(),
dynamic_state,
r.vert_params.clone(),
self.set.clone(),
(),
)
.unwrap();
// }
}
CachedEntity::Indexed(i) => {
// for _mutation in i.mutations {
builder
.draw_indexed(
self.pipeline.clone(),
dynamic_state,
i.vert_params.clone(),
i.indices.clone(),
self.set.clone(),
(),
)
.unwrap();
// }
}
}
// }
builder.build().unwrap()
}
}
#[derive(Default, Debug, Clone)]
struct Vertex {
position: [f32; 2],
}
vulkano::impl_vertex!(Vertex, position);
mod vs {
vulkano_shaders::shader! {
ty: "vertex",
path: "src/kikansha/frame/shaders/deferred.vert"
}
}
mod fs {
vulkano_shaders::shader! {
ty: "fragment",
types_meta: {
#[derive(Clone, Debug, Copy)]
},
path: "src/kikansha/frame/shaders/deferred.frag"
}
}
|
use crate::error::Error;
use std::cell::RefCell;
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use tar::Builder;
pub struct ManifestIterator {
paths: Vec<PathBuf>,
}
// move
impl IntoIterator for ManifestIterator {
type Item = PathBuf;
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.paths.into_iter()
}
}
// ref
impl<'a> IntoIterator for &'a ManifestIterator {
type Item = &'a PathBuf;
type IntoIter = std::slice::Iter<'a, PathBuf>;
fn into_iter(self) -> Self::IntoIter {
self.paths.iter()
}
}
// mut ref
impl<'a> IntoIterator for &'a mut ManifestIterator {
type Item = &'a mut PathBuf;
type IntoIter = std::slice::IterMut<'a, PathBuf>;
fn into_iter(self) -> Self::IntoIter {
self.paths.iter_mut()
}
}
pub struct Manifest<'a> {
path: &'a Path,
}
impl<'a> Manifest<'a> {
pub fn new(maybe_path: &'a Path) -> Result<Manifest, Error> {
if !maybe_path.is_dir() {
return Err(Error::PathNotADir(maybe_path.to_string_lossy().to_string()));
}
Ok(Manifest { path: maybe_path })
}
pub fn files(&mut self) -> Result<ManifestIterator, Error> {
let rc = RefCell::new(Vec::<PathBuf>::new());
let mut working_path = PathBuf::new();
working_path.push(self.path);
Manifest::visit_paths(&working_path, &rc)?;
Ok(ManifestIterator { paths: rc.take() })
}
fn visit_paths(path: &'a Path, rc: &RefCell<Vec<PathBuf>>) -> Result<(), Error> {
if path.is_dir() {
for entry in fs::read_dir(path)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
Manifest::visit_paths(&path, rc)?;
} else {
let mut paths = rc.borrow_mut();
paths.push(path);
}
}
}
Ok(())
}
}
pub fn create_and_write_archive<I>(
iterator: I,
archive_file: &File,
profile_path: &Path,
) -> Result<(), Error>
where
I: IntoIterator<Item = PathBuf>,
{
let mut archive = Builder::new(archive_file);
for path in iterator {
let mut file = match File::open(&path) {
Ok(file) => file,
Err(error) => return Err(Error::OpenFile(error, path.to_string_lossy().to_string())),
};
let archive_file_name = match path.strip_prefix(profile_path) {
Ok(archive_file_name) => archive_file_name,
Err(e) => return Err(Error::ArchiveFileName(e, path)),
};
archive.append_file(archive_file_name, &mut file)?;
}
archive.finish()?;
Ok(())
}
|
//! This is a mod for storing and parsing configuration
//!
//! According to shadowsocks' official documentation, the standard configuration
//! file should be in JSON format:
//!
//! ```ignore
//! {
//! "server": "127.0.0.1",
//! "server_port": 1080,
//! "local_port": 8388,
//! "password": "the-password",
//! "connect_timeout": 30,
//! "write_timeout": 30,
//! "read_timeout": 30,
//! "method": "aes-256-cfb",
//! "local_address": "127.0.0.1"
//! }
//! ```
//!
//! But this configuration is not for using multiple shadowsocks server, so we
//! introduce an extended configuration file format:
//!
//! ```ignore
//! {
//! "servers": [
//! {
//! "address": "127.0.0.1",
//! "port": 1080,
//! "password": "hellofuck",
//! "method": "bf-cfb"
//! },
//! {
//! "address": "127.0.0.1",
//! "port": 1081,
//! "password": "hellofuck",
//! "method": "aes-128-cfb"
//! }
//! ],
//! "local_port": 8388,
//! "local_address": "127.0.0.1"
//! }
//! ```
//!
//! These defined server will be used with a load balancing algorithm.
use std::{
fmt::{self, Debug, Display, Formatter},
net::SocketAddr,
str::FromStr,
string::ToString,
time::Duration,
};
use bytes::Bytes;
use crypto::CipherType;
use tracing::trace;
/// Server address
#[derive(Clone, Debug)]
pub enum ServerAddr {
/// IP Address
SocketAddr(SocketAddr),
/// Domain name address, eg. example.com:8080
DomainName(String, u16),
}
impl ServerAddr {
/// Get address for server listener
/// Panic if address is domain name
pub fn listen_addr(&self) -> &SocketAddr {
match *self {
ServerAddr::SocketAddr(ref s) => s,
_ => panic!("Cannot use domain name as server listen address"),
}
}
/// Get string representation of domain
pub fn host(&self) -> String {
match *self {
ServerAddr::SocketAddr(ref s) => s.ip().to_string(),
ServerAddr::DomainName(ref dm, _) => dm.clone(),
}
}
/// Get port
pub fn port(&self) -> u16 {
match *self {
ServerAddr::SocketAddr(ref s) => s.port(),
ServerAddr::DomainName(_, p) => p,
}
}
}
/// Parse `ServerAddr` error
#[derive(Debug)]
pub struct ServerAddrError;
impl FromStr for ServerAddr {
type Err = ServerAddrError;
fn from_str(s: &str) -> Result<ServerAddr, ServerAddrError> {
match s.parse::<SocketAddr>() {
Ok(addr) => Ok(ServerAddr::SocketAddr(addr)),
Err(..) => {
let mut sp = s.split(':');
match (sp.next(), sp.next()) {
(Some(dn), Some(port)) => match port.parse::<u16>() {
Ok(port) => Ok(ServerAddr::DomainName(dn.to_owned(), port)),
Err(..) => Err(ServerAddrError),
},
_ => Err(ServerAddrError),
}
}
}
}
}
impl Display for ServerAddr {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
ServerAddr::SocketAddr(ref a) => write!(f, "{}", a),
ServerAddr::DomainName(ref d, port) => write!(f, "{}:{}", d, port),
}
}
}
/// Configuration for a server
#[derive(Clone, Debug)]
pub struct ServerConfig {
/// Server address
addr: ServerAddr,
/// Encryption password (key)
password: String,
/// Encryption type (method)
method: CipherType,
/// Connection timeout
connect_timeout: Duration,
/// Read timeout
read_timeout: Duration,
/// Write timeout
write_timeout: Duration,
/// Encryption key
enc_key: Bytes,
}
impl ServerConfig {
/// Creates a new ServerConfig
pub fn new(
addr: ServerAddr,
pwd: String,
method: CipherType,
connect_timeout: Duration,
read_timeout: Duration,
write_timeout: Duration,
) -> ServerConfig {
let enc_key = method.bytes_to_key(pwd.as_bytes());
trace!("Initialize config with pwd: {:?}, key: {:?}", pwd, enc_key);
ServerConfig {
addr,
password: pwd,
method,
connect_timeout,
enc_key,
read_timeout,
write_timeout,
}
}
/// Create a basic config
pub fn basic(addr: SocketAddr, password: String, method: CipherType) -> ServerConfig {
ServerConfig::new(
ServerAddr::SocketAddr(addr),
password,
method,
Duration::from_secs(30),
Duration::from_secs(30),
Duration::from_secs(30),
)
}
/// Set encryption method
pub fn set_method(&mut self, t: CipherType, pwd: String) {
self.password = pwd;
self.method = t;
self.enc_key = t.bytes_to_key(self.password.as_bytes());
}
/// Set server addr
pub fn set_addr(&mut self, a: ServerAddr) {
self.addr = a;
}
/// Get server address
pub fn addr(&self) -> &ServerAddr {
&self.addr
}
/// Get encryption key
pub fn key(&self) -> &[u8] {
&self.enc_key[..]
}
/// Get password
pub fn password(&self) -> &str {
&self.password[..]
}
/// Get method
pub fn method(&self) -> CipherType {
self.method
}
/// Get connect timeout
pub fn connect_timeout(&self) -> Duration {
self.connect_timeout
}
/// Get read timeout
pub fn read_timeout(&self) -> Duration {
self.read_timeout
}
/// Get write timeout
pub fn write_timeout(&self) -> Duration {
self.write_timeout
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.