text stringlengths 8 4.13M |
|---|
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::error::Error;
use std::fmt::Debug;
use primitives::errors::{CommonError, CommonErrorKind, Display};
#[derive(Debug, Display)]
pub enum ErrorKind {
#[display(fmt = "Invalid txs: {:?}", _0)]
InvalidTxs(String),
#[display(fmt = "Invalid tx witness: {}", _0)]
InvalidTxWitness(String),
#[display(fmt = "Invalid tx module: {}", _0)]
InvalidTxModule(String),
#[display(fmt = "Invalid tx method: {}", _0)]
InvalidTxMethod(String),
#[display(fmt = "Invalid tx params: {}", _0)]
InvalidTxParams(String),
#[display(fmt = "{}", _0)]
Application(String),
}
impl Error for ErrorKind {}
impl From<ErrorKind> for CommonError {
fn from(error: ErrorKind) -> Self {
CommonError::new(CommonErrorKind::Executor, Box::new(error))
}
}
pub type ModuleResult<T> = Result<T, ModuleError>;
pub type OpaqueModuleResult = ModuleResult<Vec<u8>>;
#[derive(Debug)]
pub enum ModuleError {
/// System error, should not be accepted
System(CommonError),
/// Application error, should be accepted
Application(ApplicationError),
}
#[derive(Debug, Display)]
pub enum ApplicationError {
#[display(fmt = "Invalid address: {}", _0)]
InvalidAddress(String),
#[display(fmt = "Unsigned")]
Unsigned,
#[display(fmt = "{}", msg)]
User { msg: String },
}
impl From<CommonError> for ModuleError {
fn from(v: CommonError) -> Self {
ModuleError::System(v)
}
}
impl From<ErrorKind> for ModuleError {
fn from(v: ErrorKind) -> Self {
ModuleError::System(v.into())
}
}
impl From<ApplicationError> for ModuleError {
fn from(v: ApplicationError) -> Self {
ModuleError::Application(v)
}
}
impl From<String> for ModuleError {
fn from(v: String) -> Self {
ModuleError::Application(ApplicationError::User { msg: v })
}
}
impl From<&str> for ModuleError {
fn from(v: &str) -> Self {
ModuleError::Application(ApplicationError::User { msg: v.to_string() })
}
}
impl From<ModuleError> for CommonError {
fn from(error: ModuleError) -> Self {
match error {
ModuleError::System(e) => e,
ModuleError::Application(e) => ErrorKind::Application(e.to_string()).into(),
}
}
}
|
#[derive(Debug)]
pub struct Rectangle {
length: u32,
width: u32,
}
impl Rectangle {
pub fn can_hold(&self, other: &Rectangle) -> bool {
self.length > other.length && self.width > other.width
}
}
#[derive(Debug)]
pub struct Guess {
value: u32,
}
impl Guess {
pub fn new(value: u32) -> Guess {
if value < 1 {
panic!(
"Guess value must be greater than or equal to 1, got {}.",
value
);
} else if value > 100 {
panic!(
"Guess value must be less than or equal to 100, got {}.",
value
);
}
Guess { value }
}
}
pub fn add_two(a: i32) -> i32 {
internal_adder(a, 2)
}
fn internal_adder(a: i32, b: i32) -> i32 {
a + b
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn larger_can_hold_smaller() {
let larger = Rectangle {
length: 8,
width: 7,
};
let smaller = Rectangle {
length: 6,
width: 6,
};
assert_eq!(larger.can_hold(&smaller), true);
}
#[test]
fn smaller_cant_hold_larger() {
let larger = Rectangle {
length: 8,
width: 7,
};
let smaller = Rectangle {
length: 6,
width: 6,
};
assert_eq!(smaller.can_hold(&larger), false);
}
#[test]
#[should_panic(expected = "Guess value must be less than or equal to 100")]
fn greater_than_100() {
Guess::new(200);
}
#[test]
#[should_panic(expected = "Guess value must be greater than or equal to 1")]
fn greater_than_1() {
Guess::new(0);
}
}
|
pub fn solve_puzzle_part_1(input: &str) -> String {
let hw: u32 = (0..128)
.map(|n| format!("{}-{}", input, n))
.map(|s| knot_hash(s.as_bytes()))
.map(|h| h.iter().map(|b| u32::from(hamming_weight(*b))).sum::<u32>())
.sum();
hw.to_string()
}
pub fn solve_puzzle_part_2(input: &str) -> String {
let mut disk: Vec<Vec<char>> = (0..128).map(|n| format!("{}-{}", input, n))
// calculate the hash
.map(|s| knot_hash(s.as_bytes()).iter()
// create a string for each byte in the hash
.map(|b| format!("{:08b}", b))
// join all the strings into one single string for the hash
.collect::<Vec<_>>()
.join("")
// use '.' and '#' instead of 0 and 1
.chars().map(|c| {
match c {
'0' => '.',
'1' => '#',
_ => panic!(),
}
}
)
.collect()
)
.collect();
regions(&mut disk).to_string()
}
fn regions(disk: &mut Vec<Vec<char>>) -> u32 {
fn dfs(disk: &mut Vec<Vec<char>>, row: usize, col: usize) -> bool {
// check if there is a region at (row, col), if there is remove it and
// return true
if disk[row][col] == '#' {
// remove it
disk[row][col] = 'X';
// and the rest of the region
if col > 0 {
dfs(disk, row, col - 1);
}
if row > 0 {
dfs(disk, row - 1, col);
}
if col < disk[row].len() - 1 {
dfs(disk, row, col + 1);
}
if row < disk.len() - 1 {
dfs(disk, row + 1, col);
}
true
} else {
false
}
}
let mut count = 0;
for row in 0..disk.len() {
for col in 0..disk[0].len() {
assert!(disk[row].len() == disk[0].len());
if dfs(disk, row, col) {
count += 1;
}
}
}
count
}
fn hamming_weight(x: u8) -> u8 {
let m1 = 0x55; // 01010101
let m2 = 0x33; // 00110011
let m4 = 0x0f; // 00001111
let x = (x & m1) + ((x >> 1) & m1); // put count of each 2 bits into those 2 bits
let x = (x & m2) + ((x >> 2) & m2); // put count of each 4 bits into those 4 bits
// put count of each 8 bits into those 8 bits (and return it)
(x & m4) + ((x >> 4) & m4)
}
fn knot_hash(input: &[u8]) -> Vec<u8> {
let mut list: Vec<u8> = (0..256u32).map(|x| x as u8).collect();
let lengths: Vec<usize> = input
.iter()
.map(|&b| b as usize)
.chain(vec![17, 31, 73, 47, 23].into_iter())
.collect();
let mut current_position = 0;
let mut skip_size = 0;
for _ in 0..64 {
round(&mut list, &lengths, &mut current_position, &mut skip_size);
}
list.chunks(16)
.map(|block| block.iter().fold(0, |acc, &x| acc ^ x))
.collect()
}
fn round(
list: &mut Vec<u8>,
lengths: &[usize],
current_position: &mut usize,
skip_size: &mut usize,
) {
let list_len = list.len();
for length in lengths {
// reverse section
for i in 0..(*length / 2) {
list.swap(
(*current_position + i) % list_len,
(*current_position + length - 1 - i) % list_len,
);
}
// update current_position
*current_position = (*current_position + *length + *skip_size) % list_len;
// increase skip size
*skip_size += 1;
}
}
|
/**
* 参照先がない状態、`dangling pointer`はC言語ではプログラマが気をつけなければいけない。
* ライフタイムにより、rustではそれがない。
* 値を貸している間に参照先をムーブしようとすると、コンパイル時にエラーになる。
*/
fn main() {
// 本来`s`のライフタイムはこの関数の最後まで。
let s = "owned data".to_string();
// `{}`で囲んだブロックはライフタイムを区切る。
{
// `s`はここで、ムーブしてしまうので、ここでライフタイム終わり
// `t`のライフタイムはこのブロックの終わりまで。
let t = s;
}
// この時点で、`t`にも`s`にもアクセスできない。
{
let s = "owned data".to_string();
// ここで`s`への参照を作る。この参照はこのブロックの最後で死ぬが、
// `s`のほうが長生きしないといけない。
let ref_s = &s;
// let t = s;
}
}
|
//extern crate rand
//extern crate failure;
use std::str::Lines;
use std::str;
use std::env::args;
use std::error::Error;
use std::io::Read;
use std::io::prelude;
use std::fs::File;
// use self::failure::Error;
//use rand::Rng;
pub use lib::asm_info::CodeInfo;
fn convert_line(line: Vec<String>, mut info: &CodeInfo) -> Result<String, Box<Error>> {
let mut tmpBuf = String::new();
match info.jump_codes.contains_key(&line[0]) {
true => {
if line.len() > 2 {
panic!("Incorrect number of arguments. Check line #{}",info.line_num);
}
else {
tmpBuf.push_str(&info.jump_codes.get(&line[0])
.expect("Failed to get value").get_func_code());
tmpBuf.push_str(&line[1].split("0x")
.nth(1).expect("Failed to get value"));
}
},
_ => (),
}
match info.jr_codes.contains_key(&line[0]) {
true => {
if line.len() != 2 {
panic!("Incorrect number of arguments. Check line #{}",info.line_num);
}
else {
tmpBuf.push_str(&info.reg_codes.get(&line[0])
.expect("Failed to get value").get_opcode());//Jump Reg Opcode
tmpBuf.push_str(&info.regs.get(&line[1])
.expect("Failed to get value"));//Reg
tmpBuf.push_str("");//Two other regs and shift amount
tmpBuf.push_str(&info.reg_codes.get(&line[0])
.expect("Failed to get value").get_func_code());//Func Code
}
},
_ => (),
}
match info.shift_reg_codes.contains_key(&line[0]) {
true => {
if line.len() != 4 {
panic!("Incorrect number of arguments. Check line #{}",info.line_num);
}
else {
tmpBuf.push_str(&info.regs.get(&line[1])
.expect("Failed to get value"));//Reg
tmpBuf.push_str(&info.regs.get(&line[2])
.expect("Failed to get value"));//reg
tmpBuf.push_str("00000");// Zero'd out reg
tmpBuf.push_str(&line[3]);//Shift Amount
tmpBuf.push_str(&info.reg_codes.get(&line[0])
.expect("Failed to get value").get_func_code());//Func Code
}
},
_ => (),
}
match info.reg_codes.contains_key(&line[0]) {
true => {
if line.len() != 4 {
panic!("Incorrect number of arguments. Check line #{}",info.line_num);
}
else {
tmpBuf.push_str(&info.reg_codes.get(&line[0])
.expect("Failed to get value").get_opcode());//Most other OP Codes
line.iter()
.skip(1)
.map(|x|
match info.regs.contains_key(x) {
true => {
line.iter()
.skip(1)
.map(|x| tmpBuf.push_str(&info.regs.get(x)
.expect("Failed to get value.")));
tmpBuf.push_str("00000");//Shift Amount
tmpBuf.push_str(&info.reg_codes.get(&line[0])
.expect("Failed to get value").get_func_code());//Func Code
},
_ => panic!("Incorrect arguments. Check line #{}",info.line_num),
});
}
},
_ => (),
}
match info.imm_codes.contains_key(&line[0]) {
true => {},
_ => (),
}
/* match info.pseudo_opcodes.contains_key(&line[0]) {
}*/
Ok(tmpBuf)
}
fn assemble_loop(prog_lines: Lines, mut info: CodeInfo) -> Result<String, Box<Error>> {
let mut address = 10000000;
let mut tmpBuf = String::new();
for i in prog_lines {
info.line_num += 1;
address += 4;
let mut tmp_line = i.split(" ").collect::<Vec<&str>>();
let mut line: Vec<String> = tmp_line.iter()
.map(|n| n.to_string())
.collect::<Vec<String>>();
match line[0].contains(":") {
true => {
info.labels.insert(line[0].split(":")
.nth(0)
.expect("Failed to get value")
.to_string(),address);
if !(line[0].split(":").nth(1).unwrap().to_string().is_empty()) {
line[0] = line[0].split(":").nth(1)
.expect("Failed to get value").to_string();
tmpBuf.push_str(&convert_line(line.clone(),&info)?);
}
},
_ => continue,
}
tmpBuf.push_str(&convert_line(line,&info)?);
}
Ok(tmpBuf)
}
pub fn run() -> Result<(),()> {
let info = CodeInfo::new();
let mut buf = String::new();
let mut arg: Vec<String> = args().collect::<_>();
let mut f = File::open(&arg[1])
.expect("Failed to open file");
f.read_to_string(&mut buf).expect("Failed to write to buffer");
//let prog_lines = buf.split("\n").collect::<Vec<_>>();
let prog_lines = buf.lines();
let assembly = assemble_loop(prog_lines,info).unwrap();
println!("{}",assembly);
Ok(())
}
|
// Copyright (C) 2020 Sebastian Dröge <sebastian@centricular.com>
//
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use super::*;
use std::fmt;
/// `Seek-Style` header ([RFC 7826 section 18.47](https://tools.ietf.org/html/rfc7826#section-18.47)).
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum SeekStyle {
Rap,
CoRap,
FirstPrior,
Next,
Extension(String),
}
impl SeekStyle {
pub fn as_str(&self) -> &str {
match self {
SeekStyle::Rap => "RAP",
SeekStyle::CoRap => "CoRAP",
SeekStyle::FirstPrior => "First-Prior",
SeekStyle::Next => "Next",
SeekStyle::Extension(ref s) => s.as_str(),
}
}
}
impl fmt::Display for SeekStyle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
impl std::str::FromStr for SeekStyle {
type Err = HeaderParseError;
fn from_str(s: &str) -> Result<Self, HeaderParseError> {
match s {
"RAP" => Ok(SeekStyle::Rap),
"CoRAP" => Ok(SeekStyle::CoRap),
"First-Prior" => Ok(SeekStyle::FirstPrior),
"Next" => Ok(SeekStyle::Next),
_ => Ok(SeekStyle::Extension(String::from(s))),
}
}
}
impl super::TypedHeader for SeekStyle {
fn from_headers(headers: impl AsRef<Headers>) -> Result<Option<Self>, HeaderParseError> {
let headers = headers.as_ref();
let header = match headers.get(&SEEK_STYLE) {
None => return Ok(None),
Some(header) => header,
};
let seek_style = header.as_str().parse().map_err(|_| HeaderParseError)?;
Ok(Some(seek_style))
}
fn insert_into(&self, mut headers: impl AsMut<Headers>) {
let headers = headers.as_mut();
headers.insert(SEEK_STYLE, self.to_string());
}
}
|
use jargon_args::Jargon;
use std::process::exit;
use std::collections::HashMap;
const URL: &str = "https://api.myip.com";
const HELP: &str = "\
myip [OPTIONS]
OPTIONS:
-a, --all Display all IP information
-C, --code Display two letter country code
-c, --country Display only country
-h, --help Display help information
-r, --raw Display raw API response
-v, --version Display version information
";
const VERSION: &str = "\
myip version";
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut j: Jargon = Jargon::from_env();
if j.contains(["-h", "--help"]) {
print!("{}", HELP);
exit(0);
} else if j.contains(["-v", "--version"]) {
println!("{} {}\nThanks to https://myip.com/ for hosting this API.",
VERSION, env!("CARGO_PKG_VERSION")
);
exit(0);
}
let all: bool = j.contains(["-a", "--all"]);
let country: bool = j.contains(["-c", "--country"]);
let cc: bool = j.contains(["-C", "--code"]);
let raw: bool = j.contains(["-r", "--raw"]);
let resp = reqwest::get(URL)
.await?
.json::<HashMap<String, String>>()
.await?;
if all {
println!("IP:\t\t{}\nCountry:\t{}\nCC:\t\t{}",
resp["ip"], resp["country"], resp["cc"]);
} else if country {
println!("{}", resp["country"]);
} else if cc {
println!("{}", resp["cc"]);
} else if raw {
println!("{:#?}", resp);
} else {
println!("{}", resp["ip"])
}
Ok(())
}
|
pub mod api;
use crate::models::user::UserAuth;
use crate::models::DataPoolSqlite;
use crate::services::file::list_files_service;
use actix_web::*;
use actix_web::{web, HttpResponse, Result};
use log::error;
use tera::{Context, Tera};
// register page
pub async fn register_page(tml: web::Data<Tera>) -> Result<HttpResponse> {
let data = tml
.render("register.html", &Context::new())
.map_err(|_| error::ErrorInternalServerError("Template error"))?;
Ok(HttpResponse::Ok().content_type("text/html").body(data))
}
// home page
pub async fn home_page(tml: web::Data<Tera>) -> Result<HttpResponse> {
let data = tml
.render("home.html", &Context::new())
.map_err(|_| error::ErrorInternalServerError("Template error"))?;
Ok(HttpResponse::Ok().content_type("text/html").body(data))
}
pub async fn login_page(tml: web::Data<Tera>) -> Result<HttpResponse> {
let s = tml
.render("login.html", &Context::new())
.map_err(|_| error::ErrorInternalServerError("Template error"))?;
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
pub async fn upload_page(tml: web::Data<Tera>) -> Result<HttpResponse> {
let s = tml
.render("upload.html", &Context::new())
.map_err(|_| error::ErrorInternalServerError("Template error"))?;
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
pub async fn access_page(tml: web::Data<Tera>, req: HttpRequest) -> Result<HttpResponse> {
let link: String = match req.match_info().query("linkID").parse() {
Ok(l) => l,
Err(e) => {
error!("message error : {}", e);
return Err(error::ErrorInternalServerError("parse link error"));
}
};
let mut ctx = Context::new();
ctx.insert("link", &link);
let s = tml
.render("access.html", &ctx)
.map_err(|_| error::ErrorInternalServerError("Template error"))?;
Ok(HttpResponse::Ok().content_type("text/html").body(s))
}
pub async fn list_file_page(
pool: DataPoolSqlite,
tml: web::Data<Tera>,
user_auth: UserAuth,
) -> Result<HttpResponse> {
let mut ctx = Context::new();
match list_files_service(&pool, user_auth.id).await {
Ok(r) => {
ctx.insert("files", &r);
}
Err(e) => {
error!("{}", e);
}
}
let data = tml
.render("list.html", &ctx)
.map_err(|_| error::ErrorInternalServerError("Template error"))?;
Ok(HttpResponse::Ok().content_type("text/html").body(data))
}
|
use crate::{
components::level::LevelPrefabData, resources::prefabs::PrefabRegistry, utils::hierarchy_util,
};
use amethyst::{
assets::{AssetLoaderSystemData, Handle, Prefab},
controls::HideCursor,
core::Transform,
ecs::Entity,
input::{is_key_down, is_mouse_button_down},
prelude::*,
renderer::rendy::mesh::{Indices, MeshBuilder, Normal, Position, TexCoord},
renderer::{
mtl::MaterialDefaults,
palette::LinSrgba,
rendy::texture::palette::load_from_linear_rgba,
shape::FromShape,
types::{Mesh, MeshData},
Material, Texture,
},
ui::UiPrefab,
winit::{MouseButton, VirtualKeyCode},
};
pub struct MainGameState {
scene: Option<Entity>,
fps_display: Option<Entity>,
}
impl Default for MainGameState {
fn default() -> Self {
Self {
scene: None,
fps_display: None,
}
}
}
pub struct Hax {
pub the_material: Option<Handle<Material>>,
}
impl Default for Hax {
fn default() -> Self {
Self { the_material: None }
}
}
impl SimpleState for MainGameState {
fn on_start(&mut self, data: StateData<GameData>) {
let StateData { world, .. } = data;
let scene_handle = world
.read_resource::<PrefabRegistry<Prefab<LevelPrefabData>>>()
.get_prefab("default_level")
.expect("level prefab not found")
.clone();
let menu_prefab = world
.read_resource::<PrefabRegistry<UiPrefab>>()
.get_prefab("fps_widget") // todo: move ids to config file
.expect("fps prefab not found")
.clone();
self.scene = Some(world.create_entity().with(scene_handle).build());
self.fps_display = Some(world.create_entity().with(menu_prefab.clone()).build());
{
let default_mat = world.read_resource::<MaterialDefaults>().0.clone();
let albedo = world.exec(|loader: AssetLoaderSystemData<Texture>| {
loader.load_from_data(
load_from_linear_rgba(LinSrgba::new(0.1, 0.9, 0.1, 1.0)).into(),
(),
)
});
let mat = world.exec(|loader: AssetLoaderSystemData<Material>| {
loader.load_from_data(
Material {
albedo,
..default_mat.clone()
},
(),
)
});
{
let mut hax = world.write_resource::<Hax>();
hax.the_material = Some(mat.clone());
}
}
}
fn on_stop(&mut self, data: StateData<GameData>) {
// delete the ui and scene
if let Some(root) = self.scene {
hierarchy_util::delete_hierarchy(root, data.world).expect("failed to delete scene");
}
self.scene = None;
}
fn handle_event(
&mut self,
data: StateData<'_, GameData<'_, '_>>,
event: StateEvent,
) -> SimpleTrans {
let StateData { world, .. } = data;
if let StateEvent::Window(event) = &event {
if is_key_down(&event, VirtualKeyCode::Escape) {
let mut hide_cursor = world.write_resource::<HideCursor>();
hide_cursor.hide = false;
} else if is_mouse_button_down(&event, MouseButton::Left) {
let mut hide_cursor = world.write_resource::<HideCursor>();
hide_cursor.hide = true;
}
}
Trans::None
}
}
|
use ray::Ray;
use super::intersection::Intersection;
pub trait Geometry {
fn intersect(&self, &Ray) -> Option<Intersection>;
}
|
iter range(lo: uint, hi: uint) -> uint {
let lo_ = lo;
while lo_ < hi { put lo_; lo_ += 1u; }
}
fn create_index<T>(index: [{a: T, b: uint}], hash_fn: fn(T) -> uint) {
for each i: uint in range(0u, 256u) { let bucket: [T] = []; }
}
fn main() { }
|
#![feature(type_alias_impl_trait)]
#![feature(trait_alias)]
#![feature(core_intrinsics)]
#![feature(array_chunks)]
#![allow(unused)]
#![deny(unused_must_use)]
#[macro_use]
extern crate serde;
extern crate derive as _der;
use std::sync::Arc;
use actix::{Message, SystemService, Actor};
use crate::node::NodeController;
use bytes::Bytes;
#[doc(hidden)]
pub mod derive {
pub use futures::FutureExt;
pub use actix::prelude::*;
pub use futures::future::BoxFuture;
pub use bytes::{BytesMut, Bytes};
pub use prost::Message as ProstMessage;
pub use crate::process::{Pid, PidRecipient, DynHandler, Dispatcher};
pub use crate::node::NodeId;
pub use crate::util::RpcMethod;
pub use crate::process::DispatchError;
}
pub use _der::DynHandler;
pub use process::{Pid, Process};
mod import;
pub mod proto;
pub mod process;
pub mod node;
pub mod util;
pub mod suspend;
pub mod global;
pub mod memkv;
use uuid::Uuid;
pub use crate::process::DispatchError;
#[derive(Debug, Clone)]
pub struct Broadcast {
pub(crate) procid: Option<Uuid>,
pub(crate) method: u32,
pub(crate) body: Bytes,
}
impl Broadcast {
pub fn make(procid: Option<Uuid>, method: u32, body: Bytes) -> Self {
Self {
procid,
method,
body,
}
}
}
impl Message for Broadcast {
type Result = Result<(), DispatchError>;
}
// Send a specified message to a node
pub struct NodeDispatch<M> {
pub(crate) nodeid: Uuid,
pub(crate) inner: M,
}
impl<M, R> Message for NodeDispatch<M>
where M: Message<Result=Result<R, DispatchError>> + 'static,
R: 'static
{
type Result = Result<R, DispatchError>;
}
/*
// Send a specified message to a specified process
pub struct ProcDispatch<M> {
pub(crate) procid: Uuid,
pub(crate) inner: M,
}
impl<M, R> Message for ProcDispatch<M>
where M: Message<Result=Result<R, DispatchError>> + 'static,
R: 'static
{
type Result = Result<R, DispatchError>;
}
*/
//pub struct DynHandler<M> {}
/// Dispatch a message to appropriate handler
///
/// if `id.is_nil() && !wait_for_response` then the response is returned as soon as local
/// link sent the message over the wire
///
/// Otherwise sets up a correlation counter and waits for response with a timeout(to prevent DOS attacks on correlation cache)
#[derive(Debug, Clone)]
pub struct MethodCall {
pub(crate) procid: Option<Uuid>,
pub(crate) method: u32,
pub(crate) body: Bytes,
}
impl Message for MethodCall {
type Result = Result<Bytes, DispatchError>;
} |
use std::{
fs::{read, File},
io::Write,
path::PathBuf,
};
use anyhow::Context;
use console::style;
use indicatif::ProgressBar;
use rcgen::{
generate_simple_self_signed, BasicConstraints, Certificate as GenCertificate,
CertificateParams, DistinguishedName, DnType, ExtendedKeyUsagePurpose, IsCa, KeyPair,
};
use rustls::{Certificate, PrivateKey};
use crate::{mitm::DOMAIN_INTERCEPT, style::SPINNER_STYLE};
pub const CERT_FILENAME: &str = "ca.cer";
const KEY_FILENAME: &str = "ca.key";
/// Set up the certificate to intercept traffic. This will first look for `CERT_FILENAME`
/// and `KEY_FILENAME` in the current directory and use the file as-is as the root CA certificate
/// if they exist. Otherwise new CA certificate/key will be generated and exported.
/// A certificate specifically for the website will then be signed by the CA
pub fn setup_certificate() -> anyhow::Result<(Certificate, PrivateKey)> {
let cert_path: PathBuf = [".", CERT_FILENAME].iter().collect();
let key_path: PathBuf = [".", KEY_FILENAME].iter().collect();
let ca_cert = if cert_path.exists() && key_path.exists() {
let pb = ProgressBar::new_spinner().with_style(
SPINNER_STYLE
.clone()
.template("{spinner:.green} {wide_msg}"),
);
pb.set_message("读取已保存的自签发根证书及私钥");
pb.enable_steady_tick(5);
let cert_der = read(&cert_path)
.with_context(|| format!("无法读取证书文件 {}", style(CERT_FILENAME).dim()))?;
let key_der = read(&key_path)
.with_context(|| format!("无法读取私钥文件 {}", style(KEY_FILENAME).dim()))?;
let key_pair = KeyPair::from_der(&key_der).context("无效的证书私钥")?;
let params =
CertificateParams::from_ca_cert_der(&cert_der, key_pair).context("无效的根证书")?;
pb.finish_with_message("已加载自签发根证书及私钥");
GenCertificate::from_params(params).context("无效的根证书")?
} else {
let pb = ProgressBar::new_spinner().with_style(
SPINNER_STYLE
.clone()
.template("{spinner:.green} {wide_msg}"),
);
pb.set_message("生成自签发根证书及私钥");
pb.enable_steady_tick(5);
let params = generate_ca_cerficate_params();
let cert = GenCertificate::from_params(params).context("无法生成自签发证书")?;
pb.set_message("保存自签发证书及私钥");
let cert_der = cert.serialize_der().context("无法导出根证书")?;
let key_der = cert.serialize_private_key_der();
let mut cert_file = File::create(&cert_path).context("无法创建证书文件")?;
cert_file.write_all(&cert_der).context("无法写入证书")?;
cert_file.sync_all().context("无法写入证书")?;
drop(cert_file);
let mut key_file = File::create(&key_path).context("无法创建私钥文件")?;
key_file.write_all(&key_der).context("无法写入私钥")?;
key_file.sync_all().context("无法写入私钥")?;
drop(key_file);
pb.finish_with_message(&format!(
"已保存生成的自签发根证书到 {},私钥到 {}",
style(CERT_FILENAME).dim(),
style(KEY_FILENAME).dim()
));
println!(
"{} 请将证书 {} 加入系统的根证书信任库中",
style("[提醒]").green(),
style(CERT_FILENAME).dim()
);
println!("{} 证书私钥泄露可能会导致安全问题", style("[警告]").red());
cert
};
let cert = generate_simple_self_signed(
DOMAIN_INTERCEPT
.iter()
.cloned()
.map(ToOwned::to_owned)
.collect::<Vec<String>>(),
)
.context("无法生成网站用证书")?;
let cert_der = cert
.serialize_der_with_signer(&ca_cert)
.context("无法签发网站用证书")?;
let key_der = cert.serialize_private_key_der();
Ok((Certificate(cert_der), PrivateKey(key_der)))
}
/// Generate certificate parameters for root CA certificate
fn generate_ca_cerficate_params() -> CertificateParams {
let mut distinguished_name = DistinguishedName::new();
distinguished_name.push(DnType::CommonName, "DO_NOT_TRUST Genshin Exporter CA");
// TODO: fork `rcgen` and add support for [Key Usage Extension](https://tools.ietf.org/html/rfc5280#section-4.2.1.3)
let mut params = CertificateParams::new(
DOMAIN_INTERCEPT
.iter()
.cloned()
.map(ToOwned::to_owned)
.collect::<Vec<String>>(),
);
params.distinguished_name = distinguished_name;
params.is_ca = IsCa::Ca(BasicConstraints::Constrained(0));
params
.extended_key_usages
.push(ExtendedKeyUsagePurpose::ServerAuth);
params
}
|
use crate::common::factories::prelude::*;
use common::rsip::{self, headers::*, prelude::*, Method, Uri, Version};
use std::{convert::TryInto, net::IpAddr as StdIpAddr};
pub fn response(from_uri: Option<Uri>, to_uri: Option<Uri>) -> rsip::Response {
let mut headers: rsip::Headers = Randomized::default();
if let Some(from_uri) = from_uri {
let mut typed_from_header = rsip::header_opt!(headers.iter(), Header::From)
.expect("from header")
.typed()
.expect("typed from header");
headers.unique_push(typed_from_header.with_uri(from_uri).into());
}
if let Some(to_uri) = to_uri {
let mut typed_to_header = rsip::header_opt!(headers.iter(), Header::To)
.expect("to header")
.typed()
.expect("typed to header");
headers.unique_push(typed_to_header.with_uri(to_uri).into());
}
rsip::Response {
status_code: 200.into(),
version: Version::V2,
headers,
body: vec![],
}
}
pub fn trying_response_from(request: rsip::Request) -> rsip::Response {
let mut headers: rsip::Headers = Default::default();
headers.push(request.via_header().expect("via header").clone().into());
headers.push(From::new(request.to_header().expect("to header").clone()).into());
headers.push(To::new(request.from_header().expect("from header").clone()).into());
headers.push(
request
.call_id_header()
.expect("call_id header")
.clone()
.into(),
);
headers.push(
typed::CSeq::from((
request
.cseq_header()
.expect("cseq header")
.typed()
.expect("cseq typed header")
.seq,
request.method,
))
.into(),
);
headers.push(MaxForwards::default().into());
rsip::Response {
status_code: 100.into(),
headers,
version: Default::default(),
body: Default::default(),
}
}
pub fn ok_response_from(request: rsip::Request) -> rsip::Response {
let mut headers: rsip::Headers = Default::default();
headers.push(request.via_header().expect("via header").clone().into());
headers.push(From::new(request.to_header().expect("to header").clone()).into());
headers.push(To::new(request.from_header().expect("from header").clone()).into());
headers.push(
request
.call_id_header()
.expect("call_id header")
.clone()
.into(),
);
headers.push(
typed::CSeq::from((
request
.cseq_header()
.expect("cseq header")
.typed()
.expect("typed cseq header")
.seq,
request.method,
))
.into(),
);
headers.push(MaxForwards::default().into());
rsip::Response {
status_code: 200.into(),
headers,
version: Default::default(),
body: Default::default(),
}
}
pub fn request_failure_response_from(request: rsip::Request) -> rsip::Response {
let mut headers: rsip::Headers = Default::default();
headers.push(request.via_header().expect("via header").clone().into());
headers.push(From::new(request.to_header().expect("to header").clone()).into());
headers.push(To::new(request.from_header().expect("from header").clone()).into());
headers.push(
request
.call_id_header()
.expect("call_id header")
.clone()
.into(),
);
headers.push(
typed::CSeq::from((
request
.cseq_header()
.expect("cseq header")
.typed()
.expect("cseq typed header")
.seq,
request.method,
))
.into(),
);
headers.push(MaxForwards::default().into());
rsip::Response {
status_code: 404.into(),
headers,
version: Default::default(),
body: Default::default(),
}
}
pub fn redirection_response_from(request: rsip::Request) -> rsip::Response {
let mut headers: rsip::Headers = Default::default();
headers.push(request.via_header().expect("via header").clone().into());
headers.push(From::new(request.to_header().expect("to header").clone()).into());
headers.push(To::new(request.from_header().expect("from header").clone()).into());
headers.push(
request
.call_id_header()
.expect("call_id header")
.clone()
.into(),
);
headers.push(
typed::CSeq::from((
request
.cseq_header()
.expect("cseq header")
.typed()
.expect("cseq typed header")
.seq,
request.method,
))
.into(),
);
headers.push(MaxForwards::default().into());
rsip::Response {
status_code: 301.into(),
headers,
version: Default::default(),
body: Default::default(),
}
}
|
#[doc = "Register `M4SR` reader"]
pub type R = crate::R<M4SR_SPEC>;
#[doc = "Field `FDATAL` reader - Failing data low"]
pub type FDATAL_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - Failing data low"]
#[inline(always)]
pub fn fdatal(&self) -> FDATAL_R {
FDATAL_R::new(self.bits)
}
}
#[doc = "RAMECC monitor x status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`m4sr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct M4SR_SPEC;
impl crate::RegisterSpec for M4SR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`m4sr::R`](R) reader structure"]
impl crate::Readable for M4SR_SPEC {}
#[doc = "`reset()` method sets M4SR to value 0"]
impl crate::Resettable for M4SR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Register `CCMR2_Output` reader"]
pub type R = crate::R<CCMR2_OUTPUT_SPEC>;
#[doc = "Register `CCMR2_Output` writer"]
pub type W = crate::W<CCMR2_OUTPUT_SPEC>;
#[doc = "Field `CC3S` reader - Capture/Compare 3 selection"]
pub type CC3S_R = crate::FieldReader<CC3S_A>;
#[doc = "Capture/Compare 3 selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum CC3S_A {
#[doc = "0: CC3 channel is configured as output"]
Output = 0,
}
impl From<CC3S_A> for u8 {
#[inline(always)]
fn from(variant: CC3S_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for CC3S_A {
type Ux = u8;
}
impl CC3S_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CC3S_A> {
match self.bits {
0 => Some(CC3S_A::Output),
_ => None,
}
}
#[doc = "CC3 channel is configured as output"]
#[inline(always)]
pub fn is_output(&self) -> bool {
*self == CC3S_A::Output
}
}
#[doc = "Field `CC3S` writer - Capture/Compare 3 selection"]
pub type CC3S_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, CC3S_A>;
impl<'a, REG, const O: u8> CC3S_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "CC3 channel is configured as output"]
#[inline(always)]
pub fn output(self) -> &'a mut crate::W<REG> {
self.variant(CC3S_A::Output)
}
}
#[doc = "Field `OC3FE` reader - Output compare 3 fast enable"]
pub type OC3FE_R = crate::BitReader;
#[doc = "Field `OC3FE` writer - Output compare 3 fast enable"]
pub type OC3FE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OC3PE` reader - Output compare 3 preload enable"]
pub type OC3PE_R = crate::BitReader<OC3PE_A>;
#[doc = "Output compare 3 preload enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OC3PE_A {
#[doc = "0: Preload register on CCR3 disabled. New values written to CCR3 are taken into account immediately"]
Disabled = 0,
#[doc = "1: Preload register on CCR3 enabled. Preload value is loaded into active register on each update event"]
Enabled = 1,
}
impl From<OC3PE_A> for bool {
#[inline(always)]
fn from(variant: OC3PE_A) -> Self {
variant as u8 != 0
}
}
impl OC3PE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OC3PE_A {
match self.bits {
false => OC3PE_A::Disabled,
true => OC3PE_A::Enabled,
}
}
#[doc = "Preload register on CCR3 disabled. New values written to CCR3 are taken into account immediately"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == OC3PE_A::Disabled
}
#[doc = "Preload register on CCR3 enabled. Preload value is loaded into active register on each update event"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == OC3PE_A::Enabled
}
}
#[doc = "Field `OC3PE` writer - Output compare 3 preload enable"]
pub type OC3PE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, OC3PE_A>;
impl<'a, REG, const O: u8> OC3PE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Preload register on CCR3 disabled. New values written to CCR3 are taken into account immediately"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(OC3PE_A::Disabled)
}
#[doc = "Preload register on CCR3 enabled. Preload value is loaded into active register on each update event"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(OC3PE_A::Enabled)
}
}
#[doc = "Field `OC3M` reader - Output compare 3 mode"]
pub type OC3M_R = crate::FieldReader<OC3M_A>;
#[doc = "Output compare 3 mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum OC3M_A {
#[doc = "0: The comparison between the output compare register TIMx_CCRy and the counter TIMx_CNT has no effect on the outputs / OpmMode1: Retriggerable OPM mode 1 - In up-counting mode, the channel is active until a trigger event is detected (on TRGI signal). In down-counting mode, the channel is inactive"]
Frozen = 0,
#[doc = "1: Set channel to active level on match. OCyREF signal is forced high when the counter matches the capture/compare register / OpmMode2: Inversely to OpmMode1"]
ActiveOnMatch = 1,
#[doc = "2: Set channel to inactive level on match. OCyREF signal is forced low when the counter matches the capture/compare register / Reserved"]
InactiveOnMatch = 2,
#[doc = "3: OCyREF toggles when TIMx_CNT=TIMx_CCRy / Reserved"]
Toggle = 3,
#[doc = "4: OCyREF is forced low / CombinedPwmMode1: OCyREF has the same behavior as in PWM mode 1. OCyREFC is the logical OR between OC1REF and OC2REF"]
ForceInactive = 4,
#[doc = "5: OCyREF is forced high / CombinedPwmMode2: OCyREF has the same behavior as in PWM mode 2. OCyREFC is the logical AND between OC1REF and OC2REF"]
ForceActive = 5,
#[doc = "6: In upcounting, channel is active as long as TIMx_CNT<TIMx_CCRy else inactive. In downcounting, channel is inactive as long as TIMx_CNT>TIMx_CCRy else active / AsymmetricPwmMode1: OCyREF has the same behavior as in PWM mode 1. OCyREFC outputs OC1REF when the counter is counting up, OC2REF when it is counting down"]
PwmMode1 = 6,
#[doc = "7: Inversely to PwmMode1 / AsymmetricPwmMode2: Inversely to AsymmetricPwmMode1"]
PwmMode2 = 7,
}
impl From<OC3M_A> for u8 {
#[inline(always)]
fn from(variant: OC3M_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for OC3M_A {
type Ux = u8;
}
impl OC3M_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OC3M_A {
match self.bits {
0 => OC3M_A::Frozen,
1 => OC3M_A::ActiveOnMatch,
2 => OC3M_A::InactiveOnMatch,
3 => OC3M_A::Toggle,
4 => OC3M_A::ForceInactive,
5 => OC3M_A::ForceActive,
6 => OC3M_A::PwmMode1,
7 => OC3M_A::PwmMode2,
_ => unreachable!(),
}
}
#[doc = "The comparison between the output compare register TIMx_CCRy and the counter TIMx_CNT has no effect on the outputs / OpmMode1: Retriggerable OPM mode 1 - In up-counting mode, the channel is active until a trigger event is detected (on TRGI signal). In down-counting mode, the channel is inactive"]
#[inline(always)]
pub fn is_frozen(&self) -> bool {
*self == OC3M_A::Frozen
}
#[doc = "Set channel to active level on match. OCyREF signal is forced high when the counter matches the capture/compare register / OpmMode2: Inversely to OpmMode1"]
#[inline(always)]
pub fn is_active_on_match(&self) -> bool {
*self == OC3M_A::ActiveOnMatch
}
#[doc = "Set channel to inactive level on match. OCyREF signal is forced low when the counter matches the capture/compare register / Reserved"]
#[inline(always)]
pub fn is_inactive_on_match(&self) -> bool {
*self == OC3M_A::InactiveOnMatch
}
#[doc = "OCyREF toggles when TIMx_CNT=TIMx_CCRy / Reserved"]
#[inline(always)]
pub fn is_toggle(&self) -> bool {
*self == OC3M_A::Toggle
}
#[doc = "OCyREF is forced low / CombinedPwmMode1: OCyREF has the same behavior as in PWM mode 1. OCyREFC is the logical OR between OC1REF and OC2REF"]
#[inline(always)]
pub fn is_force_inactive(&self) -> bool {
*self == OC3M_A::ForceInactive
}
#[doc = "OCyREF is forced high / CombinedPwmMode2: OCyREF has the same behavior as in PWM mode 2. OCyREFC is the logical AND between OC1REF and OC2REF"]
#[inline(always)]
pub fn is_force_active(&self) -> bool {
*self == OC3M_A::ForceActive
}
#[doc = "In upcounting, channel is active as long as TIMx_CNT<TIMx_CCRy else inactive. In downcounting, channel is inactive as long as TIMx_CNT>TIMx_CCRy else active / AsymmetricPwmMode1: OCyREF has the same behavior as in PWM mode 1. OCyREFC outputs OC1REF when the counter is counting up, OC2REF when it is counting down"]
#[inline(always)]
pub fn is_pwm_mode1(&self) -> bool {
*self == OC3M_A::PwmMode1
}
#[doc = "Inversely to PwmMode1 / AsymmetricPwmMode2: Inversely to AsymmetricPwmMode1"]
#[inline(always)]
pub fn is_pwm_mode2(&self) -> bool {
*self == OC3M_A::PwmMode2
}
}
#[doc = "Field `OC3M` writer - Output compare 3 mode"]
pub type OC3M_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 3, O, OC3M_A>;
impl<'a, REG, const O: u8> OC3M_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "The comparison between the output compare register TIMx_CCRy and the counter TIMx_CNT has no effect on the outputs / OpmMode1: Retriggerable OPM mode 1 - In up-counting mode, the channel is active until a trigger event is detected (on TRGI signal). In down-counting mode, the channel is inactive"]
#[inline(always)]
pub fn frozen(self) -> &'a mut crate::W<REG> {
self.variant(OC3M_A::Frozen)
}
#[doc = "Set channel to active level on match. OCyREF signal is forced high when the counter matches the capture/compare register / OpmMode2: Inversely to OpmMode1"]
#[inline(always)]
pub fn active_on_match(self) -> &'a mut crate::W<REG> {
self.variant(OC3M_A::ActiveOnMatch)
}
#[doc = "Set channel to inactive level on match. OCyREF signal is forced low when the counter matches the capture/compare register / Reserved"]
#[inline(always)]
pub fn inactive_on_match(self) -> &'a mut crate::W<REG> {
self.variant(OC3M_A::InactiveOnMatch)
}
#[doc = "OCyREF toggles when TIMx_CNT=TIMx_CCRy / Reserved"]
#[inline(always)]
pub fn toggle(self) -> &'a mut crate::W<REG> {
self.variant(OC3M_A::Toggle)
}
#[doc = "OCyREF is forced low / CombinedPwmMode1: OCyREF has the same behavior as in PWM mode 1. OCyREFC is the logical OR between OC1REF and OC2REF"]
#[inline(always)]
pub fn force_inactive(self) -> &'a mut crate::W<REG> {
self.variant(OC3M_A::ForceInactive)
}
#[doc = "OCyREF is forced high / CombinedPwmMode2: OCyREF has the same behavior as in PWM mode 2. OCyREFC is the logical AND between OC1REF and OC2REF"]
#[inline(always)]
pub fn force_active(self) -> &'a mut crate::W<REG> {
self.variant(OC3M_A::ForceActive)
}
#[doc = "In upcounting, channel is active as long as TIMx_CNT<TIMx_CCRy else inactive. In downcounting, channel is inactive as long as TIMx_CNT>TIMx_CCRy else active / AsymmetricPwmMode1: OCyREF has the same behavior as in PWM mode 1. OCyREFC outputs OC1REF when the counter is counting up, OC2REF when it is counting down"]
#[inline(always)]
pub fn pwm_mode1(self) -> &'a mut crate::W<REG> {
self.variant(OC3M_A::PwmMode1)
}
#[doc = "Inversely to PwmMode1 / AsymmetricPwmMode2: Inversely to AsymmetricPwmMode1"]
#[inline(always)]
pub fn pwm_mode2(self) -> &'a mut crate::W<REG> {
self.variant(OC3M_A::PwmMode2)
}
}
#[doc = "Field `OC3CE` reader - Output compare 3 clear enable"]
pub type OC3CE_R = crate::BitReader;
#[doc = "Field `OC3CE` writer - Output compare 3 clear enable"]
pub type OC3CE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CC4S` reader - Capture/Compare 4 selection"]
pub type CC4S_R = crate::FieldReader<CC4S_A>;
#[doc = "Capture/Compare 4 selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum CC4S_A {
#[doc = "0: CC4 channel is configured as output"]
Output = 0,
}
impl From<CC4S_A> for u8 {
#[inline(always)]
fn from(variant: CC4S_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for CC4S_A {
type Ux = u8;
}
impl CC4S_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CC4S_A> {
match self.bits {
0 => Some(CC4S_A::Output),
_ => None,
}
}
#[doc = "CC4 channel is configured as output"]
#[inline(always)]
pub fn is_output(&self) -> bool {
*self == CC4S_A::Output
}
}
#[doc = "Field `CC4S` writer - Capture/Compare 4 selection"]
pub type CC4S_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, CC4S_A>;
impl<'a, REG, const O: u8> CC4S_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "CC4 channel is configured as output"]
#[inline(always)]
pub fn output(self) -> &'a mut crate::W<REG> {
self.variant(CC4S_A::Output)
}
}
#[doc = "Field `OC4FE` reader - Output compare 4 fast enable"]
pub type OC4FE_R = crate::BitReader;
#[doc = "Field `OC4FE` writer - Output compare 4 fast enable"]
pub type OC4FE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OC4PE` reader - Output compare 4 preload enable"]
pub type OC4PE_R = crate::BitReader<OC4PE_A>;
#[doc = "Output compare 4 preload enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OC4PE_A {
#[doc = "0: Preload register on CCR4 disabled. New values written to CCR4 are taken into account immediately"]
Disabled = 0,
#[doc = "1: Preload register on CCR4 enabled. Preload value is loaded into active register on each update event"]
Enabled = 1,
}
impl From<OC4PE_A> for bool {
#[inline(always)]
fn from(variant: OC4PE_A) -> Self {
variant as u8 != 0
}
}
impl OC4PE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OC4PE_A {
match self.bits {
false => OC4PE_A::Disabled,
true => OC4PE_A::Enabled,
}
}
#[doc = "Preload register on CCR4 disabled. New values written to CCR4 are taken into account immediately"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == OC4PE_A::Disabled
}
#[doc = "Preload register on CCR4 enabled. Preload value is loaded into active register on each update event"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == OC4PE_A::Enabled
}
}
#[doc = "Field `OC4PE` writer - Output compare 4 preload enable"]
pub type OC4PE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, OC4PE_A>;
impl<'a, REG, const O: u8> OC4PE_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Preload register on CCR4 disabled. New values written to CCR4 are taken into account immediately"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(OC4PE_A::Disabled)
}
#[doc = "Preload register on CCR4 enabled. Preload value is loaded into active register on each update event"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(OC4PE_A::Enabled)
}
}
#[doc = "Field `OC4M` reader - Output compare 4 mode"]
pub use OC3M_R as OC4M_R;
#[doc = "Field `OC4M` writer - Output compare 4 mode"]
pub use OC3M_W as OC4M_W;
#[doc = "Field `OC4CE` reader - Output compare 4 clear enable"]
pub type OC4CE_R = crate::BitReader;
#[doc = "Field `OC4CE` writer - Output compare 4 clear enable"]
pub type OC4CE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `OC3M_3` reader - Output Compare 3 mode - bit 3"]
pub type OC3M_3_R = crate::BitReader<OC3M_3_A>;
#[doc = "Output Compare 3 mode - bit 3\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OC3M_3_A {
#[doc = "0: Normal output compare mode (modes 0-7)"]
Normal = 0,
#[doc = "1: Extended output compare mode (modes 7-15)"]
Extended = 1,
}
impl From<OC3M_3_A> for bool {
#[inline(always)]
fn from(variant: OC3M_3_A) -> Self {
variant as u8 != 0
}
}
impl OC3M_3_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OC3M_3_A {
match self.bits {
false => OC3M_3_A::Normal,
true => OC3M_3_A::Extended,
}
}
#[doc = "Normal output compare mode (modes 0-7)"]
#[inline(always)]
pub fn is_normal(&self) -> bool {
*self == OC3M_3_A::Normal
}
#[doc = "Extended output compare mode (modes 7-15)"]
#[inline(always)]
pub fn is_extended(&self) -> bool {
*self == OC3M_3_A::Extended
}
}
#[doc = "Field `OC3M_3` writer - Output Compare 3 mode - bit 3"]
pub type OC3M_3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, OC3M_3_A>;
impl<'a, REG, const O: u8> OC3M_3_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Normal output compare mode (modes 0-7)"]
#[inline(always)]
pub fn normal(self) -> &'a mut crate::W<REG> {
self.variant(OC3M_3_A::Normal)
}
#[doc = "Extended output compare mode (modes 7-15)"]
#[inline(always)]
pub fn extended(self) -> &'a mut crate::W<REG> {
self.variant(OC3M_3_A::Extended)
}
}
#[doc = "Field `OC4M_4` reader - Output Compare 4 mode - bit 3"]
pub type OC4M_4_R = crate::BitReader;
#[doc = "Field `OC4M_4` writer - Output Compare 4 mode - bit 3"]
pub type OC4M_4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:1 - Capture/Compare 3 selection"]
#[inline(always)]
pub fn cc3s(&self) -> CC3S_R {
CC3S_R::new((self.bits & 3) as u8)
}
#[doc = "Bit 2 - Output compare 3 fast enable"]
#[inline(always)]
pub fn oc3fe(&self) -> OC3FE_R {
OC3FE_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Output compare 3 preload enable"]
#[inline(always)]
pub fn oc3pe(&self) -> OC3PE_R {
OC3PE_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bits 4:6 - Output compare 3 mode"]
#[inline(always)]
pub fn oc3m(&self) -> OC3M_R {
OC3M_R::new(((self.bits >> 4) & 7) as u8)
}
#[doc = "Bit 7 - Output compare 3 clear enable"]
#[inline(always)]
pub fn oc3ce(&self) -> OC3CE_R {
OC3CE_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bits 8:9 - Capture/Compare 4 selection"]
#[inline(always)]
pub fn cc4s(&self) -> CC4S_R {
CC4S_R::new(((self.bits >> 8) & 3) as u8)
}
#[doc = "Bit 10 - Output compare 4 fast enable"]
#[inline(always)]
pub fn oc4fe(&self) -> OC4FE_R {
OC4FE_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Output compare 4 preload enable"]
#[inline(always)]
pub fn oc4pe(&self) -> OC4PE_R {
OC4PE_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bits 12:14 - Output compare 4 mode"]
#[inline(always)]
pub fn oc4m(&self) -> OC4M_R {
OC4M_R::new(((self.bits >> 12) & 7) as u8)
}
#[doc = "Bit 15 - Output compare 4 clear enable"]
#[inline(always)]
pub fn oc4ce(&self) -> OC4CE_R {
OC4CE_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - Output Compare 3 mode - bit 3"]
#[inline(always)]
pub fn oc3m_3(&self) -> OC3M_3_R {
OC3M_3_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 24 - Output Compare 4 mode - bit 3"]
#[inline(always)]
pub fn oc4m_4(&self) -> OC4M_4_R {
OC4M_4_R::new(((self.bits >> 24) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:1 - Capture/Compare 3 selection"]
#[inline(always)]
#[must_use]
pub fn cc3s(&mut self) -> CC3S_W<CCMR2_OUTPUT_SPEC, 0> {
CC3S_W::new(self)
}
#[doc = "Bit 2 - Output compare 3 fast enable"]
#[inline(always)]
#[must_use]
pub fn oc3fe(&mut self) -> OC3FE_W<CCMR2_OUTPUT_SPEC, 2> {
OC3FE_W::new(self)
}
#[doc = "Bit 3 - Output compare 3 preload enable"]
#[inline(always)]
#[must_use]
pub fn oc3pe(&mut self) -> OC3PE_W<CCMR2_OUTPUT_SPEC, 3> {
OC3PE_W::new(self)
}
#[doc = "Bits 4:6 - Output compare 3 mode"]
#[inline(always)]
#[must_use]
pub fn oc3m(&mut self) -> OC3M_W<CCMR2_OUTPUT_SPEC, 4> {
OC3M_W::new(self)
}
#[doc = "Bit 7 - Output compare 3 clear enable"]
#[inline(always)]
#[must_use]
pub fn oc3ce(&mut self) -> OC3CE_W<CCMR2_OUTPUT_SPEC, 7> {
OC3CE_W::new(self)
}
#[doc = "Bits 8:9 - Capture/Compare 4 selection"]
#[inline(always)]
#[must_use]
pub fn cc4s(&mut self) -> CC4S_W<CCMR2_OUTPUT_SPEC, 8> {
CC4S_W::new(self)
}
#[doc = "Bit 10 - Output compare 4 fast enable"]
#[inline(always)]
#[must_use]
pub fn oc4fe(&mut self) -> OC4FE_W<CCMR2_OUTPUT_SPEC, 10> {
OC4FE_W::new(self)
}
#[doc = "Bit 11 - Output compare 4 preload enable"]
#[inline(always)]
#[must_use]
pub fn oc4pe(&mut self) -> OC4PE_W<CCMR2_OUTPUT_SPEC, 11> {
OC4PE_W::new(self)
}
#[doc = "Bits 12:14 - Output compare 4 mode"]
#[inline(always)]
#[must_use]
pub fn oc4m(&mut self) -> OC4M_W<CCMR2_OUTPUT_SPEC, 12> {
OC4M_W::new(self)
}
#[doc = "Bit 15 - Output compare 4 clear enable"]
#[inline(always)]
#[must_use]
pub fn oc4ce(&mut self) -> OC4CE_W<CCMR2_OUTPUT_SPEC, 15> {
OC4CE_W::new(self)
}
#[doc = "Bit 16 - Output Compare 3 mode - bit 3"]
#[inline(always)]
#[must_use]
pub fn oc3m_3(&mut self) -> OC3M_3_W<CCMR2_OUTPUT_SPEC, 16> {
OC3M_3_W::new(self)
}
#[doc = "Bit 24 - Output Compare 4 mode - bit 3"]
#[inline(always)]
#[must_use]
pub fn oc4m_4(&mut self) -> OC4M_4_W<CCMR2_OUTPUT_SPEC, 24> {
OC4M_4_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "capture/compare mode register 2 (output mode)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccmr2_output::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccmr2_output::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CCMR2_OUTPUT_SPEC;
impl crate::RegisterSpec for CCMR2_OUTPUT_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ccmr2_output::R`](R) reader structure"]
impl crate::Readable for CCMR2_OUTPUT_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ccmr2_output::W`](W) writer structure"]
impl crate::Writable for CCMR2_OUTPUT_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CCMR2_Output to value 0"]
impl crate::Resettable for CCMR2_OUTPUT_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
#![feature(conservative_impl_trait)]
#![feature(collections_bound)]
#![feature(btree_range)]
#![feature(step_by)]
#![allow(dead_code)]
extern crate chrono;
extern crate nom;
mod time_unit;
mod schedule;
pub use schedule::Schedule;
|
//! Representation of the S-expression tree.
#![deny(missing_docs)]
#![deny(unsafe_code)]
use std::str::FromStr;
use std::fmt::{self, Display, Debug, Formatter};
/**
* A single values with symbols represented using `String`.
*
* ```rust
* use atoms::StringValue;
*
* let int = StringValue::int(12);
* let float = StringValue::float(13.0);
* let string = StringValue::string("fourteen");
* // Symbols may not always be valid
* let symbol = StringValue::symbol("fifteen").unwrap();
*
* // A list
* let cons = StringValue::cons(
* int,
* StringValue::cons(
* float,
* StringValue::cons(
* string,
* StringValue::final_cons(
* symbol
* )
* )
* )
* );
* ```
*/
pub type StringValue = Value<String>;
/**
* A single value with a variable representation of symbols.
*
* ```rust
* use atoms::Value;
*
* // Represent symbols as `String`
* let int = Value::<String>::int(12);
* let float = Value::<String>::float(13.0);
* let string = Value::<String>::string("fourteen");
* // Symbols may not always be valid
* let symbol = Value::<String>::symbol("fifteen").unwrap();
*
* // A list
* let cons = Value::<String>::cons(
* int,
* Value::<String>::cons(
* float,
* Value::<String>::cons(
* string,
* Value::<String>::cons(
* symbol,
* Value::<String>::nil()
* )
* )
* )
* );
* ```
*/
#[derive(PartialEq, Clone, PartialOrd)]
pub enum Value<Sym: Sized + ToString + FromStr> {
/// A quoted UTF-8 string value
Str(String),
/// An unquoted, case-sensitive symbol
Symbol(Sym),
/// An integer value
Int(i64),
/// A floating point value
Float(f64),
/// A Cons cell
Cons(Box<Value<Sym>>, Box<Value<Sym>>),
/// A Nil value
Nil,
}
impl<Sym: Sized + ToString + FromStr> Value<Sym> {
/**
* Create a new symbol from a string.
*
* ```rust
* use atoms::StringValue;
* let symbol = StringValue::symbol("symbol").unwrap();
* assert_eq!(symbol.to_string(), "symbol");
* ```
*
* Depending on the type used to represent the symbol, this may fail to
* produce a symbol and return a `None`. This will be presented as an error
* by the parser.
*/
pub fn symbol(s: &str) -> Option<Value<Sym>> {
let sym = Sym::from_str(s);
match sym {
Ok(sym) => Some(Value::Symbol(sym)),
Err(_) => None,
}
}
/**
* Create a new string
*
* ```rust
* use atoms::StringValue;
* let string = StringValue::string("string");
* assert_eq!(string.to_string(), "\"string\"");
* ```
*/
pub fn string<S: Into<String>>(s: S) -> Value<Sym> {
Value::Str(s.into())
}
/**
* Create a new string
*
* ```rust
* use atoms::StringValue;
* let int = StringValue::int(42);
* assert_eq!(int.to_string(), "42");
* ```
*/
pub fn int<I: Into<i64>>(i: I) -> Value<Sym> {
Value::Int(i.into())
}
/**
* Create a new float
*
* ```rust
* use atoms::StringValue;
* let float = StringValue::float(13.0);
* assert_eq!(float.to_string(), "13.0");
* ```
*/
pub fn float<F: Into<f64>>(f: F) -> Value<Sym> {
Value::Float(f.into())
}
/**
* Shorthand from creating cons-cell lists out of `Vec`s.
*
* ```rust
* use atoms::StringValue;
* let ints = vec![
* StringValue::int(1),
* StringValue::int(2),
* StringValue::int(3),
* StringValue::int(4),
* StringValue::int(5),
* StringValue::int(6)
* ];
* let list = StringValue::list(ints);
* assert_eq!(list.to_string(), "(1 2 3 4 5 6)");
* ```
*/
pub fn list<V: Into<Value<Sym>>>(mut source_vec: Vec<V>) -> Value<Sym> {
// The end of the list is a nil
let mut result = Value::Nil;
while let Some(value) = source_vec.pop() {
result = Value::cons(value.into(), result)
}
result
}
/**
* Shorthand to convert a vec into a cons-cell list with a given map.
*
* ```rust
* use atoms::StringValue;
* let ints = vec![1, 2, 3, 4, 5, 6];
* let list = StringValue::into_list(ints, |c| StringValue::int(*c));
* assert_eq!(list.to_string(), "(1 2 3 4 5 6)");
* ```
*/
pub fn into_list<A, V: Into<Value<Sym>>, F>(source_vec: Vec<A>, map: F) -> Value<Sym>
where F: Fn(&A) -> V {
// Convert all members into values
let converted = source_vec.iter().map(map).collect();
Value::list(converted)
}
/**
* Create a cons cell.
*
* ```rust
* use atoms::StringValue;
* let cons = StringValue::cons(
* StringValue::int(12),
* StringValue::string("13")
* );
* assert_eq!(cons.to_string(), "(12 . \"13\")");
* ```
*/
pub fn cons<V: Into<Value<Sym>>>(left: V, right: V) -> Value<Sym> {
Value::Cons(Box::new(left.into()), Box::new(right.into()))
}
/**
* Create a cons cell with only a left element.
*
* This creates a cons cell with the right element being a nil. This is
* useful it you are manually constructing lists.
*
* ```rust
* use atoms::StringValue;
* let cons = StringValue::final_cons(
* StringValue::int(12),
* );
* assert_eq!(cons.to_string(), "(12)");
* ```
*/
pub fn final_cons<V: Into<Value<Sym>>>(left: V) -> Value<Sym> {
Value::cons(left.into(), Value::Nil)
}
/**
* Create a nil.
*
* ```rust
* use atoms::StringValue;
* assert_eq!(StringValue::nil().to_string(), "()");
* ```
*/
pub fn nil() -> Value<Sym> {
Value::Nil
}
/**
* Check if a value is a nil
*
* ```rust
* use atoms::StringValue;
* assert!(StringValue::nil().is_nil());
* assert!(!StringValue::final_cons(StringValue::nil()).is_nil());
* ```
*/
pub fn is_nil(&self) -> bool {
match *self {
Value::Nil => true,
_ => false,
}
}
/**
* Check if a value is a valid list.
*
* A value is a list if:
* * it is a `Value::Nil`, or
* * it is a `Value::Cons` with a rightmost element this is a list.
*
* ```rust
* use atoms::StringValue;
*
* // `Nil` is a valid list
* assert!(StringValue::nil().is_list());
*
* // `final_cons` ensures we get a valid list
* assert!(StringValue::cons(
* StringValue::int(12),
* StringValue::final_cons(
* StringValue::float(13.0)
* )
* ).is_list());
*
* // Manually terminated lists are valid
* assert!(StringValue::cons(
* StringValue::int(12),
* StringValue::cons(
* StringValue::float(13.0),
* StringValue::nil()
* )
* ).is_list());
*
* // These are not lists
* assert!(!StringValue::int(12).is_list());
* assert!(!StringValue::float(12.0).is_list());
* assert!(!StringValue::string("12").is_list());
* assert!(!StringValue::symbol("sym").unwrap().is_list());
* assert!(!StringValue::cons(
* StringValue::nil(),
* StringValue::symbol("sym").unwrap()
* ).is_list());
* ```
*/
pub fn is_list(&self) -> bool {
match *self {
Value::Nil => true,
Value::Cons(_, ref right) => right.is_list(),
_ => false,
}
}
}
impl<Sym> Display for Value<Sym> where Sym: ToString + FromStr + Sized {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
match *self {
Value::Cons(ref left, ref right) => display_cons(left, right, true, f),
Value::Str(ref text) => write!(f, "\"{}\"", escape_string(&text)),
Value::Symbol(ref sym) => write!(f, "{}", escape_string(&sym.to_string().replace(" ", "\\ "))),
Value::Int(ref i) => write!(f, "{}", i),
Value::Float(ref fl) => format_float(f, *fl),
Value::Nil => write!(f, "()"),
}
}
}
impl<Sym> Debug for Value<Sym> where Sym: ToString + FromStr + Sized {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
write!(f, "{}", self)
}
}
fn display_cons<Sym: ToString + FromStr + Sized>(left: &Value<Sym>,
right: &Value<Sym>,
root: bool, f: &mut Formatter)
-> Result<(), fmt::Error> {
if root {
try!(write!(f, "("));
}
// Write the left value
try!(write!(f, "{}", left));
// Write the right value
match *right {
Value::Nil => write!(f, ")"),
Value::Cons(ref left, ref right) => {
try!(write!(f, " "));
display_cons(left, right, false, f)
}
_ => write!(f, " . {})", right),
}
}
fn escape_string(text: &AsRef<str>) -> String {
text.as_ref().chars().map(
|c| -> String { c.escape_default().collect() }
).collect()
}
fn format_float<F: Into<f64>>(f: &mut Formatter, fl: F) -> Result<(), fmt::Error> {
let float = fl.into();
if float.fract() == 0f64 {
write!(f, "{:.1}", float)
} else {
write!(f, "{}", float)
}
}
#[test]
fn value_fmt_test() {
assert_eq!(format!("{:?}", Value::<String>::int(13)), "13");
assert_eq!(format!("{:?}", Value::<String>::int(-13)), "-13");
assert_eq!(format!("{:?}", Value::<String>::float(13.0)), "13.0");
assert_eq!(format!("{:?}", Value::<String>::float(13.125)), "13.125");
assert_eq!(format!("{:?}", Value::<String>::float(13.333)), "13.333");
assert_eq!(format!("{:?}", Value::<String>::float(-13.333)), "-13.333");
assert_eq!(format!("{:?}", Value::<String>::string("text")), "\"text\"");
assert_eq!(format!("{:?}", Value::<String>::string("hello\tthere\nfriend")), "\"hello\\tthere\\nfriend\"");
assert_eq!(format!("{:?}", Value::<String>::symbol("text").unwrap()), "text");
assert_eq!(format!("{:?}", Value::<String>::symbol("hello\tthere\nfriend").unwrap()), "hello\\tthere\\nfriend");
assert_eq!(format!("{:?}", Value::<String>::list(vec![
Value::<String>::int(13),
Value::<String>::float(13.333),
Value::<String>::string("text"),
Value::<String>::symbol("symbol").unwrap(),
])), "(13 13.333 \"text\" symbol)");
assert_eq!(format!("{:?}", Value::<String>::cons(
Value::int(13),
Value::cons(
Value::float(13.333),
Value::cons(
Value::string("text"),
Value::symbol("symbol").unwrap()
)
)
)), "(13 13.333 \"text\" . symbol)");
}
|
// Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
//
// Portions Copyright 2017 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE-BSD-3-Clause file.
//
// Copyright © 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0 AND BSD-3-Clause
//
extern crate anyhow;
extern crate arch;
extern crate devices;
extern crate epoll;
extern crate kvm_ioctls;
extern crate libc;
extern crate linux_loader;
extern crate net_util;
extern crate signal_hook;
#[cfg(feature = "pci_support")]
extern crate vfio;
extern crate vm_allocator;
extern crate vm_memory;
extern crate vm_virtio;
use crate::config::VmConfig;
use crate::cpu;
use crate::device_manager::{get_win_size, Console, DeviceManager, DeviceManagerError};
use anyhow::anyhow;
use arch::RegionType;
use devices::{ioapic, HotPlugNotificationType};
use kvm_bindings::{kvm_enable_cap, kvm_userspace_memory_region, KVM_CAP_SPLIT_IRQCHIP};
use kvm_ioctls::*;
use linux_loader::cmdline::Cmdline;
use linux_loader::loader::KernelLoader;
use signal_hook::{iterator::Signals, SIGINT, SIGTERM, SIGWINCH};
use std::ffi::CString;
use std::fs::{File, OpenOptions};
use std::io;
use std::ops::Deref;
use std::os::unix::io::FromRawFd;
use std::sync::{Arc, Mutex, RwLock};
use std::{result, str, thread};
use vm_allocator::{GsiApic, SystemAllocator};
use vm_device::{Migratable, MigratableError, Pausable, Snapshotable};
use vm_memory::guest_memory::FileOffset;
use vm_memory::{
Address, Bytes, Error as MmapError, GuestAddress, GuestMemory, GuestMemoryMmap,
GuestMemoryRegion, GuestUsize,
};
use vmm_sys_util::eventfd::EventFd;
use vmm_sys_util::terminal::Terminal;
const X86_64_IRQ_BASE: u32 = 5;
// CPUID feature bits
const TSC_DEADLINE_TIMER_ECX_BIT: u8 = 24; // tsc deadline timer ecx bit.
const HYPERVISOR_ECX_BIT: u8 = 31; // Hypervisor ecx bit.
// 64 bit direct boot entry offset for bzImage
const KERNEL_64BIT_ENTRY_OFFSET: u64 = 0x200;
/// Errors associated with VM management
#[derive(Debug)]
pub enum Error {
/// Cannot open the VM file descriptor.
VmFd(io::Error),
/// Cannot create the KVM instance
VmCreate(kvm_ioctls::Error),
/// Cannot set the VM up
VmSetup(kvm_ioctls::Error),
/// Cannot open the kernel image
KernelFile(io::Error),
/// Mmap backed guest memory error
GuestMemory(MmapError),
/// Cannot load the kernel in memory
KernelLoad(linux_loader::loader::Error),
/// Cannot load the command line in memory
CmdLine,
PoisonedState,
/// Cannot create a device manager.
DeviceManager(DeviceManagerError),
/// Write to the console failed.
Console(vmm_sys_util::errno::Error),
/// Cannot setup terminal in raw mode.
SetTerminalRaw(vmm_sys_util::errno::Error),
/// Cannot setup terminal in canonical mode.
SetTerminalCanon(vmm_sys_util::errno::Error),
/// Cannot create the system allocator
CreateSystemAllocator,
/// Failed parsing network parameters
ParseNetworkParameters,
/// Memory is overflow
MemOverflow,
/// Failed to create shared file.
SharedFileCreate(io::Error),
/// Failed to set shared file length.
SharedFileSetLen(io::Error),
/// Failed to allocate a memory range.
MemoryRangeAllocation,
/// Failed to allocate the IOAPIC memory range.
IoapicRangeAllocation,
/// Cannot spawn a signal handler thread
SignalHandlerSpawn(io::Error),
/// Failed to join on vCPU threads
ThreadCleanup,
/// Failed to create a new KVM instance
KvmNew(kvm_ioctls::Error),
/// VM is not created
VmNotCreated,
/// VM is not running
VmNotRunning,
/// Cannot clone EventFd.
EventFdClone(io::Error),
/// Invalid VM state transition
InvalidStateTransition(VmState, VmState),
/// Error from CPU handling
CpuManager(cpu::Error),
/// Capability missing
CapabilityMissing(Cap),
/// Cannot pause devices
PauseDevices(MigratableError),
/// Cannot resume devices
ResumeDevices(MigratableError),
/// Cannot pause CPUs
PauseCpus(MigratableError),
/// Cannot resume cpus
ResumeCpus(MigratableError),
/// Cannot pause VM
Pause(MigratableError),
/// Cannot resume VM
Resume(MigratableError),
}
pub type Result<T> = result::Result<T, Error>;
pub struct VmInfo<'a> {
pub memory: &'a Arc<RwLock<GuestMemoryMmap>>,
pub vm_fd: &'a Arc<VmFd>,
pub vm_cfg: Arc<Mutex<VmConfig>>,
pub start_of_device_area: GuestAddress,
pub end_of_device_area: GuestAddress,
}
#[derive(Clone, Copy, Debug, Deserialize, Serialize, PartialEq)]
pub enum VmState {
Created,
Running,
Shutdown,
Paused,
}
impl VmState {
fn valid_transition(self, new_state: VmState) -> Result<()> {
match self {
VmState::Created => match new_state {
VmState::Created | VmState::Shutdown | VmState::Paused => {
Err(Error::InvalidStateTransition(self, new_state))
}
VmState::Running => Ok(()),
},
VmState::Running => match new_state {
VmState::Created | VmState::Running => {
Err(Error::InvalidStateTransition(self, new_state))
}
VmState::Paused | VmState::Shutdown => Ok(()),
},
VmState::Shutdown => match new_state {
VmState::Paused | VmState::Created | VmState::Shutdown => {
Err(Error::InvalidStateTransition(self, new_state))
}
VmState::Running => Ok(()),
},
VmState::Paused => match new_state {
VmState::Created | VmState::Paused => {
Err(Error::InvalidStateTransition(self, new_state))
}
VmState::Running | VmState::Shutdown => Ok(()),
},
}
}
}
pub struct Vm {
kernel: File,
memory: Arc<RwLock<GuestMemoryMmap>>,
threads: Vec<thread::JoinHandle<()>>,
devices: DeviceManager,
config: Arc<Mutex<VmConfig>>,
on_tty: bool,
signals: Option<Signals>,
state: RwLock<VmState>,
cpu_manager: Arc<Mutex<cpu::CpuManager>>,
}
fn get_host_cpu_phys_bits() -> u8 {
use core::arch::x86_64;
unsafe {
let leaf = x86_64::__cpuid(0x8000_0000);
// Detect and handle AMD SME (Secure Memory Encryption) properly.
// Some physical address bits may become reserved when the feature is enabled.
// See AMD64 Architecture Programmer's Manual Volume 2, Section 7.10.1
let reduced = if leaf.eax >= 0x8000_001f
&& leaf.ebx == 0x6874_7541 // Vendor ID: AuthenticAMD
&& leaf.ecx == 0x444d_4163
&& leaf.edx == 0x6974_6e65
&& x86_64::__cpuid(0x8000_001f).eax & 0x1 != 0
{
(x86_64::__cpuid(0x8000_001f).ebx >> 6) & 0x3f
} else {
0
};
if leaf.eax >= 0x8000_0008 {
let leaf = x86_64::__cpuid(0x8000_0008);
((leaf.eax & 0xff) - reduced) as u8
} else {
36
}
}
}
impl Vm {
pub fn new(
config: Arc<Mutex<VmConfig>>,
exit_evt: EventFd,
reset_evt: EventFd,
) -> Result<Self> {
let kvm = Kvm::new().map_err(Error::KvmNew)?;
// Check required capabilities:
if !kvm.check_extension(Cap::SignalMsi) {
return Err(Error::CapabilityMissing(Cap::SignalMsi));
}
if !kvm.check_extension(Cap::TscDeadlineTimer) {
return Err(Error::CapabilityMissing(Cap::TscDeadlineTimer));
}
if !kvm.check_extension(Cap::SplitIrqchip) {
return Err(Error::CapabilityMissing(Cap::SplitIrqchip));
}
let kernel = File::open(&config.lock().unwrap().kernel.as_ref().unwrap().path)
.map_err(Error::KernelFile)?;
let fd = kvm.create_vm().map_err(Error::VmCreate)?;
let fd = Arc::new(fd);
// Init guest memory
let arch_mem_regions = arch::arch_memory_regions(config.lock().unwrap().memory.size);
let ram_regions: Vec<(GuestAddress, usize)> = arch_mem_regions
.iter()
.filter(|r| r.2 == RegionType::Ram)
.map(|r| (r.0, r.1))
.collect();
let sub_regions: Vec<(GuestAddress, usize)> = arch_mem_regions
.iter()
.filter(|r| r.2 == RegionType::SubRegion)
.map(|r| (r.0, r.1))
.collect();
// Check the number of reserved regions, and only take the first one
// that's acrtually a 32-bit hole.
let mut mem_hole = (GuestAddress(0), 0);
for region in sub_regions.iter() {
if region.0.unchecked_add(region.1 as u64).raw_value() <= 0x1_0000_0000 {
mem_hole = (region.0, region.1);
break;
}
}
let guest_memory = match config.lock().unwrap().memory.file {
Some(ref file) => {
let mut mem_regions = Vec::<(GuestAddress, usize, Option<FileOffset>)>::new();
for region in ram_regions.iter() {
if file.is_file() {
let file = OpenOptions::new()
.read(true)
.write(true)
.open(file)
.map_err(Error::SharedFileCreate)?;
file.set_len(region.1 as u64)
.map_err(Error::SharedFileSetLen)?;
mem_regions.push((region.0, region.1, Some(FileOffset::new(file, 0))));
} else if file.is_dir() {
let fs_str = format!("{}{}", file.display(), "/tmpfile_XXXXXX");
let fs = std::ffi::CString::new(fs_str).unwrap();
let mut path = fs.as_bytes_with_nul().to_owned();
let path_ptr = path.as_mut_ptr() as *mut _;
let fd = unsafe { libc::mkstemp(path_ptr) };
unsafe { libc::unlink(path_ptr) };
let f = unsafe { File::from_raw_fd(fd) };
f.set_len(region.1 as u64)
.map_err(Error::SharedFileSetLen)?;
mem_regions.push((region.0, region.1, Some(FileOffset::new(f, 0))));
}
}
GuestMemoryMmap::with_files(&mem_regions).map_err(Error::GuestMemory)?
}
None => GuestMemoryMmap::new(&ram_regions).map_err(Error::GuestMemory)?,
};
guest_memory
.with_regions(|index, region| {
let mem_region = kvm_userspace_memory_region {
slot: index as u32,
guest_phys_addr: region.start_addr().raw_value(),
memory_size: region.len() as u64,
userspace_addr: region.as_ptr() as u64,
flags: 0,
};
// Safe because the guest regions are guaranteed not to overlap.
unsafe {
fd.set_user_memory_region(mem_region)
.map_err(|e| io::Error::from_raw_os_error(e.errno()))
}?;
// Mark the pages as mergeable if explicitly asked for.
if config.lock().unwrap().memory.mergeable {
// Safe because the address and size are valid since the
// mmap succeeded.
let ret = unsafe {
libc::madvise(
region.as_ptr() as *mut libc::c_void,
region.len() as libc::size_t,
libc::MADV_MERGEABLE,
)
};
if ret != 0 {
let err = io::Error::last_os_error();
// Safe to unwrap because the error is constructed with
// last_os_error(), which ensures the output will be Some().
let errno = err.raw_os_error().unwrap();
if errno == libc::EINVAL {
warn!("kernel not configured with CONFIG_KSM");
} else {
warn!("madvise error: {}", err);
}
warn!("failed to mark pages as mergeable");
}
}
Ok(())
})
.map_err(|_: io::Error| Error::GuestMemory(MmapError::NoMemoryRegion))?;
// Set TSS
fd.set_tss_address(arch::x86_64::layout::KVM_TSS_ADDRESS.raw_value() as usize)
.map_err(Error::VmSetup)?;
let mut cpuid_patches = Vec::new();
// Create split irqchip
// Only the local APIC is emulated in kernel, both PICs and IOAPIC
// are not.
let mut cap: kvm_enable_cap = Default::default();
cap.cap = KVM_CAP_SPLIT_IRQCHIP;
cap.args[0] = ioapic::NUM_IOAPIC_PINS as u64;
fd.enable_cap(&cap).map_err(Error::VmSetup)?;
// Patch tsc deadline timer bit
cpuid_patches.push(cpu::CpuidPatch {
function: 1,
index: 0,
flags_bit: None,
eax_bit: None,
ebx_bit: None,
ecx_bit: Some(TSC_DEADLINE_TIMER_ECX_BIT),
edx_bit: None,
});
// Patch hypervisor bit
cpuid_patches.push(cpu::CpuidPatch {
function: 1,
index: 0,
flags_bit: None,
eax_bit: None,
ebx_bit: None,
ecx_bit: Some(HYPERVISOR_ECX_BIT),
edx_bit: None,
});
// Supported CPUID
let mut cpuid = kvm
.get_supported_cpuid(kvm_bindings::KVM_MAX_CPUID_ENTRIES)
.map_err(Error::VmSetup)?;
cpu::CpuidPatch::patch_cpuid(&mut cpuid, cpuid_patches);
let ioapic = GsiApic::new(
X86_64_IRQ_BASE,
ioapic::NUM_IOAPIC_PINS as u32 - X86_64_IRQ_BASE,
);
// Let's allocate 64 GiB of addressable MMIO space, starting at 0.
let mut allocator = SystemAllocator::new(
GuestAddress(0),
1 << 16 as GuestUsize,
GuestAddress(0),
1 << get_host_cpu_phys_bits(),
mem_hole.0,
mem_hole.1 as GuestUsize,
vec![ioapic],
)
.ok_or(Error::CreateSystemAllocator)?;
// Allocate RAM and Reserved address ranges.
for region in arch_mem_regions.iter() {
allocator
.allocate_mmio_addresses(Some(region.0), region.1 as GuestUsize, None)
.ok_or(Error::MemoryRangeAllocation)?;
}
let end_of_device_area = GuestAddress((1 << get_host_cpu_phys_bits()) - 1);
let mem_end = guest_memory.end_addr();
let start_of_device_area = if mem_end < arch::layout::MEM_32BIT_RESERVED_START {
arch::layout::RAM_64BIT_START
} else {
mem_end.unchecked_add(1)
};
// Convert the guest memory into an Arc. The point being able to use it
// anywhere in the code, no matter which thread might use it.
// Add the RwLock aspect to guest memory as we might want to perform
// additions to the memory during runtime.
let guest_memory = Arc::new(RwLock::new(guest_memory));
let vm_info = VmInfo {
memory: &guest_memory,
vm_fd: &fd,
vm_cfg: config.clone(),
start_of_device_area,
end_of_device_area,
};
let device_manager = DeviceManager::new(
&vm_info,
allocator,
ram_regions.len() as u32,
&exit_evt,
&reset_evt,
)
.map_err(Error::DeviceManager)?;
let on_tty = unsafe { libc::isatty(libc::STDIN_FILENO as i32) } != 0;
let boot_vcpus = config.lock().unwrap().cpus.boot_vcpus;
let max_vcpus = config.lock().unwrap().cpus.max_vcpus;
let cpu_manager = cpu::CpuManager::new(
boot_vcpus,
max_vcpus,
&device_manager,
guest_memory.clone(),
fd,
cpuid,
reset_evt,
)
.map_err(Error::CpuManager)?;
Ok(Vm {
kernel,
memory: guest_memory,
devices: device_manager,
config,
on_tty,
threads: Vec::with_capacity(1),
signals: None,
state: RwLock::new(VmState::Created),
cpu_manager,
})
}
fn load_kernel(&mut self) -> Result<GuestAddress> {
let mut cmdline = Cmdline::new(arch::CMDLINE_MAX_SIZE);
cmdline
.insert_str(self.config.lock().unwrap().cmdline.args.clone())
.map_err(|_| Error::CmdLine)?;
for entry in self.devices.cmdline_additions() {
cmdline.insert_str(entry).map_err(|_| Error::CmdLine)?;
}
let cmdline_cstring = CString::new(cmdline).map_err(|_| Error::CmdLine)?;
let mem = self.memory.read().unwrap();
let entry_addr = match linux_loader::loader::Elf::load(
mem.deref(),
None,
&mut self.kernel,
Some(arch::layout::HIGH_RAM_START),
) {
Ok(entry_addr) => entry_addr,
Err(linux_loader::loader::Error::InvalidElfMagicNumber) => {
linux_loader::loader::BzImage::load(
mem.deref(),
None,
&mut self.kernel,
Some(arch::layout::HIGH_RAM_START),
)
.map_err(Error::KernelLoad)?
}
_ => panic!("Invalid elf file"),
};
linux_loader::loader::load_cmdline(
mem.deref(),
arch::layout::CMDLINE_START,
&cmdline_cstring,
)
.map_err(|_| Error::CmdLine)?;
let boot_vcpus = self.cpu_manager.lock().unwrap().boot_vcpus();
let _max_vcpus = self.cpu_manager.lock().unwrap().max_vcpus();
#[allow(unused_mut, unused_assignments)]
let mut rsdp_addr: Option<GuestAddress> = None;
#[cfg(feature = "acpi")]
{
rsdp_addr = Some(crate::acpi::create_acpi_tables(
&mem,
&self.devices,
&self.cpu_manager,
));
}
match entry_addr.setup_header {
Some(hdr) => {
arch::configure_system(
&mem,
arch::layout::CMDLINE_START,
cmdline_cstring.to_bytes().len() + 1,
boot_vcpus,
Some(hdr),
rsdp_addr,
)
.map_err(|_| Error::CmdLine)?;
let load_addr = entry_addr
.kernel_load
.raw_value()
.checked_add(KERNEL_64BIT_ENTRY_OFFSET)
.ok_or(Error::MemOverflow)?;
Ok(GuestAddress(load_addr))
}
None => {
arch::configure_system(
&mem,
arch::layout::CMDLINE_START,
cmdline_cstring.to_bytes().len() + 1,
boot_vcpus,
None,
rsdp_addr,
)
.map_err(|_| Error::CmdLine)?;
Ok(entry_addr.kernel_load)
}
}
}
pub fn shutdown(&mut self) -> Result<()> {
let mut state = self.state.try_write().map_err(|_| Error::PoisonedState)?;
let new_state = VmState::Shutdown;
state.valid_transition(new_state)?;
if self.on_tty {
// Don't forget to set the terminal in canonical mode
// before to exit.
io::stdin()
.lock()
.set_canon_mode()
.map_err(Error::SetTerminalCanon)?;
}
// Trigger the termination of the signal_handler thread
if let Some(signals) = self.signals.take() {
signals.close();
}
self.cpu_manager
.lock()
.unwrap()
.shutdown()
.map_err(Error::CpuManager)?;
// Wait for all the threads to finish
for thread in self.threads.drain(..) {
thread.join().map_err(|_| Error::ThreadCleanup)?
}
*state = new_state;
Ok(())
}
pub fn resize(&mut self, desired_vcpus: u8) -> Result<()> {
self.cpu_manager
.lock()
.unwrap()
.resize(desired_vcpus)
.map_err(Error::CpuManager)?;
self.devices
.notify_hotplug(HotPlugNotificationType::CPUDevicesChanged)
.map_err(Error::DeviceManager)?;
self.config.lock().unwrap().cpus.boot_vcpus = desired_vcpus;
Ok(())
}
fn os_signal_handler(signals: Signals, console_input_clone: Arc<Console>, on_tty: bool) {
for signal in signals.forever() {
match signal {
SIGWINCH => {
let (col, row) = get_win_size();
console_input_clone.update_console_size(col, row);
}
SIGTERM | SIGINT => {
if on_tty {
io::stdin()
.lock()
.set_canon_mode()
.expect("failed to restore terminal mode");
}
std::process::exit((signal != SIGTERM) as i32);
}
_ => (),
}
}
}
pub fn boot(&mut self) -> Result<()> {
let current_state = self.get_state()?;
if current_state == VmState::Paused {
return self.resume().map_err(Error::Resume);
}
let new_state = VmState::Running;
current_state.valid_transition(new_state)?;
let entry_addr = self.load_kernel()?;
self.cpu_manager
.lock()
.unwrap()
.start_boot_vcpus(entry_addr)
.map_err(Error::CpuManager)?;
if self.devices.console().input_enabled() {
let console = self.devices.console().clone();
let signals = Signals::new(&[SIGWINCH, SIGINT, SIGTERM]);
match signals {
Ok(signals) => {
self.signals = Some(signals.clone());
let on_tty = self.on_tty;
self.threads.push(
thread::Builder::new()
.name("signal_handler".to_string())
.spawn(move || Vm::os_signal_handler(signals, console, on_tty))
.map_err(Error::SignalHandlerSpawn)?,
);
}
Err(e) => error!("Signal not found {}", e),
}
if self.on_tty {
io::stdin()
.lock()
.set_raw_mode()
.map_err(Error::SetTerminalRaw)?;
}
}
let mut state = self.state.try_write().map_err(|_| Error::PoisonedState)?;
*state = new_state;
Ok(())
}
/// Gets an Arc to the guest memory owned by this VM.
pub fn get_memory(&self) -> Arc<RwLock<GuestMemoryMmap>> {
self.memory.clone()
}
pub fn handle_stdin(&self) -> Result<()> {
let mut out = [0u8; 64];
let count = io::stdin()
.lock()
.read_raw(&mut out)
.map_err(Error::Console)?;
if self.devices.console().input_enabled() {
self.devices
.console()
.queue_input_bytes(&out[..count])
.map_err(Error::Console)?;
}
Ok(())
}
/// Gets a thread-safe reference counted pointer to the VM configuration.
pub fn get_config(&self) -> Arc<Mutex<VmConfig>> {
Arc::clone(&self.config)
}
/// Get the VM state. Returns an error if the state is poisoned.
pub fn get_state(&self) -> Result<VmState> {
self.state
.try_read()
.map_err(|_| Error::PoisonedState)
.map(|state| *state)
}
}
impl Pausable for Vm {
fn pause(&mut self) -> std::result::Result<(), MigratableError> {
let mut state = self
.state
.try_write()
.map_err(|e| MigratableError::Pause(anyhow!("Could not get VM state: {}", e)))?;
let new_state = VmState::Paused;
state
.valid_transition(new_state)
.map_err(|e| MigratableError::Pause(anyhow!("Invalid transition: {:?}", e)))?;
self.cpu_manager.lock().unwrap().pause()?;
self.devices.pause()?;
*state = new_state;
Ok(())
}
fn resume(&mut self) -> std::result::Result<(), MigratableError> {
let mut state = self
.state
.try_write()
.map_err(|e| MigratableError::Resume(anyhow!("Could not get VM state: {}", e)))?;
let new_state = VmState::Running;
state
.valid_transition(new_state)
.map_err(|e| MigratableError::Pause(anyhow!("Invalid transition: {:?}", e)))?;
self.devices.resume()?;
self.cpu_manager.lock().unwrap().resume()?;
// And we're back to the Running state.
*state = new_state;
Ok(())
}
}
impl Snapshotable for Vm {}
impl Migratable for Vm {}
#[cfg(test)]
mod tests {
use super::*;
fn test_vm_state_transitions(state: VmState) {
match state {
VmState::Created => {
// Check the transitions from Created
assert!(state.valid_transition(VmState::Created).is_err());
assert!(state.valid_transition(VmState::Running).is_ok());
assert!(state.valid_transition(VmState::Shutdown).is_err());
assert!(state.valid_transition(VmState::Paused).is_err());
}
VmState::Running => {
// Check the transitions from Running
assert!(state.valid_transition(VmState::Created).is_err());
assert!(state.valid_transition(VmState::Running).is_err());
assert!(state.valid_transition(VmState::Shutdown).is_ok());
assert!(state.valid_transition(VmState::Paused).is_ok());
}
VmState::Shutdown => {
// Check the transitions from Shutdown
assert!(state.valid_transition(VmState::Created).is_err());
assert!(state.valid_transition(VmState::Running).is_ok());
assert!(state.valid_transition(VmState::Shutdown).is_err());
assert!(state.valid_transition(VmState::Paused).is_err());
}
VmState::Paused => {
// Check the transitions from Paused
assert!(state.valid_transition(VmState::Created).is_err());
assert!(state.valid_transition(VmState::Running).is_ok());
assert!(state.valid_transition(VmState::Shutdown).is_ok());
assert!(state.valid_transition(VmState::Paused).is_err());
}
_ => {}
}
}
#[test]
fn test_vm_created_transitions() {
test_vm_state_transitions(VmState::Created);
}
#[test]
fn test_vm_running_transitions() {
test_vm_state_transitions(VmState::Running);
}
#[test]
fn test_vm_shutdown_transitions() {
test_vm_state_transitions(VmState::Shutdown);
}
#[test]
fn test_vm_paused_transitions() {
test_vm_state_transitions(VmState::Paused);
}
}
#[allow(unused)]
pub fn test_vm() {
// This example based on https://lwn.net/Articles/658511/
let code = [
0xba, 0xf8, 0x03, /* mov $0x3f8, %dx */
0x00, 0xd8, /* add %bl, %al */
0x04, b'0', /* add $'0', %al */
0xee, /* out %al, (%dx) */
0xb0, b'\n', /* mov $'\n', %al */
0xee, /* out %al, (%dx) */
0xf4, /* hlt */
];
let mem_size = 0x1000;
let load_addr = GuestAddress(0x1000);
let mem = GuestMemoryMmap::new(&[(load_addr, mem_size)]).unwrap();
let kvm = Kvm::new().expect("new KVM instance creation failed");
let vm_fd = kvm.create_vm().expect("new VM fd creation failed");
mem.with_regions(|index, region| {
let mem_region = kvm_userspace_memory_region {
slot: index as u32,
guest_phys_addr: region.start_addr().raw_value(),
memory_size: region.len() as u64,
userspace_addr: region.as_ptr() as u64,
flags: 0,
};
// Safe because the guest regions are guaranteed not to overlap.
unsafe { vm_fd.set_user_memory_region(mem_region) }
})
.expect("Cannot configure guest memory");
mem.write_slice(&code, load_addr)
.expect("Writing code to memory failed");
let vcpu_fd = vm_fd.create_vcpu(0).expect("new VcpuFd failed");
let mut vcpu_sregs = vcpu_fd.get_sregs().expect("get sregs failed");
vcpu_sregs.cs.base = 0;
vcpu_sregs.cs.selector = 0;
vcpu_fd.set_sregs(&vcpu_sregs).expect("set sregs failed");
let mut vcpu_regs = vcpu_fd.get_regs().expect("get regs failed");
vcpu_regs.rip = 0x1000;
vcpu_regs.rax = 2;
vcpu_regs.rbx = 3;
vcpu_regs.rflags = 2;
vcpu_fd.set_regs(&vcpu_regs).expect("set regs failed");
loop {
match vcpu_fd.run().expect("run failed") {
VcpuExit::IoIn(addr, data) => {
println!(
"IO in -- addr: {:#x} data [{:?}]",
addr,
str::from_utf8(&data).unwrap()
);
}
VcpuExit::IoOut(addr, data) => {
println!(
"IO out -- addr: {:#x} data [{:?}]",
addr,
str::from_utf8(&data).unwrap()
);
}
VcpuExit::MmioRead(_addr, _data) => {}
VcpuExit::MmioWrite(_addr, _data) => {}
VcpuExit::Unknown => {}
VcpuExit::Exception => {}
VcpuExit::Hypercall => {}
VcpuExit::Debug => {}
VcpuExit::Hlt => {
println!("HLT");
}
VcpuExit::IrqWindowOpen => {}
VcpuExit::Shutdown => {}
VcpuExit::FailEntry => {}
VcpuExit::Intr => {}
VcpuExit::SetTpr => {}
VcpuExit::TprAccess => {}
VcpuExit::S390Sieic => {}
VcpuExit::S390Reset => {}
VcpuExit::Dcr => {}
VcpuExit::Nmi => {}
VcpuExit::InternalError => {}
VcpuExit::Osi => {}
VcpuExit::PaprHcall => {}
VcpuExit::S390Ucontrol => {}
VcpuExit::Watchdog => {}
VcpuExit::S390Tsch => {}
VcpuExit::Epr => {}
VcpuExit::SystemEvent => {}
VcpuExit::S390Stsi => {}
VcpuExit::IoapicEoi(_vector) => {}
VcpuExit::Hyperv => {}
}
// r => panic!("unexpected exit reason: {:?}", r),
}
}
|
#[doc = "Register `SWIER2` reader"]
pub type R = crate::R<SWIER2_SPEC>;
#[doc = "Register `SWIER2` writer"]
pub type W = crate::W<SWIER2_SPEC>;
#[doc = "Field `SWI34` reader - Software interrupt on event"]
pub type SWI34_R = crate::BitReader<SWI34W_A>;
#[doc = "Software interrupt on event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SWI34W_A {
#[doc = "1: Generates an interrupt request"]
Pend = 1,
}
impl From<SWI34W_A> for bool {
#[inline(always)]
fn from(variant: SWI34W_A) -> Self {
variant as u8 != 0
}
}
impl SWI34_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<SWI34W_A> {
match self.bits {
true => Some(SWI34W_A::Pend),
_ => None,
}
}
#[doc = "Generates an interrupt request"]
#[inline(always)]
pub fn is_pend(&self) -> bool {
*self == SWI34W_A::Pend
}
}
#[doc = "Field `SWI34` writer - Software interrupt on event"]
pub type SWI34_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SWI34W_A>;
impl<'a, REG, const O: u8> SWI34_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Generates an interrupt request"]
#[inline(always)]
pub fn pend(self) -> &'a mut crate::W<REG> {
self.variant(SWI34W_A::Pend)
}
}
#[doc = "Field `SWI40` reader - Software interrupt on event"]
pub use SWI34_R as SWI40_R;
#[doc = "Field `SWI41` reader - Software interrupt on event"]
pub use SWI34_R as SWI41_R;
#[doc = "Field `SWI45` reader - Software interrupt on event 45"]
pub use SWI34_R as SWI45_R;
#[doc = "Field `SWI40` writer - Software interrupt on event"]
pub use SWI34_W as SWI40_W;
#[doc = "Field `SWI41` writer - Software interrupt on event"]
pub use SWI34_W as SWI41_W;
#[doc = "Field `SWI45` writer - Software interrupt on event 45"]
pub use SWI34_W as SWI45_W;
impl R {
#[doc = "Bit 2 - Software interrupt on event"]
#[inline(always)]
pub fn swi34(&self) -> SWI34_R {
SWI34_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 8 - Software interrupt on event"]
#[inline(always)]
pub fn swi40(&self) -> SWI40_R {
SWI40_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Software interrupt on event"]
#[inline(always)]
pub fn swi41(&self) -> SWI41_R {
SWI41_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 13 - Software interrupt on event 45"]
#[inline(always)]
pub fn swi45(&self) -> SWI45_R {
SWI45_R::new(((self.bits >> 13) & 1) != 0)
}
}
impl W {
#[doc = "Bit 2 - Software interrupt on event"]
#[inline(always)]
#[must_use]
pub fn swi34(&mut self) -> SWI34_W<SWIER2_SPEC, 2> {
SWI34_W::new(self)
}
#[doc = "Bit 8 - Software interrupt on event"]
#[inline(always)]
#[must_use]
pub fn swi40(&mut self) -> SWI40_W<SWIER2_SPEC, 8> {
SWI40_W::new(self)
}
#[doc = "Bit 9 - Software interrupt on event"]
#[inline(always)]
#[must_use]
pub fn swi41(&mut self) -> SWI41_W<SWIER2_SPEC, 9> {
SWI41_W::new(self)
}
#[doc = "Bit 13 - Software interrupt on event 45"]
#[inline(always)]
#[must_use]
pub fn swi45(&mut self) -> SWI45_W<SWIER2_SPEC, 13> {
SWI45_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "software interrupt event register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`swier2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`swier2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SWIER2_SPEC;
impl crate::RegisterSpec for SWIER2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`swier2::R`](R) reader structure"]
impl crate::Readable for SWIER2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`swier2::W`](W) writer structure"]
impl crate::Writable for SWIER2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SWIER2 to value 0"]
impl crate::Resettable for SWIER2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
/*!
Type-level representatins of a chain of field accesses (`FieldPath`),
and multiple field accesses (`FieldPathSet`).
*/
#![allow(non_snake_case, non_camel_case_types)]
use core_extensions::MarkerType;
use std_::marker::PhantomData;
use crate::type_level::_private::TString;
use crate::type_level::collection_traits::{
Append,Append_,
PushBack,PushBack_,
ToTList,ToTList_,
ToTString_,
};
////////////////////////////////////////////////////////////////////////////////
mod sealed{
pub trait Sealed{}
}
use self::sealed::Sealed;
impl<T> Sealed for TString<T>{}
impl<T> Sealed for FieldPath<T>{}
impl<T,U> Sealed for FieldPathSet<T,U>{}
/// A marker trait only implemented by FieldPath
pub trait IsFieldPath:Sealed+Copy{}
impl<T> IsFieldPath for FieldPath<T>{}
/// A marker trait only implemented by FieldPathSet
pub trait IsFieldPathSet:Sealed+Copy{
/// Whether the pats in the set can contain duplicate paths.
type PathUniqueness;
}
impl<T,U> IsFieldPathSet for FieldPathSet<T,U>{
type PathUniqueness=U;
}
////////////////////////////////////////////////////////////////////////////////
impl<T> TString<T>{
/// Constructs the TString.
pub const NEW:Self=TString(PhantomData);
#[inline(always)]
pub const fn to_path(self)->FieldPath<(TString<T>,)>{
FieldPath::NEW
}
#[inline(always)]
pub const fn to_set(self)->FieldPathSet<(FieldPath<(TString<T>,)>,),UniquePaths>{
FieldPath::NEW.to_set()
}
}
impl<T> Copy for TString<T>{}
impl<T> Clone for TString<T>{
#[inline(always)]
fn clone(&self)->Self{
*self
}
}
unsafe impl<T> MarkerType for TString<T>{}
impl<T> ToTString_ for TString<T>{
type Output=Self;
}
////////////////////////////////////////////////////////////////////////////////
/// A type-level representation of a chain of field accesses,like `.a.b.c.d`.
///
pub struct FieldPath<T>(PhantomData<T>);
/// A FieldPath for accesing a single `Str` field.
#[doc(hidden)]
pub type FieldPath1<Str>=FieldPath<(Str,)>;
impl<T> Copy for FieldPath<T>{}
impl<T> Clone for FieldPath<T>{
#[inline(always)]
fn clone(&self)->Self{
*self
}
}
impl<T> FieldPath<T>{
pub const NEW:Self=FieldPath(PhantomData);
#[inline(always)]
pub const fn new()->FieldPath<T>{
FieldPath(PhantomData)
}
}
unsafe impl<T> MarkerType for FieldPath<T>{}
#[doc(hidden)]
impl<S> ToTString_ for FieldPath<(TString<S>,)>{
type Output=TString<S>;
}
impl<T> ToTList_ for FieldPath<T>
where
T:ToTList_
{
type Output=ToTList<T>;
}
#[doc(hidden)]
impl<T,S> PushBack_<TString<S>> for FieldPath<T>
where
T:PushBack_<TString<S>>
{
type Output=FieldPath<PushBack<T,TString<S>>>;
}
impl<T,S> PushBack_<FieldPath<(S,)>> for FieldPath<T>
where
T:PushBack_<S>
{
type Output=FieldPath<PushBack<T,S>>;
}
impl<T,U> Append_<FieldPath<U>> for FieldPath<T>
where
T:Append_<U>
{
type Output=FieldPath<Append<T,U>>;
}
impl<T> FieldPath<T>{
/// Constructs a new FieldPath with `_other` appended at the end.
///
/// Currently this can only be a single element FieldPath
/// (ie:`fp!(a)`/`fp!(foo)`/`fp!(bar)`)
#[inline(always)]
pub fn push<U,V>(self,_other:U)->FieldPath<V>
where
Self:PushBack_<U,Output=FieldPath<V>>
{
MarkerType::MTVAL
}
/// Constructs a new FieldPath with `_other` appended at the end.
#[inline(always)]
pub fn append<U>(self,_other:FieldPath<U>)->FieldPath<Append<T,U>>
where
T:Append_<U>
{
MarkerType::MTVAL
}
/// Converts this `FieldPath` to a `FieldPathSet`.
#[inline(always)]
pub const fn to_set(self)->FieldPathSet<(Self,),UniquePaths>{
unsafe{
FieldPathSet::new_unchecked()
}
}
}
impl<S> FieldPath<(TString<S>,)>{
#[doc(hidden)]
pub const fn to_tstr(self)->TString<S>{
MarkerType::MTVAL
}
}
////////////////////////////////////////////////////////////////////////////////
/// A list of `FieldPath`s whose uniqueness is determined by `U`.
///
/// If `U=UniquePaths` then all the `FieldPath`s are unique,
/// and this can be passed to `GetFieldExt::fields_mut`,
/// since you can't have aliasing mutable references to the same field.
///
/// If `U=AliasedPaths` then there might be repeated `FieldPath`s,
/// and this cannot be passed to `GetFieldExt::fields_mut`,
/// because it might borrow the same field mutably twice.
///
pub struct FieldPathSet<T,U>(PhantomData<(T,U)>);
/// A merker type indicating that FieldPathSet contains unique paths,
/// in which no path is a prefix of any other path in the set,
/// this is required to call `GetFieldExt::fields_mut`.
pub struct UniquePaths;
/// A merker type indicating that FieldPathSet may not contain unique `FielsPath`s,
/// which means that its not safe to pass the FieldPathSet to `GetFieldExt::fields_mut`
/// (this is why it requires `FieldPathSet<_,UniquePaths>`).
pub struct AliasedPaths;
impl<T,U> Copy for FieldPathSet<T,U>{}
impl<T,U> Clone for FieldPathSet<T,U>{
#[inline(always)]
fn clone(&self)->Self{
*self
}
}
// `MarkerType` is not implemented for `FieldPathSet<T.UniquePaths>`
// because `FieldPathSet<T.UniquePaths>` ought only be constructible
// by satisfying the safety requirements of `FieldPathSet::<T.UniquePaths>::new`,
// which aren't cheaply enforceable on the type level.
//
// impl<T> !MarkerType for FieldPathSet<T.UniquePaths>{}
unsafe impl<T> MarkerType for FieldPathSet<T,AliasedPaths>{}
impl<T,U> FieldPathSet<T,U>{
// The constructor function used by proc macros,
#[doc(hidden)]
#[inline(always)]
pub const unsafe fn new_unchecked()->Self{
FieldPathSet(PhantomData)
}
}
impl<T> FieldPathSet<T,UniquePaths>{
/// Constructs a `FieldPathSet`.
///
/// # Safety
///
/// `T` must be a tuple of `FieldPaths<_>`s,
/// where none of them is a subset of each other.
#[inline(always)]
pub const unsafe fn new()->Self{
FieldPathSet(PhantomData)
}
/// Converts a `FieldPathSet<T,UniquePaths>` to a `FieldPathSet<T,AliasedPaths>`
#[inline(always)]
pub const fn downgrade(self)->FieldPathSet<T,AliasedPaths>{
FieldPathSet(PhantomData)
}
}
impl<T> FieldPathSet<T,AliasedPaths>{
/// Constructs a `FieldPathSet`.
#[inline(always)]
pub const fn new()->Self{
FieldPathSet(PhantomData)
}
/// Converts a `FieldPathSet<T,AliasedPaths>` to a `FieldPathSet<T,UniquePaths>`
///
/// # Safety
///
/// You must ensure that all the `FieldPath`s are unique,
/// there must be no `FieldPath` that is a prefix of any other `FieldPath`.
#[inline(always)]
pub const unsafe fn upgrade_unchecked(self)->FieldPathSet<T,UniquePaths>{
FieldPathSet(PhantomData)
}
}
impl<T,U> FieldPathSet<(FieldPath<T>,),U> {
/// Converts a `FieldPathSet` containing a single `FieldPath`
/// into that `FieldPath`.
#[inline(always)]
pub const fn to_path(self)->FieldPath<T>{
MarkerType::MTVAL
}
}
impl<T,U> FieldPathSet<T,U>{
/// Constructs a new FieldPathSet with `_other` appended at the end.
///
/// Currently this accepts:
///
/// - A FieldPath
/// (ie:`fp!(a)`/`fp!(foo)`/`fp!(bar)`)
///
/// - A FieldPathSet containing a single FieldPath
/// (ie:`fp!(a).to_set()`/`fp!(foo).to_set()`/`fp!(bar).to_set()`)
#[inline(always)]
pub fn push<O,Out>(self,_other:O)->FieldPathSet<Out,AliasedPaths>
where
Self:PushBack_<O,Output=FieldPathSet<Out,AliasedPaths>>
{
MarkerType::MTVAL
}
/// Constructs a new FieldPathSet with the `_other` FieldPathSet
/// appended at the end.
#[inline(always)]
pub fn append<T2,U2>(
self,
_other:FieldPathSet<T2,U2>
)->FieldPathSet<Append<T,T2>,AliasedPaths>
where
T:Append_<T2>
{
MarkerType::MTVAL
}
}
impl<T,U> ToTList_ for FieldPathSet<T,U>
where
T:ToTList_
{
type Output=ToTList<T>;
}
impl<T,U,P> PushBack_<FieldPath<P>> for FieldPathSet<T,U>
where
T:PushBack_<FieldPath<P>>
{
type Output=FieldPathSet<PushBack<T,FieldPath<P>>,AliasedPaths>;
}
impl<T,U,P,U2> PushBack_<FieldPathSet<(P,),U2>> for FieldPathSet<T,U>
where
T:PushBack_<P>
{
type Output=FieldPathSet<PushBack<T,P>,AliasedPaths>;
}
impl<T,T2,U,U2> Append_<FieldPathSet<T2,U2>> for FieldPathSet<T,U>
where
T:Append_<T2>
{
type Output=FieldPathSet<Append<T,T2>,AliasedPaths>;
}
////////////////////////////////////////////////////////////////////////////////
#[doc(hidden)]
impl<S> From<FieldPath<(TString<S>,)>> for TString<S>{
#[inline(always)]
fn from(_this:FieldPath<(TString<S>,)>)->Self{
MarkerType::MTVAL
}
}
#[doc(hidden)]
impl<S> From<TString<S>> for FieldPath<(TString<S>,)>{
#[inline(always)]
fn from(_this:TString<S>)->Self{
MarkerType::MTVAL
}
}
impl<T,U> From<FieldPathSet<(FieldPath<T>,),U>> for FieldPath<T>{
#[inline(always)]
fn from(_this:FieldPathSet<(FieldPath<T>,),U>)->Self{
MarkerType::MTVAL
}
}
impl<P> From<FieldPath<P>> for FieldPathSet<(FieldPath<P>,),UniquePaths>{
#[inline(always)]
fn from(this:FieldPath<P>)->Self{
this.to_set()
}
}
////////////////////////////////////////////////////////////////////////////////
#[cfg(test)]
mod tests; |
mod manager;
pub use self::manager::Manager;
use crate::api::Api;
use crate::http::client::Client;
use std::ops::{Deref, DerefMut};
pub struct Connection {
pub client: Client,
api: Api,
}
impl Connection {
pub fn new(host: &str) -> Connection {
let client = Client::new(host);
let api = Api::new_with_client(&client);
Connection { client, api }
}
}
impl Deref for Connection {
type Target = Api;
fn deref(&self) -> &Api {
&self.api
}
}
impl DerefMut for Connection {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.api
}
}
|
use std::process::Command; // Run programs
use assert_cmd::prelude::*; // Add methods on commands
use predicates::prelude::*; // Used for writing assertions
#[test]
fn command_help_shows_usage_and_options() -> Result<(), Box<std::error::Error>> {
let mut cmd = Command::main_binary()?;
cmd.arg("-h")
.assert()
.success()
.stdout(predicate::str::contains("Simple Mixed Numbers Calculator"))
.stdout(predicate::str::contains("-e, --eval <expression> The expression to evaluate"));
Ok(())
}
#[test]
fn run_with_short_eval_arg() -> Result<(), Box<std::error::Error>> {
let mut cmd = Command::main_binary()?;
cmd.arg("-e")
.arg("1/2 * 3_3/4");
cmd.assert()
.success()
.stdout(predicate::str::contains("= 1_7/8"));
Ok(())
}
#[test]
fn run_with_long_eval_arg() -> Result<(), Box<std::error::Error>> {
let mut cmd = Command::main_binary()?;
cmd.arg("--eval")
.arg("2_3/8 + 9/8");
cmd.assert()
.success()
.stdout(predicate::str::contains("= 3_1/2"));
Ok(())
}
#[test]
fn run_with_unparseable_expression_prints_error() -> Result<(), Box<std::error::Error>> {
let mut cmd = Command::main_binary()?;
cmd.arg("-e")
.arg("1 / 2 - 3_3/4");
cmd.assert()
.failure()
.stderr(predicate::str::contains("Error: Unparseable operation!"));
Ok(())
}
#[test]
fn run_with_fraction_with_zero_denominator_prints_error() -> Result<(), Box<std::error::Error>> {
let mut cmd = Command::main_binary()?;
cmd.arg("-e")
.arg("1/0 / 3_3/4");
cmd.assert()
.failure()
.stderr(predicate::str::contains("Error: Fraction with zero denominator!"));
Ok(())
}
#[test]
fn run_with_no_args_start_repl_mode() -> Result<(), Box<std::error::Error>> {
let mut cmd = Command::main_binary()?;
cmd.with_stdin()
.buffer("1/2 * 3_3/4\n2_3/8 + 9/8\nq")
.assert()
.success()
.stdout(predicate::str::contains("Starting repl mode. Type 'q' to quit"))
.stdout(predicate::str::contains("= 1_7/8"))
.stdout(predicate::str::contains("= 3_1/2"));
Ok(())
}
#[test]
fn run_repl_mode_with_errors() -> Result<(), Box<std::error::Error>> {
let mut cmd = Command::main_binary()?;
cmd.with_stdin()
.buffer("5/2\n1/2 * 3_3/4\n2_1/2 + 3_2/0\n2_3/8 + 9/8\nq")
.assert()
.success()
.stdout(predicate::str::contains("Starting repl mode. Type 'q' to quit"))
.stderr(predicate::str::contains("Error: Unparseable operation!"))
.stdout(predicate::str::contains("= 1_7/8"))
.stderr(predicate::str::contains("Error: Fraction with zero denominator!"))
.stdout(predicate::str::contains("= 3_1/2"));
Ok(())
}
|
/// Solves the Day 09 Part 1 puzzle with respect to the given input.
pub fn part_1(input: String) {
let (heights, w, h) = parse_input(input);
let mut sum = 0;
for r in 0..h {
for c in 0..w {
let mut neighbours = Vec::<u32>::new();
if r > 0 {
neighbours.push(heights[r - 1][c]);
}
if r < h - 1 {
neighbours.push(heights[r + 1][c]);
}
if c > 0 {
neighbours.push(heights[r][c - 1]);
}
if c < w - 1 {
neighbours.push(heights[r][c + 1]);
}
let low = neighbours.iter().all(|h| h > &heights[r][c]);
if low {
sum += heights[r][c] + 1;
}
}
}
println!("{}", sum);
}
/// Solves the Day 09 Part 2 puzzle with respect to the given input.
pub fn part_2(input: String) {
let (heights, w, h) = parse_input(input);
let mut visited = vec![vec![false; w]; h];
let mut basins = Vec::<usize>::new();
for r in 0..h {
for c in 0..w {
if !visited[r][c] {
let basin = dfs(&heights, &mut visited, h, w, r, c);
if basin > 0 {
basins.push(basin);
}
}
}
}
basins.sort();
basins.reverse();
let product = basins[0] * basins[1] * basins[2];
println!("{}", product);
}
/// Parses the heightmap (along with its width and height) from the given input.
fn parse_input(input: String) -> (Vec<Vec<u32>>, usize, usize) {
let mut heights = Vec::<Vec<u32>>::new();
for line in input.lines() {
let row: Vec<u32> = line.chars().map(char_to_u32).collect();
heights.push(row);
}
let h = heights.len();
let w = heights[0].len();
return (heights, w, h);
}
/// Parses a character representing a digit to an unsigned integer.
fn char_to_u32(digit: char) -> u32 {
return digit.to_digit(10).unwrap();
}
/// Returns the remaining area of the basin containing location (r, c).
fn dfs(
heights: &Vec<Vec<u32>>,
visited: &mut Vec<Vec<bool>>,
h: usize,
w: usize,
r: usize,
c: usize,
) -> usize {
if visited[r][c] || heights[r][c] == 9 {
return 0;
}
visited[r][c] = true;
let mut area = 1;
if r > 0 {
area += dfs(heights, visited, h, w, r - 1, c);
}
if r < h - 1 {
area += dfs(heights, visited, h, w, r + 1, c);
}
if c > 0 {
area += dfs(heights, visited, h, w, r, c - 1);
}
if c < w - 1 {
area += dfs(heights, visited, h, w, r, c + 1);
}
return area;
}
|
#[doc = "Register `COUNTR` reader"]
pub type R = crate::R<COUNTR_SPEC>;
#[doc = "Field `COUNT` reader - COUNT"]
pub type COUNT_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - COUNT"]
#[inline(always)]
pub fn count(&self) -> COUNT_R {
COUNT_R::new(self.bits)
}
}
#[doc = "TAMP monotonic counter register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`countr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct COUNTR_SPEC;
impl crate::RegisterSpec for COUNTR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`countr::R`](R) reader structure"]
impl crate::Readable for COUNTR_SPEC {}
#[doc = "`reset()` method sets COUNTR to value 0"]
impl crate::Resettable for COUNTR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
mod element;
mod html_entity;
pub use element::*;
pub use html_entity::*;
|
use crate::days::day6::{default_input, parse_input};
pub fn run() {
println!("{}", customs_groups_str(default_input()).unwrap())
}
pub fn customs_groups_str(input : &str) -> Result<usize, ()> {
customs_groups(parse_input(input))
}
pub fn customs_groups(groups : Vec<Vec<&str>>) -> Result<usize, ()> {
Ok(groups.iter().map(|group| {
let mut letters = [false; 26];
for p in group {
for c in p.chars() {
letters[(c as u8 - b'a') as usize] = true;
}
}
letters.iter().filter(|b| **b).count()
}).sum())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn part1_answer() {
assert_eq!(6612, customs_groups_str(default_input()).unwrap())
}
} |
use std::os::raw::c_ulong;
use pyo3::prelude::*;
#[link(name = "z")]
extern "C" {
fn gzflags() -> c_ulong;
}
#[pyfunction]
fn add(x: usize, y: usize) -> usize {
let _version = unsafe { libz_sys::zlibVersion() };
let _flags = unsafe { gzflags() };
let sum = x + y;
sum
}
#[pymodule]
fn lib_with_disallowed_lib(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_wrapped(wrap_pyfunction!(add))?;
Ok(())
}
|
#[macro_use]
extern crate may;
use may::coroutine::yield_now;
fn main() {
let h = go!(move || {
println!("hi, I'm parent");
let v = (0..100)
.map(|i| {
go!(move || {
println!("hi, I'm child{i}");
yield_now();
println!("bye from child{i}");
})
})
.collect::<Vec<_>>();
yield_now();
// wait child finish
for i in v {
i.join().unwrap();
}
println!("bye from parent");
});
h.join().unwrap();
}
|
use anyhow::{anyhow, Result};
use pasture_core::math::Alignable;
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use std::{
collections::HashMap,
convert::TryFrom,
convert::TryInto,
io::{BufRead, Seek, SeekFrom, Write},
};
use super::{read_json_header, write_json_header};
/// A reference to data inside a FeatureTable binary body
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]
pub struct FeatureTableDataReference {
#[serde(rename = "byteOffset")]
pub byte_offset: usize,
#[serde(rename = "componentType", skip_serializing_if = "Option::is_none")]
pub component_type: Option<String>,
}
/// Different possible values for an entry in a 3D Tiles FeatureTable
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum FeatureTableValue {
SingleValue(serde_json::Value),
Array(Vec<serde_json::Value>),
DataReference(FeatureTableDataReference),
}
impl TryFrom<Value> for FeatureTableValue {
type Error = anyhow::Error;
fn try_from(val: Value) -> Result<Self> {
if val.is_array() {
let as_array = val.as_array().unwrap();
return Ok(FeatureTableValue::Array(as_array.clone()));
}
if val.is_object() {
let as_obj = val.as_object().unwrap();
// Object can mean single-value entry OR reference to binary data. The latter is identified by the
// presence of a 'byteOffset' key
if as_obj.contains_key("byteOffset") {
let data_reference = serde_json::from_value::<FeatureTableDataReference>(val)?;
return Ok(FeatureTableValue::DataReference(data_reference));
} else {
return Ok(FeatureTableValue::SingleValue(val));
}
}
Ok(FeatureTableValue::SingleValue(val))
}
}
impl TryFrom<&Value> for FeatureTableValue {
type Error = anyhow::Error;
fn try_from(val: &Value) -> Result<Self> {
if val.is_array() {
let as_array = val.as_array().unwrap();
return Ok(FeatureTableValue::Array(as_array.clone()));
}
if val.is_object() {
let as_obj = val.as_object().unwrap();
// Object can mean single-value entry OR reference to binary data. The latter is identified by the
// presence of a 'byteOffset' key
if as_obj.contains_key("byteOffset") {
let data_reference =
serde_json::from_value::<FeatureTableDataReference>(val.clone())?;
return Ok(FeatureTableValue::DataReference(data_reference));
} else {
return Ok(FeatureTableValue::SingleValue(val.clone()));
}
}
Ok(FeatureTableValue::SingleValue(val.clone()))
}
}
impl Into<Value> for FeatureTableValue {
fn into(self) -> Value {
match self {
FeatureTableValue::SingleValue(val) => val,
FeatureTableValue::Array(arr) => Value::Array(arr),
FeatureTableValue::DataReference(data_reference) => {
serde_json::to_value(data_reference)
.expect("Could not convert FeatureTableDataReference to JSON Value")
}
}
}
}
impl Into<Value> for &FeatureTableValue {
fn into(self) -> Value {
match self {
FeatureTableValue::SingleValue(val) => val.clone(),
FeatureTableValue::Array(arr) => Value::Array(arr.clone()),
FeatureTableValue::DataReference(data_reference) => {
serde_json::to_value(data_reference)
.expect("Could not convert FeatureTableDataReference to JSON Value")
}
}
}
}
/// 3D Tiles feature table structure
pub type FeatureTableHeader = HashMap<String, FeatureTableValue>;
/// Deserialize a `FeatureTableHeader` from the given `reader`. If successful, returns the serialized header and the
/// `reader` will be at the start of the binary body of the 3D Tiles FeatureTable. See the [3D Tiles documentation](https://github.com/CesiumGS/3d-tiles/blob/master/specification/TileFormats/FeatureTable/README.md)
/// for more information. If this operation fails, the reader will be in an undefined state.
pub fn deser_feature_table_header<R: BufRead + Seek>(
mut reader: R,
feature_table_header_size: usize,
header_start_position_in_file: usize,
) -> Result<FeatureTableHeader> {
let feature_table_header_json = read_json_header(&mut reader, feature_table_header_size)?;
// Read the potential padding bytes so we end at an 8-byte boundary in the file. since the 'reader' can be a
// sub-reader that does not refer to the whole file, we need the start position of the header in the file as
// an extra parameter
let current_position_in_file = header_start_position_in_file + feature_table_header_size;
let padding_bytes = current_position_in_file.align_to(8) - current_position_in_file;
if padding_bytes > 0 {
reader.seek(SeekFrom::Current(padding_bytes as i64))?;
}
let feature_table_obj = feature_table_header_json
.as_object()
.ok_or(anyhow!("FeatureTable JSON header was no JSON object"))?;
// Convert the object to our `FeatureTableHeader` type
Ok(feature_table_obj
.iter()
.map(|(k, v)| -> Result<(String, FeatureTableValue)> {
let feature_table_value: FeatureTableValue = v.try_into()?;
Ok((k.clone(), feature_table_value))
})
.collect::<Result<HashMap<_, _>, _>>()?)
}
/// Serializes the given `FeatureTableHeader` to the given `writer`. If successful, the `writer` will be at the appropriate
/// position for writing the FeatureTable body (i.e. required padding spaces have been written as per the [3D Tiles documentation](https://github.com/CesiumGS/3d-tiles/blob/master/specification/TileFormats/FeatureTable/README.md)).
pub fn ser_feature_table_header<W: Write>(
mut writer: W,
feature_table_header: &FeatureTableHeader,
header_start_position_in_file: usize,
) -> Result<()> {
let header_as_map = feature_table_header
.iter()
.map(|(k, v)| -> (String, Value) { (k.clone(), v.into()) })
.collect::<Map<_, _>>();
let header_json_obj = Value::Object(header_as_map);
// FeatureTable header is the first thing written after the .pnts header
write_json_header(&mut writer, &header_json_obj, header_start_position_in_file)
}
#[cfg(test)]
mod tests {
use std::io::{BufReader, BufWriter, Cursor, SeekFrom};
use super::*;
use serde_json::json;
fn dummy_feature_table_header() -> FeatureTableHeader {
let mut header = FeatureTableHeader::new();
header.insert(
"SINGLE_FIELD".into(),
FeatureTableValue::SingleValue(json!(23)),
);
header.insert(
"ARRAY_FIELD".into(),
FeatureTableValue::Array(vec![json!(1), json!(2), json!(3)]),
);
header.insert(
"REFERENCE_FIELD".into(),
FeatureTableValue::DataReference(FeatureTableDataReference {
byte_offset: 42,
component_type: Some("FLOAT".into()),
}),
);
header
}
#[test]
fn test_3dtiles_feature_table_io() -> Result<()> {
let expected_header = dummy_feature_table_header();
let mut writer = BufWriter::new(Cursor::new(vec![]));
ser_feature_table_header(&mut writer, &expected_header, 0)?;
// Make sure that the header is written with padding bytes so that we are at an 8-byte boundary
let header_size_in_file = writer.seek(SeekFrom::Current(0))? as usize;
assert_eq!(header_size_in_file % 8, 0);
let mut cursor = writer.into_inner()?;
cursor.seek(SeekFrom::Start(0))?;
let mut reader = BufReader::new(cursor);
let actual_header = deser_feature_table_header(&mut reader, header_size_in_file, 0)?;
assert_eq!(expected_header, actual_header);
Ok(())
}
}
|
use super::KafkaConfig;
use std::collections::HashMap;
use std::ascii::AsciiExt;
use std::iter::FromIterator;
use std::time::Duration;
use std::fs::File;
//use std::io::prelude::*;
use std::result;
use kafka::consumer::{Consumer, FetchOffset, GroupOffsetStorage, Message, MessageSets};
use std::io::{Error, Read, Write};
use std::str::from_utf8;
struct KafkaConsumer {
kafka_config: KafkaConfig,
consumer_name: String,
consumer: Box<Consumer>,
}
struct KafkaConsumerMessage {
pub offset: i64,
pub key: Box<String>,
pub value: Box<String>,
}
impl KafkaConsumer {
pub fn new(
_consumer_group_name: &str,
topic_name: &str,
brokers_with_port: &str,
) -> KafkaConsumer {
let brokers: Vec<&str> = brokers_with_port.split(',').collect();
let _v1 = Vec::from_iter(brokers.iter().map(|x| x.to_string()));
let consumer = Consumer::from_hosts(vec!["localhost:9092".to_owned()])
.with_topic_partitions(topic_name.to_owned(), &[0, 1])
.with_fallback_offset(FetchOffset::Earliest)
.with_group("my-group".to_owned())
.with_offset_storage(GroupOffsetStorage::Kafka)
.create()
.unwrap();
KafkaConsumer {
kafka_config: KafkaConfig {
broker_with_ip: "".to_string(),
},
consumer_name: "".to_string(),
consumer: Box::new(consumer),
}
}
pub fn read_without_commit<'a>(&mut self) -> Vec<KafkaConsumerMessage> {
let mut vec = Vec::new();
for ms in self.consumer.poll().unwrap().iter() {
for m in ms.messages() {
let km = KafkaConsumerMessage {
offset: m.offset,
key: Box::new(from_utf8(m.key).unwrap().to_owned()),
value: Box::new(from_utf8(m.value).unwrap().to_owned()),
};
vec.push(km)
}
self.consumer.consume_messageset(ms);
}
return vec;
}
pub fn commit(&mut self) {
self.consumer.commit_consumed();
}
}
|
use std::collections::{HashMap, HashSet};
use std::io::{self};
fn main() -> io::Result<()> {
let files_results = vec![("test.txt", 7, 6), ("input.txt", 535, 212)];
for (f, result_1, result_2) in files_results.into_iter() {
println!("File: {}", f);
let file_content: Vec<String> = std::fs::read_to_string(f)?
.lines()
.map(|x| x.to_string())
.collect();
let mut substitutes: HashMap<&str, Vec<&str>> = HashMap::new();
for line in file_content.iter() {
if line.is_empty() {
break;
}
let splitted: Vec<&str> = line.split(" => ").collect();
let reference = &splitted[0];
let replacement = &splitted[1];
substitutes
.entry(reference)
.and_modify(|x| x.push(replacement))
.or_insert(vec![replacement]);
}
let molecule = file_content.last().unwrap();
let mut uniques: HashSet<String> = HashSet::new();
for (i, ch) in molecule.chars().enumerate() {
let (left, right) = if ch.to_lowercase().collect::<Vec<_>>() == vec![ch] {
(i - 1, i + 1)
} else {
(i, i + 1)
};
match substitutes.get(&molecule[left..right]) {
Some(repl) => {
for r in repl.iter() {
let mut s = String::from(molecule);
s.replace_range(left..right, r);
uniques.insert(s);
}
}
None => (),
}
}
assert_eq!(uniques.len(), result_1);
let reversed: HashMap<&str, &str> = substitutes
.iter()
.flat_map(|(&k, v)| v.iter().map(move |&vv| (vv, k)))
.collect();
let str_1 = "e";
let mut str_2 = molecule.to_string();
let longest_key_len = reversed.keys().map(|x| x.len()).max().unwrap();
let mut main_offset = 0;
let mut min_changes = 0;
while str_1 != str_2 {
println!("Line remaining: {}, main_offset {}", str_2, main_offset);
let mut i = 0;
while i < longest_key_len {
let offset = if str_2.len() + i >= longest_key_len {
str_2.len() + i - longest_key_len
} else {
str_2.len()
};
let beginning = if main_offset > offset {
0
} else {
offset - main_offset
};
let end = str_2.len() - main_offset;
let tmp_str = &str_2[beginning..end];
match reversed.get(tmp_str) {
Some(val) => {
println!("Before {}", str_2);
println!("Replaced {} with {}", tmp_str, val);
str_2.replace_range(beginning..end, val);
println!("After {}", str_2);
min_changes += 1;
main_offset = 0;
break;
}
None => (),
}
i += 1;
}
if i == longest_key_len {
main_offset += 1;
}
}
println!("Min changes: {}", min_changes);
assert_eq!(min_changes, result_2);
}
Ok(())
}
|
#![allow(dead_code)]
mod gears;
extern crate image;
use image::{ImageBuffer, RgbImage, Rgb};
const MDL_NAME: &str = "piramid";
const WIDTH: u32 = 100;
const HEIGHT: u32 = 100;
fn main() {
let input_mdl = format!("./models/{}.mdl", MDL_NAME);
let piramid = gears::model::Model::from_mdl(&*input_mdl);
println!("{:?}", piramid);
let mut imgbuf: RgbImage = ImageBuffer::new(WIDTH, HEIGHT);
let get_point_coordinates = |p: &gears::point::Point| -> [u32; 2] {
[
(p.x as u32) + (WIDTH / 2),
(p.y as u32) + (HEIGHT / 2),
]
};
for t in piramid.triangles {
for p in t.vertices.iter() {
let coordinates = get_point_coordinates(p);
if coordinates[0] >= WIDTH || coordinates[1] >= HEIGHT {
continue
}
let pixel = imgbuf.get_pixel_mut(coordinates[0], coordinates[1]);
*pixel = Rgb([255, 0, 0]);
}
}
let output_png = format!("./out/{}.png", MDL_NAME);
imgbuf.save(output_png).unwrap();
}
|
use board;
use moves;
use movement;
#[test]
fn layout() {
use fen;
use zobrist;
zobrist::init();
let mut main_board : board::chessboard = board::init();
fen::parse("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1", &mut main_board);
verify_squares(&main_board);
fen::parse("8/4r1p1/5pBp/2k2P2/3p4/1Pn2KP1/3R1P2/8 w - - 0 1", &mut main_board);
verify_squares(&main_board);
fen::parse("4k3/8/8/8/8/8/4P3/4K3 w - - 5 39", &mut main_board);
verify_squares(&main_board);
fen::parse("r1bq1rk1/pp3ppp/3n4/2p1N3/2B5/7P/PPP2PP1/R1BQR1K1 w - h2 0 33", &mut main_board);
verify_squares(&main_board);
}
fn verify_squares (cboard: &board::chessboard) {
for x in 0..board::full_board_size {
if x > 20 && x < 99 && x % 10 != 0 && x % 10 != 9 {
assert!(cboard.layout[x] >= board::piece::Empty as u8);
assert!(cboard.layout[x] <= board::piece::p as u8);
} else {
assert_eq!(cboard.layout[x], board::void_square);
}
}
}
#[test]
fn hashing() {
use fen;
use zobrist;
zobrist::init();
let mut main_board : board::chessboard = board::init();
fen::parse("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1", &mut main_board);
let first_hash = main_board.zobrist;
fen::parse("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 40 32", &mut main_board);
let second_hash = main_board.zobrist;
assert!(first_hash == second_hash);
fen::parse("8/p7/Pp1p1rk1/1Pp2N2/4P1K1/3P3P/8/8 b - - 0 53", &mut main_board);
let third_hash = main_board.zobrist;
assert!(first_hash != third_hash);
}
#[test]
fn move_make_hash() {
use fen;
use zobrist;
zobrist::init();
let mut main_board : board::chessboard = board::init();
fen::parse("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1", &mut main_board);
let move1 = moves::_move::new(22, 43, 0, 0, false, false, false, 0);
movement::make(&move1, &mut main_board);
assert_eq!(zobrist::hash(&main_board), main_board.zobrist);
let move2 = moves::_move::new(97, 78, 0, 0, false, false, false, 0);
movement::make(&move2, &mut main_board);
assert_eq!(zobrist::hash(&main_board), main_board.zobrist);
let move3 = moves::_move::new(35, 45, 0, 0, false, false, false, 0);
movement::make(&move3, &mut main_board);
assert_eq!(zobrist::hash(&main_board), main_board.zobrist);
let move4 = moves::_move::new(82, 72, 0, 0, false, false, false, 0);
movement::make(&move4, &mut main_board);
assert_eq!(zobrist::hash(&main_board), main_board.zobrist);
}
#[test]
fn move_undo_hash() {
use fen;
use zobrist;
zobrist::init();
let mut main_board : board::chessboard = board::init();
fen::parse("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1", &mut main_board);
let move1 = moves::_move::new(22, 43, 0, 0, false, false, false, 0);
movement::make(&move1, &mut main_board);
let move2 = moves::_move::new(97, 78, 0, 0, false, false, false, 0);
movement::make(&move2, &mut main_board);
let move3 = moves::_move::new(35, 45, 0, 0, false, false, false, 0);
movement::make(&move3, &mut main_board);
let move4 = moves::_move::new(82, 72, 0, 0, false, false, false, 0);
movement::make(&move4, &mut main_board);
for x in 0..4 {
movement::undo(&mut main_board);
assert_eq!(zobrist::hash(&main_board), main_board.zobrist);
}
}
fn verify_hash(main_board : &board::chessboard) {
use zobrist;
assert_eq!(zobrist::hash(&main_board), main_board.zobrist);
}
#[test]
fn piece_list() {
use fen;
use zobrist;
zobrist::init();
let mut main_board : board::chessboard = board::init();
fen::parse("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1", &mut main_board);
verify_pl(&main_board);
fen::parse("8/p7/Pp1p1rk1/1Pp2N2/4P1K1/3P3P/8/8 b - - 0 53", &mut main_board);
verify_pl(&main_board);
fen::parse("r1bq1rk1/pp3ppp/3n4/2p1N3/2B5/7P/PPP2PP1/R1BQR1K1 w - h2 0 33", &mut main_board);
verify_pl(&main_board);
fen::parse("rnbqkb1r/pp1p1pPp/8/2p1pP2/1P1P4/3P3P/P1P1P3/RNBQKBNR w KQkq - 0 1", &mut main_board);
verify_pl(&main_board);
}
fn verify_pl(cboard: &board::chessboard) {
assert_eq!(cboard.piece_count[board::piece::k as usize], 1);
assert_eq!(cboard.piece_count[board::piece::K as usize], 1);
for i in 1..board::piece::p as usize{
for x in 0..cboard.piece_count[i]{
assert_eq!(cboard.layout[cboard.piece_list[i as usize][x as usize] as usize], i as u8);
}
if cboard.piece_count[i] != 0 {
assert!(cboard.piece_list[i][cboard.piece_count[i] as usize - 1] != 0);
}
}
}
#[test]
fn castling() {
use fen;
use zobrist;
zobrist::init();
let mut main_board : board::chessboard = board::init();
fen::parse("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1", &mut main_board);
verify_castle(&main_board);
fen::parse("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1", &mut main_board);
verify_castle(&main_board);
}
fn verify_castle(cboard: &board::chessboard) {
assert!(cboard.castling <= 15);
}
#[test]
fn repetition() {
use fen;
use zobrist;
use think;
zobrist::init();
let mut main_board : board::chessboard = board::init();
fen::parse("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1", &mut main_board);
movement::make(&moves::_move::new(22, 43, 0, 0, false, false, false,0), &mut main_board);
assert_eq!(think::repetition(&main_board), false);
movement::make(&moves::_move::new(92, 73, 0, 0, false, false, false, 0), &mut main_board);
assert_eq!(think::repetition(&main_board), false);
movement::make(&moves::_move::new(43, 22, 0, 0, false, false, false, 0), &mut main_board);
assert_eq!(think::repetition(&main_board), false);
movement::make(&moves::_move::new(73, 92, 0, 0, false, false, false, 0), &mut main_board);
assert!(think::repetition(&main_board));
movement::make(&moves::_move::new(92, 73, 0, 0, false, false, false, 0), &mut main_board);
assert!(think::repetition(&main_board));
movement::make(&moves::_move::new(73, 65, 0, 0, false, false, false, 0), &mut main_board);
assert_eq!(think::repetition(&main_board), false);
}
pub fn sane(cboard: &board::chessboard) -> bool {
let result : bool = true;
verify_squares(cboard);
verify_pl(cboard);
verify_castle(cboard);
verify_hash(cboard);
result
}
static mut leafnodes : f64 = 0f64;
pub fn perft(depth : i32, cboard : &mut board::chessboard) {
if depth == 0 {
unsafe {
leafnodes += 1f64;
return;
}
}
let mut move_list : moves::movelist = moves::movelist::new();
moves::generator(&mut move_list, cboard);
for x in 0..move_list.count as usize {
if !movement::make(&move_list.all[x], cboard) {
continue
}
perft(depth - 1, cboard);
movement::undo(cboard);
}
}
pub fn perft_test(depth: i32, cboard : &mut board::chessboard) -> f64 {
unsafe {
println!("Perft test to depth: {}", depth);
leafnodes = 0f64;
let mut move_list : moves::movelist = moves::movelist::new();
moves::generator(&mut move_list, cboard);
for x in 0..move_list.count as usize {
if !movement::make(&move_list.all[x], cboard) {
continue
}
perft(depth - 1, cboard);
movement::undo(cboard);
}
println!("Test Complete: {} nodes", leafnodes);
leafnodes
}
}
#[test]
pub fn perft_suite() {
use fen;
use zobrist;
use think;
zobrist::init();
let mut main_board : board::chessboard = board::init();
fen::parse("r3k2r/p1ppqpb1/bn2pnp1/3PN3/1p2P3/2N2Q1p/PPPBBPPP/R3K2R w KQkq - 0 1", &mut main_board);
assert_eq!(perft_test(3, &mut main_board), 97862f64);
fen::parse("n1n5/PPPk4/8/8/8/8/4Kppp/5N1N b - - 0 1", &mut main_board);
assert_eq!(perft_test(4, &mut main_board), 182838f64);
fen::parse("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1", &mut main_board);
assert_eq!(perft_test(4, &mut main_board), 197281f64);
}
|
use std::collections::hash_map::Entry;
use std::collections::{HashSet, HashMap};
use std::path::Path;
use std::sync::{Arc, RwLock};
use meilidb_schema::Schema;
mod custom_settings;
mod docs_words_index;
mod documents_addition;
mod documents_deletion;
mod documents_index;
mod error;
mod index;
mod main_index;
mod raw_index;
mod synonyms_addition;
mod synonyms_deletion;
mod synonyms_index;
mod words_index;
pub use self::error::Error;
pub use self::index::Index;
pub use self::custom_settings::CustomSettings;
use self::docs_words_index::DocsWordsIndex;
use self::documents_addition::DocumentsAddition;
use self::documents_deletion::DocumentsDeletion;
use self::synonyms_addition::SynonymsAddition;
use self::synonyms_deletion::SynonymsDeletion;
use self::documents_index::DocumentsIndex;
use self::index::InnerIndex;
use self::main_index::MainIndex;
use self::raw_index::{RawIndex, InnerRawIndex};
use self::words_index::WordsIndex;
use self::synonyms_index::SynonymsIndex;
pub struct Database {
cache: RwLock<HashMap<String, Arc<Index>>>,
inner: Arc<rocksdb::DB>,
}
impl Database {
pub fn start_default<P: AsRef<Path>>(path: P) -> Result<Database, Error> {
let path = path.as_ref();
let cache = RwLock::new(HashMap::new());
let options = {
let mut options = rocksdb::Options::default();
options.create_if_missing(true);
options
};
let cfs = rocksdb::DB::list_cf(&options, path).unwrap_or(Vec::new());
let inner = Arc::new(rocksdb::DB::open_cf(&options, path, &cfs)?);
let database = Database { cache, inner };
let mut indexes: Vec<_> = cfs.iter()
.filter_map(|c| c.split('-').nth(0).filter(|&c| c != "default"))
.collect();
indexes.sort_unstable();
indexes.dedup();
for index in indexes {
database.open_index(index)?;
}
Ok(database)
}
pub fn indexes(&self) -> Result<Option<HashSet<String>>, Error> {
let bytes = match self.inner.get("indexes")? {
Some(bytes) => bytes,
None => return Ok(None),
};
let indexes = bincode::deserialize(&bytes)?;
Ok(Some(indexes))
}
fn set_indexes(&self, value: &HashSet<String>) -> Result<(), Error> {
let bytes = bincode::serialize(value)?;
self.inner.put("indexes", bytes)?;
Ok(())
}
pub fn open_index(&self, name: &str) -> Result<Option<Arc<Index>>, Error> {
{
let cache = self.cache.read().unwrap();
if let Some(index) = cache.get(name).cloned() {
return Ok(Some(index))
}
}
let mut cache = self.cache.write().unwrap();
let index = match cache.entry(name.to_string()) {
Entry::Occupied(occupied) => {
occupied.get().clone()
},
Entry::Vacant(vacant) => {
if !self.indexes()?.map_or(false, |x| x.contains(name)) {
return Ok(None)
}
let main = {
self.inner.cf_handle(name).expect("cf not found");
MainIndex(InnerRawIndex::new(self.inner.clone(), Arc::from(name)))
};
let synonyms = {
let cf_name = format!("{}-synonyms", name);
self.inner.cf_handle(&cf_name).expect("cf not found");
SynonymsIndex(InnerRawIndex::new(self.inner.clone(), Arc::from(cf_name)))
};
let words = {
let cf_name = format!("{}-words", name);
self.inner.cf_handle(&cf_name).expect("cf not found");
WordsIndex(InnerRawIndex::new(self.inner.clone(), Arc::from(cf_name)))
};
let docs_words = {
let cf_name = format!("{}-docs-words", name);
self.inner.cf_handle(&cf_name).expect("cf not found");
DocsWordsIndex(InnerRawIndex::new(self.inner.clone(), Arc::from(cf_name)))
};
let documents = {
let cf_name = format!("{}-documents", name);
self.inner.cf_handle(&cf_name).expect("cf not found");
DocumentsIndex(InnerRawIndex::new(self.inner.clone(), Arc::from(cf_name)))
};
let custom = {
let cf_name = format!("{}-custom", name);
self.inner.cf_handle(&cf_name).expect("cf not found");
CustomSettings(InnerRawIndex::new(self.inner.clone(), Arc::from(cf_name)))
};
let raw_index = RawIndex { main, synonyms, words, docs_words, documents, custom };
let index = Index::from_raw(raw_index)?;
vacant.insert(Arc::new(index)).clone()
},
};
Ok(Some(index))
}
pub fn create_index(&self, name: &str, schema: Schema) -> Result<Arc<Index>, Error> {
let mut cache = self.cache.write().unwrap();
let index = match cache.entry(name.to_string()) {
Entry::Occupied(occupied) => {
occupied.get().clone()
},
Entry::Vacant(vacant) => {
let main = {
self.inner.create_cf(name, &rocksdb::Options::default())?;
MainIndex(InnerRawIndex::new(self.inner.clone(), Arc::from(name)))
};
if let Some(prev_schema) = main.schema()? {
if prev_schema != schema {
return Err(Error::SchemaDiffer)
}
}
main.set_schema(&schema)?;
let synonyms = {
let cf_name = format!("{}-synonyms", name);
self.inner.create_cf(&cf_name, &rocksdb::Options::default())?;
SynonymsIndex(InnerRawIndex::new(self.inner.clone(), Arc::from(cf_name)))
};
let words = {
let cf_name = format!("{}-words", name);
self.inner.create_cf(&cf_name, &rocksdb::Options::default())?;
WordsIndex(InnerRawIndex::new(self.inner.clone(), Arc::from(cf_name)))
};
let docs_words = {
let cf_name = format!("{}-docs-words", name);
self.inner.create_cf(&cf_name, &rocksdb::Options::default())?;
DocsWordsIndex(InnerRawIndex::new(self.inner.clone(), Arc::from(cf_name)))
};
let documents = {
let cf_name = format!("{}-documents", name);
self.inner.create_cf(&cf_name, &rocksdb::Options::default())?;
DocumentsIndex(InnerRawIndex::new(self.inner.clone(), Arc::from(cf_name)))
};
let custom = {
let cf_name = format!("{}-custom", name);
self.inner.create_cf(&cf_name, &rocksdb::Options::default())?;
CustomSettings(InnerRawIndex::new(self.inner.clone(), Arc::from(cf_name)))
};
let mut indexes = self.indexes()?.unwrap_or_else(HashSet::new);
indexes.insert(name.to_string());
self.set_indexes(&indexes)?;
let raw_index = RawIndex { main, synonyms, words, docs_words, documents, custom };
let index = Index::from_raw(raw_index)?;
vacant.insert(Arc::new(index)).clone()
},
};
Ok(index)
}
}
|
// Copyright 2018-2019 Mozilla
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use crate::backend::traits::BackendDatabase;
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub struct DatabaseImpl(pub(crate) lmdb::Database);
impl BackendDatabase for DatabaseImpl {}
|
use input::get_name;
use output::{goodbye, hello};
mod day_kind;
mod input;
mod output;
fn main() {
let name = get_name();
hello(&name);
goodbye(&name);
}
|
// Copyright 2021 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// https://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
use super::{
keys::{PublicKey, Signature, SignatureShare},
token::Token,
utils, Error, Result,
};
use crate::SectionElders;
use crdts::Dot;
use serde::{Deserialize, Serialize};
use std::fmt::{self, Debug, Display, Formatter};
use threshold_crypto::PublicKeySet;
use tiny_keccak::{Hasher, Sha3};
/// Debit ID.
pub type DebitId = Dot<PublicKey>;
/// Credit ID is the hash of the DebitId.
pub type CreditId = [u8; 256 / 8];
/// Msg, containing any data to the recipient.
pub type Msg = String;
/// Contains info on who the replicas
/// of this wallet are, and the wallet history at them.
#[derive(Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct WalletHistory {
///
pub replicas: SectionElders,
///
pub history: ActorHistory,
}
impl Debug for WalletHistory {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(
f,
"WalletHistory {{ replicas: PkSet {{ public_key: {:?} }}, history: {:?} }}",
self.replicas.key_set.public_key(),
self.history
)
}
}
/// A cmd to transfer of tokens between two keys.
#[derive(Clone, Hash, Eq, PartialEq, PartialOrd, Serialize, Deserialize, Debug)]
pub struct Transfer {
/// The amount to transfer.
pub amount: Token,
/// The destination to transfer to.
pub to: PublicKey,
/// Debit ID, containing source key.
pub debit_id: DebitId,
/// Msg, containing any data to the recipient.
pub msg: Msg,
}
impl Transfer {
/// The source.
pub fn debit(&self) -> Debit {
Debit {
id: self.debit_id,
amount: self.amount,
}
}
/// The destination.
pub fn credit(&self) -> Result<Credit> {
Ok(Credit {
id: self.debit().credit_id()?,
amount: self.amount,
recipient: self.to,
msg: self.msg.to_string(),
})
}
}
/// A debit of tokens at a key.
#[derive(Clone, Hash, Eq, PartialEq, PartialOrd, Serialize, Deserialize, Debug)]
pub struct Debit {
/// Debit ID, containing source key.
pub id: DebitId,
/// The amount to debit.
pub amount: Token,
}
impl Debit {
/// Get the debit id
pub fn id(&self) -> DebitId {
self.id
}
/// Get the amount of this debit
pub fn amount(&self) -> Token {
self.amount
}
/// Get the key to be debited
pub fn sender(&self) -> PublicKey {
self.id.actor
}
///
pub fn credit_id(&self) -> Result<CreditId> {
let id_bytes = &utils::serialise(&self.id)?;
let mut hasher = Sha3::v256();
let mut output = [0; 32];
hasher.update(&id_bytes);
hasher.finalize(&mut output);
Ok(output)
}
}
/// A debit of tokens at a key.
#[derive(Clone, Hash, Eq, PartialEq, PartialOrd, Serialize, Deserialize, Debug)]
pub struct Credit {
/// Unique id for the credit, being the hash of the DebitId.
pub id: CreditId,
/// The amount to credit.
pub amount: Token,
/// The recipient key
pub recipient: PublicKey,
/// Msg, containing any data to the recipient.
pub msg: Msg,
}
impl Credit {
/// Get the credit id
pub fn id(&self) -> &CreditId {
&self.id
}
/// Get the amount of this credit
pub fn amount(&self) -> Token {
self.amount
}
/// Get the key to be credited
pub fn recipient(&self) -> PublicKey {
self.recipient
}
}
/// The history of a transfer Actor.
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct ActorHistory {
/// All the credits.
pub credits: Vec<CreditAgreementProof>,
/// All the debits.
pub debits: Vec<TransferAgreementProof>,
}
impl ActorHistory {
/// Returns empty history.
pub fn empty() -> Self {
Self {
credits: vec![],
debits: vec![],
}
}
/// Returns `true` if the history contains no elements.
pub fn is_empty(&self) -> bool {
self.credits.is_empty() && self.debits.is_empty()
}
/// Returns the number of elements in the history, also referred to
/// as its 'length'.
pub fn len(&self) -> usize {
self.credits.len() + self.debits.len()
}
}
/// The aggregated Replica signatures of the Actor debit cmd.
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct CreditAgreementProof {
/// The cmd generated by sender Actor.
pub signed_credit: SignedCredit,
/// Quorum of Replica sigs over the credit.
pub debiting_replicas_sig: Signature,
/// PublicKeySet of the replica when it validated the debit.
pub debiting_replicas_keys: ReplicaPublicKeySet,
}
impl Debug for CreditAgreementProof {
fn fmt(&self, formatter: &mut Formatter) -> fmt::Result {
write!(formatter, "CreditAgreementProof::")?;
write!(formatter, "Credit({})", self.signed_credit.amount())?;
write!(formatter, "ActorSignature::")?;
Debug::fmt(&self.signed_credit.actor_signature, formatter)?;
write!(formatter, "ReplicaSignature::")?;
Debug::fmt(&self.debiting_replicas_sig, formatter)
}
}
impl Display for CreditAgreementProof {
fn fmt(&self, formatter: &mut Formatter) -> fmt::Result {
Debug::fmt(self, formatter)
}
}
impl CreditAgreementProof {
/// Get the credit id
pub fn id(&self) -> &CreditId {
self.signed_credit.id()
}
/// Get the amount of this credit
pub fn amount(&self) -> Token {
self.signed_credit.amount()
}
/// Get the recipient of this credit
pub fn recipient(&self) -> PublicKey {
self.signed_credit.recipient()
}
/// Get the PublicKeySet of the replica that validated this credit
pub fn replica_keys(&self) -> ReplicaPublicKeySet {
self.debiting_replicas_keys.clone()
}
}
/// The aggregated Replica signatures of the Actor debit cmd.
#[derive(Clone, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub struct TransferAgreementProof {
/// The debit generated by sender Actor.
pub signed_debit: SignedDebit,
/// The credit generated by sender Actor.
pub signed_credit: SignedCredit,
/// Quorum of Replica sigs over the debit.
pub debit_sig: Signature,
/// Quorum of Replica sigs over the credit.
pub credit_sig: Signature,
/// PublicKeySet of the replica when it validated the transfer.
pub debiting_replicas_keys: ReplicaPublicKeySet,
}
impl TransferAgreementProof {
/// Get the debit id
pub fn id(&self) -> DebitId {
self.signed_debit.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.signed_debit.amount()
}
/// Get the sender of this transfer
pub fn sender(&self) -> PublicKey {
self.signed_debit.sender()
}
/// Get the recipient of this transfer
pub fn recipient(&self) -> PublicKey {
self.signed_credit.recipient()
}
/// Get the PublicKeySet of the replica that validated this transfer
pub fn replica_keys(&self) -> ReplicaPublicKeySet {
self.debiting_replicas_keys.clone()
}
/// Get the corresponding credit agreement proof.
pub fn credit_proof(&self) -> CreditAgreementProof {
CreditAgreementProof {
signed_credit: self.signed_credit.clone(),
debiting_replicas_sig: self.credit_sig.clone(),
debiting_replicas_keys: self.replica_keys(),
}
}
}
impl Debug for TransferAgreementProof {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(
f,
"TransferAgreementProof {{ signed_debit: {:?}, signed_credit: {:?}, debit_sig: {:?}, credit_sig: {:?}, debiting_replicas_keys: PkSet {{ public_key: {:?} }} }}",
self.signed_debit,
self.signed_credit,
self.debit_sig,
self.credit_sig,
self.debiting_replicas_keys.public_key()
)
}
}
/// An Actor cmd.
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct SignedTransfer {
/// The debit.
pub debit: SignedDebit,
/// The credit.
pub credit: SignedCredit,
}
impl SignedTransfer {
/// Get the debit id
pub fn id(&self) -> DebitId {
self.debit.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.debit.amount()
}
/// Get the sender of this transfer
pub fn sender(&self) -> PublicKey {
self.debit.id().actor
}
/// Get the credit id of this debit.
pub fn credit_id(&self) -> Result<CreditId> {
self.debit.credit_id()
}
}
/// An Actor cmd.
#[derive(Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct SignedDebit {
/// The debit.
pub debit: Debit,
/// Actor signature over the debit.
pub actor_signature: Signature,
}
impl SignedDebit {
/// Get the debit id
pub fn id(&self) -> DebitId {
self.debit.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.debit.amount()
}
/// Get the sender of this transfer
pub fn sender(&self) -> PublicKey {
self.debit.sender()
}
/// Get the credit id of this debit.
pub fn credit_id(&self) -> Result<CreditId> {
self.debit.credit_id()
}
/// Tries to represent the signed debit as a share.
pub fn as_share(&self) -> Result<SignedDebitShare> {
if let Signature::BlsShare(share) = self.actor_signature.clone() {
Ok(SignedDebitShare {
debit: self.debit.clone(),
actor_signature: share,
})
} else {
Err(Error::InvalidSignature)
}
}
}
/// An Actor cmd.
#[derive(Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct SignedCredit {
/// The credit.
pub credit: Credit,
/// Actor signature over the transfer.
pub actor_signature: Signature,
}
impl SignedCredit {
/// Get the credit id
pub fn id(&self) -> &CreditId {
self.credit.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.credit.amount
}
/// Get the sender of this transfer
pub fn recipient(&self) -> PublicKey {
self.credit.recipient()
}
/// Tries to represent the signed credit as a share.
pub fn as_share(&self) -> Result<SignedCreditShare> {
if let Signature::BlsShare(share) = self.actor_signature.clone() {
Ok(SignedCreditShare {
credit: self.credit.clone(),
actor_signature: share,
})
} else {
Err(Error::InvalidSignature)
}
}
}
// ------------------------------------------------------------
// MULTI SIG
// ------------------------------------------------------------
/// An Actor cmd.
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct SignedTransferShare {
/// The debit.
debit: SignedDebitShare,
/// The credit.
credit: SignedCreditShare,
///
actors: PublicKeySet,
}
impl SignedTransferShare {
/// Creates a valid transfer share out of its parts.
pub fn new(
debit: SignedDebitShare,
credit: SignedCreditShare,
actors: PublicKeySet,
) -> Result<Self> {
if debit.amount() != credit.amount() {
return Err(Error::InvalidOperation);
}
if debit.credit_id()? != *credit.id() {
return Err(Error::InvalidOperation);
}
let debit_sig_index = debit.actor_signature.index;
let credit_sig_index = credit.actor_signature.index;
if debit_sig_index != credit_sig_index {
return Err(Error::InvalidOperation);
}
Ok(Self {
debit,
credit,
actors,
})
}
/// Get the debit id
pub fn id(&self) -> DebitId {
self.debit.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.debit.amount()
}
/// Get the sender of this transfer
pub fn sender(&self) -> PublicKey {
self.debit.id().actor
}
/// Get the credit id of this debit.
pub fn credit_id(&self) -> Result<CreditId> {
self.debit.credit_id()
}
/// Get the debit share.
pub fn debit(&self) -> &SignedDebitShare {
&self.debit
}
/// Get the credit share.
pub fn credit(&self) -> &SignedCreditShare {
&self.credit
}
/// Get the share index.
pub fn share_index(&self) -> usize {
self.debit.actor_signature.index
}
/// Get the public key set of the actors.
pub fn actors(&self) -> &PublicKeySet {
&self.actors
}
}
impl Debug for SignedTransferShare {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(
f,
"SignedTransferShare {{ debit: {:?}, credit: {:?}, actors: PkSet {{ public_key: {:?} }} }}",
self.debit,
self.credit,
self.actors.public_key()
)
}
}
/// An Actor cmd.
#[derive(Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct SignedDebitShare {
/// The debit.
pub debit: Debit,
/// Actor signature over the debit.
pub actor_signature: SignatureShare,
}
impl SignedDebitShare {
/// Get the debit id
pub fn id(&self) -> DebitId {
self.debit.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.debit.amount()
}
/// Get the sender of this transfer
pub fn sender(&self) -> PublicKey {
self.debit.sender()
}
/// Get the credit id of this debit.
pub fn credit_id(&self) -> Result<CreditId> {
self.debit.credit_id()
}
///
pub fn share_index(&self) -> usize {
self.actor_signature.index
}
}
/// An Actor cmd.
#[derive(Clone, Hash, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct SignedCreditShare {
/// The credit.
pub credit: Credit,
/// Actor signature over the transfer.
pub actor_signature: SignatureShare,
}
impl SignedCreditShare {
/// Get the credit id
pub fn id(&self) -> &CreditId {
self.credit.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.credit.amount
}
/// Get the sender of this transfer
pub fn recipient(&self) -> PublicKey {
self.credit.recipient()
}
///
pub fn share_index(&self) -> usize {
self.actor_signature.index
}
}
// ------------------------------------------------------------
// Replica
// ------------------------------------------------------------
/// Events raised by the Replica.
#[allow(clippy::large_enum_variant)]
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub enum ReplicaEvent {
/// The event raised when
/// a multisig validation has been proposed.
TransferValidationProposed(TransferValidationProposed),
/// The event raised when
/// ValidateTransfer cmd has been successful.
TransferValidated(TransferValidated),
/// The event raised when
/// RegisterTransfer cmd has been successful.
TransferRegistered(TransferRegistered),
/// The event raised when
/// PropagateTransfer cmd has been successful.
TransferPropagated(TransferPropagated),
}
/// The debiting Replica event raised when
/// ProposeTransferValidation cmd has been successful.
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct TransferValidationProposed {
/// The debit signed by the initiating Actor.
pub signed_debit: SignedDebitShare,
/// The credit signed by the initiating Actor.
pub signed_credit: SignedCreditShare,
/// When the proposals accumulate, we have an agreed transfer.
pub agreed_transfer: Option<SignedTransfer>,
}
impl TransferValidationProposed {
/// Get the debit id
pub fn id(&self) -> DebitId {
self.signed_debit.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.signed_debit.amount()
}
/// Get the sender of this transfer
pub fn sender(&self) -> PublicKey {
self.signed_debit.sender()
}
/// Get the recipient of this transfer
pub fn recipient(&self) -> PublicKey {
self.signed_credit.recipient()
}
}
/// The debiting Replica event raised when
/// ValidateTransfer cmd has been successful.
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct TransferValidated {
/// The debit initiated by the Actor.
pub signed_debit: SignedDebit,
/// The corresponding credit, signed by the Actor.
pub signed_credit: SignedCredit,
/// Replica signature over the signed debit.
pub replica_debit_sig: SignatureShare,
/// Replica signature over the signed credit.
pub replica_credit_sig: SignatureShare,
/// The PK Set of the Replicas
pub replicas: PublicKeySet,
}
impl Debug for TransferValidated {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(
f,
"TransferValidated {{ signed_debit: {:?}, signed_credit: {:?}, replica_debit_sig: {:?}, replica_credit_sig: {:?}, replicas: PkSet {{ public_key: {:?} }} }}",
self.signed_debit,
self.signed_credit,
self.replica_debit_sig,
self.replica_credit_sig,
self.replicas.public_key()
)
}
}
impl TransferValidated {
/// Get the debit id
pub fn id(&self) -> DebitId {
self.signed_debit.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.signed_debit.amount()
}
/// Get the sender of this transfer
pub fn sender(&self) -> PublicKey {
self.signed_debit.sender()
}
/// Get the recipient of this transfer
pub fn recipient(&self) -> PublicKey {
self.signed_credit.recipient()
}
}
/// The debiting Replica event raised when
/// RegisterTransfer cmd has been successful.
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct TransferRegistered {
/// The transfer proof.
pub transfer_proof: TransferAgreementProof,
}
impl TransferRegistered {
/// Get the debit id
pub fn id(&self) -> DebitId {
self.transfer_proof.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.transfer_proof.amount()
}
/// Get the sender of this transfer
pub fn sender(&self) -> PublicKey {
self.transfer_proof.sender()
}
/// Get the recipient of this transfer
pub fn recipient(&self) -> PublicKey {
self.transfer_proof.recipient()
}
}
/// The crediting Replica event raised when
/// PropagateTransfer cmd has been successful.
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize, Debug)]
pub struct TransferPropagated {
/// The debiting Replicas' proof.
pub credit_proof: CreditAgreementProof,
}
impl TransferPropagated {
/// Get the credit id
pub fn id(&self) -> &CreditId {
self.credit_proof.id()
}
/// Get the amount of this transfer
pub fn amount(&self) -> Token {
self.credit_proof.amount()
}
/// Get the recipient of this credit
pub fn recipient(&self) -> PublicKey {
self.credit_proof.recipient()
}
}
/// Public Key Set for a group of transfer replicas.
pub type ReplicaPublicKeySet = PublicKeySet;
/// The Replica event raised when
/// we learn of a new group PK set.
#[derive(Clone, Hash, Eq, PartialEq, PartialOrd, Serialize, Deserialize)]
pub struct KnownGroupAdded {
/// The PublicKeySet of the group.
pub group: PublicKeySet,
}
impl Debug for KnownGroupAdded {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(
f,
"KnownGroupAdded {{ group: PkSet {{ public_key: {:?} }} }}",
self.group.public_key()
)
}
}
/// Notification of a credit sent to a recipient.
#[derive(Eq, PartialEq, Clone, Serialize, Deserialize, Debug)]
pub struct CreditNotification(pub CreditAgreementProof);
|
use hydroflow::hydroflow_syntax;
pub fn main() {
let mut flow = hydroflow_syntax! {
source_iter(0..10)
-> filter_map(|n| {
let n2 = n * n;
if n2 > 10 {
Some(n2)
}
else {
None
}
})
-> flat_map(|n| (n..=n+1))
-> for_each(|n| println!("G'day {}", n));
};
flow.run_available();
}
|
// Copyright 2019 Parity Technologies
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{time::Duration, any::type_name, convert::{TryFrom, TryInto}};
#[cfg(feature = "bit-vec")]
use bitvec::{vec::BitVec, order::Lsb0};
use criterion::{Criterion, black_box, Bencher, criterion_group, criterion_main};
use parity_scale_codec::*;
use parity_scale_codec_derive::{Encode, Decode};
fn array_vec_write_u128(b: &mut Bencher) {
b.iter(|| {
for b in 0..black_box(1_000_000) {
let a = 0xffff_ffff_ffff_ffff_ffff_u128;
Compact(a ^ b).using_encoded(|x| {
black_box(x).len()
});
}
});
}
fn test_vec<F: Fn(&mut Vec<u8>, &[u8])>(b: &mut Bencher, f: F) {
let f = black_box(f);
let x = black_box([0xff; 10240]);
b.iter(|| {
for _b in 0..black_box(10_000) {
let mut vec = Vec::<u8>::new();
f(&mut vec, &x);
}
});
}
fn vec_write_as_output(b: &mut Bencher) {
test_vec(b, |vec, a| {
Output::write(vec, a);
});
}
fn vec_extend(b: &mut Bencher) {
test_vec(b, |vec, a| {
vec.extend(a);
});
}
fn vec_extend_from_slice(b: &mut Bencher) {
test_vec(b, |vec, a| {
vec.extend_from_slice(a);
});
}
struct NoLimitInput<'a>(&'a [u8]);
impl<'a> Input for NoLimitInput<'a> {
fn remaining_len(&mut self) -> Result<Option<usize>, Error> {
Ok(None)
}
fn read(&mut self, into: &mut [u8]) -> Result<(), Error> {
self.0.read(into)
}
}
#[derive(Encode, Decode)]
enum Event {
ComplexEvent(Vec<u8>, u32, i32, u128, i8),
}
fn vec_append_with_decode_and_encode(b: &mut Bencher) {
let data = b"PCX";
b.iter(|| {
let mut encoded_events_vec = Vec::new();
for _ in 0..1000 {
let mut events = Vec::<Event>::decode(&mut &encoded_events_vec[..])
.unwrap_or_default();
events.push(Event::ComplexEvent(data.to_vec(), 4, 5, 6, 9));
encoded_events_vec = events.encode();
}
})
}
fn vec_append_with_encode_append(b: &mut Bencher) {
let data = b"PCX";
b.iter(|| {
let mut encoded_events_vec;
let events = vec![Event::ComplexEvent(data.to_vec(), 4, 5, 6, 9)];
encoded_events_vec = events.encode();
for _ in 1..1000 {
encoded_events_vec = <Vec<Event> as EncodeAppend>::append_or_new(
encoded_events_vec,
&[Event::ComplexEvent(data.to_vec(), 4, 5, 6, 9)],
).unwrap();
}
});
}
fn encode_decode_vec<T: TryFrom<u8> + Codec>(c: &mut Criterion) where T::Error: std::fmt::Debug {
let mut g = c.benchmark_group("vec_encode");
for vec_size in [1, 2, 5, 32, 1024, 2048, 16384] {
g.bench_with_input(&format!("{}/{}", type_name::<T>(), vec_size), &vec_size, |b, &vec_size| {
let vec: Vec<T> = (0..=127u8)
.cycle()
.take(vec_size)
.map(|v| v.try_into().unwrap())
.collect();
let vec = black_box(vec);
b.iter(|| vec.encode())
});
}
drop(g);
let mut g = c.benchmark_group("vec_decode");
for vec_size in [1, 2, 5, 32, 1024, 2048, 16384] {
g.bench_with_input(&format!("{}/{}", type_name::<T>(), vec_size), &vec_size, |b, &vec_size| {
let vec: Vec<T> = (0..=127u8)
.cycle()
.take(vec_size)
.map(|v| v.try_into().unwrap())
.collect();
let vec = vec.encode();
let vec = black_box(vec);
b.iter(|| {
let _: Vec<T> = Decode::decode(&mut &vec[..]).unwrap();
})
});
}
drop(g);
let mut g = c.benchmark_group("vec_decode_no_limit");
for vec_size in [16384, 131072] {
g.bench_with_input(&format!("vec_decode_no_limit_{}/{}", type_name::<T>(), vec_size), &vec_size, |b, &vec_size| {
let vec: Vec<T> = (0..=127u8)
.cycle()
.take(vec_size)
.map(|v| v.try_into().unwrap())
.collect();
let vec = vec.encode();
let vec = black_box(vec);
b.iter(|| {
let _: Vec<T> = Decode::decode(&mut NoLimitInput(&vec[..])).unwrap();
})
});
}
}
fn encode_decode_complex_type(c: &mut Criterion) {
#[derive(Encode, Decode, Clone)]
struct ComplexType {
_val: u32,
_other_val: u128,
_vec: Vec<u32>,
}
let complex_types = vec![
ComplexType { _val: 3, _other_val: 345634635, _vec: vec![1, 2, 3, 5, 6, 7] },
ComplexType { _val: 1000, _other_val: 0980345634635, _vec: vec![1, 2, 3, 5, 6, 7] },
ComplexType { _val: 43564, _other_val: 342342345634635, _vec: vec![1, 2, 3, 5, 6, 7] },
];
let mut g = c.benchmark_group("vec_encode_complex_type");
for vec_size in [1, 2, 5, 32, 1024, 2048, 16384] {
let complex_types = complex_types.clone();
g.bench_with_input(format!("vec_encode_complex_type/{}", vec_size), &vec_size, move |b, &vec_size| {
let vec: Vec<ComplexType> = complex_types.clone().into_iter().cycle().take(vec_size).collect();
let vec = black_box(vec);
b.iter(|| vec.encode())
});
}
drop(g);
let mut g = c.benchmark_group("vec_decode_complex_type");
for vec_size in [1, 2, 5, 32, 1024, 2048, 16384] {
let complex_types = complex_types.clone();
g.bench_with_input(format!("vec_decode_complex_type/{}", vec_size), &vec_size, move |b, &vec_size| {
let vec: Vec<ComplexType> = complex_types.clone().into_iter().cycle().take(vec_size).collect();
let vec = vec.encode();
let vec = black_box(vec);
b.iter(|| {
let _: Vec<ComplexType> = Decode::decode(&mut &vec[..]).unwrap();
})
});
}
}
fn bench_fn(c: &mut Criterion) {
c.bench_function("vec_write_as_output", vec_write_as_output);
c.bench_function("vec_extend", vec_extend);
c.bench_function("vec_extend_from_slice", vec_extend_from_slice);
c.bench_function("vec_append_with_decode_and_encode", vec_append_with_decode_and_encode);
c.bench_function("vec_append_with_encode_append", vec_append_with_encode_append);
c.bench_function("array_vec_write_u128", array_vec_write_u128);
}
fn encode_decode_bitvec_u8(c: &mut Criterion) {
let _ = c;
#[cfg(feature = "bit-vec")]
{
let mut g = c.benchmark_group("bitvec_u8_encode");
for size in [1, 2, 5, 32, 1024] {
g.bench_with_input(size.to_string(), &size, |b, &size| {
let vec: BitVec<u8, Lsb0> = [true, false]
.iter()
.cloned()
.cycle()
.take(size)
.collect();
let vec = black_box(vec);
b.iter(|| vec.encode())
});
}
}
#[cfg(feature = "bit-vec")]
{
let mut g = c.benchmark_group("bitvec_u8_decode");
for size in [1, 2, 5, 32, 1024] {
g.bench_with_input(size.to_string(), &size, |b, &size| {
let vec: BitVec<u8, Lsb0> = [true, false]
.iter()
.cloned()
.cycle()
.take(size)
.collect();
let vec = vec.encode();
let vec = black_box(vec);
b.iter(|| {
let _: BitVec<u8, Lsb0> = Decode::decode(&mut &vec[..]).unwrap();
})
});
}
}
}
criterion_group!{
name = benches;
config = Criterion::default().warm_up_time(Duration::from_millis(500)).without_plots();
targets = encode_decode_vec::<u8>, encode_decode_vec::<u16>, encode_decode_vec::<u32>, encode_decode_vec::<u64>,
encode_decode_vec::<i8>, encode_decode_vec::<i16>, encode_decode_vec::<i32>, encode_decode_vec::<i64>,
bench_fn, encode_decode_bitvec_u8, encode_decode_complex_type
}
criterion_main!(benches);
|
// Copyright 2013-2018, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use Cancellable;
use Error;
use ffi;
use glib;
use glib::object::IsA;
use glib::translate::*;
use std::ptr;
use glib_ffi;
use gobject_ffi;
use SocketListener;
use Socket;
pub trait SocketListenerExtManual {
fn accept_socket_async<'a, P: Into<Option<&'a Cancellable>>, Q: FnOnce(Result<(Socket, Option<glib::Object>), Error>) + Send + 'static>(&self, cancellable: P, callback: Q);
}
impl<O: IsA<SocketListener>> SocketListenerExtManual for O {
fn accept_socket_async<'a, P: Into<Option<&'a Cancellable>>, Q: FnOnce(Result<(Socket, Option<glib::Object>), Error>) + Send + 'static>(&self, cancellable: P, callback: Q) {
let cancellable = cancellable.into();
let cancellable = cancellable.to_glib_none();
let user_data: Box<Box<Q>> = Box::new(Box::new(callback));
unsafe extern "C" fn accept_socket_async_trampoline<Q: FnOnce(Result<(Socket, Option<glib::Object>), Error>) + Send + 'static>(_source_object: *mut gobject_ffi::GObject, res: *mut ffi::GAsyncResult, user_data: glib_ffi::gpointer)
{
callback_guard!();
let mut error = ptr::null_mut();
let mut source_object = ptr::null_mut();
let res = ffi::g_socket_listener_accept_socket_finish(_source_object as *mut _, res, &mut source_object, &mut error);
let result = if error.is_null() { Ok((from_glib_full(res), from_glib_none(source_object))) } else { Err(from_glib_full(error)) };
let callback: Box<Box<Q>> = Box::from_raw(user_data as *mut _);
callback(result);
}
let callback = accept_socket_async_trampoline::<Q>;
unsafe {
ffi::g_socket_listener_accept_socket_async(self.to_glib_none().0, cancellable.0, Some(callback), Box::into_raw(user_data) as *mut _);
}
}
}
|
pub mod if_statement;
pub mod loops;
pub mod match_statement;
pub mod combination_lock; |
use super::Client;
use crate::{
bson::Document,
client::session::ClusterTime,
error::Result,
options::{SessionOptions, TransactionOptions},
runtime,
ClientSession as AsyncClientSession,
};
/// A MongoDB client session. This struct represents a logical session used for ordering sequential
/// operations. To create a `ClientSession`, call `start_session` on a
/// [`Client`](../struct.Client.html).
///
/// `ClientSession` instances are not thread safe or fork safe. They can only be used by one thread
/// or process at a time.
pub struct ClientSession {
pub(crate) async_client_session: AsyncClientSession,
}
impl From<AsyncClientSession> for ClientSession {
fn from(async_client_session: AsyncClientSession) -> Self {
Self {
async_client_session,
}
}
}
impl ClientSession {
/// The client used to create this session.
pub fn client(&self) -> Client {
self.async_client_session.client().into()
}
/// The id of this session.
pub fn id(&self) -> &Document {
self.async_client_session.id()
}
/// The highest seen cluster time this session has seen so far.
/// This will be `None` if this session has not been used in an operation yet.
pub fn cluster_time(&self) -> Option<&ClusterTime> {
self.async_client_session.cluster_time()
}
/// The options used to create this session.
pub fn options(&self) -> Option<&SessionOptions> {
self.async_client_session.options()
}
/// Set the cluster time to the provided one if it is greater than this session's highest seen
/// cluster time or if this session's cluster time is `None`.
pub fn advance_cluster_time(&mut self, to: &ClusterTime) {
self.async_client_session.advance_cluster_time(to)
}
/// Starts a new transaction on this session with the given `TransactionOptions`. If no options
/// are provided, the session's `defaultTransactionOptions` will be used. This session must
/// be passed into each operation within the transaction; otherwise, the operation will be
/// executed outside of the transaction.
///
/// ```rust
/// # use mongodb::{bson::{doc, Document}, error::Result, sync::{Client, ClientSession}};
/// #
/// # async fn do_stuff() -> Result<()> {
/// # let client = Client::with_uri_str("mongodb://example.com")?;
/// # let coll = client.database("foo").collection::<Document>("bar");
/// # let mut session = client.start_session(None)?;
/// session.start_transaction(None)?;
/// let result = coll.insert_one_with_session(doc! { "x": 1 }, None, &mut session)?;
/// session.commit_transaction()?;
/// # Ok(())
/// # }
/// ```
pub fn start_transaction(
&mut self,
options: impl Into<Option<TransactionOptions>>,
) -> Result<()> {
runtime::block_on(self.async_client_session.start_transaction(options))
}
/// Commits the transaction that is currently active on this session.
///
/// ```rust
/// # use mongodb::{bson::{doc, Document}, error::Result, sync::{Client, ClientSession}};
/// #
/// # async fn do_stuff() -> Result<()> {
/// # let client = Client::with_uri_str("mongodb://example.com")?;
/// # let coll = client.database("foo").collection::<Document>("bar");
/// # let mut session = client.start_session(None)?;
/// session.start_transaction(None)?;
/// let result = coll.insert_one_with_session(doc! { "x": 1 }, None, &mut session)?;
/// session.commit_transaction()?;
/// # Ok(())
/// # }
/// ```
///
/// This operation will retry once upon failure if the connection and encountered error support
/// retryability. See the documentation
/// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on
/// retryable writes.
pub fn commit_transaction(&mut self) -> Result<()> {
runtime::block_on(self.async_client_session.commit_transaction())
}
/// Aborts the transaction that is currently active on this session. Any open transaction will
/// be aborted automatically in the `Drop` implementation of `ClientSession`.
///
/// ```rust
/// # use mongodb::{bson::{doc, Document}, error::Result, sync::{Client, ClientSession, Collection}};
/// #
/// # async fn do_stuff() -> Result<()> {
/// # let client = Client::with_uri_str("mongodb://example.com")?;
/// # let coll = client.database("foo").collection::<Document>("bar");
/// # let mut session = client.start_session(None)?;
/// session.start_transaction(None)?;
/// match execute_transaction(coll, &mut session) {
/// Ok(_) => session.commit_transaction()?,
/// Err(_) => session.abort_transaction()?,
/// }
/// # Ok(())
/// # }
///
/// fn execute_transaction(coll: Collection<Document>, session: &mut ClientSession) -> Result<()> {
/// coll.insert_one_with_session(doc! { "x": 1 }, None, session)?;
/// coll.delete_one_with_session(doc! { "y": 2 }, None, session)?;
/// Ok(())
/// }
/// ```
///
/// This operation will retry once upon failure if the connection and encountered error support
/// retryability. See the documentation
/// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on
/// retryable writes.
pub fn abort_transaction(&mut self) -> Result<()> {
runtime::block_on(self.async_client_session.abort_transaction())
}
/// Starts a transaction, runs the given callback, and commits or aborts the transaction.
/// Transient transaction errors will cause the callback or the commit to be retried;
/// other errors will cause the transaction to be aborted and the error returned to the
/// caller. If the callback needs to provide its own error information, the
/// [`Error::custom`](crate::error::Error::custom) method can accept an arbitrary payload that
/// can be retrieved via [`Error::get_custom`](crate::error::Error::get_custom).
pub fn with_transaction<R, F>(
&mut self,
mut callback: F,
options: impl Into<Option<TransactionOptions>>,
) -> Result<R>
where
F: for<'a> FnMut(&'a mut ClientSession) -> Result<R>,
{
let options = options.into();
let timeout = std::time::Duration::from_secs(120);
let start = std::time::Instant::now();
use crate::{
client::session::TransactionState,
error::{TRANSIENT_TRANSACTION_ERROR, UNKNOWN_TRANSACTION_COMMIT_RESULT},
};
'transaction: loop {
self.start_transaction(options.clone())?;
let ret = match callback(self) {
Ok(v) => v,
Err(e) => {
if matches!(
self.async_client_session.transaction.state,
TransactionState::Starting | TransactionState::InProgress
) {
self.abort_transaction()?;
}
if e.contains_label(TRANSIENT_TRANSACTION_ERROR) && start.elapsed() < timeout {
continue 'transaction;
}
return Err(e);
}
};
if matches!(
self.async_client_session.transaction.state,
TransactionState::None
| TransactionState::Aborted
| TransactionState::Committed { .. }
) {
return Ok(ret);
}
'commit: loop {
match self.commit_transaction() {
Ok(()) => return Ok(ret),
Err(e) => {
if e.is_max_time_ms_expired_error() || start.elapsed() >= timeout {
return Err(e);
}
if e.contains_label(UNKNOWN_TRANSACTION_COMMIT_RESULT) {
continue 'commit;
}
if e.contains_label(TRANSIENT_TRANSACTION_ERROR) {
continue 'transaction;
}
return Err(e);
}
}
}
}
}
}
|
use rand::{Rng, distributions::{IndependentSample, Range}};
use std::fmt::{self, Display};
use std::str::FromStr;
use std::string::ToString;
#[derive(Clone, Debug)]
pub struct DiceExpr {
rolls: i64,
dice: i64,
sides: i64,
modifier: Option<String>,
value: i64,
versus: bool,
tag: String,
target: i64,
}
impl DiceExpr {
fn roll_once<R>(&self, mut rng: &mut R) -> String where R: Rng {
let die = Range::new(1, self.sides + 1);
let mut rolls: Vec<i64> = Vec::with_capacity(self.dice as usize);
for _ in 0 .. self.dice {
rolls.push(die.ind_sample(&mut rng));
}
let mut sum = rolls.iter().fold(0, |s, x| s + x);
if let Some(ref m) = self.modifier {
match m.as_str() {
"b" => {
let index = ::std::cmp::min(self.value, self.dice) as usize;
rolls.sort_by(|a, b| b.cmp(a));
sum = rolls[0..index].iter().fold(0, |s, x| s + x);
}
"w" => {
let index = ::std::cmp::min(self.value, self.dice) as usize;
rolls.sort_by(|a, b| a.cmp(b));
sum = rolls[0..index].iter().fold(0, |s, x| s + x);
}
"+" => sum += self.value,
"-" => sum -= self.value,
"*" | "x" | "×" => sum *= self.value,
"/" | "\\" | "÷" => sum = sum.checked_div(self.value).unwrap_or(0),
_ => unreachable!(),
}
}
if self.versus && self.dice == 3 && self.sides == 6 {
// GURPS 4th Edition success roll.
let margin = self.target - sum; // Roll under.
let skill = format!("{}-{}", self.tag.trim(), self.target);
if sum < 5 || (self.target > 14 && sum < 6) || (self.target > 15 && sum < 7) {
format!(
"{:>2} vs {}: Success by {} (CRITICAL SUCCESS)",
sum,
skill,
margin
)
} else if sum > 16 || margin <= -10 {
if self.target > 15 && sum == 17 {
format!(
"{:>2} vs {}: Margin of {} (Automatic Failure)",
sum,
skill,
margin
)
} else {
format!(
"{:>2} vs {}: Failure by {} (CRITICAL FAILURE)",
sum,
skill,
margin.abs()
)
}
} else if margin < 0 {
format!("{:>2} vs {}: Failure by {}", sum, skill, margin.abs())
} else {
format!("{:>2} vs {}: Success by {}", sum, skill, margin)
}
} else if self.versus && self.sides == 20 &&
(self.dice == 1 ||
(self.dice == 2 && self.value == 1 &&
(matches!(self.modifier, Some(ref x) if x == "b") ||
matches!(self.modifier, Some(ref x) if x == "w"))))
{
// Generic d20 system success roll.
let margin = sum - self.target; // Roll over.
let skill = format!("{}{}", self.tag.trim(), self.target);
if margin < 0 {
format!("{:>2} vs {}: Failure by {}", sum, skill, margin.abs())
} else {
format!("{:>2} vs {}: Success by {}", sum, skill, margin)
}
} else if self.versus && self.dice == 1 && self.sides == 100 {
// Generic percentile system success roll.
let margin = self.target - sum; //Roll under.
let skill = format!("{}-{}", self.tag.trim(), self.target);
if margin < 0 {
format!("{:>3} vs {}: Failure by {}", sum, skill, margin.abs())
} else {
format!("{:>3} vs {}: Success by {}", sum, skill, margin)
}
} else {
format!("{}: {:>3} {:?}", self, sum, rolls)
}
}
fn roll<R>(&self, mut rng: &mut R) -> Vec<String> where R: Rng {
let mut results = Vec::with_capacity(self.rolls as usize);
for _ in 0..self.rolls {
results.push(self.roll_once(&mut rng));
}
results
}
}
impl Display for DiceExpr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.rolls > 1 {
write!(f, "{}x", self.rolls)?;
}
write!(f, "{}d{}", self.dice, self.sides)?;
if let Some(ref m) = self.modifier {
write!(f, "{}{}", m, self.value)?;
}
if self.versus {
write!(f, " vs {}-{}", self.tag.trim(), self.target)?;
}
Ok(())
}
}
#[derive(Clone, Debug)]
pub struct DiceVec{ inner: Vec<DiceExpr> }
impl DiceVec {
pub fn new() -> Self {
DiceVec { inner: Vec::new() }
}
pub fn roll<R>(&self, mut rng: &mut R) -> Vec<String> where R: Rng {
self.inner.iter().flat_map(|e| e.roll(&mut rng)).collect()
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Fail, Hash, Ord, PartialOrd)]
#[fail(display = "provided string did not contain a valid dice expression")]
pub struct ParseDiceError;
impl FromStr for DiceVec {
type Err = ParseDiceError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use regex::Regex;
lazy_static! {
static ref RE: Regex = Regex::new(r"(?x)
(?: (?P<rolls>\d+) [*x] )? # repeated rolls
(?: (?P<dice>\d+) d (?P<sides>\d+)? ) # number, optional sides
(?: (?P<modifier>[-+*x×÷/\\bw]) (?P<value>\d+) )? # modifier and value
(?: \s* (?: (?P<vs>vs?) \s*? # versus
(?P<tag>\S+?.*?)? [\s-] ) # tag
(?P<target>-?\d+) )? # target
").unwrap();
}
let mut dice: DiceVec = DiceVec::new();
for cap in RE.captures_iter(s) {
dice.inner.push(DiceExpr {
rolls: cap.name("rolls")
.map(|c| c.as_str())
.unwrap_or("1")
.parse()
.unwrap_or(1),
dice: cap.name("dice")
.map(|c| c.as_str())
.unwrap_or("3")
.parse()
.unwrap_or(3),
sides: cap.name("sides")
.map(|c| c.as_str())
.unwrap_or("6")
.parse()
.unwrap_or(6),
modifier: cap.name("modifier").map(|c| c.as_str().to_string()),
value: cap.name("value")
.map(|c| c.as_str())
.unwrap_or("0")
.parse()
.unwrap_or(0),
versus: cap.name("vs").map(|c| c.as_str()).is_some(),
tag: cap.name("tag")
.map(|c| c.as_str())
.unwrap_or("Skill")
.to_string(),
target: cap.name("target")
.map(|c| c.as_str())
.unwrap_or("0")
.parse()
.unwrap_or(0),
});
}
if dice.inner.is_empty() {
Err(ParseDiceError)
} else {
Ok(dice)
}
}
}
impl Display for DiceVec {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner
.iter()
.map(|e| e.to_string())
.collect::<Vec<_>>()
.join(", ")
.fmt(f)
}
}
|
// ===============================================================================
// Authors: AFRL/RQQA
// Organization: Air Force Research Laboratory, Aerospace Systems Directorate, Power and Control Division
//
// Copyright (c) 2017 Government of the United State of America, as represented by
// the Secretary of the Air Force. No copyright is claimed in the United States under
// Title 17, U.S. Code. All Other Rights Reserved.
// ===============================================================================
// This file was auto-created by LmcpGen. Modifications will be overwritten.
use avtas::lmcp::{Error, ErrorType, Lmcp, LmcpSubscription, SrcLoc, Struct, StructInfo};
use std::fmt::Debug;
#[derive(Clone, Debug, Default)]
#[repr(C)]
pub struct GraphNode {
pub node_id: i64,
pub coordinates: Box<::afrl::cmasi::location3d::Location3DT>,
pub associated_edges: Vec<i64>,
}
impl PartialEq for GraphNode {
fn eq(&self, _other: &GraphNode) -> bool {
true
&& &self.node_id == &_other.node_id
&& &self.coordinates == &_other.coordinates
&& &self.associated_edges == &_other.associated_edges
}
}
impl LmcpSubscription for GraphNode {
fn subscription() -> &'static str { "uxas.messages.route.GraphNode" }
}
impl Struct for GraphNode {
fn struct_info() -> StructInfo {
StructInfo {
exist: 1,
series: 5931053054693474304u64,
version: 4,
struct_ty: 1,
}
}
}
impl Lmcp for GraphNode {
fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> {
let mut pos = 0;
{
let x = Self::struct_info().ser(buf)?;
pos += x;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.node_id.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.coordinates.ser(r)?;
pos += writeb;
}
{
let r = get!(buf.get_mut(pos ..));
let writeb: usize = self.associated_edges.ser(r)?;
pos += writeb;
}
Ok(pos)
}
fn deser(buf: &[u8]) -> Result<(GraphNode, usize), Error> {
let mut pos = 0;
let (si, u) = StructInfo::deser(buf)?;
pos += u;
if si == GraphNode::struct_info() {
let mut out: GraphNode = Default::default();
{
let r = get!(buf.get(pos ..));
let (x, readb): (i64, usize) = Lmcp::deser(r)?;
out.node_id = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Box<::afrl::cmasi::location3d::Location3DT>, usize) = Lmcp::deser(r)?;
out.coordinates = x;
pos += readb;
}
{
let r = get!(buf.get(pos ..));
let (x, readb): (Vec<i64>, usize) = Lmcp::deser(r)?;
out.associated_edges = x;
pos += readb;
}
Ok((out, pos))
} else {
Err(error!(ErrorType::InvalidStructInfo))
}
}
fn size(&self) -> usize {
let mut size = 15;
size += self.node_id.size();
size += self.coordinates.size();
size += self.associated_edges.size();
size
}
}
pub trait GraphNodeT: Debug + Send {
fn as_uxas_messages_route_graph_node(&self) -> Option<&GraphNode> { None }
fn as_mut_uxas_messages_route_graph_node(&mut self) -> Option<&mut GraphNode> { None }
fn node_id(&self) -> i64;
fn node_id_mut(&mut self) -> &mut i64;
fn coordinates(&self) -> &Box<::afrl::cmasi::location3d::Location3DT>;
fn coordinates_mut(&mut self) -> &mut Box<::afrl::cmasi::location3d::Location3DT>;
fn associated_edges(&self) -> &Vec<i64>;
fn associated_edges_mut(&mut self) -> &mut Vec<i64>;
}
impl Clone for Box<GraphNodeT> {
fn clone(&self) -> Box<GraphNodeT> {
if let Some(x) = GraphNodeT::as_uxas_messages_route_graph_node(self.as_ref()) {
Box::new(x.clone())
} else {
unreachable!()
}
}
}
impl Default for Box<GraphNodeT> {
fn default() -> Box<GraphNodeT> { Box::new(GraphNode::default()) }
}
impl PartialEq for Box<GraphNodeT> {
fn eq(&self, other: &Box<GraphNodeT>) -> bool {
if let (Some(x), Some(y)) =
(GraphNodeT::as_uxas_messages_route_graph_node(self.as_ref()),
GraphNodeT::as_uxas_messages_route_graph_node(other.as_ref())) {
x == y
} else {
false
}
}
}
impl Lmcp for Box<GraphNodeT> {
fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> {
if let Some(x) = GraphNodeT::as_uxas_messages_route_graph_node(self.as_ref()) {
x.ser(buf)
} else {
unreachable!()
}
}
fn deser(buf: &[u8]) -> Result<(Box<GraphNodeT>, usize), Error> {
let (si, _) = StructInfo::deser(buf)?;
if si == GraphNode::struct_info() {
let (x, readb) = GraphNode::deser(buf)?;
Ok((Box::new(x), readb))
} else {
Err(error!(ErrorType::InvalidStructInfo))
}
}
fn size(&self) -> usize {
if let Some(x) = GraphNodeT::as_uxas_messages_route_graph_node(self.as_ref()) {
x.size()
} else {
unreachable!()
}
}
}
impl GraphNodeT for GraphNode {
fn as_uxas_messages_route_graph_node(&self) -> Option<&GraphNode> { Some(self) }
fn as_mut_uxas_messages_route_graph_node(&mut self) -> Option<&mut GraphNode> { Some(self) }
fn node_id(&self) -> i64 { self.node_id }
fn node_id_mut(&mut self) -> &mut i64 { &mut self.node_id }
fn coordinates(&self) -> &Box<::afrl::cmasi::location3d::Location3DT> { &self.coordinates }
fn coordinates_mut(&mut self) -> &mut Box<::afrl::cmasi::location3d::Location3DT> { &mut self.coordinates }
fn associated_edges(&self) -> &Vec<i64> { &self.associated_edges }
fn associated_edges_mut(&mut self) -> &mut Vec<i64> { &mut self.associated_edges }
}
#[cfg(test)]
pub mod tests {
use super::*;
use quickcheck::*;
impl Arbitrary for GraphNode {
fn arbitrary<G: Gen>(_g: &mut G) -> GraphNode {
GraphNode {
node_id: Arbitrary::arbitrary(_g),
coordinates: Box::new(::afrl::cmasi::location3d::Location3D::arbitrary(_g)),
associated_edges: Arbitrary::arbitrary(_g),
}
}
}
quickcheck! {
fn serializes(x: GraphNode) -> Result<TestResult, Error> {
use std::u16;
if x.associated_edges.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
let mut buf: Vec<u8> = vec![0; x.size()];
let sx = x.ser(&mut buf)?;
Ok(TestResult::from_bool(sx == x.size()))
}
fn roundtrips(x: GraphNode) -> Result<TestResult, Error> {
use std::u16;
if x.associated_edges.len() > (u16::MAX as usize) { return Ok(TestResult::discard()); }
let mut buf: Vec<u8> = vec![0; x.size()];
let sx = x.ser(&mut buf)?;
let (y, sy) = GraphNode::deser(&buf)?;
Ok(TestResult::from_bool(sx == sy && x == y))
}
}
}
|
use shorthand::ShortHand;
struct NotClone;
#[derive(ShortHand)]
struct Example {
#[shorthand(enable(clone))]
value: NotClone,
}
fn main() {}
|
use std::collections::{HashMap, HashSet};
use std::iter::FromIterator;
use std::fs::File;
use std::io::{self, BufReader};
use std::io::prelude::*;
use std::error::Error;
fn parse_input(file: &str) -> Result<HashMap<String, Vec<String>>, Box<dyn Error>> {
let mut map = HashMap::new();
let f = File::open(file)?;
let f = BufReader::new(f);
for line in f.lines() {
let line = line.unwrap();
let planets: Vec<&str> = line.split(")").collect();
let orbited = planets[0].to_owned();
let orbiting = planets[1].to_owned();
map.entry(orbiting.clone()).or_insert(vec![]);
let children = map.entry(orbited).or_insert(vec![]);
children.push(orbiting);
}
Ok(map)
}
pub fn part1() -> Result<u32, Box<dyn Error>> {
let root = "COM";
let input_map = parse_input("src/level6/input.txt")?;
Ok(visit(root, 0, &input_map))
}
pub fn part2() -> Result<(), Box<dyn Error>> {
let input_map = parse_input("src/level6/input.txt")?;
let parent_map = parent_map(&input_map);
let your_ancestors = get_ancestors("YOU", &parent_map);
let san_ancestors = get_ancestors("SAN", &parent_map);
let your_ancestors: HashSet<&str> = HashSet::from_iter(your_ancestors.iter().cloned());
let san_ancestors: HashSet<&str> = HashSet::from_iter(san_ancestors.iter().cloned());
let difference: HashSet<_> = your_ancestors.symmetric_difference(&san_ancestors).collect();
println!("{:?}", difference);
println!("{:?}", difference.len());
Ok(())
}
fn get_ancestors<'a>(target_node: &str, parent_map: &'a HashMap<String, String>) -> Vec<&'a str> {
let direct_parent = match parent_map.get(target_node) {
Some(x) => x,
None => return vec![]
};
let mut ancestors: Vec<&str> = vec![direct_parent];
ancestors.extend(&get_ancestors(&direct_parent, &parent_map));
ancestors
}
fn parent_map(map: &HashMap<String, Vec<String>>) -> HashMap<String, String> {
let mut parents: HashMap<String, String> = HashMap::new();
for (parent, children) in map {
for child in children {
parents.insert(child.clone(), parent.clone());
}
}
parents
}
fn visit(node: &str, depth: u32, map: &HashMap<String, Vec<String>>) -> u32 {
let depth = depth + 1;
let mut sum = 0;
for child in &map[node] {
sum += depth;
sum += visit(child, depth, &map);
}
sum
}
|
fn distance(a: &(i32, i32), b: &(i32, i32)) -> i32 {
(a.0 - b.0).abs() + (a.1 - b.1).abs()
}
fn main() {
use std::io::{self, BufRead};
let stdin = io::stdin();
let mut coordinates = Vec::new();
for line in stdin.lock().lines() {
let line = line.unwrap();
let mut xy = line.split(", ").map(|x| x.parse::<i32>().unwrap());
let x = xy.next().unwrap();
let y = xy.next().unwrap();
coordinates.push((x, y));
}
let xmin = -50;
let ymin = -50;
let xmax = 500;
let ymax = 500;
let mut area = vec![0; coordinates.len()];
for x in xmin..xmax {
for y in ymin..ymax {
let min_dist = coordinates.iter()
.map(|xy| distance(&xy, &(x, y))).min().unwrap();
let mut closest_points = coordinates.iter().enumerate()
.filter(|&(_i, &xy)| distance(&xy, &(x, y)) <= min_dist);
let first_el = closest_points.next().unwrap().0;
area[first_el] += match closest_points.next() {
Some(_) => 0,
None => 1,
}
}
}
for seed in xmin..xmax {
for &(x, y) in vec![(xmin, seed), (xmax-1, seed), (seed, ymin), (seed, ymax-1)].iter(){
let min_dist = coordinates.iter()
.map(|xy| distance(&xy, &(x, y))).min().unwrap();
let mut closest_points = coordinates.iter().enumerate()
.filter(|&(_i, &xy)| distance(&xy, &(x, y)) <= min_dist);
let first_el = closest_points.next().unwrap().0;
area[first_el] = match closest_points.next() {
Some(_) => area[first_el],
None => 0,
}
}
}
println!("{:?}", area.iter().max());
}
|
extern crate chrono;
extern crate getopts;
// std libs
use std::io;
use std::fs::{self, DirEntry};
use std::path::Path;
use std::os::unix::fs::PermissionsExt;
use std::env;
// getops
use getopts::Options;
// chrono
use chrono::prelude::NaiveDateTime;
/// print_all_file_info
///
/// Prints all sorts of file information
/// similar to running `ls -l`
///
/// @param entry: &DirEntry
/// @return std::io::Result<()>
fn print_all_file_info(entry: &DirEntry) {
let name = entry
.path()
.file_name()
.unwrap()
.to_string_lossy()
.into_owned();
// Grab the metadata from the file
let meta = std::fs::metadata(&entry.path())
.expect(&format!("Unable to get info about {:?}", &entry.path()));
// get the permissions
let perm = meta.permissions();
// get the last miodified date in seconds, converted from u64 to i64
let mod_date_secs = meta.modified()
.unwrap()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs() as i64;
// get the last modified date
let time = NaiveDateTime::from_timestamp(mod_date_secs, 0);
let files_in_dir = get_file_count_in_dir(&entry.path());
// Icon for file or directory
let mut icon = "f";
// Change to directory if it is a directory
if meta.is_dir() {
icon = "d";
}
println!(
"{icon}\t{files_in}\t{size}\t{mode}\t{time} {name}",
icon = icon,
files_in = files_in_dir,
size = meta.len(),
mode = perm.mode(),
name = name,
time = time,
);
}
/// get_file_count_in_dir
/// "ls -l" shows a file count. A single file counts as "1" and inside directories
/// everything adds +1 to the count, including the default "." and ".." folders.
fn get_file_count_in_dir(dir: &Path) -> usize {
// If it is no directory that it is a file which counts as "1".
let mut res: usize = 1;
if dir.is_dir() {
// On *nix systems every directory
// has a "." and ".." inside.
// Since we set res to 1 above we add +1 here
// to match the two folders
res += 1;
for _ in fs::read_dir(dir) {
res += 1;
}
}
res
}
/// print_file_info
/// Wrapper function for `print_all_file_info`
/// In the future this function should run the `ls` like code
/// or the `tree` like code.
fn print_file_info(entry: &DirEntry) {
print_all_file_info(entry);
}
/// list_files_and_dirs
/// List all files and folders in current directory
fn list_files_and_dirs(dir: &Path) -> io::Result<()> {
if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let entry = entry?;
print_file_info(&entry);
}
}
Ok(())
}
/// list_files_and_dirs
/// List all files and folders in current directory
fn list_files_and_dirs_silent(dir: &Path) -> io::Result<()> {
if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let entry = entry?;
print!("{}\t", entry
.path()
.file_name()
.unwrap()
.to_string_lossy()
.into_owned()
);
}
}
Ok(())
}
fn print_usage(program: &str, opts: Options) {
let brief = format!("Usage: {} [options]", program);
print!("{}", opts.usage(&brief));
}
fn main() {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optflag("h", "help", "print this help menu");
opts.optflag("s", "simple", "only show file names");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => panic!(f.to_string()),
};
if matches.opt_present("h") {
print_usage(&program, opts);
return;
}
if matches.opt_present("s") {
let _r = list_files_and_dirs_silent(&Path::new("."));
} else {
let _r = list_files_and_dirs(&Path::new("."));
}
}
|
#[macro_use]
extern crate structure;
use std::fs::File;
use std::io::{Read,Seek,SeekFrom};
#[derive(Debug)]
pub struct ImInfo {
width: i64,
height: i64,
format: String,
}
pub fn imsz(fname: &str) -> ImInfo {
let mut info = ImInfo { width: 0, height: 0, format: String::from("unknown")};
let mut file = File::open(fname).unwrap();
let mut preamble = [0u8; 26];
file.read(&mut preamble).unwrap();
if preamble[..6] == *b"GIF87a" || preamble[..6] == *b"GIF89a" {
info.format = "gif".to_string();
let s = structure!("<HH");
let (w, h): (u16, u16) = s.unpack(&preamble[6..10]).unwrap();
info.width = w.into();
info.height = h.into();
} else if preamble[..8] == *b"\x89PNG\r\n\x1a\n" {
info.format = "png".to_string();
let s = structure!(">II");
if preamble[12..16] == *b"IHDR" {
let (w, h): (u32, u32) = s.unpack(&preamble[16..24]).unwrap();
info.width = w.into();
info.height = h.into();
} else {
let (w, h): (u32, u32) = s.unpack(&preamble[8..16]).unwrap();
info.width = w.into();
info.height = h.into();
}
} else if preamble[..2] == *b"BM" {
info.format = "bmp".to_string();
let s = structure!("<I");
let header_size: u32 = s.unpack(&preamble[14..18]).unwrap().0;
if header_size == 12 {
let s = structure!("<HH");
let (w, h): (u16, u16) = s.unpack(&preamble[18..22]).unwrap();
info.width = w.into();
info.height = h.into();
} else {
let s = structure!("<ii");
let (w, h): (i32, i32) = s.unpack(&preamble[18..26]).unwrap();
info.width = w.into();
// h is negative when stored upside down
info.height = h.abs().into();
}
} else if preamble[..2] == *b"\xff\xd8" {
info.format = "jpeg".to_string();
let _ = file.seek(SeekFrom::Start(2));
let mut buf1: [u8; 1] = [0];
let mut buf2: [u8; 2] = [0; 2];
let mut buf4: [u8; 4] = [0; 4];
file.read(&mut buf1).unwrap();
while buf1[0] != b'\xda' && buf1[0] != 0 {
while buf1[0] != b'\xff' {
file.read(&mut buf1).unwrap();
}
while buf1[0] == b'\xff' {
file.read(&mut buf1).unwrap();
}
if buf1[0] >= 0xc0 && buf1[0] <= 0xc3 {
let _ = file.seek(SeekFrom::Current(3));
let s = structure!(">HH");
file.read(&mut buf4).unwrap();
let (w, h): (u16, u16) = s.unpack(&buf4).unwrap();
info.width = w.into();
info.height = h.into();
break;
}
file.read(&mut buf2).unwrap();
let s = structure!(">H");
let b: u16 = s.unpack(&buf2).unwrap().0;
let offset: i64 = (b - 2).into();
let _ = file.seek(SeekFrom::Current(offset));
file.read(&mut buf1).unwrap();
}
}
return info
} |
use std::sync::RwLock;
// ------------------------------------------------------------------------------------------------
// Public Macros
// ------------------------------------------------------------------------------------------------
///
/// Used by the library to report user messages, in interactive mode this will write to `stdout`
/// otherwise it will log at level `info`.
///
#[macro_export]
macro_rules! reportln {
($($arg:tt)*) => ({
$crate::reporter::report_message(&format!($($arg)*), false);
})
}
///
/// Used by the library to report user messages, in interactive mode this will write to `stderr`
/// otherwise it will log at level `error`.
///
#[macro_export]
macro_rules! ereportln {
($($arg:tt)*) => ({
$crate::reporter::report_message(&format!($($arg)*), true);
})
}
// ------------------------------------------------------------------------------------------------
// Public Functions
// ------------------------------------------------------------------------------------------------
lazy_static! {
static ref IS_INTERACTIVE: RwLock<bool> = RwLock::new(false);
}
///
/// Set whether the library is part of an interactive tool or not. This affects the behavior of
/// the `reportln` and `ereportln` macros.
///
pub fn set_is_interactive(is_interactive: bool) {
let mut inner = IS_INTERACTIVE.write().unwrap();
*inner = is_interactive;
}
///
/// Returns whether the library is part of an interactive tool or not.
///
pub fn is_interactive() -> bool {
reportln!("{}", "str");
*IS_INTERACTIVE.read().unwrap()
}
#[doc(hidden)]
pub fn report_message(msg: &str, error: bool) {
if is_interactive() {
if error {
eprintln!("{}", msg);
} else {
println!("{}", msg);
}
} else if error {
error!("{}", msg);
} else {
info!("{}", msg);
}
}
|
#[doc = "Register `CFBLR` reader"]
pub type R = crate::R<CFBLR_SPEC>;
#[doc = "Register `CFBLR` writer"]
pub type W = crate::W<CFBLR_SPEC>;
#[doc = "Field `CFBLL` reader - Color Frame Buffer Line Length"]
pub type CFBLL_R = crate::FieldReader<u16>;
#[doc = "Field `CFBLL` writer - Color Frame Buffer Line Length"]
pub type CFBLL_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 13, O, u16>;
#[doc = "Field `CFBP` reader - Color Frame Buffer Pitch in bytes"]
pub type CFBP_R = crate::FieldReader<u16>;
#[doc = "Field `CFBP` writer - Color Frame Buffer Pitch in bytes"]
pub type CFBP_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 13, O, u16>;
impl R {
#[doc = "Bits 0:12 - Color Frame Buffer Line Length"]
#[inline(always)]
pub fn cfbll(&self) -> CFBLL_R {
CFBLL_R::new((self.bits & 0x1fff) as u16)
}
#[doc = "Bits 16:28 - Color Frame Buffer Pitch in bytes"]
#[inline(always)]
pub fn cfbp(&self) -> CFBP_R {
CFBP_R::new(((self.bits >> 16) & 0x1fff) as u16)
}
}
impl W {
#[doc = "Bits 0:12 - Color Frame Buffer Line Length"]
#[inline(always)]
#[must_use]
pub fn cfbll(&mut self) -> CFBLL_W<CFBLR_SPEC, 0> {
CFBLL_W::new(self)
}
#[doc = "Bits 16:28 - Color Frame Buffer Pitch in bytes"]
#[inline(always)]
#[must_use]
pub fn cfbp(&mut self) -> CFBP_W<CFBLR_SPEC, 16> {
CFBP_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Layerx Color Frame Buffer Length Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfblr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfblr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CFBLR_SPEC;
impl crate::RegisterSpec for CFBLR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cfblr::R`](R) reader structure"]
impl crate::Readable for CFBLR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cfblr::W`](W) writer structure"]
impl crate::Writable for CFBLR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CFBLR to value 0"]
impl crate::Resettable for CFBLR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
mod file;
mod guid_group;
mod memory;
mod store_value;
mod variable;
mod vendor_group;
use self::guid_group::GuidGroup;
use self::store_value::StoreValue;
use self::variable::VariableStore;
use self::vendor_group::VendorGroup;
pub use self::file::FileStore;
pub use self::memory::MemoryStore;
|
use crate::syntax_kind::SyntaxKindId;
use text_unit::TextRange;
use text_unit::TextUnit;
use core::fmt;
use std::fmt::Formatter;
use std::fmt::Error;
use smol_str::SmolStr;
use crate::syntax::SyntaxDefinition;
use crate::escape_str;
use rowan::Types;
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub struct TokenInfo {
pub token_type: SyntaxKindId,
pub len: TextUnit
}
pub fn convert_to_fixed<'a>(text: &'a str, tokens: Vec<TokenInfo>, def: &'a SyntaxDefinition) -> Vec<FixedToken<'a>> {
let mut offset = TextUnit::from(0);
tokens.iter().map(|t| {
let range = TextRange::from_to(offset, offset + t.len);
offset += t.len;
let token = FixedToken::new(t.token_type, range, SmolStr::new(&text[range]), def);
token
}).collect()
}
/// Fat fixed representation of token used to display it
pub struct FixedToken<'a> {
pub token_type: SyntaxKindId,
pub range: TextRange,
pub text: SmolStr,
pub def: &'a SyntaxDefinition
}
impl<'a> FixedToken<'a> {
pub fn new(token_type: SyntaxKindId, range: TextRange, text: SmolStr, def: &'a SyntaxDefinition) -> Self {
FixedToken { token_type, range, text, def }
}
}
impl <'a>fmt::Display for FixedToken<'a> {
fn fmt<'b>(&self, f: &mut Formatter<'b>) -> Result<(), Error> {
let name = self.def._syntax_kind_info(self.token_type).name;
write!(f, "{}{}@{}", name, self.range, escape_str(self.text.as_str()))
}
}
|
use super::*;
use cgmath::{Point3, Matrix4};
type Subject = Illuminator;
mod radiance_and_direction {
use super::*;
#[test]
fn it_builds_a_vector_pointing_towards_the_light() {
let origin = Point3::new(4.0, 5.0, 6.0);
let intersection = Intersection::new(0.0, origin, Vector3::new(0.0, 0.0, 0.0));
let light = Light::new(5.0);
let translation = Matrix4::from_translation(Vector3::new(1.0, 2.0, 3.0));
let transform = Transform::new(translation);
let (_, direction) = Subject::radiance_and_direction(&intersection, &light, &transform);
assert_eq!(direction, Vector3::new(-3.0, -3.0, -3.0).normalize());
}
#[test]
fn it_calculates_radiance_by_dividing_the_lights_power_by_the_radius_squared() {
let origin = Point3::new(4.0, 5.0, 6.0);
let intersection = Intersection::new(0.0, origin, Vector3::new(0.0, 0.0, 0.0));
let light = Light::new(5.0);
let translation = Matrix4::from_translation(Vector3::new(1.0, 2.0, 3.0));
let transform = Transform::new(translation);
let (radiance, _) = Subject::radiance_and_direction(&intersection, &light, &transform);
assert_eq!(radiance, 5.0 / 27.0);
}
}
|
pub mod sip_server;
pub fn debug(udp_tuple: &models::server::UdpTuple) {
println!(
"\nRequest from {}: \n{}",
udp_tuple.peer,
String::from_utf8(udp_tuple.bytes.to_vec()).expect("string")
);
}
|
#[macro_use]
extern crate log;
mod cpu;
mod gameboy;
mod instruction;
mod mmu;
mod registers;
pub use gameboy::Gameboy;
|
//! The module contains [`Peaker`] trait and its implementations to be used in [`Height`] and [`Width`].
//!
//! [`Width`]: crate::settings::width::Width
//! [`Height`]: crate::settings::height::Height
/// A strategy of width function.
/// It determines the order how the function is applied.
pub trait Peaker {
/// Creates a new instance.
fn create() -> Self;
/// This function returns a column index which will be changed.
/// Or `None` if no changes are necessary.
fn peak(&mut self, min_widths: &[usize], widths: &[usize]) -> Option<usize>;
}
/// A Peaker which goes over column 1 by 1.
#[derive(Debug, Default, Clone)]
pub struct PriorityNone {
i: usize,
}
impl Peaker for PriorityNone {
fn create() -> Self {
Self { i: 0 }
}
fn peak(&mut self, _: &[usize], widths: &[usize]) -> Option<usize> {
let mut i = self.i;
let mut count_empty = 0;
while widths[i] == 0 {
i += 1;
if i >= widths.len() {
i = 0;
}
count_empty += 1;
if count_empty == widths.len() {
return None;
}
}
let col = i;
i += 1;
if i >= widths.len() {
i = 0;
}
self.i = i;
Some(col)
}
}
/// A Peaker which goes over the biggest column first.
#[derive(Debug, Default, Clone)]
pub struct PriorityMax;
impl Peaker for PriorityMax {
fn create() -> Self {
Self
}
fn peak(&mut self, _: &[usize], widths: &[usize]) -> Option<usize> {
let col = (0..widths.len()).max_by_key(|&i| widths[i]).unwrap();
if widths[col] == 0 {
None
} else {
Some(col)
}
}
}
/// A Peaker which goes over the smallest column first.
#[derive(Debug, Default, Clone)]
pub struct PriorityMin;
impl Peaker for PriorityMin {
fn create() -> Self {
Self
}
fn peak(&mut self, min_widths: &[usize], widths: &[usize]) -> Option<usize> {
let col = (0..widths.len())
.filter(|&i| min_widths.is_empty() || widths[i] > min_widths[i])
.min_by_key(|&i| widths[i])
.unwrap();
if widths[col] == 0 {
None
} else {
Some(col)
}
}
}
|
use azure_core::AddAsHeader;
use http::request::Builder;
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord)]
pub struct BlobContentMD5([u8; 16]);
impl From<md5::Digest> for BlobContentMD5 {
fn from(md5: md5::Digest) -> Self {
BlobContentMD5(md5.0)
}
}
impl AddAsHeader for BlobContentMD5 {
fn add_as_header(&self, builder: Builder) -> Builder {
builder.header("x-ms-blob-content-md5", base64::encode(self.0))
}
fn add_as_header2(
&self,
request: &mut azure_core::Request,
) -> Result<(), azure_core::HTTPHeaderError> {
request.headers_mut().append(
"x-ms-blob-content-md5",
http::header::HeaderValue::from_str(&base64::encode(self.0))?,
);
Ok(())
}
}
|
pub fn raindrops(n: u32) -> String {
let mut output = String::new();
if n % 3 == 0 { output.push_str("Pling"); }
if n % 5 == 0 { output.push_str("Plang"); }
if n % 7 == 0 { output.push_str("Plong"); }
if !output.is_empty() { output } else { n.to_string() }
}
|
/*!
```rudra-poc
[target]
crate = "http"
version = "0.1.19"
[report]
issue_url = "https://github.com/hyperium/http/issues/352"
issue_date = 2019-11-16
rustsec_url = "https://github.com/RustSec/advisory-db/pull/217"
rustsec_id = "RUSTSEC-2019-0033"
[[bugs]]
analyzer = "Manual"
bug_class = "Other"
rudra_report_locations = []
```
!*/
#![forbid(unsafe_code)]
use http::header::{HeaderMap, HOST};
fn main() {
let mut map = HeaderMap::<u32>::with_capacity(32);
dbg!(map.capacity());
// map size becomes larger than MAX_SIZE
map.reserve(50000);
dbg!(map.capacity());
// debug mode: panics with integer overflow
// release mode: the map size silently overflows to 0
map.reserve(std::usize::MAX - 100000);
map.insert("host", 42);
// this calls grow(0), which causes infinite loop
map.reserve(std::usize::MAX - 100000);
}
|
use std::collections::HashMap;
use std::hash::Hash;
use std::io;
use std::io::prelude::*;
pub fn find_common_keys<K, V>(hms: &[HashMap<K, V>]) -> Vec<K>
where K: Clone + Eq + Hash
{
let mut keycount: HashMap<&K, usize> = HashMap::new();
for key in hms.iter().flat_map(|hm| hm.keys()) {
let counter = keycount.entry(key).or_insert(0);
*counter += 1;
}
let numhms = hms.len();
keycount.iter()
.filter_map(|(key, count)| {
if *count != numhms {
None
} else {
Some((*key).clone())
}
})
.collect()
}
pub fn format_num_bytes(num: u64) -> String {
if num > 99 * 1024 * 1024 {
format!("~{}MB", num / 1024 / 1024)
} else if num > 99 * 1024 {
format!("~{}KB", num / 1024)
} else {
format!("~{}B", num)
}
}
pub fn readers_identical<R>(rs: &mut [R]) -> bool
where R: Read
{
// This approach is slow:
// if f1.bytes().zip(f2.bytes()).all(|(b1, b2)| b1.unwrap() == b2.unwrap()) {
let mut brs: Vec<_> = rs.iter_mut()
.map(|r| io::BufReader::with_capacity(512, r))
.collect();
loop {
let numread = {
let bufs: Vec<_> = brs.iter_mut()
.map(|buf| buf.fill_buf().unwrap())
.collect();
let basebuf = bufs[0];
let numread = basebuf.len();
if numread == 0 {
return true;
}
if !bufs.iter().all(|buf| &basebuf == buf) {
return false;
}
numread
};
for br in &mut brs {
br.consume(numread);
}
}
}
pub fn to_string_slices(strings: &[String]) -> Vec<&str> {
strings.iter().map(|s| &s[..]).collect()
}
|
#[doc = "Register `OPTSR2_CUR` reader"]
pub type R = crate::R<OPTSR2_CUR_SPEC>;
#[doc = "Field `SRAM2_RST` reader - SRAM2 erase when system reset"]
pub type SRAM2_RST_R = crate::BitReader;
#[doc = "Field `BKPRAM_ECC` reader - Backup RAM ECC detection and correction disable"]
pub type BKPRAM_ECC_R = crate::BitReader;
#[doc = "Field `SRAM2_ECC` reader - SRAM2 ECC detection and correction disable"]
pub type SRAM2_ECC_R = crate::BitReader;
#[doc = "Field `SRAM1_RST` reader - SRAM1 erase upon system reset"]
pub type SRAM1_RST_R = crate::BitReader;
#[doc = "Field `SRAM1_ECC` reader - SRAM1 ECC detection and correction disable"]
pub type SRAM1_ECC_R = crate::BitReader;
impl R {
#[doc = "Bit 3 - SRAM2 erase when system reset"]
#[inline(always)]
pub fn sram2_rst(&self) -> SRAM2_RST_R {
SRAM2_RST_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Backup RAM ECC detection and correction disable"]
#[inline(always)]
pub fn bkpram_ecc(&self) -> BKPRAM_ECC_R {
BKPRAM_ECC_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 6 - SRAM2 ECC detection and correction disable"]
#[inline(always)]
pub fn sram2_ecc(&self) -> SRAM2_ECC_R {
SRAM2_ECC_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 9 - SRAM1 erase upon system reset"]
#[inline(always)]
pub fn sram1_rst(&self) -> SRAM1_RST_R {
SRAM1_RST_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - SRAM1 ECC detection and correction disable"]
#[inline(always)]
pub fn sram1_ecc(&self) -> SRAM1_ECC_R {
SRAM1_ECC_R::new(((self.bits >> 10) & 1) != 0)
}
}
#[doc = "FLASH option status register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`optsr2_cur::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct OPTSR2_CUR_SPEC;
impl crate::RegisterSpec for OPTSR2_CUR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`optsr2_cur::R`](R) reader structure"]
impl crate::Readable for OPTSR2_CUR_SPEC {}
#[doc = "`reset()` method sets OPTSR2_CUR to value 0"]
impl crate::Resettable for OPTSR2_CUR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use combine::*;
use combine::stream::Stream;
use combine::parser::char::*;
use crate::expr::*;
parser! {
pub fn id_cont[I]()(I) -> char where [
I: Stream<Item=char>,
] {
char('\'').or(alpha_num())
}
}
parser! {
pub fn id[I]()(I) -> String where [
I: Stream<Item=char>,
] {
letter()
.and(many(id_cont()))
.map(|(c, cs): (char, String)| {
let mut s = c.to_string();
s.push_str(&cs);
s
}).skip(spaces())
}
}
parser! {
pub fn keyword[I](kw: &'static str)(I) -> () where [
I: Stream<Item=char>,
] {
attempt(string(&kw).skip(not_followed_by(id_cont())))
.skip(spaces())
.map(drop)
}
}
parser! {
pub fn expr[I]()(I) -> Expr where [
I: Stream<Item=char>,
] {
spaces().with(expr_function())
}
}
parser! {
pub fn bindings[I]()(I) -> Bindings where [
I: Stream<Item=char>,
] {
let binding = choice! {
// TODO normal binds
keyword("inherit")
.skip(spaces())
.with(optional(
between(char('('), char(')').skip(spaces()), expr())))
.and(sep_end_by(id(), spaces())) // TODO not quite right, attrs is more complicated than ids
.map(|(source, attrs)| Binding::Inherit {
source: source.map(Box::new),
attrs
})
};
sep_end_by(binding.skip(spaces()), char(';').skip(spaces()))
}
}
parser! {
pub fn expr_function[I]()(I) -> Expr where [
I: Stream<Item=char>,
] {
let formal_parameters = ||
between(char('{').skip(spaces()),
char('}').skip(spaces()),
formals());
let function_parameters = (choice! {
formal_parameters()
.and(optional(char('@').skip(spaces()).with(id())))
.map(|(fs, n)| Pattern::Set {
name: n,
parameters: fs,
}),
attempt(id().skip(char('@')))
.skip(spaces())
.and(formal_parameters())
.map(|(n, fs)| Pattern::Set {
name: Some(n),
parameters: fs,
}),
id().map(Pattern::Name)
}).skip(spaces());
(choice! {
attempt(function_parameters.skip(char(':')))
.skip(spaces())
.and(expr_function())
.map(|(p, e)| Expr::Bound(Bound {
binder: Binder::Function(p),
body: Box::new(e),
})),
keyword("assert")
.with(expr())
.skip(char(';'))
.skip(spaces())
.and(expr_function())
.map(|(a, e)| {
Expr::Bound(Bound {
binder: Binder::Assert(Box::new(a)),
body: Box::new(e),
})
}),
keyword("let")
.with(bindings())
.skip(keyword("in"))
.and(expr_function())
.map(|(bs, e)| {
Expr::Bound(Bound {
binder: Binder::Let(bs),
body: Box::new(e),
})
}),
expr_if()
}).skip(spaces())
}
}
parser! {
pub fn expr_if[I]()(I) -> Expr where [
I: Stream<Item=char>,
] {
(choice! {
keyword("PLACEHOLDER").map(|_| Expr::Literal(Literal::Int(3))),
keyword("if").with(expr())
.skip(keyword("then")).and(expr())
.skip(keyword("else")).and(expr())
.map(|((p, t), f)| {
Expr::Conditional(Conditional {
cond: Box::new(p),
then_: Box::new(t),
else_: Box::new(f),
})
})
}).skip(spaces())
}
}
parser! {
pub fn expr_op[I]()(I) -> Expr where [
I: Stream<Item=char>,
] {
choice! {
value(Expr::Literal(Literal::Int(5)))
}
}
}
parser! {
pub fn formals[I]()(I) -> Vec<Parameter> where [
I: Stream<Item=char>,
] {
let parameter = choice! {
string("...").skip(spaces()).map(|_| Parameter::Ellipsis),
id().and(optional(spaces().skip(char('?')).with(expr())))
.skip(spaces())
.map(|(x, d)| Parameter::Named {
name: x,
default: d.map(Box::new),
})
};
sep_by(parameter, char(',').skip(spaces())).skip(spaces())
}
}
#[cfg(test)]
mod tests {
use combine::stream::state::{State, SourcePosition};
use combine::easy::Stream as EasyStream;
use super::*;
fn test<P>(mut p: P, input: &'static str)
where
P: Parser<Input=EasyStream<State<&'static str, SourcePosition>>>,
P::Output: std::fmt::Debug,
{
println!("{:?}", p.easy_parse(State::new(input)).expect("success"))
}
#[test]
fn expr_function_() {
test(expr_function(), "x: y: PLACEHOLDER");
test(expr_function(), "{ x, y ? PLACEHOLDER, ... }: PLACEHOLDER");
test(expr_function(), "n@{ x, y ? PLACEHOLDER, ... }: PLACEHOLDER");
test(expr_function(), "{ x, y ? PLACEHOLDER, ... }@n: PLACEHOLDER");
test(expr_function(), "assert PLACEHOLDER; PLACEHOLDER");
test(expr_function(), "let inherit w z; inherit (PLACEHOLDER) w z; in PLACEHOLDER");
}
#[test]
fn expr_if_() {
test(expr_if(), "if PLACEHOLDER then PLACEHOLDER else PLACEHOLDER");
}
}
|
use crate::solutions::Solution;
use itertools::Itertools;
pub struct Day04 {}
fn bounds(input: &str) -> [i32; 2] {
let mut numbers = input.split('-').map(|t| t.parse::<i32>().unwrap());
[numbers.next().unwrap(), numbers.next().unwrap()]
}
// Full-on iterator implementation (slower)
// fn valid(password: &i32) -> bool {
// let check = |(increasing, has_double), (a, b)| (increasing && a <= b, has_double || a == b);
//
// let (increasing, has_double) = password
// .to_string()
// .chars()
// .tuple_windows::<(_, _)>()
// .fold((true, false), check);
//
// increasing && has_double
// }
fn valid(password: &i32) -> bool {
let chars = &password.to_string().chars().collect::<Vec<_>>();
chars
.iter()
.group_by(|i| *i)
.into_iter()
.map(|(_, group)| group.count())
.any(|length| length >= 2usize)
&& chars.iter().tuple_windows().all(|(a, b)| a <= b)
}
fn valid_without_groups(password: &i32) -> bool {
let chars = &password.to_string().chars().collect::<Vec<_>>();
chars
.iter()
.group_by(|i| *i)
.into_iter()
.map(|(_, group)| group.count())
.any(|length| length == 2usize)
&& chars.iter().tuple_windows().all(|(a, b)| a <= b)
}
impl Solution for Day04 {
fn part_one(&self, input: &str) -> String {
let bounds = bounds(&input);
(bounds[0]..=bounds[1]).filter(valid).count().to_string()
}
fn part_two(&self, input: &str) -> String {
let bounds = bounds(&input);
(bounds[0]..=bounds[1])
.filter(valid_without_groups)
.count()
.to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn bounds_from_input() {
assert_eq!(bounds("146810-612564"), [146810, 612564]);
}
#[test]
fn example_data_part_one() {
assert!(valid(&111111));
assert!(valid(&223450) == false);
assert!(valid(&123789) == false);
}
#[test]
fn example_data_part_two() {
assert!(valid_without_groups(&112233));
assert!(valid_without_groups(&123444) == false);
assert!(valid_without_groups(&111122));
}
}
|
use std::{
cmp,
collections::HashMap,
fmt,
io::{self, Write},
};
use console::{style, StyledObject};
use enum_map::EnumMap;
use crate::{
data_type::{Item, ItemType, Pull, Rarity},
report::Report,
};
/// Contains a summary of basic stats regarding a gacha log
#[derive(Debug)]
pub struct Summary {
/// total number of pulls
pub len: usize,
/// stats of correspondent rarity
pub stats_per_rarity: EnumMap<Rarity, StatsForRarity>,
/// stats of correspondent item type
pub stats_per_type: EnumMap<ItemType, StatsForType>,
}
/// Helper enum so that a string can be write both to console with style and to file without style
enum StylizedString {
Styled(StyledObject<String>),
UnStyled(String),
}
impl fmt::Display for StylizedString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::Styled(obj) => write!(f, "{}", obj),
Self::UnStyled(s) => write!(f, "{}", s),
}
}
}
impl StylizedString {
/// Apply style specified by `f`, if the variant is UnStyled, nothing will happen
fn with_style<F>(self, f: F) -> Self
where
F: FnOnce(StyledObject<String>) -> StyledObject<String>,
{
match self {
Self::Styled(obj) => Self::Styled(f(obj)),
s @ _ => s,
}
}
}
impl Summary {
/// pretty print the summary
fn write_to<T: Write>(&self, output: &mut T, with_style: bool) -> io::Result<()> {
let stylizer: Box<dyn Fn(String) -> StylizedString> = if with_style {
Box::new(|s| StylizedString::Styled(style(s)))
} else {
Box::new(|s| StylizedString::UnStyled(s))
};
writeln!(
output,
"你一共进行了{}抽,其中五星{}抽,四星{}抽,三星{}抽",
stylizer(self.len.to_string()).with_style(StyledObject::blue),
stylizer(self.stats_per_rarity[Rarity::Five].num.to_string())
.with_style(StyledObject::yellow),
stylizer(self.stats_per_rarity[Rarity::Four].num.to_string())
.with_style(StyledObject::magenta),
stylizer(self.stats_per_rarity[Rarity::Three].num.to_string())
.with_style(StyledObject::blue),
)?;
writeln!(
output,
"综合出率五星{}%,四星{}%",
stylizer(format!(
"{:.2}",
self.stats_per_rarity[Rarity::Five].num as f64 / self.len as f64 * 100.0
))
.with_style(StyledObject::yellow),
stylizer(format!(
"{:.2}",
self.stats_per_rarity[Rarity::Four].num as f64 / self.len as f64 * 100.0
))
.with_style(StyledObject::magenta),
)?;
writeln!(
output,
"共抽出{}个武器,其中五星{}抽,四星{}抽",
stylizer(self.stats_per_type[ItemType::Weapon].num.to_string())
.with_style(StyledObject::blue),
stylizer(
self.stats_per_type[ItemType::Weapon].num_per_rarity[Rarity::Five].to_string()
)
.with_style(StyledObject::yellow),
stylizer(
self.stats_per_type[ItemType::Weapon].num_per_rarity[Rarity::Four].to_string()
)
.with_style(StyledObject::magenta),
)?;
writeln!(
output,
"共抽出{}个角色,其中五星{}抽,四星{}抽",
stylizer(self.stats_per_type[ItemType::Character].num.to_string())
.with_style(StyledObject::blue),
stylizer(
self.stats_per_type[ItemType::Character].num_per_rarity[Rarity::Five].to_string()
)
.with_style(StyledObject::yellow),
stylizer(
self.stats_per_type[ItemType::Character].num_per_rarity[Rarity::Four].to_string()
)
.with_style(StyledObject::magenta),
)?;
writeln!(
output,
"最多连续抽出{}个五星,连续抽出{}个四星",
stylizer(
self.stats_per_rarity[Rarity::Five]
.longest_streak
.to_string()
)
.with_style(StyledObject::yellow),
stylizer(
self.stats_per_rarity[Rarity::Four]
.longest_streak
.to_string()
)
.with_style(StyledObject::magenta),
)?;
writeln!(
output,
"最多{}抽未抽出五星,目前{}抽未抽出五星,{}抽未抽出四星",
stylizer(
self.stats_per_rarity[Rarity::Five]
.longest_drought
.to_string()
)
.with_style(StyledObject::red),
stylizer(
self.stats_per_rarity[Rarity::Five]
.current_drought
.to_string()
)
.with_style(StyledObject::red),
stylizer(
self.stats_per_rarity[Rarity::Four]
.current_drought
.to_string()
)
.with_style(StyledObject::red),
)?;
if self.stats_per_rarity[Rarity::Five].sorted_occurrence.len() > 0 {
writeln!(output, "抽出的五星次数:",)?;
for (item, count) in self.stats_per_rarity[Rarity::Five].sorted_occurrence.iter() {
writeln!(
output,
" {}: {}次",
stylizer(item.name.clone()).with_style(StyledObject::yellow),
stylizer(count.to_string()).with_style(StyledObject::blue),
)?;
}
}
if self.stats_per_rarity[Rarity::Four].sorted_occurrence.len() > 0 {
writeln!(output, "抽出的四星次数:",)?;
for (item, count) in self.stats_per_rarity[Rarity::Four].sorted_occurrence.iter() {
writeln!(
output,
" {}: {}次",
stylizer(item.name.clone()).with_style(StyledObject::magenta),
stylizer(count.to_string()).with_style(StyledObject::blue),
)?;
}
}
output.flush()?;
Ok(())
}
}
impl Report for Summary {
fn new(log: &Vec<Pull>) -> Self {
log.iter()
.fold(IntermediateSummary::default(), |mut summary, pull| {
summary.update(pull);
summary
})
.into()
}
fn print(&self) {
self.write_to(&mut io::stdout(), true).unwrap();
}
fn write<T: Write>(&self, output: &mut T) -> io::Result<()> {
self.write_to(output, false)
}
}
/// Intermediate summary while folding
#[derive(Debug, Default)]
struct IntermediateSummary {
/// total number of pulls
len: usize,
/// stats of correspondent rarity
stats_per_rarity: EnumMap<Rarity, IntermediateStatsForRarity>,
/// stats of correspondent item type
stats_per_type: EnumMap<ItemType, StatsForType>,
}
impl IntermediateSummary {
fn update(&mut self, pull: &Pull) {
self.len += 1;
for (rarity, stats) in self.stats_per_rarity.iter_mut() {
stats.update(rarity, pull);
}
self.stats_per_type[pull.item.item_type].update(pull);
}
}
impl Into<Summary> for IntermediateSummary {
fn into(self) -> Summary {
let mut stats_per_rarity = EnumMap::new();
stats_per_rarity.extend(
self.stats_per_rarity
.into_iter()
.map(|(rarity, stats)| (rarity, stats.into())),
);
Summary {
len: self.len,
stats_per_rarity,
stats_per_type: self.stats_per_type,
}
}
}
/// Statistics classified by rarity
#[derive(Default, Debug)]
pub struct StatsForRarity {
/// total pulls in this rarity
pub num: usize,
pub current_streak: usize,
pub longest_streak: usize,
pub current_drought: usize,
pub longest_drought: usize,
pub sorted_occurrence: Vec<(Item, usize)>,
}
/// Intermediate statistics classified by rarity
#[derive(Default, Debug)]
struct IntermediateStatsForRarity {
/// total pulls in this rarity
num: usize,
current_streak: usize,
longest_streak: usize,
current_drought: usize,
longest_drought: usize,
occurrences: HashMap<Item, usize>,
}
impl<'a> IntermediateStatsForRarity {
fn update(&mut self, rarity: Rarity, pull: &Pull) {
if rarity == pull.item.rarity {
self.num += 1;
self.current_streak += 1;
self.longest_streak = cmp::max(self.current_streak, self.longest_streak);
self.current_drought = 0;
*self.occurrences.entry(pull.item.clone()).or_insert(0) += 1;
} else {
self.current_streak = 0;
self.current_drought += 1;
self.longest_drought = cmp::max(self.current_drought, self.longest_drought);
}
}
}
impl Into<StatsForRarity> for IntermediateStatsForRarity {
fn into(mut self) -> StatsForRarity {
let mut sorted_occurrence: Vec<(Item, usize)> = self.occurrences.drain().collect();
sorted_occurrence.sort_by_key(|(_, cnt)| cmp::Reverse(*cnt));
StatsForRarity {
num: self.num,
current_streak: self.current_streak,
longest_streak: self.longest_streak,
current_drought: self.current_drought,
longest_drought: self.longest_drought,
sorted_occurrence,
}
}
}
#[derive(Default, Debug)]
pub struct StatsForType {
pub num: usize,
pub num_per_rarity: EnumMap<Rarity, usize>,
}
impl StatsForType {
fn update(&mut self, pull: &Pull) {
self.num += 1;
self.num_per_rarity[pull.item.rarity] += 1;
}
}
|
// use std::net::{TcpListener, TcpStream, UdpSocket};
use std::io::{Read};
use crc::{crc32, Hasher32};
#[derive(Debug)]
pub struct SpacePacket {
pub version: u8,
pub packet_type: u8,
pub sec_hdr_flag: u8,
pub apid: u16,
pub seq_flags: u8,
pub seq_num: u16,
pub data_len: u16,
pub payload: Vec<u8>,
}
// pub fn print_space_packet(sp: &mut SpacePacket) {
// eprintln!("{:?}", sp);
// }
impl SpacePacket {
pub fn to_vec(self: &Self) -> Vec<u8> {
let full_payload_len: u16 = self.payload.len() as u16 -1 +4;
let mut pickle = Vec::with_capacity(6 + full_payload_len as usize +1);
eprintln!("length of payload is {}", self.data_len);
pickle.push(self.version << 5 | self.packet_type & 1 << 4 | self.sec_hdr_flag & 1 << 3 | (self.apid >> 8) as u8 & 0x7);
pickle.push((self.apid & 0xff) as u8);
pickle.push(self.seq_flags << 6 | (self.seq_num >> 8) as u8);
pickle.push((self.seq_num & 0xff) as u8);
pickle.push((full_payload_len >> 8) as u8);
pickle.push((full_payload_len & 0xff) as u8);
pickle.extend(self.payload.iter().cloned());
let mut digest = crc32::Digest::new(crc32::IEEE);
digest.write(&pickle);
let crc = digest.sum32();
pickle.push((crc >> 24) as u8);
pickle.push( (crc >> 16 & 0xff) as u8);
pickle.push( (crc >> 8 & 0xff) as u8);
pickle.push( (crc & 0xff ) as u8);
eprintln!("length of vector is {}", pickle.len());
// eprintln!("{:x}", digest.sum32());
pickle
}
}
pub fn read_space_packet(stream: & mut std::net::TcpStream) -> Result<SpacePacket, &'static str> {
let mut buffer = vec![0; 6 as usize]; //Vec::with_capacity(6); // [0; 6];
let mut total_read: usize = 0;
let mut done = false;
let mut digest = crc32::Digest::new(crc32::IEEE);
while done == false {
let count = stream.read(&mut buffer[total_read..]).unwrap();
// eprintln!("read {:?} bytes", count);
total_read += count;
// eprintln!("total_read = {}", total_read);
if count == 0 {
// eprintln!("EOF for stdin");
return Err("unexpected EOF")
}
if total_read == 6 {
// eprintln!("got enough bytes for the header");
done = true;
}
// else {
// eprintln!("total_read = {}", total_read);
// }
}
digest.write(&buffer);
let version = buffer[0] >> 5;
let packet_type = buffer[0] >> 4 & 0x1;
let sec_hdr_flag = buffer[0] >> 3 & 0x1;
let apid: u16 = (buffer[0] as u16 & 3 << 8 )| u16::from( buffer[1]);
let seq_flags = buffer[2] >> 6;
let seq_num: u16 = ((u16::from(buffer[2]) & 64) << 8) | u16::from(buffer[3]);
let data_len: u16 = (u16::from(buffer[4]) << 8 | u16::from(buffer[5])) +1;
// eprintln!("data_len = {}", data_len);
if data_len < 5 {
return Err("Invalid payload length");
}
let mut payload = vec![0; data_len as usize];
total_read = 0;
done = false;
while !done {
// let count = io::stdin().read(&mut payload[total_read..]).unwrap();
let count = stream.read(&mut payload[total_read..]).unwrap();
// eprintln!("read {:?} bytes", count);
total_read += count;
if count == 0 {
// eprintln!("EOF for stdin");
return Err("unexpected EOF")
}
if total_read == data_len as usize {
// eprintln!("got all the payload");
done = true;
}
// else {
// eprintln!("total_read = {}", total_read);
// }
}
let len = payload.len();
let mut pkt_crc = payload[len-4] as u32;
pkt_crc = pkt_crc << 8 | payload[len-3] as u32;
pkt_crc = pkt_crc << 8 | payload[len-2] as u32;
pkt_crc = pkt_crc << 8 | payload[len-1] as u32;
digest.write(&payload[0..len-4]);
let crc = digest.sum32();
payload.drain(len-4..len);
// eprintln!("{:x}",crc );
// eprintln!("sent: {:x}", pkt_crc);
if crc != pkt_crc {
return Err("bad CRC on packet")
}
let alpha :SpacePacket = SpacePacket{ apid: apid,
sec_hdr_flag: sec_hdr_flag,
version: version,
packet_type: packet_type,
seq_flags: seq_flags,
seq_num: seq_num,
data_len: data_len,
payload: payload };
Ok(alpha)
}
|
use super::Auth;
use anyhow::Result;
use async_trait::async_trait;
use sha2::{Digest, Sha224};
use std::collections::HashSet;
pub struct ConfigAuthenticator {
store: HashSet<String>,
}
impl ConfigAuthenticator {
pub fn new(passwords: Vec<String>) -> Result<ConfigAuthenticator> {
let mut s: HashSet<String> = HashSet::new();
for p in passwords {
let mut hasher = Sha224::new();
hasher.update(p.into_bytes());
let result = hasher.finalize();
s.insert(hex::encode(result));
}
Ok(ConfigAuthenticator { store: s })
}
}
#[async_trait]
impl Auth for ConfigAuthenticator {
async fn auth(&self, password: &str) -> Result<bool> {
Ok(self.store.contains(password))
}
async fn stat(&self, password: &str, _: u64, _: u64) -> Result<()> {
self.auth(password).await?;
Ok(())
}
}
|
use crate::projects_and_tasks::{list_with_names::ListWithNames, task::Task};
#[derive(Debug, PartialEq, Clone)]
pub struct Tasks {
tasks: Vec<Task>,
}
impl ListWithNames<Task> for Tasks {
fn items(&self) -> std::slice::Iter<Task> {
self.tasks.iter()
}
}
pub struct TasksBuilder {
tasks: Vec<Task>,
}
impl TasksBuilder {
pub fn new() -> Self {
Self::empty()
}
pub fn empty() -> Self {
Self { tasks: vec![] }
}
pub fn with_tasks(mut self, tasks: Vec<Task>) -> Self {
self.tasks = tasks;
self
}
pub fn build(self) -> Tasks {
Tasks { tasks: self.tasks }
}
}
|
use ocl;
use std::ffi::CString;
use std::marker::Unsize;
use super::{OpenCL, OpenCLDevice};
use super::super::super::compute_device::ComputeDevice;
use super::super::super::context::{Context, ContextCtor};
use super::super::super::error::{Error, ErrorKind, Result};
use super::super::super::extension_package::{ExtensionPackage, ExtensionPackageCtor};
use super::super::super::hardware::Hardware;
/// Defines a Open CL context.
///
/// A context is responsible for managing OpenCL objects and resources (command-queues, program
/// objects, kernel objects, executing kernels, etc.). The usual configuration is a single context
/// encapsulating multiple devices. The resources, such as [buffers][buffer] and [events][event],
/// can be shared across multiple devices in a single context. Other possible setups include:
///
/// * a single context for multiple devices
/// * a single context for a single device
/// * a context for each device
///
/// note: multi-platform contexts are not supported in OpenCL.
///
/// ## Programs
///
/// An OpenCL context can have multiple programs associated with it. Programs can be compiled
/// individually to avoid possible name clashes due to using packages from multiple package
/// authors.
///
/// [buffer]: ./frameworks/opencl/struct.Memory.html
/// [event]: ./frameworks/opencl/struct.Event.html
pub struct OpenCLContext<P> {
/// The context.
context: ocl::Context,
/// The index of the _active_ device.
active: usize,
/// A list of devices associated with the context.
selected_devices: Vec<OpenCLDevice>,
/// The `Device`s' corresponding `Hardware`.
selected_hardware: Vec<Hardware>,
// todo document this:
// package is stored here because
// a) the program depends on the selected devices
// b) the lazy static would new the context
// 1) mutating would be possible but wouldn't be worth the cost and trouble
extension_package: P,
}
impl<P> OpenCLContext<P> {
pub fn device(&self) -> &OpenCLDevice {
&self.selected_devices[self.active]
}
pub fn extension_package(&self) -> &P {
&self.extension_package
}
/// Builds and returns a program.
pub fn program(&self, src_strings: Vec<CString>) -> Result<ocl::Program> {
let cmplr_opts = CString::new("").unwrap();
let device_ids: Vec<_> = self.selected_devices.iter().map(|d| d.device.clone()).collect();
Ok(ocl::Program::new(
self.context.core(),
src_strings,
Some(&device_ids),
cmplr_opts
)?)
}
}
impl<Package> Context for OpenCLContext<Package>
where Package: ExtensionPackage,
OpenCLContext<Package>: Unsize<Package::Extension> {
type Package = Package;
fn active_codev(&self) -> &ComputeDevice {
&self.selected_devices[self.active]
}
fn extension(&self) -> &<Package as ExtensionPackage>::Extension {
self
}
fn activate(&mut self, index: usize) -> Result {
if index >= self.selected_devices.len() {
return Err(Error::new(ErrorKind::Other, "device index out of range"));
}
self.active = index;
Ok(())
}
}
impl<P> ContextCtor<P> for OpenCLContext<P>
where P: 'static + ExtensionPackage + ExtensionPackageCtor<OpenCLContext<()>>,
OpenCLContext<P>: Unsize<P::Extension> {
type F = OpenCL<P>;
fn new(framework: &Self::F, selection: &[Hardware]) -> Result<Self> {
let props = ocl::builders::ContextProperties::new().platform(framework.implementation);
let s = ocl::builders::DeviceSpecifier::Indices(selection.iter().map(|h| h.id).collect());
let ctx = ocl::Context::new(Some(props), Some(s), None, None)?;
let mut devices = vec![];
for hardware in selection.iter() {
let d = ocl::Device::by_idx_wrap(framework.implementation, hardware.id);
let queue = ocl::Queue::new(&ctx, d, Some(ocl::flags::QUEUE_PROFILING_ENABLE))?;
devices.push(OpenCLDevice {
device: d,
context: ctx.clone(),
queue,
});
}
let mut unpackaged = OpenCLContext {
context: ctx,
active: 0,
selected_devices: devices,
selected_hardware: selection.to_vec(),
extension_package: (),
};
let package = P::package(&mut unpackaged)?;
Ok(OpenCLContext {
context: unpackaged.context,
active: unpackaged.active,
selected_devices: unpackaged.selected_devices,
selected_hardware: unpackaged.selected_hardware,
extension_package: package,
})
}
} |
use nanoda_lib::utils::{ List, List::*, Env };
use nanoda_lib::name::Name;
use nanoda_lib::level::{ LevelsPtr, Level };
use nanoda_lib::param;
#[test]
fn util_test2() {
let mut env = Env::new(false);
let mut env = env.as_compiler();
let l1 = param!(["u", "v"], &mut env);
let p1 = param!("u", &mut env);
let r = l1.mem(p1, &env);
println!("mem result : {}\n", r);
}
#[test]
fn pos_test0() {
let mut env = Env::new(false);
let mut live = env.as_compiler();
let l = param!(["a", "b", "c", "d", "e"], &mut live);
let param_a = param!("a", &mut live);
let param_b = param!("b", &mut live);
let param_c = param!("c", &mut live);
let param_d = param!("d", &mut live);
let param_e = param!("e", &mut live);
let param_z = param!("z", &mut live);
assert_eq!(l.pos(param_a, &live), Some(0));
assert_eq!(l.pos(param_b, &live), Some(1));
assert_eq!(l.pos(param_c, &live), Some(2));
assert_eq!(l.pos(param_d, &live), Some(3));
assert_eq!(l.pos(param_e, &live), Some(4));
assert_eq!(l.pos(param_z, &live), None);
}
#[test]
fn concat_test0() {
let mut env = Env::new(false);
let mut live = env.as_compiler();
let l1 = param!(["a", "b"], &mut live);
let l2 = param!(["c", "d"], &mut live);
let target = param!(["a", "b", "c", "d"], &mut live);
assert_eq!(l1.concat(l2, &mut live), target);
}
#[test]
fn concat_test1() {
let mut env = Env::new(false);
let mut live = env.as_compiler();
let l1 = param!(["a", "b"], &mut live);
let l2 = Nil::<Level>.alloc(&mut live);
let target = param!(["a", "b"], &mut live);
assert_eq!(l1.concat(l2, &mut live), target);
}
#[test]
fn concat_test2() {
let mut env = Env::new(false);
let mut live = env.as_compiler();
let l1 = Nil::<Level>.alloc(&mut live);
let l2 = param!(["a", "b"], &mut live);
let target = param!(["a", "b"], &mut live);
assert_eq!(l1.concat(l2, &mut live), target);
}
|
use std::ops::*;
use super::angle::Angle;
use super::common::*;
macro_rules! Op {
($op_trait: ident, $op_func: ident, $op: tt) => {
impl $op_trait for Vec2D {
type Output = Vec2D;
fn $op_func(self, rhs: Vec2D) -> Vec2D {
Vec2D::new(self.x $op rhs.x, self.y $op rhs.y)
}
}
};
}
macro_rules! OpAsn {
($op_trait: ident, $op_func: ident, $op: tt) => {
impl $op_trait for Vec2D {
fn $op_func(&mut self, rhs: Vec2D) {
self.x $op rhs.x;
self.y $op rhs.y;
}
}
};
}
macro_rules! OpNum {
($op_trait: ident, $op_func: ident, $op: tt) => {
impl $op_trait<f64> for Vec2D {
type Output = Vec2D;
fn $op_func(self, rhs: f64) -> Vec2D {
Vec2D::new(self.x $op rhs, self.y $op rhs)
}
}
};
}
macro_rules! OpNumAssign {
($op_trait: ident, $op_func: ident, $op: tt) => {
impl $op_trait<f64> for Vec2D {
fn $op_func(&mut self, rhs: f64) {
self.x $op rhs;
self.y $op rhs;
}
}
};
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct Vec2D {
pub x: f64,
pub y: f64
}
impl Vec2D {
pub fn zero() -> Vec2D {
Vec2D{x: 0.0, y: 0.0}
}
pub fn new(x: f64, y: f64) -> Vec2D {
Vec2D {x, y}
}
pub fn from_topule(coords: (f64, f64)) -> Vec2D {
Vec2D{x: coords.0, y: coords.1}
}
pub fn as_tuple(&self) -> (f64, f64) {
(self.x, self.y)
}
pub fn length_sqr(&self) -> f64 {
self.x.powi(2) + self.y.powi(2)
}
pub fn length(&self) -> f64 {
self.length_sqr().sqrt()
}
pub fn set_length(&mut self, new_lenght: f64) {
let cur_length = self.length();
let mutiplier = new_lenght / cur_length;
self.x *= mutiplier;
self.y *= mutiplier;
}
pub fn angle(&self) -> Angle {
Angle::from_atan2(self.y, self.x).normalized()
}
pub fn dot(&self, other: &Vec2D) -> f64 {
self.x * other.x + self.y * other.y
}
pub fn cross(&self, other: &Vec2D) -> f64 {
self.x * other.y - self.y * other.x
}
pub fn orthogonal(&self) -> Vec2D {
Vec2D::new(-self.y, self.x)
}
pub fn distance_sqr(&self, other: &Vec2D) -> f64 {
(self.x - other.x).powi(2) + (self.y - other.y).powi(2)
}
pub fn distance(&self, other: &Vec2D) -> f64 {
self.distance_sqr(other).sqrt()
}
pub fn angle_between(&self, other: &Vec2D) -> Angle {
let cross = self.cross(other);
let dot = self.dot(other);
return Angle::from_atan2(cross, dot);
}
}
impl Normalizable for Vec2D {
fn normalize(&mut self) {
let len = self.length();
if len != 0.0 {
self.x /= len;
self.y /= len;
}
}
fn normalized(&self) -> Vec2D {
let len = self.length();
if len != 0.0 {
Vec2D::new(self.x / len, self.y / len)
} else {
return Vec2D::zero()
}
}
}
impl Rotatable<f64> for Vec2D {
type Output = Vec2D;
fn rotate(&mut self, angle: f64) {
let cos = angle.cos();
let sin = angle.sin();
let x = self.x * cos - self.y * sin;
let y = self.x * sin + self.y * cos;
self.x = x;
self.y = y;
}
fn rotated(&self, angle: f64) -> Vec2D {
let cos = angle.cos();
let sin = angle.sin();
let x = self.x * cos - self.y * sin;
let y = self.x * sin + self.y * cos;
Vec2D::new(x, y)
}
}
impl Rotatable<&Angle> for Vec2D {
type Output = Vec2D;
fn rotate(&mut self, angle: &Angle) {
let cos = angle.cos();
let sin = angle.sin();
let x = self.x * cos - self.y * sin;
let y = self.x * sin + self.y * cos;
self.x = x;
self.y = y;
}
fn rotated(&self, angle: &Angle) -> Vec2D {
let cos = angle.cos();
let sin = angle.sin();
let x = self.x * cos - self.y * sin;
let y = self.x * sin + self.y * cos;
Vec2D::new(x, y)
}
}
impl AlmostEq for Vec2D {
fn is_eq(&self, rhs: &Vec2D, eps: f64) -> bool
{
self.x.is_eq(&rhs.x, eps) && self.y.is_eq(&rhs.y, eps)
}
}
Op!(Add, add, +);
Op!(Sub, sub, -);
Op!(Mul, mul, *);
OpAsn!(AddAssign, add_assign, +=);
OpAsn!(SubAssign, sub_assign, -=);
OpAsn!(MulAssign, mul_assign, *=);
OpNum!(Add, add, +);
OpNum!(Sub, sub, -);
OpNum!(Mul, mul, *);
OpNum!(Div, div, /);
OpNumAssign!(AddAssign, add_assign, +=);
OpNumAssign!(SubAssign, sub_assign, -=);
OpNumAssign!(MulAssign, mul_assign, *=);
OpNumAssign!(DivAssign, div_assign, /=);
#[cfg(test)]
mod test {
use rand::Rng;
use super::*;
fn gen_xys(cnt: i32) -> Vec<(f64, f64)> {
let mut rng = rand::thread_rng();
(0..cnt).map(move |_| {
let x = rng.gen::<f64>();
let y = rng.gen::<f64>();
(x, y)
}).collect()
}
#[test]
fn create_and_tup() {
for (x, y) in gen_xys(100) {
let v1 = Vec2D::new(x, y);
let v2 = Vec2D::from_topule((x, y));
assert_eq!(v1.x, x);
assert_eq!(v1.y, y);
assert_eq!(v1, v2);
assert_eq!(v1.as_tuple(), (x, y));
}
}
#[test]
fn normalization() {
let z = Vec2D::zero().normalized();
assert!(z.length().is_eq(&0.0, EPS));
for (x, y) in gen_xys(100) {
let v = Vec2D::new(x, y);
assert!(v.normalized().length().is_eq(&1.0, EPS));
}
}
#[test]
fn length() {
let mut rng = rand::thread_rng();
for (x, y) in gen_xys(100) {
let mut v = Vec2D::new(x, y);
let len_sqr = x * x + y * y;
assert_eq!(v.length_sqr(), len_sqr);
assert_eq!(v.length(), len_sqr.sqrt());
let new_len = rng.gen::<f64>();
v.set_length(new_len);
assert!(v.length().is_eq(&new_len, EPS));
}
}
#[test]
fn angles() {
let tests = vec!{
(Vec2D::new(1.0, 0.0), 0.0),
(Vec2D::new(0.0, 1.0), 90.0),
(Vec2D::new(-1.0, 0.0), 180.0),
(Vec2D::new(0.0, -1.0), 270.0)
};
for (v, a) in tests {
let angle = Angle::from_deg(a).normalized();
assert!(v.angle().is_eq(&angle, EPS));
}
}
#[test]
fn rotations() {
let v = Vec2D::new(1.0, 0.0);
let tests = vec!{
(Vec2D::new(1.0, 0.0), 0.0),
(Vec2D::new(0.0, 1.0), 90.0),
(Vec2D::new(-1.0, 0.0), 180.0),
(Vec2D::new(0.0, -1.0), 270.0)
};
for (tv, a) in tests {
let rv = v.rotated(&Angle::from_deg(a));
let mut temp = Vec2D::new(1.0, 0.0);
temp.rotate(&Angle::from_deg(a));
assert!(rv.is_eq(&tv, EPS));
assert!(temp.is_eq(&tv, EPS));
}
}
#[test]
fn ops() {
let coords1 = gen_xys(100);
let coords2 = gen_xys(100);
let mut rng = rand::thread_rng();
for (c1, c2) in coords1.iter().zip(coords2) {
let (x1, y1) = c1;
let (x2, y2) = c2;
let v1 = Vec2D::new(*x1, *y1);
let v2 = Vec2D::new(x2, y2);
let rand = rng.gen::<f64>();
let sum = v1 + v2;
assert_eq!(sum.as_tuple(), (x1 + x2, y1 + y2));
let dif = v1 - v2;
assert_eq!(dif.as_tuple(), (x1 - x2, y1 - y2));
let mul = v1 * v2;
assert_eq!(mul.as_tuple(), (x1 * x2, y1 * y2));
let sum_num = v1 + rand;
assert_eq!(sum_num.as_tuple(), (x1 + rand, y1 + rand));
let diff_num = v1 - rand;
assert_eq!(diff_num.as_tuple(), (x1 - rand, y1 - rand));
let mul_num = v1 * rand;
assert_eq!(mul_num.as_tuple(), (x1 * rand, y1 * rand));
let div_num = v1 / rand;
assert_eq!(div_num.as_tuple(), (x1 / rand, y1 / rand));
}
}
} |
// A fairly easy way to declare extensible trait hierarchies in Rust with
// support for arbitrary dynamic upcasting and downcasting (just like
// traditional inheritance). This is based on Simon Marlow's "An Extensible
// Dynamically-Typed Hierarchy of Exceptions":
// https://simonmar.github.io/bib/papers/ext-exceptions.pdf
//
// Pros: Simple, no unsafe magic other than what's encapsulated in mopa.
// Cons: Inefficient due to an indirection for each level of the hierarchy.
//
// Alternative approaches:
//
// - https://crates.io/crates/query_interface
// - http://idubrov.name/rust/2018/06/16/dynamic-casting-traits.html
#[macro_use]
extern crate mopa;
use mopa::Any;
// root type
pub trait Object: Any + std::fmt::Debug {
fn upcast_object(self) -> Box<Object> where Self: Sized {
Box::new(self)
}
fn downcast_object_ref(obj: &Object) -> Option<&Self> where Self: Sized {
obj.downcast_ref()
}
}
mopafy!(Object);
macro_rules! declare_supertype {
($type:ty, $supertype:ty) => {
impl Object for $type {
fn upcast_object(self) -> Box<Object> where Self: Sized {
Box::<$supertype>::upcast_object(Box::new(self))
}
fn downcast_object_ref(obj: &Object) -> Option<&Self> where Self: Sized {
Box::<$supertype>::downcast_object_ref(obj)?.downcast_ref()
}
}
}
}
// child trait
trait Animal: Object {
fn animal(&self);
}
mopafy!(Animal);
impl<T: Animal + ?Sized> Animal for Box<T> where Box<T>: Object {
fn animal(&self) { (**self).animal() }
}
impl Object for Box<Animal> {}
// grandchild trait
trait Canine: Animal {
fn canine(&self);
}
mopafy!(Canine);
impl<T: Canine + ?Sized> Canine for Box<T> where Box<T>: Object {
fn canine(&self) { (**self).canine() }
}
declare_supertype!(Box<Canine>, Animal);
// great-grandchild type
#[derive(Debug)]
struct Wolf;
declare_supertype!(Wolf, Canine);
impl Animal for Wolf {
fn animal(&self) { println!("I can do animal things."); }
}
impl Canine for Wolf {
fn canine(&self) { println!("I can do canine things."); }
}
impl Wolf {
fn wolf(&self) { println!("I can do wolf things."); }
}
fn main() {
let wolf: Box<Object> = Wolf.upcast_object();
Box::<Animal>::downcast_object_ref(&*wolf).unwrap().animal();
Box::<Canine>::downcast_object_ref(&*wolf).unwrap().canine();
Wolf::downcast_object_ref(&*wolf).unwrap().wolf();
}
|
pub(crate) mod from_headers;
pub(crate) const HEADER_VERSION: &str = "x-ms-version"; // Cow[str]
pub(crate) const HEADER_DATE: &str = "x-ms-date"; // [String]
pub(crate) const HEADER_DOCUMENTDB_IS_UPSERT: &str = "x-ms-documentdb-is-upsert"; // [bool]
pub(crate) const HEADER_INDEXING_DIRECTIVE: &str = "x-ms-indexing-directive"; // [IndexingDirective]
pub(crate) const HEADER_CONSISTENCY_LEVEL: &str = "x-ms-consistency-level"; // [ConsistencyLevel]
pub(crate) const HEADER_SESSION_TOKEN: &str = "x-ms-session-token"; // [ContinuationToken]
pub(crate) const HEADER_ALLOW_MULTIPLE_WRITES: &str = "x-ms-cosmos-allow-tentative-writes"; // [bool]
pub(crate) const HEADER_A_IM: &str = "A-IM"; // Cow[str]
pub(crate) const HEADER_ACTIVITY_ID: &str = "x-ms-activity-id"; // [String]
pub(crate) const HEADER_DOCUMENTDB_PARTITIONRANGEID: &str = "x-ms-documentdb-partitionkeyrangeid"; // [String]
pub(crate) const HEADER_DOCUMENTDB_PARTITIONKEY: &str = "x-ms-documentdb-partitionkey"; // [String]
pub(crate) const HEADER_NUMBER_OF_READ_REGIONS: &str = "x-ms-number-of-read-regions";
pub(crate) const HEADER_REQUEST_CHARGE: &str = "x-ms-request-charge"; // [f64]
pub(crate) const HEADER_OFFER_THROUGHPUT: &str = "x-ms-offer-throughput"; // [u64]
pub(crate) const HEADER_OFFER_TYPE: &str = "x-ms-offer-type"; // [&str]
#[allow(dead_code)]
pub(crate) const HEADER_DOCUMENTDB_ISQUERY: &str = "x-ms-documentdb-isquery"; // [bool]
pub(crate) const HEADER_DOCUMENTDB_QUERY_ENABLECROSSPARTITION: &str =
"x-ms-documentdb-query-enablecrosspartition"; // [bool]
pub(crate) const HEADER_DOCUMENTDB_QUERY_PARALLELIZECROSSPARTITIONQUERY: &str =
"x-ms-documentdb-query-parallelizecrosspartitionquery"; // [bool]
pub(crate) const HEADER_DOCUMENTDB_EXPIRY_SECONDS: &str = "x-ms-documentdb-expiry-seconds"; // [u64]
pub(crate) const HEADER_CONTENT_PATH: &str = "x-ms-content-path"; // [String]
pub(crate) const HEADER_ALT_CONTENT_PATH: &str = "x-ms-alt-content-path"; // [String]
pub(crate) const HEADER_LAST_STATE_CHANGE_UTC: &str = "x-ms-last-state-change-utc"; // [DateTime<UTC>]
pub(crate) const HEADER_RESOURCE_QUOTA: &str = "x-ms-resource-quota"; // [ResourceQuota]
pub(crate) const HEADER_RESOURCE_USAGE: &str = "x-ms-resource-usage"; // [ResourceQuota]
pub(crate) const HEADER_QUORUM_ACKED_LSN: &str = "x-ms-quorum-acked-lsn"; // [u64]
pub(crate) const HEADER_CURRENT_WRITE_QUORUM: &str = "x-ms-current-write-quorum"; // [u64]
pub(crate) const HEADER_CURRENT_REPLICA_SET_SIZE: &str = "x-ms-current-replica-set-size"; // [u64]
pub(crate) const HEADER_SCHEMA_VERSION: &str = "x-ms-schemaversion"; // [String]
pub(crate) const HEADER_SERVICE_VERSION: &str = "x-ms-serviceversion"; // [String]
pub(crate) const HEADER_GATEWAY_VERSION: &str = "x-ms-gatewayversion"; // [String]
pub(crate) const HEADER_COLLECTION_PARTITION_INDEX: &str = "collection-partition-index"; // [u64]
pub(crate) const HEADER_COLLECTION_SERVICE_INDEX: &str = "collection-service-index"; // [u64]
pub(crate) const HEADER_LSN: &str = "lsn"; // [u64]
pub(crate) const HEADER_GLOBAL_COMMITTED_LSN: &str = "x-ms-global-committed-lsn"; // [u64]
pub(crate) const HEADER_ITEM_LSN: &str = "x-ms-item-lsn"; // [u64]
pub(crate) const HEADER_TRANSPORT_REQUEST_ID: &str = "x-ms-transport-request-id"; // [u64]
pub(crate) const HEADER_COSMOS_LLSN: &str = "x-ms-cosmos-llsn"; // [u64]
pub(crate) const HEADER_COSMOS_ITEM_LLSN: &str = "x-ms-cosmos-item-llsn"; // [u64]
pub(crate) const HEADER_COSMOS_QUORUM_ACKED_LLSN: &str = "x-ms-cosmos-quorum-acked-llsn"; // [u64]
pub(crate) const HEADER_ROLE: &str = "x-ms-xp-role"; // [u64]
pub(crate) const HEADER_MAX_MEDIA_STORAGE_USAGE_MB: &str = "x-ms-max-media-storage-usage-mb"; // [u64]
pub(crate) const HEADER_MEDIA_STORAGE_USAGE_MB: &str = "x-ms-media-storage-usage-mb"; // [u64]
|
use std::rc::Rc;
//use std::ops::{Shl, ShlAssign, Shr, ShrAssign, Rem, RemAssign, BitOrAssign, BitXor, Not, Sub, BitAnd, BitOr};
use num::clamp;
use num::PrimInt;
use num::cast::NumCast;
use crate::shape::*;
use crate::lens::*;
use crate::types::*;
use crate::scope::*;
#[derive(Clone, PartialEq, Eq)]
struct BitWordScope<B> {
vec: Vec<B>,
bits_used: usize,
pos: usize,
}
impl<B> BitWordScope<B> {
fn with_words(vec: Vec<B>, bits_used: usize) -> BitWordScope<B> {
BitWordScope {
vec: vec,
bits_used: bits_used,
pos: 0,
}
}
}
impl<B: PrimInt> BitWordScope<B> {
fn bit_lens() -> Lens<BitWordScope<B>, bool> {
lens(Rc::new(|vec: &BitWordScope<B>| get_bitword_scope_bits(vec)),
Rc::new(|vec: &mut BitWordScope<B>, a: bool| set_bitword_scope_bits(vec, a)))
}
}
impl<B: PrimInt> BitWordScope<B> {
fn lens() -> Lens<BitWordScope<B>, B> {
lens(Rc::new(|vec: &BitWordScope<B>| get_bitword_scope(vec)),
Rc::new(|vec: &mut BitWordScope<B>, a: B| set_bitword_scope(vec, a)))
}
}
impl<B> Shape for BitWordScope<B> {
type Shape = usize;
fn shape(&self) -> usize {
self.vec.len()
}
}
fn get_bitword_scope_bits<B: PrimInt>(bitword_scope: &BitWordScope<B>) -> bool {
let index = bitword_scope.pos / bitword_scope.bits_used;
let bit_index = (bitword_scope.pos % bitword_scope.bits_used) as usize;
(bitword_scope.vec[index] & (B::one() << bit_index)) != B::zero()
}
fn set_bitword_scope_bits<B: PrimInt>(bitword_scope: &mut BitWordScope<B>, a: bool) {
let index = bitword_scope.pos / bitword_scope.bits_used;
let bit_index: usize = bitword_scope.pos % bitword_scope.bits_used;
let loc_cleared = bitword_scope.vec[index] & !(B::one() << bit_index);
let set_bit: B = NumCast::from::<u8>(a as u8).unwrap();
let set_bit: B = set_bit << bit_index;
let loc_set = loc_cleared | set_bit;
bitword_scope.vec[index] = loc_set;
}
fn get_bitword_scope<B: PrimInt>(bitword_scope: &BitWordScope<B>) -> B {
let index = bitword_scope.pos / bitword_scope.bits_used;
bitword_scope.vec[index]
}
fn set_bitword_scope<B: PrimInt>(bitword_scope: &mut BitWordScope<B>, a: B) {
let index = bitword_scope.pos / bitword_scope.bits_used;
bitword_scope.vec[index] = a;
}
impl<B> Scope<usize> for BitWordScope<B> {
fn adjust(&mut self, pos: usize) {
self.pos = clamp(pos, 0, (self.vec.len() * self.bits_used) - 1);
}
}
impl<B> Scope<isize> for BitWordScope<B> {
fn adjust(&mut self, offset: isize) {
self.pos = clamp((self.pos as isize) + offset, 0, ((self.bits_used * self.vec.len()) - 1) as isize) as usize;
}
}
#[test]
fn test_bit_word_scope() {
let mut bit_word_scope: BitWordScope<u8> = BitWordScope::with_words(vec![1,2,3,4,0x7], 3);
let lens = BitWordScope::lens();
let current: u8 = (lens.view)(&bit_word_scope);
assert_eq!(current, 1);
let current: u8 = (lens.view)(&bit_word_scope);
assert_eq!(current, 1);
(lens.set)(&mut bit_word_scope, 100);
let current: u8 = (lens.view)(&bit_word_scope);
assert_eq!(current, 100);
bit_word_scope.adjust(1usize);
(lens.set)(&mut bit_word_scope, 100);
assert_eq!(bit_word_scope.vec[0], 100);
bit_word_scope.adjust(100isize);
let current: u8 = (lens.view)(&bit_word_scope);
assert_eq!(current, 7);
bit_word_scope.adjust(100usize);
let current: u8 = (lens.view)(&bit_word_scope);
assert_eq!(current, 0x07);
}
|
use std::collections::HashMap;
type Headers = HashMap<String, String>;
pub struct Opa {
host: String,
port: i16,
version: String,
ssl: bool,
headers: Option<Headers>,
}
impl Opa {
pub fn new() -> Self {
Self {
host: "localhost".to_string(),
port: 8181,
version: "v1".to_string(),
ssl: false,
headers: None,
}
}
/// Get the host
///
/// Default: localhost
pub fn host(&self) -> String {
self.host.clone()
}
/// Get the port
///
/// Default: 8181
pub fn port(&self) -> i16 {
self.port
}
/// Get the version
///
/// Default: v1
pub fn version(&self) -> String {
self.version.clone()
}
pub fn ssl(&self) -> bool {
self.ssl
}
pub fn headers(&self) -> Option<Headers> {
self.headers.clone()
}
pub(crate) fn ip(&self) -> String {
format!("http://{}:{}", self.host, self.port)
}
}
|
use serde::{Deserialize, Serialize};
use common::result::Result;
use crate::application::dtos::{AuthorDto, CategoryDto, PublicationDto};
use crate::domain::author::{AuthorId, AuthorRepository};
use crate::domain::category::CategoryRepository;
use crate::domain::content_manager::{ContentManagerId, ContentManagerRepository};
use crate::domain::publication::{PublicationRepository, Status};
#[derive(Deserialize)]
pub struct SearchCommand {
author_id: Option<String>,
category_id: Option<String>,
status: Option<String>,
name: Option<String>,
}
#[derive(Serialize)]
pub struct SearchResponse {
publications: Vec<PublicationDto>,
}
pub struct Search<'a> {
author_repo: &'a dyn AuthorRepository,
category_repo: &'a dyn CategoryRepository,
content_manager_repo: &'a dyn ContentManagerRepository,
publication_repo: &'a dyn PublicationRepository,
}
impl<'a> Search<'a> {
pub fn new(
author_repo: &'a dyn AuthorRepository,
category_repo: &'a dyn CategoryRepository,
content_manager_repo: &'a dyn ContentManagerRepository,
publication_repo: &'a dyn PublicationRepository,
) -> Self {
Search {
author_repo,
category_repo,
content_manager_repo,
publication_repo,
}
}
pub async fn exec(&self, auth_id: String, cmd: SearchCommand) -> Result<SearchResponse> {
let content_manager_id = ContentManagerId::new(&auth_id)?;
let is_content_manager = self
.content_manager_repo
.find_by_id(&content_manager_id)
.await
.is_ok();
let author_id = AuthorId::new(&auth_id)?;
let mut publications = self.publication_repo.find_all().await?;
if let Some(author_id) = cmd.author_id {
publications = publications
.into_iter()
.filter(|publication| publication.author_id().value() == author_id)
.collect();
}
if let Some(category_id) = cmd.category_id {
publications = publications
.into_iter()
.filter(|publication| publication.header().category_id().value() == category_id)
.collect();
}
if let Some(status) = cmd.status {
publications = publications
.into_iter()
.filter(|publication| {
publication.status_history().current().status().to_string() == status
})
.collect();
}
if let Some(name) = cmd.name {
publications = publications
.into_iter()
.filter(|publication| publication.header().name().value().contains(&name))
.collect();
}
publications = publications
.into_iter()
.filter(|publication| {
if is_content_manager || publication.author_id() == &author_id {
return true;
}
matches!(publication.status_history().current().status(), Status::Published { .. })
})
.collect();
let mut publication_dtos = Vec::new();
for publication in publications.iter() {
let author = self.author_repo.find_by_id(publication.author_id()).await?;
let category = self
.category_repo
.find_by_id(publication.header().category_id())
.await?;
publication_dtos.push(
PublicationDto::from(publication)
.author(AuthorDto::from(&author))
.category(CategoryDto::from(&category)),
);
}
Ok(SearchResponse {
publications: publication_dtos,
})
}
}
|
use airhobot::prelude::*;
use std::path::PathBuf;
use structopt::StructOpt;
///
/// KEYBOARD SHORTCUTS:
///
/// 1: select field
/// 2: pick pusher color
/// 3: pick puck color
/// 4: simulate puck (place two points in the field)
/// 5: move pusher
/// c: show controls
/// r: reload config
/// f: next frame
/// s: save state
/// q: quit
///
/// use the spacebar for pause
#[derive(StructOpt, Debug)]
#[structopt(name = "AirHoBot", verbatim_doc_comment)]
pub struct Args {
/// config file
#[structopt(short, long, default_value = "airhobot.toml")]
pub config_file: PathBuf,
/// verbose logging (use -vv for trace logging)
#[structopt(short, long, parse(from_occurrences))]
pub verbose: u8,
/// show frame for (in millis)
#[structopt(short, long, default_value = "50")]
pub delay: u64,
/// Use the cam with the given id as input source
#[structopt(long, conflicts_with = "image, video")]
pub cam: Option<i32>,
/// Use the image as input source
#[structopt(long, conflicts_with = "cam_id, video")]
pub image: Option<PathBuf>,
/// Use the video as input source
#[structopt(long, conflicts_with = "image, cam_id")]
pub video: Option<PathBuf>,
}
impl Args {
pub fn source(&self) -> Result<Source> {
self.cam
.map(Source::cam)
.or(self.video.as_ref().map(Source::video))
.or(self.image.as_ref().map(Source::image))
.unwrap_or(Err(Error::Arguments {
msg: "input source missing - use the `-h` flag for help".into(),
}))
}
}
|
extern crate rustc_serialize;
extern crate hamming;
extern crate crypto;
extern crate itertools;
use rustc_serialize::hex::{ToHex, FromHex};
use rustc_serialize::base64::{FromBase64, ToBase64, Config, CharacterSet, Newline};
use std::collections::BTreeMap;
use crypto::{ symmetriccipher, buffer, aes, blockmodes };
use crypto::buffer::{ ReadBuffer, WriteBuffer, BufferResult };
use itertools::Itertools;
const BASE64_CONFIG: Config = Config {
char_set : CharacterSet::Standard,
newline : Newline::LF,
pad : true,
line_length : None
};
fn fixed_xor(buffer: &[u8], key: &[u8]) -> Vec<u8> {
let key_cycle = key.iter().cycle();
buffer.iter().zip(key_cycle).map(|(b, k)| b^k).collect()
}
fn score_str(input: &str) -> i32 {
//ETAOINSHRDLU
input.chars().fold(0, |score, c| {
score + match c {
'E'|'e' => 13,
'T'|'t' => 12,
'A'|'a' => 11,
'O'|'o' => 10,
'I'|'i' => 9,
'N'|'n' => 8,
'S'|'s' => 7,
'H'|'h' => 6,
'R'|'r' => 5,
'D'|'d' => 4,
'L'|'l' => 3,
'U'|'u' => 2,
' ' => 1,
_ => 0
}
})
}
fn single_byte_cypher_xor(buffer: &[u8], num_results: usize) -> Vec<(i32, u8)> {
let mut decrypt = BTreeMap::new();
for i in 0..127 as u8 {
let decrypted_buf = fixed_xor(buffer, &vec![i]);
let decrypted_str_result = String::from_utf8(decrypted_buf);
if decrypted_str_result.is_ok() {
let decrypted_str = decrypted_str_result.unwrap();
decrypt.insert(score_str(&decrypted_str), i);
}
}
decrypt.iter().rev().take(num_results).map(|(i, c)| (*i, *c)).collect()
}
fn average_hamming_distance(buffer: &[u8], chunk_size: usize) -> f32 {
let (even, odd) :
(Vec<(usize, &[u8])>, Vec<(usize, &[u8])>) = buffer.chunks(chunk_size)
.enumerate()
.partition(|&(i, _)| {
i % 2 == 0
});
(even.iter().zip(odd.iter()).filter(|&(a, b)|{
a.1.len() == b.1.len()
}).fold(0, |acc, (a, b)| {
acc + hamming::distance(a.1, b.1)
}) as f32 / even.len() as f32) / chunk_size as f32
}
fn rank_keysizes(buffer: &[u8], (start, end): (usize, usize)) -> Vec<(usize)> {
let mut distances = BTreeMap::new();
for keysize in start..end {
let distance = (average_hamming_distance(buffer, keysize) * 1000 as f32) as i32;
distances.insert(distance, keysize);
}
distances.iter().map(|(key, val)| *val).collect()
}
fn gen_key_with_keysize(buffer: &[u8], keysize: usize) -> Vec<u8> {
let chunks = buffer.chunks(keysize).collect::<Vec<&[u8]>>();
(0..keysize).map(|i| {
chunks.iter().map(|v| {
if i < v.len() {
v[i]
} else {
0
}
}).collect::<Vec<u8>>()
}).enumerate().map(|(i, buf)| {
single_byte_cypher_xor(&buf, 1)[0].1
}).collect()
}
fn aes_128_ecb_decrypt(encrypted_data: &[u8], key: &[u8]) -> Result<Vec<u8>, symmetriccipher::SymmetricCipherError> {
let mut decryptor = aes::ecb_decryptor(
aes::KeySize::KeySize128,
key,
blockmodes::NoPadding);
let mut final_result = Vec::<u8>::new();
let mut read_buffer = buffer::RefReadBuffer::new(encrypted_data);
let mut buffer = [0; 4096];
let mut write_buffer = buffer::RefWriteBuffer::new(&mut buffer);
loop {
let result = try!(decryptor.decrypt(&mut read_buffer, &mut write_buffer, true));
final_result.extend(write_buffer.take_read_buffer().take_remaining().iter().map(|&i| i));
match result {
BufferResult::BufferUnderflow => break,
BufferResult::BufferOverflow => { }
}
}
Ok(final_result)
}
fn get_num_matching_chunks(buffer: &[u8], chunk_size: usize) -> usize {
buffer.chunks(chunk_size)
.combinations(2)
.filter(|chunks_vec| {
chunks_vec[0].eq(chunks_vec[1])
}).count()
}
#[test]
fn challenge_1() {
let expected = "SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t";
let hex_buffer = "49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d".to_string().from_hex().unwrap();
let base64_str = hex_buffer.to_base64(BASE64_CONFIG);
assert_eq!(expected, base64_str);
}
#[test]
fn challenge_2() {
let expected = "746865206b696420646f6e277420706c6179";
let buffer = "1c0111001f010100061a024b53535009181c".to_string().from_hex().unwrap();
let key = "686974207468652062756c6c277320657965".to_string().from_hex().unwrap();
let xor = fixed_xor(&buffer, &key);
let xor_str = xor.to_hex();
assert_eq!(expected, xor_str);
}
#[test]
fn challenge_3() {
let input = "1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736".to_string().from_hex().unwrap();
let top_3 = single_byte_cypher_xor(&input, 3);
for (score, result) in top_3 {
println!("{} => {}", score, result as char);
}
assert!(true);
}
#[test]
fn challenge_4() {
let input = include_str!("../input/challenge_4.txt");
let mut best = BTreeMap::new();
for line in input.lines() {
for (score, result) in single_byte_cypher_xor(&line.to_string().from_hex().unwrap(), 3) {
best.insert(score, result as char);
}
}
for (score, result) in best.iter().rev().take(5) {
println!("{} => {}", score, result);
}
assert!(true);
}
#[test]
fn challenge_5() {
let expected = "0b3637272a2b2e63622c2e69692a23693a2a3c6324202d623d63343c2a26226324272765272a282b2f20430a652e2c652a3124333a653e2b2027630c692b20283165286326302e27282f";
let input = "Burning 'em, if you ain't quick and nimble
I go crazy when I hear a cymbal";
let key = "ICE";
let encrypted = fixed_xor(&input.as_bytes(), &key.as_bytes());
let encrypted_hex_str = encrypted.to_hex();
assert_eq!(expected, encrypted_hex_str);
}
#[test]
fn hamming_test() {
assert_eq!(37, hamming::distance("this is a test".as_bytes(), "wokka wokka!!!".as_bytes()));
}
#[test]
fn challenge_6() {
let input : String = include_str!("../input/challenge_6.txt").lines().collect();
let as_bytes = input.from_base64().unwrap();
let keysizes = rank_keysizes(&as_bytes, (2,40));
let key = gen_key_with_keysize(&as_bytes, keysizes[0]);
let decrypted = String::from_utf8(fixed_xor(&as_bytes, &key)).unwrap();
println!("{}", decrypted);
assert!(true);
}
#[test]
fn challenge_7() {
let input : String = include_str!("../input/challenge_7.txt").lines().collect();
let as_bytes = input.from_base64().unwrap();
let key = "YELLOW SUBMARINE".as_bytes();
let decrypted_data = aes_128_ecb_decrypt(&as_bytes, &key).ok().unwrap();
println!("{}", String::from_utf8(decrypted_data).unwrap());
assert!(true);
}
#[test]
fn challenge_8() {
let input = include_str!("../input/challenge_8.txt");
let mut line_matches = Vec::new();
for line in input.lines() {
line_matches.push((get_num_matching_chunks(line.as_bytes(), 16), line));
}
for result in line_matches {
if result.0 > 0 {
println!("{} => {:?}", result.0, result.1);
}
}
assert!(true);
} |
// Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::{self, ErrorKind};
use std::fs;
use std::path::{Path, PathBuf};
pub fn get_file_size(path: &PathBuf) -> io::Result<u64> {
let meta = try!(fs::metadata(path));
Ok(meta.len())
}
pub fn file_exists(file: &PathBuf) -> bool {
let path = Path::new(file);
path.exists() && path.is_file()
}
pub fn delete_file_if_exist(file: &PathBuf) {
match fs::remove_file(file) {
Ok(_) => {}
Err(ref e) if e.kind() == ErrorKind::NotFound => {}
Err(e) => {
warn!("failed to delete file {}: {:?}", file.display(), e);
}
}
}
#[cfg(test)]
mod test {
use std::io::Write;
use std::fs::OpenOptions;
use tempdir::TempDir;
use super::*;
#[test]
fn test_get_file_size() {
let tmp_dir = TempDir::new("").unwrap();
let dir_path = tmp_dir.path().to_path_buf();
// Ensure it works to get the size of an empty file.
let empty_file = dir_path.join("empty_file");
{
let _ = OpenOptions::new()
.write(true)
.create_new(true)
.open(&empty_file)
.unwrap();
}
assert_eq!(get_file_size(&empty_file).unwrap(), 0);
// Ensure it works to get the size of an non-empty file.
let non_empty_file = dir_path.join("non_empty_file");
let size = 5;
let v = vec![0; size];
{
let mut f = OpenOptions::new()
.write(true)
.create_new(true)
.open(&non_empty_file)
.unwrap();
f.write_all(&v[..]).unwrap();
}
assert_eq!(get_file_size(&non_empty_file).unwrap(), size as u64);
// Ensure it works for non-existent file.
let non_existent_file = dir_path.join("non_existent_file");
assert!(get_file_size(&non_existent_file).is_err());
}
#[test]
fn test_file_exists() {
let tmp_dir = TempDir::new("").unwrap();
let dir_path = tmp_dir.path().to_path_buf();
assert_eq!(file_exists(&dir_path), false);
let existent_file = dir_path.join("empty_file");
{
let _ = OpenOptions::new()
.write(true)
.create_new(true)
.open(&existent_file)
.unwrap();
}
assert_eq!(file_exists(&existent_file), true);
let non_existent_file = dir_path.join("non_existent_file");
assert_eq!(file_exists(&non_existent_file), false);
}
#[test]
fn test_delete_file_if_exist() {
let tmp_dir = TempDir::new("").unwrap();
let dir_path = tmp_dir.path().to_path_buf();
let existent_file = dir_path.join("empty_file");
{
let _ = OpenOptions::new()
.write(true)
.create_new(true)
.open(&existent_file)
.unwrap();
}
assert_eq!(file_exists(&existent_file), true);
delete_file_if_exist(&existent_file);
assert_eq!(file_exists(&existent_file), false);
let non_existent_file = dir_path.join("non_existent_file");
delete_file_if_exist(&non_existent_file);
}
}
|
extern crate proc_macro;
use std::collections::HashSet;
//use anyhow::{anyhow, bail, Result};
use layout::{get_struct_member_layout, StructMemberLayout};
use proc_macro::TokenStream;
use quote::quote;
use syn::DeriveInput;
use syn::{
parse_macro_input, Attribute, Data, Error, Field, Fields, GenericArgument, Ident, Lit,
NestedMeta, PathArguments, Result, Type, TypePath,
};
mod layout;
enum PasturePrimitiveType {
U8,
I8,
U16,
I16,
U32,
I32,
U64,
I64,
F32,
F64,
Bool,
Vec3u8,
Vec3u16,
Vec3f32,
Vec3f64,
Vec4u8,
}
impl PasturePrimitiveType {
fn min_alignment(&self) -> u64 {
match self {
PasturePrimitiveType::U8 => 1,
PasturePrimitiveType::I8 => 1,
PasturePrimitiveType::U16 => 2,
PasturePrimitiveType::I16 => 2,
PasturePrimitiveType::U32 => 4,
PasturePrimitiveType::I32 => 4,
PasturePrimitiveType::U64 => 8,
PasturePrimitiveType::I64 => 8,
PasturePrimitiveType::F32 => 4,
PasturePrimitiveType::F64 => 8,
PasturePrimitiveType::Bool => 1,
PasturePrimitiveType::Vec3u8 => 1,
PasturePrimitiveType::Vec3u16 => 2,
PasturePrimitiveType::Vec3f32 => 4,
PasturePrimitiveType::Vec3f64 => 8,
&PasturePrimitiveType::Vec4u8 => 1,
}
}
fn size(&self) -> u64 {
match self {
PasturePrimitiveType::U8 => 1,
PasturePrimitiveType::I8 => 1,
PasturePrimitiveType::U16 => 2,
PasturePrimitiveType::I16 => 2,
PasturePrimitiveType::U32 => 4,
PasturePrimitiveType::I32 => 4,
PasturePrimitiveType::U64 => 8,
PasturePrimitiveType::I64 => 8,
PasturePrimitiveType::F32 => 4,
PasturePrimitiveType::F64 => 8,
PasturePrimitiveType::Bool => 1,
PasturePrimitiveType::Vec3u8 => 3,
PasturePrimitiveType::Vec3u16 => 6,
PasturePrimitiveType::Vec3f32 => 12,
PasturePrimitiveType::Vec3f64 => 24,
&PasturePrimitiveType::Vec4u8 => 4,
}
}
fn as_token_stream(&self) -> quote::__private::TokenStream {
match self {
PasturePrimitiveType::U8 => quote! {pasture_core::layout::PointAttributeDataType::U8},
PasturePrimitiveType::I8 => quote! {pasture_core::layout::PointAttributeDataType::I8},
PasturePrimitiveType::U16 => quote! {pasture_core::layout::PointAttributeDataType::U16},
PasturePrimitiveType::I16 => quote! {pasture_core::layout::PointAttributeDataType::I16},
PasturePrimitiveType::U32 => quote! {pasture_core::layout::PointAttributeDataType::U32},
PasturePrimitiveType::I32 => quote! {pasture_core::layout::PointAttributeDataType::I32},
PasturePrimitiveType::U64 => quote! {pasture_core::layout::PointAttributeDataType::U64},
PasturePrimitiveType::I64 => quote! {pasture_core::layout::PointAttributeDataType::I64},
PasturePrimitiveType::F32 => quote! {pasture_core::layout::PointAttributeDataType::F32},
PasturePrimitiveType::F64 => quote! {pasture_core::layout::PointAttributeDataType::F64},
PasturePrimitiveType::Bool => {
quote! {pasture_core::layout::PointAttributeDataType::Bool}
}
PasturePrimitiveType::Vec3u8 => {
quote! {pasture_core::layout::PointAttributeDataType::Vec3u8}
}
PasturePrimitiveType::Vec3u16 => {
quote! {pasture_core::layout::PointAttributeDataType::Vec3u16}
}
PasturePrimitiveType::Vec3f32 => {
quote! {pasture_core::layout::PointAttributeDataType::Vec3f32}
}
PasturePrimitiveType::Vec3f64 => {
quote! {pasture_core::layout::PointAttributeDataType::Vec3f64}
}
PasturePrimitiveType::Vec4u8 => {
quote! {pasture_core::layout::PointAttributeDataType::Vec4u8}
}
}
}
}
fn get_primitive_type_for_ident_type(ident: &Ident) -> Result<PasturePrimitiveType> {
let type_name = ident.to_string();
match type_name.as_str() {
"u8" => Ok(PasturePrimitiveType::U8),
"u16" => Ok(PasturePrimitiveType::U16),
"u32" => Ok(PasturePrimitiveType::U32),
"u64" => Ok(PasturePrimitiveType::U64),
"i8" => Ok(PasturePrimitiveType::I8),
"i16" => Ok(PasturePrimitiveType::I16),
"i32" => Ok(PasturePrimitiveType::I32),
"i64" => Ok(PasturePrimitiveType::I64),
"f32" => Ok(PasturePrimitiveType::F32),
"f64" => Ok(PasturePrimitiveType::F64),
"bool" => Ok(PasturePrimitiveType::Bool),
_ => Err(Error::new_spanned(
ident,
format!("Type {} is no valid Pasture primitive type!", type_name),
)),
}
}
fn get_primitive_type_for_non_ident_type(type_path: &TypePath) -> Result<PasturePrimitiveType> {
// Path should have an ident (Vector3, Vector4, ...), as well as one generic argument
let valid_idents: HashSet<_> = ["Vector3", "Vector4"].iter().collect();
let path_segment = type_path
.path
.segments
.first()
.ok_or_else(|| Error::new_spanned(&type_path.path, "Invalid type"))?;
if !valid_idents.contains(&path_segment.ident.to_string().as_str()) {
return Err(Error::new_spanned(&path_segment.ident, "Invalid type"));
}
let path_arg = match &path_segment.arguments {
PathArguments::AngleBracketed(arg) => arg,
_ => return Err(Error::new_spanned(&path_segment.arguments, "Invalid type")),
};
let first_generic_arg = path_arg
.args
.first()
.ok_or_else(|| Error::new_spanned(path_arg, "Invalid type arguments"))?;
let type_arg = match first_generic_arg {
GenericArgument::Type(t) => t,
_ => return Err(Error::new_spanned(first_generic_arg, "Invalid type")),
};
let type_path = match type_arg {
Type::Path(p) => p,
_ => return Err(Error::new_spanned(type_arg, "Invalid type")),
};
match type_path.path.get_ident() {
Some(ident) => {
// Not ALL primitive types are supported as generic arguments for Vector3
let type_name = ident.to_string();
match path_segment.ident.to_string().as_str() {
"Vector3" => match type_name.as_str() {
"u8" => Ok(PasturePrimitiveType::Vec3u8),
"u16" => Ok(PasturePrimitiveType::Vec3u16),
"f32" => Ok(PasturePrimitiveType::Vec3f32),
"f64" => Ok(PasturePrimitiveType::Vec3f64),
_ => Err(Error::new_spanned(
ident,
format!("Vector3<{}> is no valid Pasture primitive type. Vector3 is supported, but only for generic argument(s) u8, u16, f32 or f64", type_name),
))
},
"Vector4" => match type_name.as_str() {
"u8" => Ok(PasturePrimitiveType::Vec4u8),
_ => Err(Error::new_spanned(
ident,
format!("Vector4<{}> is no valid Pasture primitive type. Vector4 is supported, but only for generic argument(s) u8", type_name),
))
},
_ => Err(Error::new_spanned(ident, format!("Invalid type"))),
}
}
None => Err(Error::new_spanned(&type_path.path, "Invalid type")),
}
}
fn type_path_to_primitive_type(type_path: &TypePath) -> Result<PasturePrimitiveType> {
if type_path.qself.is_some() {
return Err(Error::new_spanned(
type_path,
"Qualified types are illegal in a struct with #[derive(PointType)]",
));
}
let datatype = match type_path.path.get_ident() {
Some(ident) => get_primitive_type_for_ident_type(ident),
None => get_primitive_type_for_non_ident_type(type_path),
}?;
Ok(datatype)
// let gen = quote! {
// pasture_core::layout::PointAttributeDataType::#datatype_name
// };
// Ok(gen)
}
fn get_attribute_name_from_field(field: &Field) -> Result<String> {
if field.attrs.len() != 1 {
return Err(Error::new_spanned(
field,
"derive(PointType) requires exactly one #[pasture] attribute per member!",
));
}
let pasture_attribute = &field.attrs[0];
let meta = pasture_attribute.parse_meta()?;
// TODO Better explanation of the builtin Pasture attributes in this error message!
let malformed_field_error_msg = "#[pasture] attribute is malformed. Correct syntax is #[pasture(attribute = \"NAME\")] or #[pasture(BUILTIN_XXX)], where XXX matches any of the builtin attributes in Pasture.";
// For now, we expect that 'meta' is a Meta::List containing a single entry
// The entry should be a NameValue, corresponding to 'attribute = "NAME"', or a Path, corresponding to 'builtin_XXX', where XXX matches any of the basic
// builtin attributes in Pasture (such as INTENSITY, POSITION_3D etc.)
match &meta {
syn::Meta::List(list) => {
let first_list_entry = list
.nested
.first()
.ok_or_else(|| Error::new_spanned(list, malformed_field_error_msg))?;
let nested_meta = match first_list_entry {
NestedMeta::Meta(nested_meta) => nested_meta,
_ => return Err(Error::new_spanned(list, malformed_field_error_msg)),
};
match nested_meta {
syn::Meta::Path(path) => {
let ident = path
.get_ident()
.ok_or_else(|| Error::new_spanned(path, malformed_field_error_msg))?;
let ident_as_str = ident.to_string();
match ident_as_str.as_str() {
"BUILTIN_POSITION_3D" => Ok("Position3D".into()),
"BUILTIN_INTENSITY" => Ok("Intensity".into()),
"BUILTIN_RETURN_NUMBER" => Ok("ReturnNumber".into()),
"BUILTIN_NUMBER_OF_RETURNS" => Ok("NumberOfReturns".into()),
"BUILTIN_CLASSIFICATION_FLAGS" => Ok("ClassificationFlags".into()),
"BUILTIN_SCANNER_CHANNEL" => Ok("ScannerChannel".into()),
"BUILTIN_SCAN_DIRECTION_FLAG" => Ok("ScanDirectionFlag".into()),
"BUILTIN_EDGE_OF_FLIGHT_LINE" => Ok("EdgeOfFlightLine".into()),
"BUILTIN_CLASSIFICATION" => Ok("Classification".into()),
"BUILTIN_SCAN_ANGLE_RANK" => Ok("ScanAngleRank".into()),
"BUILTIN_SCAN_ANGLE" => Ok("ScanAngle".into()),
"BUILTIN_USER_DATA" => Ok("UserData".into()),
"BUILTIN_POINT_SOURCE_ID" => Ok("PointSourceID".into()),
"BUILTIN_COLOR_RGB" => Ok("ColorRGB".into()),
"BUILTIN_GPS_TIME" => Ok("GpsTime".into()),
"BUILTIN_NIR" => Ok("NIR".into()),
"BUILTIN_WAVE_PACKET_DESCRIPTOR_INDEX" => {
Ok("WavePacketDescriptorIndex".into())
}
"BUILTIN_WAVEFORM_DATA_OFFSET" => Ok("WaveformDataOffset".into()),
"BUILTIN_WAVEFORM_PACKET_SIZE" => Ok("WaveformPacketSize".into()),
"BUILTIN_RETURN_POINT_WAVEFORM_LOCATION" => {
Ok("ReturnPointWaveformLocation".into())
}
"BUILTIN_WAVEFORM_PARAMETERS" => Ok("WaveformParameters".into()),
"BUILTIN_POINT_ID" => Ok("PointID".into()),
"BUILTIN_NORMAL" => Ok("Normal".into()),
// TODO Other attributes
_ => {
return Err(Error::new_spanned(
ident,
format!("Unrecognized attribute name {}", ident_as_str),
))
}
}
}
syn::Meta::NameValue(name_value) => name_value
.path
.get_ident()
.and_then(|path| {
if path != "attribute" {
return None;
}
if let Lit::Str(ref attribute_name) = name_value.lit {
Some(attribute_name.value())
} else {
None
}
})
.ok_or_else(|| Error::new_spanned(name_value, malformed_field_error_msg)),
bad => Err(Error::new_spanned(bad, malformed_field_error_msg)),
}
}
bad => Err(Error::new_spanned(bad, malformed_field_error_msg)),
}
}
/// Describes a single field within a `PointType` struct. Contains the name of the field, the point attribute
/// that the field maps to, as well as the primitive type of the field
struct FieldLayoutDescription {
pub attribute_name: String,
pub primitive_type: PasturePrimitiveType,
}
fn get_field_layout_descriptions(fields: &Fields) -> Result<Vec<FieldLayoutDescription>> {
fields
.iter()
.map(|field| match field.ty {
Type::Path(ref type_path) => {
let primitive_type = type_path_to_primitive_type(type_path)?;
let attribute_name = get_attribute_name_from_field(field)?;
Ok(FieldLayoutDescription {
attribute_name,
primitive_type,
})
}
ref bad => Err(Error::new_spanned(
bad,
format!("Invalid type in PointType struct"),
)),
})
.collect::<Result<Vec<FieldLayoutDescription>>>()
}
fn field_parameters(data: &Data, ident: &Ident) -> Result<Vec<FieldLayoutDescription>> {
// TODO Make sure that structrs are #[repr(C)] - OR figure out the exact layout of the members in the struct. But #[repr(rust)] is allowed
// to re-order the fields in the struct, which would (maybe?) break the Layout. Then again, if we correctly determine offsets and sizes of
// fields, the order might not be important anymore?! It's really quite tricky to get this right and will need a lot of tests
// We can use this maybe: https://doc.rust-lang.org/std/alloc/struct.Layout.html
//
//let member_layout = get_struct_member_layout(type_attributes, struct_data)?;
match data {
Data::Struct(struct_data) => get_field_layout_descriptions(&struct_data.fields),
_ => Err(Error::new_spanned(
ident,
format!("#[derive(PointType)] is only valid for structs"),
)),
}
}
fn calculate_offsets_and_alignment(
fields: &[FieldLayoutDescription],
data: &Data,
ident: &Ident,
type_attributes: &[Attribute],
) -> Result<(Vec<u64>, u64)> {
let struct_data = match data {
Data::Struct(struct_data) => struct_data,
_ => {
return Err(Error::new_spanned(
ident,
format!("#[derive(PointType)] is only valid for structs"),
))
}
};
let struct_layout = get_struct_member_layout(type_attributes, struct_data)?;
let mut current_offset = 0;
let mut max_alignment = 1;
let mut offsets = vec![];
for field in fields {
let min_alignment = match struct_layout {
StructMemberLayout::C => field.primitive_type.min_alignment(),
StructMemberLayout::Packed(max_alignment) => {
std::cmp::min(max_alignment, field.primitive_type.min_alignment())
}
};
max_alignment = std::cmp::max(min_alignment, max_alignment);
let aligned_offset = ((current_offset + min_alignment - 1) / min_alignment) * min_alignment;
offsets.push(aligned_offset);
current_offset = aligned_offset + field.primitive_type.size();
}
Ok((offsets, max_alignment))
}
/// Custom `derive` macro that implements the [`PointType`](pasture_core::layout::PointType) trait for the type that it is applied to.
///
/// Any that that wants to implement `PointType` using this `derive` macro must fulfill the following requirements:
/// - It must be at least one of `#[repr(C)]` and `#[repr(packed)]`
/// - All its members may only be [Pasture primitive types](pasture_core::layout::PointAttributeDataType)
/// - Each member must contain an attribute `#[pasture(X)]`, where `X` is either one of the builtin attributes explained below, or `attribute = "name"` for a custom attribute named `name`
/// - No two members may share the same attribute name
///
/// # Builtin attributes
///
/// To associate a member of a custom `PointType` with one of the builtin point attributes in Pasture, use the `#[pasture(X)]` attribute, where `X` is one of:
///
/// - `BUILTIN_POSITION_3D` corresponding to the [POSITION_3D](pasture_core::layout::attributes::POSITION_3D) attribute
/// - `BUILTIN_INTENSITY` corresponding to the [INTENSITY](pasture_core::layout::attributes::INTENSITY) attribute
/// - `BUILTIN_RETURN_NUMBER` corresponding to the [RETURN_NUMBER](pasture_core::layout::attributes::RETURN_NUMBER) attribute
/// - `BUILTIN_NUMBER_OF_RETURNS` corresponding to the [NUMBER_OF_RETURNS](pasture_core::layout::attributes::NUMBER_OF_RETURNS) attribute
/// - `BUILTIN_CLASSIFICATION_FLAGS` corresponding to the [CLASSIFICATION_FLAGS](pasture_core::layout::attributes::CLASSIFICATION_FLAGS) attribute
/// - `BUILTIN_SCANNER_CHANNEL` corresponding to the [SCANNER_CHANNEL](pasture_core::layout::attributes::SCANNER_CHANNEL) attribute
/// - `BUILTIN_SCAN_DIRECTION_FLAG` corresponding to the [SCAN_DIRECTION_FLAG](pasture_core::layout::attributes::SCAN_DIRECTION_FLAG) attribute
/// - `BUILTIN_EDGE_OF_FLIGHT_LINE` corresponding to the [EDGE_OF_FLIGHT_LINE](pasture_core::layout::attributes::EDGE_OF_FLIGHT_LINE) attribute
/// - `BUILTIN_CLASSIFICATION` corresponding to the [CLASSIFICATION](pasture_core::layout::attributes::CLASSIFICATION) attribute
/// - `BUILTIN_SCAN_ANGLE_RANK` corresponding to the [SCAN_ANGLE_RANK](pasture_core::layout::attributes::SCAN_ANGLE_RANK) attribute
/// - `BUILTIN_SCAN_ANGLE` corresponding to the [SCAN_ANGLE](pasture_core::layout::attributes::SCAN_ANGLE) attribute
/// - `BUILTIN_USER_DATA` corresponding to the [USER_DATA](pasture_core::layout::attributes::USER_DATA) attribute
/// - `BUILTIN_POINT_SOURCE_ID` corresponding to the [POINT_SOURCE_ID](pasture_core::layout::attributes::POINT_SOURCE_ID) attribute
/// - `BUILTIN_COLOR_RGB` corresponding to the [COLOR_RGB](pasture_core::layout::attributes::COLOR_RGB) attribute
/// - `BUILTIN_GPS_TIME` corresponding to the [GPS_TIME](pasture_core::layout::attributes::GPS_TIME) attribute
/// - `BUILTIN_NIR` corresponding to the [NIR](pasture_core::layout::attributes::NIR) attribute
/// - `BUILTIN_WAVE_PACKET_DESCRIPTOR_INDEX` corresponding to the [WAVE_PACKET_DESCRIPTOR_INDEX](pasture_core::layout::attributes::WAVE_PACKET_DESCRIPTOR_INDEX) attribute
/// - `BUILTIN_WAVEFORM_DATA_OFFSET` corresponding to the [WAVEFORM_DATA_OFFSET](pasture_core::layout::attributes::WAVEFORM_DATA_OFFSET) attribute
/// - `BUILTIN_WAVEFORM_PACKET_SIZE` corresponding to the [WAVEFORM_PACKET_SIZE](pasture_core::layout::attributes::WAVEFORM_PACKET_SIZE) attribute
/// - `BUILTIN_RETURN_POINT_WAVEFORM_LOCATION` corresponding to the [RETURN_POINT_WAVEFORM_LOCATION](pasture_core::layout::attributes::RETURN_POINT_WAVEFORM_LOCATION) attribute
/// - `BUILTIN_WAVEFORM_PARAMETERS` corresponding to the [WAVEFORM_PARAMETERS](pasture_core::layout::attributes::WAVEFORM_PARAMETERS) attribute
/// - `BUILTIN_POINT_ID` corresponding to the [POINT_ID](pasture_core::layout::attributes::POINT_ID) attribute
/// - `BUILTIN_NORMAL` corresponding to the [NORMAL](pasture_core::layout::attributes::NORMAL) attribute
///
/// # Custom attributes
///
/// To associate a member of a custom `PointType` with a point attribute with custom `name`, use the `#[pasture(attribute = "name")]` attribute
#[proc_macro_derive(PointType, attributes(pasture))]
pub fn derive_point_type(item: TokenStream) -> TokenStream {
let input = parse_macro_input!(item as DeriveInput);
// What we want:
// - Ensure that the current type is a struct and not an enum
// - Get all members of the struct
// - Ensure that all members are one of the accepted primitive types that pasture-core defines
// - Ensure that each member has an appropriate attribute on it for the name of the attribute
// - Get the name, size and offset of each member, in order
// - Generate an impl PointType for the struct, where we build the layout using the types, names, sizes and offsets
if !input.generics.params.is_empty() {
return Error::new_spanned(input, "derive(PointType) is not valid for generic types")
.to_compile_error()
.into();
// let err = quote_spanned! {
// input.generics.span() => compile_error!("derive(PointType) is not valid for generic types!")
// };
// return proc_macro::TokenStream::from(err);
}
let name = &input.ident;
let fields = match field_parameters(&input.data, name) {
Ok(inner) => inner,
Err(why) => {
return why.to_compile_error().into();
}
};
let (offsets, type_alignment) =
match calculate_offsets_and_alignment(&fields, &input.data, name, input.attrs.as_slice()) {
Ok(inner) => inner,
Err(why) => {
return why.to_compile_error().into();
}
};
let attribute_descriptions = fields.iter().zip(offsets.iter()).map(|(field, offset)| {
let attribute_name = &field.attribute_name;
let primitive_type = &field.primitive_type.as_token_stream();
quote! {
pasture_core::layout::PointAttributeDefinition::custom(#attribute_name, #primitive_type).at_offset_in_type(#offset)
}
});
let gen = quote! {
impl pasture_core::layout::PointType for #name {
fn layout() -> pasture_core::layout::PointLayout {
pasture_core::layout::PointLayout::from_members_and_alignment(&[
#(#attribute_descriptions ,)*
], #type_alignment)
}
}
};
gen.into()
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use anyhow::{bail, ensure, Result};
use libra_crypto::HashValue;
use libra_types::{account_address::AccountAddress, transaction::TransactionArgument};
use sgtypes::{channel_transaction::ChannelOp, htlc::HtlcPayment};
pub(crate) mod actor_timer;
pub mod coerce_derive;
pub(crate) mod contract;
/// check if the `op` is a htlc transfer
pub fn is_htlc_transfer(op: &ChannelOp) -> bool {
match op {
ChannelOp::Action {
module_address,
module_name,
function_name,
} => {
module_address == &AccountAddress::default()
&& module_name.as_str() == "ChannelScript"
&& function_name.as_str() == "send_payment"
}
_ => false,
}
}
/// check if the `op` is a htlc transfer
pub fn is_htlc_receive(op: &ChannelOp) -> bool {
match op {
ChannelOp::Action {
module_address,
module_name,
function_name,
} => {
module_address == &AccountAddress::default()
&& module_name.as_str() == "ChannelScript"
&& function_name.as_str() == "receive_payment"
}
_ => false,
}
}
/// get hash lock value from `args` which should be the args of `ChannelScript.send_payment`
pub fn parse_htlc_payment(args: &[TransactionArgument]) -> Result<HtlcPayment> {
ensure!(args.len() == 4, "send_payment should have 4 args");
let amount = match &args[1] {
TransactionArgument::U64(a) => *a,
_ => bail!("1st arg of send_payment should be u64"),
};
let hash_lock = match &args[2] {
TransactionArgument::ByteArray(d) => HashValue::from_slice(d.as_bytes())?,
_ => bail!("3rd arg of send_payment should be byte array"),
};
let timeout = match &args[3] {
TransactionArgument::U64(a) => *a,
_ => bail!("4th arg of send_payment should be u64"),
};
Ok(HtlcPayment::new(hash_lock, amount, timeout))
}
/// get preimage value from `args` which should be the args of `ChannelScript.receive_payment`
pub fn parse_htlc_preimage(args: &[TransactionArgument]) -> Result<HashValue> {
ensure!(args.len() == 1, "receive_payment should have 1 args");
match &args[0] {
TransactionArgument::ByteArray(d) => HashValue::from_slice(d.as_bytes()),
_ => bail!("the 2th arg of receive_payment should be byte array"),
}
}
|
fn print_u32(n: u32) {
println!("The number is {}", n);
}
fn product(x: u32, y: u32) -> u32
{
x * y
}
fn inc(n: &mut u32)
{
*n += 1;
}
fn main() {
let n1 = 10;
let mut n2 = 15;
print_u32(n1);
print_u32(product(n1, 5));
inc(&mut n2);
print_u32(n2);
}
|
//! A light-emitting material.
use crate::{
hittable::{HitRecord, UVCoord},
material::Scatter,
ray::Ray,
texture::Texture,
vec3,
vec3::Vec3,
};
use rand::prelude::*;
/// A light-emitting material. Can hold any texture. Will not reflect rays.
#[derive(Debug, Clone)]
pub struct DiffuseLight {
/// The emitting texture.
pub emit: Texture,
}
impl DiffuseLight {
/// Create a new diffuse light.
pub fn new(emit: Texture) -> Self {
Self { emit }
}
pub fn scatter<R: Rng + ?Sized>(
&self,
_rng: &mut R,
_ray: &Ray,
_rec: &HitRecord,
) -> Option<Scatter> {
None
}
pub fn emitted(&self, rec: &HitRecord, uv_coord: UVCoord, point: &Vec3) -> Vec3 {
if rec.front_face {
self.emit.0(uv_coord, point)
} else {
vec3!()
}
}
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use anyhow::Result;
use starcoin_crypto::HashValue;
pub use starcoin_state_tree::StateNodeStore;
use starcoin_types::{
access_path::AccessPath, account_address::AccountAddress, account_state::AccountState,
};
mod chain_state;
pub mod mock;
pub use chain_state::{
AccountStateReader, ChainState, ChainStateReader, ChainStateWriter, StateProof, StateWithProof,
};
pub use starcoin_vm_types::state_view::StateView;
pub trait ChainStateService: ChainStateReader {
///Use new state_root for load chain state.
fn change_root(&mut self, state_root: HashValue);
}
#[async_trait::async_trait]
pub trait ChainStateAsyncService: Clone + std::marker::Unpin + Send + Sync {
async fn get(self, access_path: AccessPath) -> Result<Option<Vec<u8>>>;
async fn get_with_proof(self, access_path: AccessPath) -> Result<StateWithProof>;
async fn get_account_state(self, address: AccountAddress) -> Result<Option<AccountState>>;
async fn state_root(self) -> Result<HashValue>;
}
|
use soldier::Soldier;
use utilities::Position;
extern crate crossterm;
use std::io::{stdout, Write};
use self::crossterm::{
execute,
style,
Color,
Goto,
PrintStyledFont,
Show
};
pub struct Squad {
pub members: Vec<Soldier>
}
impl Squad {
pub fn new() -> Squad {
return Squad {
members: vec![
Soldier::new(String::from("Jinpachi"), Position { x: 2, y: 2 }),
Soldier::new(String::from("Heihachi"), Position { x: 2, y: 3 }),
Soldier::new(String::from("Kazumi"), Position { x: 2, y: 4 }),
Soldier::new(String::from("Kazuya"), Position { x: 3, y: 2 }),
Soldier::new(String::from("Jun"), Position { x: 3, y: 3 }),
Soldier::new(String::from("Jin"), Position { x: 3, y: 4 }),
]
}
}
pub fn draw(&self) {
for soldier in &self.members {
execute!(stdout(),
Goto(soldier.pos.x, soldier.pos.y),
PrintStyledFont(style("@").with(Color::White)),
Show
).unwrap();
}
}
} |
use std::io::{Error, BufReader, Read};
use crate::types::*;
pub type DecodeResult<T> = Result<T, DecodeError>;
pub enum DecodeError {
Io(Error),
Error,
}
impl From<Error> for DecodeError {
fn from(e: Error) -> Self {
DecodeError::Io(e)
}
}
pub struct Decoder<R: Read> {
reader: R,
}
impl<R: Read> Decoder<R> {
pub fn new(reader: R) -> Self {
Self { reader: reader }
}
pub fn byte(&mut self) -> DecodeResult<u8> {
let mut buf = [0];
self.reader.read_exact(&mut buf)?;
Ok(buf[0])
}
pub fn u16(&mut self) -> DecodeResult<u16> {
let lo = self.byte()?;
let hi = self.byte()?;
Ok((hi as u16) << 8 | (lo as u16))
}
pub fn u32(&mut self) -> DecodeResult<u32> {
let lo = self.u16()?;
let hi = self.u16()?;
Ok((hi as u32) << 16 | (lo as u32))
}
// decode leb 128 unsigned int
// https://en.wikipedia.org/wiki/LEB128
pub fn varunint32(&mut self) -> DecodeResult<u32> {
let mut result = 0;
let mut shift = 0;
loop {
let byte = self.byte()?;
result |= ((byte & 0x7f) as u32) << shift;
if byte & 0x80 == 0 {
return Ok(result);
}
shift += 7;
}
}
// https://webassembly.github.io/spec/core/binary/conventions.html#binary-vec
pub fn vec<T, F>(&mut self, read_function: F) -> DecodeResult<Vec<T>>
where
F: Fn(&mut Decoder<R>) -> DecodeResult<T>,
{
let length = self.varunint32()?;
let mut vec = Vec::with_capacity(length as usize);
for _ in 0..length {
vec.push(read_function(self)?);
}
Ok(vec)
}
pub fn index(&mut self) -> DecodeResult<Index> {
self.varunint32()
}
pub fn valtype(&mut self) -> DecodeResult<Values> {
decoder_valtype(self.byte()?)
}
pub fn blocktype(&mut self) -> DecodeResult<Vec<Values>> {
let vec = match self.byte()? {
0x40 => vec![],
byte => vec![decoder_valtype(byte)?]
};
Ok(vec)
}
pub fn decode<T, F>(&mut self, function: F) -> DecodeResult<T>
where
F: Fn(&mut Decoder<R>) -> DecodeResult<T>,
{
function(self)
}
}
fn decoder_valtype(value: u8) -> DecodeResult<Values> {
match value {
0x7F => Ok(Values::Int(Int::I32)),
0x7E => Ok(Values::Int(Int::I64)),
0x7D => Ok(Values::Float(Float::F32)),
0x7C => Ok(Values::Float(Float::F64)),
_ => Err(DecodeError::Error),
}
}
|
use std::ffi::CStr;
use std::mem;
use std::ops::Range;
use std::os::raw::c_void;
use std::ptr;
use anyhow::Result;
use libc;
use ffi;
use dev;
use errors::{AsResult, ErrorKind::OsError};
use ether;
use malloc;
use mbuf;
use memory::SocketId;
use mempool;
use utils::AsRaw;
pub type PortId = u16;
pub type QueueId = u16;
/// A structure used to retrieve link-level information of an Ethernet port.
pub struct EthLink {
pub speed: u32,
pub duplex: bool,
pub autoneg: bool,
pub up: bool,
}
pub trait EthDevice {
fn portid(&self) -> PortId;
/// Configure an Ethernet device.
///
/// This function must be invoked first before any other function in the Ethernet API.
/// This function can also be re-invoked when a device is in the stopped state.
///
fn configure(&self, nb_rx_queue: QueueId, nb_tx_queue: QueueId, conf: &EthConf) -> Result<&Self>;
/// Retrieve the contextual information of an Ethernet device.
fn info(&self) -> RawEthDeviceInfo;
/// Retrieve the general I/O statistics of an Ethernet device.
fn stats(&self) -> Result<RawEthDeviceStats>;
/// Reset the general I/O statistics of an Ethernet device.
fn reset_stats(&self) -> &Self;
/// Retrieve the Ethernet address of an Ethernet device.
fn mac_addr(&self) -> ether::EtherAddr;
/// Set the default MAC address.
fn set_mac_addr(&self, addr: &[u8; ether::ETHER_ADDR_LEN]) -> Result<&Self>;
/// Return the NUMA socket to which an Ethernet device is connected
fn socket_id(&self) -> SocketId;
/// Check if port_id of device is attached
fn is_valid(&self) -> bool;
/// Allocate and set up a receive queue for an Ethernet device.
///
/// The function allocates a contiguous block of memory for *nb_rx_desc*
/// receive descriptors from a memory zone associated with *socket_id*
/// and initializes each receive descriptor with a network buffer allocated
/// from the memory pool *mb_pool*.
fn rx_queue_setup(
&self,
rx_queue_id: QueueId,
nb_rx_desc: u16,
rx_conf: Option<ffi::rte_eth_rxconf>,
mb_pool: &mut mempool::MemoryPool,
) -> Result<&Self>;
/// Allocate and set up a transmit queue for an Ethernet device.
fn tx_queue_setup(
&self,
tx_queue_id: QueueId,
nb_tx_desc: u16,
tx_conf: Option<ffi::rte_eth_txconf>,
) -> Result<&Self>;
/// Enable receipt in promiscuous mode for an Ethernet device.
fn promiscuous_enable(&self) -> &Self;
/// Disable receipt in promiscuous mode for an Ethernet device.
fn promiscuous_disable(&self) -> &Self;
/// Return the value of promiscuous mode for an Ethernet device.
fn is_promiscuous_enabled(&self) -> Result<bool>;
/// Retrieve the MTU of an Ethernet device.
fn mtu(&self) -> Result<u16>;
/// Change the MTU of an Ethernet device.
fn set_mtu(&self, mtu: u16) -> Result<&Self>;
/// Enable/Disable hardware filtering by an Ethernet device
/// of received VLAN packets tagged with a given VLAN Tag Identifier.
fn set_vlan_filter(&self, vlan_id: u16, on: bool) -> Result<&Self>;
/// Retrieve the Ethernet device link status
#[inline]
fn is_up(&self) -> bool {
self.link().up
}
/// Retrieve the status (ON/OFF), the speed (in Mbps) and
/// the mode (HALF-DUPLEX or FULL-DUPLEX) of the physical link of an Ethernet device.
///
/// It might need to wait up to 9 seconds in it.
///
fn link(&self) -> EthLink;
/// Retrieve the status (ON/OFF), the speed (in Mbps) and
/// the mode (HALF-DUPLEX or FULL-DUPLEX) of the physical link of an Ethernet device.
///
/// It is a no-wait version of rte_eth_link_get().
///
fn link_nowait(&self) -> EthLink;
/// Link up an Ethernet device.
fn set_link_up(&self) -> Result<&Self>;
/// Link down an Ethernet device.
fn set_link_down(&self) -> Result<&Self>;
/// Allocate mbuf from mempool, setup the DMA physical address
/// and then start RX for specified queue of a port. It is used
/// when rx_deferred_start flag of the specified queue is true.
fn rx_queue_start(&self, rx_queue_id: QueueId) -> Result<&Self>;
/// Stop specified RX queue of a port
fn rx_queue_stop(&self, rx_queue_id: QueueId) -> Result<&Self>;
/// Start TX for specified queue of a port.
/// It is used when tx_deferred_start flag of the specified queue is true.
fn tx_queue_start(&self, tx_queue_id: QueueId) -> Result<&Self>;
/// Stop specified TX queue of a port
fn tx_queue_stop(&self, tx_queue_id: QueueId) -> Result<&Self>;
/// Start an Ethernet device.
fn start(&self) -> Result<&Self>;
/// Stop an Ethernet device.
fn stop(&self) -> &Self;
/// Close a stopped Ethernet device. The device cannot be restarted!
fn close(&self) -> &Self;
/// Retrieve a burst of input packets from a receive queue of an Ethernet device.
fn rx_burst(&self, queue_id: QueueId, rx_pkts: &mut [Option<mbuf::MBuf>]) -> usize;
/// Send a burst of output packets on a transmit queue of an Ethernet device.
fn tx_burst<T: AsRaw<Raw = mbuf::RawMBuf>>(&self, queue_id: QueueId, rx_pkts: &mut [T]) -> usize;
/// Read VLAN Offload configuration from an Ethernet device
fn vlan_offload(&self) -> Result<EthVlanOffloadMode>;
/// Set VLAN offload configuration on an Ethernet device
fn set_vlan_offload(&self, mode: EthVlanOffloadMode) -> Result<&Self>;
}
/// Get the total number of Ethernet devices that have been successfully initialized
/// by the matching Ethernet driver during the PCI probing phase.
///
/// All devices whose port identifier is in the range [0, rte::ethdev::count() - 1]
/// can be operated on by network applications immediately after invoking rte_eal_init().
/// If the application unplugs a port using hotplug function,
/// The enabled port numbers may be noncontiguous.
/// In the case, the applications need to manage enabled port by themselves.
pub fn count() -> u16 {
unsafe { ffi::rte_eth_dev_count_avail() }
}
pub fn devices() -> Range<PortId> {
0..count()
}
impl EthDevice for PortId {
fn portid(&self) -> PortId {
*self
}
fn configure(&self, nb_rx_queue: QueueId, nb_tx_queue: QueueId, conf: &EthConf) -> Result<&Self> {
rte_check!(unsafe {
ffi::rte_eth_dev_configure(*self,
nb_rx_queue,
nb_tx_queue,
RawEthConf::from(conf).as_raw())
}; ok => { self })
}
fn info(&self) -> RawEthDeviceInfo {
let mut info: RawEthDeviceInfo = Default::default();
unsafe {
ffi::rte_eth_dev_info_get(*self, &mut info);
}
info
}
fn stats(&self) -> Result<RawEthDeviceStats> {
let mut stats: RawEthDeviceStats = Default::default();
rte_check!(unsafe {
ffi::rte_eth_stats_get(*self, &mut stats)
}; ok => { stats })
}
fn reset_stats(&self) -> &Self {
unsafe { ffi::rte_eth_stats_reset(*self) };
self
}
fn mac_addr(&self) -> ether::EtherAddr {
unsafe {
let mut addr: ffi::rte_ether_addr = mem::zeroed();
ffi::rte_eth_macaddr_get(*self, &mut addr);
ether::EtherAddr::from(addr.addr_bytes)
}
}
fn set_mac_addr(&self, addr: &[u8; ether::ETHER_ADDR_LEN]) -> Result<&Self> {
rte_check!(unsafe {
ffi::rte_eth_dev_default_mac_addr_set(*self, addr.as_ptr() as * mut _)
}; ok => { self })
}
fn socket_id(&self) -> SocketId {
unsafe { ffi::rte_eth_dev_socket_id(*self) }
}
fn is_valid(&self) -> bool {
unsafe { ffi::rte_eth_dev_is_valid_port(*self) != 0 }
}
fn rx_queue_setup(
&self,
rx_queue_id: QueueId,
nb_rx_desc: u16,
rx_conf: Option<ffi::rte_eth_rxconf>,
mb_pool: &mut mempool::MemoryPool,
) -> Result<&Self> {
rte_check!(unsafe {
ffi::rte_eth_rx_queue_setup(*self,
rx_queue_id,
nb_rx_desc,
self.socket_id() as u32,
rx_conf.as_ref().map(|conf| conf as *const _).unwrap_or(ptr::null()),
mb_pool.as_raw_mut())
}; ok => { self })
}
fn tx_queue_setup(
&self,
tx_queue_id: QueueId,
nb_tx_desc: u16,
tx_conf: Option<ffi::rte_eth_txconf>,
) -> Result<&Self> {
rte_check!(unsafe {
ffi::rte_eth_tx_queue_setup(*self,
tx_queue_id,
nb_tx_desc,
self.socket_id() as u32,
tx_conf.as_ref().map(|conf| conf as *const _).unwrap_or(ptr::null()))
}; ok => { self })
}
fn promiscuous_enable(&self) -> &Self {
unsafe { ffi::rte_eth_promiscuous_enable(*self) };
self
}
fn promiscuous_disable(&self) -> &Self {
unsafe { ffi::rte_eth_promiscuous_disable(*self) };
self
}
fn is_promiscuous_enabled(&self) -> Result<bool> {
let ret = unsafe { ffi::rte_eth_promiscuous_get(*self) };
rte_check!(ret; ok => { ret != 0 })
}
fn mtu(&self) -> Result<u16> {
let mut mtu: u16 = 0;
rte_check!(unsafe { ffi::rte_eth_dev_get_mtu(*self, &mut mtu)}; ok => { mtu })
}
fn set_mtu(&self, mtu: u16) -> Result<&Self> {
rte_check!(unsafe { ffi::rte_eth_dev_set_mtu(*self, mtu) }; ok => { self })
}
fn set_vlan_filter(&self, vlan_id: u16, on: bool) -> Result<&Self> {
rte_check!(unsafe {
ffi::rte_eth_dev_vlan_filter(*self, vlan_id, bool_value!(on) as i32)
}; ok => { self })
}
fn link(&self) -> EthLink {
let mut link = rte_sys::rte_eth_link::default();
unsafe {
ffi::rte_eth_link_get(*self, &mut link as *mut _);
}
EthLink {
speed: link.link_speed,
duplex: link.link_duplex() != 0,
autoneg: link.link_autoneg() != 0,
up: link.link_status() != 0,
}
}
fn link_nowait(&self) -> EthLink {
let mut link = rte_sys::rte_eth_link::default();
unsafe {
ffi::rte_eth_link_get_nowait(*self, &mut link as *mut _);
}
EthLink {
speed: link.link_speed,
duplex: link.link_duplex() != 0,
autoneg: link.link_autoneg() != 0,
up: link.link_status() != 0,
}
}
fn set_link_up(&self) -> Result<&Self> {
rte_check!(unsafe { ffi::rte_eth_dev_set_link_up(*self) }; ok => { self })
}
fn set_link_down(&self) -> Result<&Self> {
rte_check!(unsafe { ffi::rte_eth_dev_set_link_down(*self) }; ok => { self })
}
fn rx_queue_start(&self, rx_queue_id: QueueId) -> Result<&Self> {
rte_check!(unsafe { ffi::rte_eth_dev_rx_queue_start(*self, rx_queue_id) }; ok => { self })
}
fn rx_queue_stop(&self, rx_queue_id: QueueId) -> Result<&Self> {
rte_check!(unsafe { ffi::rte_eth_dev_rx_queue_stop(*self, rx_queue_id) }; ok => { self })
}
fn tx_queue_start(&self, tx_queue_id: QueueId) -> Result<&Self> {
rte_check!(unsafe { ffi::rte_eth_dev_tx_queue_start(*self, tx_queue_id) }; ok => { self })
}
fn tx_queue_stop(&self, tx_queue_id: QueueId) -> Result<&Self> {
rte_check!(unsafe { ffi::rte_eth_dev_tx_queue_stop(*self, tx_queue_id) }; ok => { self })
}
fn start(&self) -> Result<&Self> {
rte_check!(unsafe { ffi::rte_eth_dev_start(*self) }; ok => { self })
}
fn stop(&self) -> &Self {
unsafe { ffi::rte_eth_dev_stop(*self) };
self
}
fn close(&self) -> &Self {
unsafe { ffi::rte_eth_dev_close(*self) };
self
}
fn rx_burst(&self, queue_id: QueueId, rx_pkts: &mut [Option<mbuf::MBuf>]) -> usize {
unsafe {
ffi::_rte_eth_rx_burst(*self, queue_id, rx_pkts.as_mut_ptr() as *mut _, rx_pkts.len() as u16) as usize
}
}
fn tx_burst<T: AsRaw<Raw = mbuf::RawMBuf>>(&self, queue_id: QueueId, rx_pkts: &mut [T]) -> usize {
unsafe {
if rx_pkts.is_empty() {
ffi::_rte_eth_tx_burst(*self, queue_id, ptr::null_mut(), 0) as usize
} else {
ffi::_rte_eth_tx_burst(*self, queue_id, rx_pkts.as_mut_ptr() as *mut _, rx_pkts.len() as u16) as usize
}
}
}
fn vlan_offload(&self) -> Result<EthVlanOffloadMode> {
let mode = unsafe { ffi::rte_eth_dev_get_vlan_offload(*self) };
rte_check!(mode; ok => { EthVlanOffloadMode::from_bits_truncate(mode) })
}
fn set_vlan_offload(&self, mode: EthVlanOffloadMode) -> Result<&Self> {
rte_check!(unsafe {
ffi::rte_eth_dev_set_vlan_offload(*self, mode.bits)
}; ok => { self })
}
}
pub trait EthDeviceInfo {
/// Device Driver name.
fn driver_name(&self) -> &str;
fn dev(&self) -> Option<dev::Device>;
}
pub type RawEthDeviceInfo = ffi::rte_eth_dev_info;
impl EthDeviceInfo for RawEthDeviceInfo {
#[inline]
fn driver_name(&self) -> &str {
unsafe { CStr::from_ptr(self.driver_name).to_str().unwrap() }
}
#[inline]
fn dev(&self) -> Option<dev::Device> {
if self.device.is_null() {
None
} else {
Some(self.device.into())
}
}
}
pub trait EthDeviceStats {}
pub type RawEthDeviceStats = ffi::rte_eth_stats;
impl EthDeviceStats for RawEthDeviceStats {}
bitflags! {
/// Definitions used for VMDQ pool rx mode setting
pub struct EthVmdqRxMode : u16 {
/// accept untagged packets.
const ETH_VMDQ_ACCEPT_UNTAG = 0x0001;
/// accept packets in multicast table .
const ETH_VMDQ_ACCEPT_HASH_MC = 0x0002;
/// accept packets in unicast table.
const ETH_VMDQ_ACCEPT_HASH_UC = 0x0004;
/// accept broadcast packets.
const ETH_VMDQ_ACCEPT_BROADCAST = 0x0008;
/// multicast promiscuous.
const ETH_VMDQ_ACCEPT_MULTICAST = 0x0010;
}
}
/// A set of values to identify what method is to be used to route packets to multiple queues.
pub type EthRxMultiQueueMode = ffi::rte_eth_rx_mq_mode::Type;
bitflags! {
/// Definitions used for VLAN Offload functionality
pub struct EthVlanOffloadMode: i32 {
/// VLAN Strip On/Off
const ETH_VLAN_STRIP_OFFLOAD = 0x0001;
/// VLAN Filter On/Off
const ETH_VLAN_FILTER_OFFLOAD = 0x0002;
/// VLAN Extend On/Off
const ETH_VLAN_EXTEND_OFFLOAD = 0x0004;
/// VLAN Strip setting mask
const ETH_VLAN_STRIP_MASK = 0x0001;
/// VLAN Filter setting mask
const ETH_VLAN_FILTER_MASK = 0x0002;
/// VLAN Extend setting mask
const ETH_VLAN_EXTEND_MASK = 0x0004;
/// VLAN ID is in lower 12 bits
const ETH_VLAN_ID_MAX = 0x0FFF;
}
}
/**
* A set of values to identify what method is to be used to transmit
* packets using multi-TCs.
*/
pub type EthTxMultiQueueMode = ffi::rte_eth_tx_mq_mode::Type;
/// The RSS offload types are defined based on flow types which are defined
/// in rte_eth_ctrl.h. Different NIC hardwares may support different RSS offload
/// types. The supported flow types or RSS offload types can be queried by
/// rte_eth_dev_info_get().
bitflags! {
pub struct RssHashFunc: u64 {
const ETH_RSS_UNKNOWN = 0;
const ETH_RSS_IPV4 = 1 << ffi::RTE_ETH_FLOW_IPV4;
const ETH_RSS_FRAG_IPV4 = 1 << ffi::RTE_ETH_FLOW_FRAG_IPV4;
const ETH_RSS_NONFRAG_IPV4_TCP = 1 << ffi::RTE_ETH_FLOW_NONFRAG_IPV4_TCP;
const ETH_RSS_NONFRAG_IPV4_UDP = 1 << ffi::RTE_ETH_FLOW_NONFRAG_IPV4_UDP;
const ETH_RSS_NONFRAG_IPV4_SCTP = 1 << ffi::RTE_ETH_FLOW_NONFRAG_IPV4_SCTP;
const ETH_RSS_NONFRAG_IPV4_OTHER = 1 << ffi::RTE_ETH_FLOW_NONFRAG_IPV4_OTHER;
const ETH_RSS_IPV6 = 1 << ffi::RTE_ETH_FLOW_IPV6;
const ETH_RSS_FRAG_IPV6 = 1 << ffi::RTE_ETH_FLOW_FRAG_IPV6;
const ETH_RSS_NONFRAG_IPV6_TCP = 1 << ffi::RTE_ETH_FLOW_NONFRAG_IPV6_TCP;
const ETH_RSS_NONFRAG_IPV6_UDP = 1 << ffi::RTE_ETH_FLOW_NONFRAG_IPV6_UDP;
const ETH_RSS_NONFRAG_IPV6_SCTP = 1 << ffi::RTE_ETH_FLOW_NONFRAG_IPV6_SCTP;
const ETH_RSS_NONFRAG_IPV6_OTHER = 1 << ffi::RTE_ETH_FLOW_NONFRAG_IPV6_OTHER;
const ETH_RSS_L2_PAYLOAD = 1 << ffi::RTE_ETH_FLOW_L2_PAYLOAD;
const ETH_RSS_IPV6_EX = 1 << ffi::RTE_ETH_FLOW_IPV6_EX;
const ETH_RSS_IPV6_TCP_EX = 1 << ffi::RTE_ETH_FLOW_IPV6_TCP_EX;
const ETH_RSS_IPV6_UDP_EX = 1 << ffi::RTE_ETH_FLOW_IPV6_UDP_EX;
const ETH_RSS_PORT = 1 << ffi::RTE_ETH_FLOW_PORT;
const ETH_RSS_VXLAN = 1 << ffi::RTE_ETH_FLOW_VXLAN;
const ETH_RSS_GENEVE = 1 << ffi::RTE_ETH_FLOW_GENEVE;
const ETH_RSS_NVGRE = 1 << ffi::RTE_ETH_FLOW_NVGRE;
const ETH_RSS_GTPU = 1 << ffi::RTE_ETH_FLOW_GTPU;
const ETH_RSS_ETH = 1 << ffi::RTE_ETH_FLOW_MAX;
const ETH_RSS_S_VLAN = 1 << 25;
const ETH_RSS_C_VLAN = 1 << 26;
const ETH_RSS_ESP = 1 << 27;
const ETH_RSS_AH = 1 << 28;
const ETH_RSS_L2TPV3 = 1 << 29;
const ETH_RSS_PFCP = 1 << 30;
const ETH_RSS_PPPOE = 1 << 31;
const ETH_RSS_ECPRI = 1 << 32;
const ETH_RSS_L3_SRC_ONLY = 1 << 63;
const ETH_RSS_L3_DST_ONLY = 1 << 62;
const ETH_RSS_L4_SRC_ONLY = 1 << 61;
const ETH_RSS_L4_DST_ONLY = 1 << 60;
const ETH_RSS_L2_SRC_ONLY = 1 << 59;
const ETH_RSS_L2_DST_ONLY = 1 << 58;
const RTE_ETH_RSS_L3_PRE32 = 1 << 57;
const RTE_ETH_RSS_L3_PRE40 = 1 << 56;
const RTE_ETH_RSS_L3_PRE48 = 1 << 55;
const RTE_ETH_RSS_L3_PRE56 = 1 << 54;
const RTE_ETH_RSS_L3_PRE64 = 1 << 53;
const RTE_ETH_RSS_L3_PRE96 = 1 << 52;
const ETH_RSS_LEVEL_PMD_DEFAULT = 0 << 50;
const ETH_RSS_LEVEL_OUTERMOST = 1 << 50;
const ETH_RSS_LEVEL_INNERMOST = 2 << 50;
const ETH_RSS_LEVEL_MASK = 3 << 50;
const ETH_RSS_IPV6_PRE32 =
Self::ETH_RSS_IPV6.bits |
Self::RTE_ETH_RSS_L3_PRE32.bits;
const ETH_RSS_IPV6_PRE40 =
Self::ETH_RSS_IPV6.bits |
Self::RTE_ETH_RSS_L3_PRE40.bits;
const ETH_RSS_IPV6_PRE48 =
Self::ETH_RSS_IPV6.bits |
Self::RTE_ETH_RSS_L3_PRE48.bits;
const ETH_RSS_IPV6_PRE56 =
Self::ETH_RSS_IPV6.bits |
Self::RTE_ETH_RSS_L3_PRE56.bits;
const ETH_RSS_IPV6_PRE64 =
Self::ETH_RSS_IPV6.bits |
Self::RTE_ETH_RSS_L3_PRE64.bits;
const ETH_RSS_IPV6_PRE96 =
Self::ETH_RSS_IPV6.bits |
Self::RTE_ETH_RSS_L3_PRE96.bits;
const ETH_RSS_IPV6_PRE32_UDP =
Self::ETH_RSS_NONFRAG_IPV6_UDP.bits |
Self::RTE_ETH_RSS_L3_PRE32.bits;
const ETH_RSS_IPV6_PRE40_UDP =
Self::ETH_RSS_NONFRAG_IPV6_UDP.bits |
Self::RTE_ETH_RSS_L3_PRE40.bits;
const ETH_RSS_IPV6_PRE48_UDP =
Self::ETH_RSS_NONFRAG_IPV6_UDP.bits |
Self::RTE_ETH_RSS_L3_PRE48.bits;
const ETH_RSS_IPV6_PRE56_UDP =
Self::ETH_RSS_NONFRAG_IPV6_UDP.bits |
Self::RTE_ETH_RSS_L3_PRE56.bits;
const ETH_RSS_IPV6_PRE64_UDP =
Self::ETH_RSS_NONFRAG_IPV6_UDP.bits |
Self::RTE_ETH_RSS_L3_PRE64.bits;
const ETH_RSS_IPV6_PRE96_UDP =
Self::ETH_RSS_NONFRAG_IPV6_UDP.bits |
Self::RTE_ETH_RSS_L3_PRE96.bits;
const ETH_RSS_IPV6_PRE32_TCP =
Self::ETH_RSS_NONFRAG_IPV6_TCP.bits |
Self::RTE_ETH_RSS_L3_PRE32.bits;
const ETH_RSS_IPV6_PRE40_TCP =
Self::ETH_RSS_NONFRAG_IPV6_TCP.bits |
Self::RTE_ETH_RSS_L3_PRE40.bits;
const ETH_RSS_IPV6_PRE48_TCP =
Self::ETH_RSS_NONFRAG_IPV6_TCP.bits |
Self::RTE_ETH_RSS_L3_PRE48.bits;
const ETH_RSS_IPV6_PRE56_TCP =
Self::ETH_RSS_NONFRAG_IPV6_TCP.bits |
Self::RTE_ETH_RSS_L3_PRE56.bits;
const ETH_RSS_IPV6_PRE64_TCP =
Self::ETH_RSS_NONFRAG_IPV6_TCP.bits |
Self::RTE_ETH_RSS_L3_PRE64.bits;
const ETH_RSS_IPV6_PRE96_TCP =
Self::ETH_RSS_NONFRAG_IPV6_TCP.bits |
Self::RTE_ETH_RSS_L3_PRE96.bits;
const ETH_RSS_IPV6_PRE32_SCTP =
Self::ETH_RSS_NONFRAG_IPV6_SCTP.bits |
Self::RTE_ETH_RSS_L3_PRE32.bits;
const ETH_RSS_IPV6_PRE40_SCTP =
Self::ETH_RSS_NONFRAG_IPV6_SCTP.bits |
Self::RTE_ETH_RSS_L3_PRE40.bits;
const ETH_RSS_IPV6_PRE48_SCTP =
Self::ETH_RSS_NONFRAG_IPV6_SCTP.bits |
Self::RTE_ETH_RSS_L3_PRE48.bits;
const ETH_RSS_IPV6_PRE56_SCTP =
Self::ETH_RSS_NONFRAG_IPV6_SCTP.bits |
Self::RTE_ETH_RSS_L3_PRE56.bits;
const ETH_RSS_IPV6_PRE64_SCTP =
Self::ETH_RSS_NONFRAG_IPV6_SCTP.bits |
Self::RTE_ETH_RSS_L3_PRE64.bits;
const ETH_RSS_IPV6_PRE96_SCTP =
Self::ETH_RSS_NONFRAG_IPV6_SCTP.bits |
Self::RTE_ETH_RSS_L3_PRE96.bits;
const ETH_RSS_IP =
Self::ETH_RSS_IPV4.bits |
Self::ETH_RSS_FRAG_IPV4.bits |
Self::ETH_RSS_NONFRAG_IPV4_OTHER.bits |
Self::ETH_RSS_IPV6.bits |
Self::ETH_RSS_FRAG_IPV6.bits |
Self::ETH_RSS_NONFRAG_IPV6_OTHER.bits |
Self::ETH_RSS_IPV6_EX.bits;
const ETH_RSS_UDP =
Self::ETH_RSS_NONFRAG_IPV4_UDP.bits |
Self::ETH_RSS_NONFRAG_IPV6_UDP.bits |
Self::ETH_RSS_IPV6_UDP_EX.bits;
const ETH_RSS_TCP =
Self::ETH_RSS_NONFRAG_IPV4_TCP.bits |
Self::ETH_RSS_NONFRAG_IPV6_TCP.bits |
Self::ETH_RSS_IPV6_TCP_EX.bits;
const ETH_RSS_SCTP =
Self::ETH_RSS_NONFRAG_IPV4_SCTP.bits |
Self::ETH_RSS_NONFRAG_IPV6_SCTP.bits;
const ETH_RSS_TUNNEL =
Self::ETH_RSS_VXLAN.bits |
Self::ETH_RSS_GENEVE.bits |
Self::ETH_RSS_NVGRE.bits;
const ETH_RSS_VLAN =
Self::ETH_RSS_S_VLAN.bits |
Self::ETH_RSS_C_VLAN.bits;
/**< Mask of valid RSS hash protocols */
const ETH_RSS_PROTO_MASK =
Self::ETH_RSS_IPV4.bits |
Self::ETH_RSS_FRAG_IPV4.bits |
Self::ETH_RSS_NONFRAG_IPV4_TCP.bits |
Self::ETH_RSS_NONFRAG_IPV4_UDP.bits |
Self::ETH_RSS_NONFRAG_IPV4_SCTP.bits |
Self::ETH_RSS_NONFRAG_IPV4_OTHER.bits |
Self::ETH_RSS_IPV6.bits |
Self::ETH_RSS_FRAG_IPV6.bits |
Self::ETH_RSS_NONFRAG_IPV6_TCP.bits |
Self::ETH_RSS_NONFRAG_IPV6_UDP.bits |
Self::ETH_RSS_NONFRAG_IPV6_SCTP.bits |
Self::ETH_RSS_NONFRAG_IPV6_OTHER.bits |
Self::ETH_RSS_L2_PAYLOAD.bits |
Self::ETH_RSS_IPV6_EX.bits |
Self::ETH_RSS_IPV6_TCP_EX.bits |
Self::ETH_RSS_IPV6_UDP_EX.bits |
Self::ETH_RSS_PORT.bits |
Self::ETH_RSS_VXLAN.bits |
Self::ETH_RSS_GENEVE.bits |
Self::ETH_RSS_NVGRE.bits;
}
}
pub struct EthRssConf {
pub key: Option<[u8; 40]>,
pub hash: RssHashFunc,
}
impl Default for EthRssConf {
fn default() -> Self {
unsafe { mem::zeroed() }
}
}
#[derive(Default)]
pub struct RxAdvConf {
/// Port RSS configuration
pub rss_conf: Option<EthRssConf>,
pub vmdq_dcb_conf: Option<ffi::rte_eth_vmdq_dcb_conf>,
pub dcb_rx_conf: Option<ffi::rte_eth_dcb_rx_conf>,
pub vmdq_rx_conf: Option<ffi::rte_eth_vmdq_rx_conf>,
}
pub enum TxAdvConf {}
/// Device supported speeds bitmap flags
bitflags! {
pub struct LinkSpeed: u32 {
/**< Autonegotiate (all speeds) */
const ETH_LINK_SPEED_AUTONEG = 0 << 0;
/**< Disable autoneg (fixed speed) */
const ETH_LINK_SPEED_FIXED = 1 << 0;
/**< 10 Mbps half-duplex */
const ETH_LINK_SPEED_10M_HD = 1 << 1;
/**< 10 Mbps full-duplex */
const ETH_LINK_SPEED_10M = 1 << 2;
/**< 100 Mbps half-duplex */
const ETH_LINK_SPEED_100M_HD = 1 << 3;
/**< 100 Mbps full-duplex */
const ETH_LINK_SPEED_100M = 1 << 4;
const ETH_LINK_SPEED_1G = 1 << 5;
const ETH_LINK_SPEED_2_5G = 1 << 6;
const ETH_LINK_SPEED_5G = 1 << 7;
const ETH_LINK_SPEED_10G = 1 << 8;
const ETH_LINK_SPEED_20G = 1 << 9;
const ETH_LINK_SPEED_25G = 1 << 10;
const ETH_LINK_SPEED_40G = 1 << 11;
const ETH_LINK_SPEED_50G = 1 << 12;
const ETH_LINK_SPEED_56G = 1 << 13;
const ETH_LINK_SPEED_100G = 1 << 14;
}
}
impl Default for LinkSpeed {
fn default() -> Self {
LinkSpeed::ETH_LINK_SPEED_AUTONEG
}
}
pub type EthRxMode = ffi::rte_eth_rxmode;
pub type EthTxMode = ffi::rte_eth_txmode;
#[derive(Default)]
pub struct EthConf {
/// bitmap of ETH_LINK_SPEED_XXX of speeds to be used.
///
/// ETH_LINK_SPEED_FIXED disables link autonegotiation, and a unique speed shall be set.
/// Otherwise, the bitmap defines the set of speeds to be advertised.
/// If the special value ETH_LINK_SPEED_AUTONEG (0) is used,
/// all speeds supported are advertised.
pub link_speeds: LinkSpeed,
/// Port RX configuration.
pub rxmode: Option<EthRxMode>,
/// Port TX configuration.
pub txmode: Option<EthTxMode>,
/// Loopback operation mode.
///
/// By default the value is 0, meaning the loopback mode is disabled.
/// Read the datasheet of given ethernet controller for details.
/// The possible values of this field are defined in implementation of each driver.
pub lpbk_mode: u32,
/// Port RX filtering configuration (union).
pub rx_adv_conf: Option<RxAdvConf>,
/// Port TX DCB configuration (union).
pub tx_adv_conf: Option<TxAdvConf>,
/// Currently,Priority Flow Control(PFC) are supported,
/// if DCB with PFC is needed, and the variable must be set ETH_DCB_PFC_SUPPORT.
pub dcb_capability_en: u32,
pub fdir_conf: Option<ffi::rte_fdir_conf>,
pub intr_conf: Option<ffi::rte_intr_conf>,
}
pub type RawEthConfPtr = *const ffi::rte_eth_conf;
pub struct RawEthConf(ffi::rte_eth_conf);
impl RawEthConf {
fn as_raw(&self) -> RawEthConfPtr {
&self.0
}
}
impl<'a> From<&'a EthConf> for RawEthConf {
fn from(c: &EthConf) -> Self {
let mut conf: ffi::rte_eth_conf = Default::default();
if let Some(ref rxmode) = c.rxmode {
conf.rxmode = *rxmode
}
if let Some(ref txmode) = c.txmode {
conf.txmode = *txmode
}
if let Some(ref adv_conf) = c.rx_adv_conf {
if let Some(ref rss_conf) = adv_conf.rss_conf {
let (rss_key, rss_key_len) = rss_conf
.key
.map_or_else(|| (ptr::null(), 0), |key| (key.as_ptr(), key.len() as u8));
conf.rx_adv_conf.rss_conf.rss_key = rss_key as *mut _;
conf.rx_adv_conf.rss_conf.rss_key_len = rss_key_len;
conf.rx_adv_conf.rss_conf.rss_hf = rss_conf.hash.bits;
}
}
RawEthConf(conf)
}
}
/// Calculate the size of the tx buffer.
pub fn rte_eth_tx_buffer_size(size: usize) -> usize {
mem::size_of::<ffi::rte_eth_dev_tx_buffer>() + mem::size_of::<*mut ffi::rte_mbuf>() * size
}
pub type RawTxBuffer = ffi::rte_eth_dev_tx_buffer;
pub type RawTxBufferPtr = *mut ffi::rte_eth_dev_tx_buffer;
pub type TxBufferErrorCallback<T> = fn(unsent: *mut *mut ffi::rte_mbuf, count: u16, userdata: &T);
pub trait TxBuffer {
fn free(&mut self);
/// Configure a callback for buffered packets which cannot be sent
fn set_err_callback<T>(
&mut self,
callback: Option<TxBufferErrorCallback<T>>,
userdata: Option<&T>,
) -> Result<&mut Self>;
/// Silently dropping unsent buffered packets.
fn drop_err_packets(&mut self) -> Result<&mut Self>;
/// Tracking unsent buffered packets.
fn count_err_packets(&mut self) -> Result<&mut Self>;
}
/// Initialize default values for buffered transmitting
pub fn alloc_buffer(size: usize, socket_id: i32) -> Result<RawTxBufferPtr> {
unsafe {
malloc::zmalloc_socket("tx_buffer", rte_eth_tx_buffer_size(size), 0, socket_id)
.ok_or(OsError(libc::ENOMEM))
.map(|p| p.as_ptr() as *mut _)
.and_then(|p| ffi::rte_eth_tx_buffer_init(p, size as u16).as_result().map(|_| p))
}
}
impl TxBuffer for RawTxBuffer {
fn free(&mut self) {
malloc::free(self as RawTxBufferPtr as *mut c_void);
}
fn set_err_callback<T>(
&mut self,
callback: Option<TxBufferErrorCallback<T>>,
userdata: Option<&T>,
) -> Result<&mut Self> {
rte_check!(unsafe {
ffi::rte_eth_tx_buffer_set_err_callback(self,
mem::transmute(callback),
mem::transmute(userdata))
}; ok => { self })
}
fn drop_err_packets(&mut self) -> Result<&mut Self> {
rte_check!(unsafe {
ffi::rte_eth_tx_buffer_set_err_callback(self,
Some(ffi::rte_eth_tx_buffer_drop_callback),
ptr::null_mut())
}; ok => { self })
}
fn count_err_packets(&mut self) -> Result<&mut Self> {
rte_check!(unsafe {
ffi::rte_eth_tx_buffer_set_err_callback(self,
Some(ffi::rte_eth_tx_buffer_count_callback),
ptr::null_mut())
}; ok => { self })
}
}
|
/// A target.
///
/// # Semantics
///
/// Used to link to internal objects that can't be assigned affiliated keywords. E.g. list
/// items.
///
/// See fuzzy [`Link`]s.
///
/// # Syntax
///
/// ```text
/// <<TARGET>>
/// ```
///
/// `TARGET` can contain any character except `<`, `>` and newline. It can't start or end with
/// a whitespace character. It will not be parsed.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Target {
pub target: String,
}
|
use std::sync::Arc;
use gristmill::asset::Resources;
use gristmill::game::{Game, Window, run_game};
use gristmill_gui::{*, quad::Quad, text::{Text, Align}, button::ButtonClass, event::{GuiActionEvent, GuiActionEventRef}, container::*, layout::*, layout_builder::*, listener};
use gristmill::renderer::{RenderLoader, RenderContext, RenderAssetList, pass::{RenderPass, RenderPass3D2D}};
use gristmill::color::Color;
use gristmill::geometry2d::*;
use gristmill::input::{InputSystem, InputActions, CursorAction, ActionState};
use gristmill_examples::basic_geo_renderer::BasicGeoRenderer;
use gristmill_gui::renderer::GuiRenderer;
// -------------------------------------------------------------------------------------------------
type Scene = ((), Gui);
#[derive(Default)]
struct GuiGameInput {
primary: CursorAction,
}
impl InputActions for GuiGameInput {
fn end_frame(&mut self) {
self.primary.end_frame();
}
fn set_action_state(&mut self, target: &str, state: ActionState) {
match target {
"primary" => self.primary.set_state(state),
_ => (),
}
}
}
impl GuiInputActions for GuiGameInput {
fn primary(&self) -> &CursorAction { &self.primary }
}
struct GuiGame {
scene: Scene,
input: GuiGameInput,
player: Player,
player_window: PlayerWindow,
}
struct PlayerStats {
unspent: u32,
strength: u32,
dexterity: u32,
intelligence: u32,
}
impl PlayerStats {
fn get_mut(&mut self, index: usize) -> &mut u32 {
match index {
0 => &mut self.strength,
1 => &mut self.dexterity,
2 => &mut self.intelligence,
_ => panic!("invalid index"),
}
}
}
struct Player {
name: String,
level: u32,
stats: PlayerStats,
//perks: Vec<PlayerPerk>,
}
impl Player {
fn new() -> Player {
Player {
name: "Test Name".to_string(),
level: 45,
stats: PlayerStats { unspent: 3, strength: 1, dexterity: 1, intelligence: 1 },
}
}
}
struct PlayerWindow {
root: GuiNode,
name_text: WidgetNode<Text>,
level_text: WidgetNode<Text>,
stat_unspent: GuiValue<u32>,
stats: [GuiValue<u32>; 3],
}
impl PlayerWindow {
// TODO inflate from file
const PADDING: i32 = 8;
fn build_top(gui: &mut Gui, parent: &BoxLayout, player_image: GuiTexture) -> (WidgetNode<Text>, WidgetNode<Text>) {
let image_size = player_image.size().unwrap_or_default();
let container = parent.add(gui, BoxSize::Exact(image_size.height));
gui.add_widget(container, Layout::offset_parent(Rect::from_size(image_size)), Quad::new_texture(player_image));
let mut layout = Layout::new_size(Size::new(128, 20));
layout.set_anchor(Side::Top, Anchor::parent(0));
layout.set_anchor(Side::Left, Anchor::previous_sibling_opposite(PlayerWindow::PADDING));
layout.set_anchor(Side::Right, Anchor::parent(0));
let mut text = Text::new_empty();
text.set_font(font::Font::default(), 20.0);
let name_text = gui.add_widget(container, layout, text);
let mut layout = Layout::new_size(Size::new(128, 16));
layout.set_anchor(Side::Top, Anchor::previous_sibling_opposite(0));
layout.set_anchor(Side::Left, Anchor::previous_sibling(0));
layout.set_anchor(Side::Right, Anchor::parent(0));
let level_text = gui.add_widget(container, layout, Text::new_empty());
(name_text, level_text)
}
fn build_stat_row(gui: &mut Gui, container: GuiNode, stat: String, buttons: Option<(usize, &mut GuiValue<u32>, &ButtonClass, &ButtonClass)>) -> GuiValue<u32> {
let mut text = Text::new(stat);
text.set_alignment(Align::Start, Align::Middle);
gui.add_widget(container, Layout::default(), text);
let mut text = Text::new("0".to_string());
text.set_alignment(Align::Middle, Align::Middle);
let value_text = gui.add_widget(container, Layout::default(), text);
let mut stat_value = GuiValue::new();
stat_value.add_listener(listener::ConvertString(listener::SetText(value_text)));
if let Some((index, stat_unspent, add_button, sub_button)) = buttons {
let add = add_button.instance_builder()
.with_press_event(GuiActionEvent::NamedIndex("stat_add".to_string(), index))
.build(gui, container);
stat_unspent.add_listener(listener::Compare(listener::Comparison::NotEqual, 0, listener::EnableButton(add)));
let sub = sub_button.instance_builder()
.with_press_event(GuiActionEvent::NamedIndex("stat_sub".to_string(), index))
.build(gui, container);
stat_value.add_listener(listener::Compare(listener::Comparison::NotEqual, 0, listener::EnableButton(sub)));
}
else {
gui.add(container, Layout::default());
gui.add(container, Layout::default());
}
stat_value
}
fn build(gui: &mut Gui, textures: &RenderAssetList<GuiTexture>) -> PlayerWindow {
let mut base_button = ButtonClass::new();
base_button.set_texture(textures.get("button"));
let base_button = Arc::new(base_button);
let layout = Layout::center_parent(Size::new(384, 256));
let root = gui.add_widget(gui.root(), layout, Quad::new_texture(textures.get("frame"))).into();
gui.set_event_handler(root);
let root_layout = BoxLayout::new(root, BoxDirection::Vertical, Padding::new(PlayerWindow::PADDING));
let (name_text, level_text) = PlayerWindow::build_top(gui, &root_layout, textures.get("player"));
root_layout.add_widget(gui, BoxSize::Exact(1), Quad::new_color(gristmill::color::black()));
let bottom = root_layout.add(gui, BoxSize::Remaining);
let bottom_layout = SplitLayout::new(bottom, BoxDirection::Horizontal, Padding::new_inside(PlayerWindow::PADDING * 2));
let left_container = bottom_layout.add(gui);
let right_container = bottom_layout.add(gui);
bottom_layout.add_center_widget(gui, 1, Quad::new_color(gristmill::color::black()));
gui.set_container(left_container, TableContainer::new(&[0, 24, 16, 16], 16, Padding::new_inside(PlayerWindow::PADDING), Some(1)));
let mut add_button = ButtonClass::new_inherit(base_button.clone());
add_button.set_icon(textures.get("add"));
let mut sub_button = ButtonClass::new_inherit(base_button.clone());
sub_button.set_icon(textures.get("sub"));
let mut stat_unspent = PlayerWindow::build_stat_row(gui, left_container, "Remaining".to_string(), None);
let stats = [
PlayerWindow::build_stat_row(gui, left_container, "Strength".to_string(), Some((0, &mut stat_unspent, &add_button, &sub_button))),
PlayerWindow::build_stat_row(gui, left_container, "Dexterity".to_string(), Some((1, &mut stat_unspent, &add_button, &sub_button))),
PlayerWindow::build_stat_row(gui, left_container, "Intelligence".to_string(), Some((2, &mut stat_unspent, &add_button, &sub_button))),
];
let perk_texture = textures.get("perk");
let perk_texture_size = perk_texture.size().unwrap_or_default();
gui.set_container(right_container, FlowContainer::new(Padding::new_inside(PlayerWindow::PADDING)));
for _i in 0..10 {
gui.add_widget(right_container, Layout::new_size(perk_texture_size), Quad::new_texture(perk_texture.clone()));
}
add_button.instance_builder()
.with_layout(Layout::new_size(perk_texture_size))
.build(gui, right_container);
PlayerWindow { root, name_text, level_text, stat_unspent, stats }
}
fn show(&mut self, gui: &mut Gui, player: &Player) {
gui.get_mut(self.name_text).unwrap().set_text(player.name.clone());
gui.get_mut(self.level_text).unwrap().set_text(format!("Level: {}", player.level));
self.update_stats(gui, player);
}
fn update(&mut self, gui: &mut Gui, player: &mut Player) {
let mut stats_changed = false;
gui.get_events(self.root).unwrap().dispatch_queue(|event| {
match event.as_ref() {
GuiActionEventRef::NamedIndex("stat_add", index) => {
if player.stats.unspent > 0 {
player.stats.unspent -= 1;
*player.stats.get_mut(index) += 1;
stats_changed = true;
}
},
GuiActionEventRef::NamedIndex("stat_sub", index) => {
let stat = player.stats.get_mut(index);
if *stat > 0 {
*stat -= 1;
player.stats.unspent += 1;
stats_changed = true;
}
},
_ => (),
}
});
if stats_changed {
self.update_stats(gui, player);
}
}
fn update_stats(&mut self, gui: &mut Gui, player: &Player) {
self.stat_unspent.set(gui, player.stats.unspent);
self.stats[0].set(gui, player.stats.strength);
self.stats[1].set(gui, player.stats.dexterity);
self.stats[2].set(gui, player.stats.intelligence);
}
}
impl Game for GuiGame {
type RenderPass = RenderPass3D2D<BasicGeoRenderer, GuiRenderer>;
fn load(mut resources: Resources, loader: &mut RenderLoader) -> (Self, Self::RenderPass) {
let mut render_pass = Self::RenderPass::with_clear_color(loader, Color::new(0.0, 0.8, 0.8, 1.0));
let textures = render_pass.scene_render1(loader).load_assets(resources.get("gui_textures"));
let mut gui = Gui::new();
let mut player_window = PlayerWindow::build(&mut gui, &textures);
let player = Player::new();
player_window.show(&mut gui, &player);
(GuiGame {
scene: ((), gui),
input: GuiGameInput::default(),
player,
player_window,
}, render_pass)
}
fn update(&mut self, _window: &Window, input_system: &mut InputSystem, _delta: f64) -> bool {
input_system.dispatch_queue(&mut self.input);
let gui = &mut self.scene.1;
gui.process_input(&self.input);
self.player_window.update(gui, &mut self.player);
true
}
fn render(&mut self, _loader: &mut RenderLoader, context: &mut RenderContext, render_pass: &mut Self::RenderPass) {
render_pass.render(context, &mut self.scene);
}
}
fn main() {
let mut resources = Resources::new();
gristmill_gui::font::load_fonts(&mut resources);
run_game::<GuiGame>(resources);
}
|
use std::{ fs };
pub fn main() -> Option<bool> {
let file_contents = match fs::read_to_string(
"./inputs/2020-12-06-aoc-01-input.txt"
) {
Ok(c) => c,
Err(e) => panic!("{:?}", e)
};
let total_sum = file_contents
.split("\n\n")
.map(|block| {
block
.split('\n')
.fold(vec!(), |mut acc, curr| {
curr.chars().for_each(|c| {
if !acc.contains(&c) {
acc.push(c)
}
});
acc
})
.len()
}).fold(0, |acc, curr| acc + curr);
println!("The total sum is: {}", total_sum);
Some(true)
}
|
use aoc19::data_path;
use itertools::Itertools;
use std::fs::rename;
fn new_name(name: &str) -> String {
name.split('_')
.enumerate()
.filter_map(|(ix, s)| if ix != 1 { Some(s) } else { None })
.join("_")
}
fn main() {
for day in data_path().read_dir().unwrap() {
let day = day.unwrap().path();
for files in day.read_dir().unwrap() {
let file = files.unwrap().path();
let mut new_path = day.clone();
let name = file.file_name().unwrap();
let name = new_name(name.to_str().unwrap());
new_path.push(name);
rename(file, new_path).unwrap();
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_rename() {
assert_eq!("in_01.data", &new_name("in_01_01.data"));
assert_eq!("in_01.data", &new_name("in_09_01.data"));
}
}
|
use crate::init;
use std::env;
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
pub struct InitOpt {
/// Agree to all prompts. Useful for non-interactive uses
#[structopt(long = "force-yes", short = "y")]
force_yes: bool,
}
pub fn init(opt: InitOpt) -> anyhow::Result<()> {
let current_directory = env::current_dir()?;
init::init(current_directory, opt.force_yes)
}
#[cfg(feature = "integration_tests")]
impl InitOpt {
pub fn new(force_yes: bool) -> Self {
InitOpt { force_yes }
}
}
|
use automerge_backend;
use automerge_frontend;
use automerge_protocol;
use pyo3::prelude::*;
use pyo3::wrap_pyfunction;
fn base_document(doc_id: &str, default_text: &str) -> automerge_backend::Backend {
let mut doc = automerge_backend::Backend::init();
let mut frontend = automerge_frontend::Frontend::new();
let change = automerge_frontend::LocalChange::set(
automerge_frontend::Path::root().key("docId"),
automerge_frontend::Value::Primitive(automerge_protocol::ScalarValue::Str(
doc_id.to_string(),
)),
);
let change_request = frontend
.change::<_, automerge_frontend::InvalidChangeRequest>(
Some("set root object".into()),
|frontend| {
frontend.add_change(change)?;
Ok(())
},
)
.unwrap();
doc.apply_local_change(change_request.unwrap())
.unwrap()
.0;
let changes = automerge_frontend::LocalChange::set(
automerge_frontend::Path::root().key("textArea"),
automerge_frontend::Value::Text(default_text.chars().collect()),
);
let change_request = frontend
.change::<_, automerge_frontend::InvalidChangeRequest>(Some("".into()), |frontend| {
frontend.add_change(changes)?;
Ok(())
})
.unwrap();
doc.apply_local_change(change_request.unwrap())
.unwrap()
.0;
return doc;
}
#[pyfunction]
fn new_document(doc_id: &str, text: &str) -> std::vec::Vec<u8> {
let doc = base_document(doc_id, text);
let data = doc.save().and_then(|data| Ok(data));
return data.unwrap();
}
// TODO : Rename this into "apply_change", as it applies only *one* change
#[pyfunction]
fn apply_changes(doc: std::vec::Vec<u8>, changes_bytes: std::vec::Vec<u8>) -> std::vec::Vec<u8> {
let mut doc = automerge_backend::Backend::load(doc)
.and_then(|back| Ok(back))
.unwrap();
let changes = automerge_backend::Change::from_bytes(changes_bytes)
.and_then(|c| Ok(c))
.unwrap();
doc.apply_changes(vec![changes])
.and_then(|patch| Ok(patch))
.unwrap();
let data = doc.save().and_then(|data| Ok(data));
return data.unwrap();
}
#[pyfunction]
fn get_all_changes(doc: std::vec::Vec<u8>) -> std::vec::Vec<std::vec::Vec<u8>> {
let doc = automerge_backend::Backend::load(doc)
.and_then(|back| Ok(back))
.unwrap();
let changes = doc.get_changes(&[]);
let mut bytes: std::vec::Vec<std::vec::Vec<u8>> = std::vec::Vec::new();
for c in changes.iter() {
bytes.push(c.bytes.clone());
}
return bytes;
}
pub fn init_submodule(module: &PyModule) -> PyResult<()> {
module.add_function(wrap_pyfunction!(new_document, module)?)?;
module.add_function(wrap_pyfunction!(apply_changes, module)?)?;
module.add_function(wrap_pyfunction!(get_all_changes, module)?)?;
Ok(())
}
/*
* Critical path: new_document, get_all_changes.
* TODO apply_changes
*/
#[test]
fn test_new_document() {
// Instanciating an automerge frontend and generating a patch of changes, and checking the document changed.
let doc = new_document("test_doc_id", "Test content");
let changes = get_all_changes(doc);
// There must be two changes : one to set the doc id, one to set the content.
assert_eq!( changes.len(), 2 );
}
|
use crate::config::dfinity::Config;
use crate::lib::environment::Environment;
use crate::lib::error::DfxResult;
use anyhow::{anyhow, bail};
use clap::Clap;
use serde_json::value::Value;
/// Configures project options for your currently-selected project.
#[derive(Clap)]
pub struct ConfigOpts {
/// Specifies the name of the configuration option to set or read.
/// Use the period delineated path to specify the option to set or read.
/// If this is not mentioned, outputs the whole configuration.
config_path: String,
/// Specifies the new value to set.
/// If you don't specify a value, the command displays the current value of the option from the configuration file.
value: Option<String>,
/// Specifies the format of the output. By default, the output format is JSON.
#[clap(long, default_value("json"), possible_values(&["json", "text"]))]
format: String,
}
pub fn exec(env: &dyn Environment, opts: ConfigOpts) -> DfxResult {
// Cannot use the `env` variable as we need a mutable copy.
let mut config: Config = env.get_config_or_anyhow()?.as_ref().clone();
let config_path = opts.config_path.as_str();
let format = opts.format.as_str();
// We replace `.` with `/` so the user can use `path.value.field` instead of forcing him
// to use `path/value/field`. Since none of our keys have slashes or tildes in them it
// won't be a problem.
let mut config_path = config_path.replace(".", "/");
// We change config path to starts with a `/` if it doesn't already. This is because
// JSON pointers can be relative, but we don't have a place to start if is it.
if !config_path.starts_with('/') {
config_path.insert(0, '/');
}
if config_path == "/" {
config_path.clear()
}
if let Some(arg_value) = opts.value {
// Try to parse the type of the value (which is a string from the arguments) as
// JSON. By default we will just assume the type is string (if all parsing fails).
let value =
serde_json::from_str::<Value>(&arg_value).unwrap_or_else(|_| Value::String(arg_value));
*config
.get_mut_json()
.pointer_mut(config_path.as_str())
.ok_or_else(|| anyhow!("Config path does not exist at '{}'.", config_path))? = value;
config.save()
} else if let Some(value) = config.get_json().pointer(config_path.as_str()) {
match format {
"text" => println!("{}", value),
"json" => println!("{}", serde_json::to_string_pretty(value)?),
_ => {}
}
Ok(())
} else {
bail!("Config path does not exist at '{}'.", config_path)
}
}
|
pub mod encoded_message;
pub mod message;
pub mod packetmessage;
|
use crate::Result;
use clap::{App, ArgMatches};
use oauth2::{ClientId, ClientSecret, RedirectUrl};
use ronor::Sonos;
use rustyline::Editor;
pub const NAME: &str = "init";
pub fn build() -> App<'static, 'static> {
App::new(NAME).about("Initialise sonos integration configuration")
}
pub fn run(sonos: &mut Sonos, _matches: &ArgMatches) -> Result<()> {
println!("1. Go to https://integration.sonos.com/ and create a developer account.");
println!(" NOTE that your existing Sonos user account does not work.");
println!();
println!("2. Create a new control integration and enter the information below.");
println!();
let mut console = Editor::<()>::new();
let client_id = ClientId::new(console.readline("Client identifier: ")?);
let client_secret = ClientSecret::new(console.readline("Client secret: ")?);
let redirect_url =
RedirectUrl::new(console.readline("Redirection URL: ")?)?;
sonos.set_integration_config(client_id, client_secret, redirect_url)?;
println!();
println!("OK, ready to go.");
println!("Now run 'ronor login' to authorize access to your Sonos user account.");
Ok(())
}
|
use crate::{
remote_ptr::{RemotePtr, Void},
wait_status::WaitStatus,
};
use libc::pid_t;
use std::ffi::{OsStr, OsString};
#[derive(Clone)]
pub enum TraceTaskEventVariant {
/// DIFF NOTE: We DONT have a `None` variant here, unlike rr.
///
/// Created by clone(2), fork(2), vfork(2) syscalls
Clone(TraceTaskEventClone),
Exec(TraceTaskEventExec),
Exit(TraceTaskEventExit),
}
#[derive(Copy, Clone, Eq, PartialEq)]
pub enum TraceTaskEventType {
Clone,
Exec,
Exit,
}
impl TraceTaskEvent {
pub fn clone_variant(&self) -> &TraceTaskEventClone {
match &self.variant {
TraceTaskEventVariant::Clone(v) => v,
_ => panic!("Not a TraceTaskEventTypeClone"),
}
}
pub fn exec_variant(&self) -> &TraceTaskEventExec {
match &self.variant {
TraceTaskEventVariant::Exec(v) => v,
_ => panic!("Not a TraceTaskEventTypeExec"),
}
}
pub fn exit_variant(&self) -> &TraceTaskEventExit {
match &self.variant {
TraceTaskEventVariant::Exit(v) => v,
_ => panic!("Not a TraceTaskEventTypeExit"),
}
}
}
#[derive(Clone)]
pub struct TraceTaskEventClone {
pub(super) parent_tid_: pid_t,
pub(super) own_ns_tid_: pid_t,
pub(super) clone_flags_: i32,
}
impl TraceTaskEventClone {
pub fn parent_tid(&self) -> pid_t {
self.parent_tid_
}
pub fn own_ns_tid(&self) -> pid_t {
self.own_ns_tid_
}
pub fn clone_flags(&self) -> i32 {
self.clone_flags_
}
}
#[derive(Clone)]
pub struct TraceTaskEventExec {
pub(super) file_name_: OsString,
pub(super) cmd_line_: Vec<OsString>,
pub(super) exe_base_: RemotePtr<Void>,
}
impl TraceTaskEventExec {
pub fn file_name(&self) -> &OsStr {
&self.file_name_
}
pub fn cmd_line(&self) -> &[OsString] {
&self.cmd_line_
}
pub fn exe_base(&self) -> RemotePtr<Void> {
self.exe_base_
}
}
#[derive(Clone)]
pub struct TraceTaskEventExit {
pub(super) exit_status_: WaitStatus,
}
impl TraceTaskEventExit {
pub fn exit_status(&self) -> WaitStatus {
self.exit_status_
}
}
pub struct TraceTaskEvent {
pub(super) variant: TraceTaskEventVariant,
pub(super) tid_: pid_t,
}
impl TraceTaskEvent {
pub fn tid(&self) -> pid_t {
self.tid_
}
pub fn event_variant(&self) -> &TraceTaskEventVariant {
&self.variant
}
pub fn event_type(&self) -> TraceTaskEventType {
match &self.variant {
TraceTaskEventVariant::Clone(_) => TraceTaskEventType::Clone,
TraceTaskEventVariant::Exit(_) => TraceTaskEventType::Exit,
TraceTaskEventVariant::Exec(_) => TraceTaskEventType::Exec,
}
}
}
|
// Copyright 2019. The Tari Project
// SPDX-License-Identifier: BSD-3-Clause
//! Extended commitments are commitments that have more than one blinding factor.
use alloc::vec::Vec;
use core::{borrow::Borrow, iter::once};
use curve25519_dalek::{
ristretto::{CompressedRistretto, RistrettoPoint},
scalar::Scalar,
traits::{Identity, MultiscalarMul},
};
#[cfg(feature = "precomputed_tables")]
use crate::ristretto::pedersen::scalar_mul_with_pre_computation_tables;
use crate::{
alloc::string::ToString,
commitment::{
ExtendedHomomorphicCommitmentFactory,
ExtensionDegree,
HomomorphicCommitment,
HomomorphicCommitmentFactory,
},
errors::CommitmentError,
ristretto::{
constants::{ristretto_nums_points, RISTRETTO_NUMS_POINTS_COMPRESSED},
pedersen::{
ristretto_pedersen_h,
ristretto_pedersen_h_compressed,
PedersenCommitment,
RISTRETTO_PEDERSEN_G,
RISTRETTO_PEDERSEN_G_COMPRESSED,
},
RistrettoPublicKey,
RistrettoSecretKey,
},
};
/// Generates extended Pederson commitments `sum(k_i.G_i) + v.H` using the provided base
/// [RistrettoPoints](curve25519_dalek::ristretto::RistrettoPoints).
/// Notes:
/// - Homomorphism with public key only holds for extended commitments with `ExtensionDegree::DefaultPedersen`
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ExtendedPedersenCommitmentFactory {
/// Base for the committed value
pub(crate) h_base: RistrettoPoint,
/// Compressed base for the committed value
pub(crate) h_base_compressed: CompressedRistretto,
/// Base for the blinding factor vector
pub(crate) g_base_vec: Vec<RistrettoPoint>,
/// Compressed base for the blinding factor vector
pub(crate) g_base_compressed_vec: Vec<CompressedRistretto>,
/// Blinding factor extension degree
pub(crate) extension_degree: ExtensionDegree,
}
impl ExtendedPedersenCommitmentFactory {
/// Create a new Extended Pedersen Ristretto Commitment factory for the required extension degree using
/// pre-calculated compressed constants - we only hold references to the static generator points.
pub fn new_with_extension_degree(extension_degree: ExtensionDegree) -> Result<Self, CommitmentError> {
if extension_degree as usize > ristretto_nums_points().len() ||
extension_degree as usize > RISTRETTO_NUMS_POINTS_COMPRESSED.len()
{
return Err(CommitmentError::CommitmentExtensionDegree {
reason: "Not enough Ristretto NUMS points to construct the extended commitment factory".to_string(),
});
}
let g_base_vec = once(&RISTRETTO_PEDERSEN_G)
.chain(ristretto_nums_points()[1..extension_degree as usize].iter())
.copied()
.collect();
let g_base_compressed_vec = once(&RISTRETTO_PEDERSEN_G_COMPRESSED)
.chain(RISTRETTO_NUMS_POINTS_COMPRESSED[1..extension_degree as usize].iter())
.copied()
.collect();
Ok(Self {
h_base: *ristretto_pedersen_h(),
h_base_compressed: *ristretto_pedersen_h_compressed(),
g_base_vec,
g_base_compressed_vec,
extension_degree,
})
}
/// Creates a Pedersen commitment using the value scalar and a blinding factor vector
pub fn commit_scalars(
&self,
value: &Scalar,
blinding_factors: &[Scalar],
) -> Result<RistrettoPoint, CommitmentError>
where
for<'a> &'a Scalar: Borrow<Scalar>,
{
if blinding_factors.is_empty() || blinding_factors.len() > self.extension_degree as usize {
Err(CommitmentError::CommitmentExtensionDegree {
reason: "blinding vector".to_string(),
})
} else if blinding_factors.len() == 1 &&
(self.g_base_vec[0], self.h_base) == (RISTRETTO_PEDERSEN_G, *ristretto_pedersen_h())
{
#[cfg(feature = "precomputed_tables")]
{
Ok(scalar_mul_with_pre_computation_tables(&blinding_factors[0], value))
}
#[cfg(not(feature = "precomputed_tables"))]
{
let scalars = once(value).chain(blinding_factors);
let g_base_head = self.g_base_vec.iter().take(blinding_factors.len());
let points = once(&self.h_base).chain(g_base_head);
Ok(RistrettoPoint::multiscalar_mul(scalars, points))
}
} else {
let scalars = once(value).chain(blinding_factors);
let g_base_head = self.g_base_vec.iter().take(blinding_factors.len());
let points = once(&self.h_base).chain(g_base_head);
Ok(RistrettoPoint::multiscalar_mul(scalars, points))
}
}
}
impl Default for ExtendedPedersenCommitmentFactory {
/// The default Extended Pedersen Ristretto Commitment factory is of extension degree Zero; this corresponds to
/// the default non extended Pedersen Ristretto Commitment factory.
fn default() -> Self {
Self::new_with_extension_degree(ExtensionDegree::DefaultPedersen)
.expect("Ristretto default base points not defined!")
}
}
impl HomomorphicCommitmentFactory for ExtendedPedersenCommitmentFactory {
type P = RistrettoPublicKey;
fn commit(&self, k: &RistrettoSecretKey, v: &RistrettoSecretKey) -> PedersenCommitment {
let c = self
.commit_scalars(&v.0, &[k.0])
.expect("Default commitments will never fail");
HomomorphicCommitment(RistrettoPublicKey::new_from_pk(c))
}
fn zero(&self) -> PedersenCommitment {
HomomorphicCommitment(RistrettoPublicKey::new_from_pk(RistrettoPoint::identity()))
}
fn open(&self, k: &RistrettoSecretKey, v: &RistrettoSecretKey, commitment: &PedersenCommitment) -> bool {
let c_test = self.commit(k, v);
commitment == &c_test
}
fn commit_value(&self, k: &RistrettoSecretKey, value: u64) -> PedersenCommitment {
let v = RistrettoSecretKey::from(value);
self.commit(k, &v)
}
fn open_value(&self, k: &RistrettoSecretKey, v: u64, commitment: &PedersenCommitment) -> bool {
let kv = RistrettoSecretKey::from(v);
self.open(k, &kv, commitment)
}
}
impl ExtendedHomomorphicCommitmentFactory for ExtendedPedersenCommitmentFactory {
type P = RistrettoPublicKey;
fn commit_extended(
&self,
k_vec: &[RistrettoSecretKey],
v: &RistrettoSecretKey,
) -> Result<PedersenCommitment, CommitmentError> {
let blinding_factors: Vec<Scalar> = k_vec.iter().map(|k| k.0).collect();
let c = self.commit_scalars(&v.0, &blinding_factors)?;
Ok(HomomorphicCommitment(RistrettoPublicKey::new_from_pk(c)))
}
fn zero_extended(&self) -> PedersenCommitment {
HomomorphicCommitment(RistrettoPublicKey::new_from_pk(RistrettoPoint::identity()))
}
fn open_extended(
&self,
k_vec: &[RistrettoSecretKey],
v: &RistrettoSecretKey,
commitment: &PedersenCommitment,
) -> Result<bool, CommitmentError> {
let c_test = self
.commit_extended(k_vec, v)
.map_err(|e| CommitmentError::CommitmentExtensionDegree { reason: e.to_string() })?;
Ok(commitment == &c_test)
}
fn commit_value_extended(
&self,
k_vec: &[RistrettoSecretKey],
value: u64,
) -> Result<PedersenCommitment, CommitmentError> {
let v = RistrettoSecretKey::from(value);
self.commit_extended(k_vec, &v)
}
fn open_value_extended(
&self,
k_vec: &[RistrettoSecretKey],
v: u64,
commitment: &PedersenCommitment,
) -> Result<bool, CommitmentError> {
let kv = RistrettoSecretKey::from(v);
self.open_extended(k_vec, &kv, commitment)
}
}
#[cfg(test)]
mod test {
use alloc::vec::Vec;
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use curve25519_dalek::{ristretto::RistrettoPoint, scalar::Scalar, traits::MultiscalarMul};
use rand::rngs::ThreadRng;
use crate::{
commitment::{
ExtendedHomomorphicCommitmentFactory,
ExtensionDegree,
HomomorphicCommitment,
HomomorphicCommitmentFactory,
},
keys::{PublicKey, SecretKey},
ristretto::{
constants::ristretto_nums_points,
pedersen::{
commitment_factory::PedersenCommitmentFactory,
extended_commitment_factory::ExtendedPedersenCommitmentFactory,
ristretto_pedersen_h,
RISTRETTO_PEDERSEN_G,
},
RistrettoPublicKey,
RistrettoSecretKey,
},
};
static EXTENSION_DEGREE: [ExtensionDegree; 6] = [
ExtensionDegree::DefaultPedersen,
ExtensionDegree::AddOneBasePoint,
ExtensionDegree::AddTwoBasePoints,
ExtensionDegree::AddThreeBasePoints,
ExtensionDegree::AddFourBasePoints,
ExtensionDegree::AddFiveBasePoints,
];
#[test]
fn check_default_base() {
let factory = ExtendedPedersenCommitmentFactory::default();
assert_eq!(factory.g_base_vec[0], RISTRETTO_PEDERSEN_G);
assert_eq!(factory.h_base, *ristretto_pedersen_h());
assert_eq!(
factory,
ExtendedPedersenCommitmentFactory::new_with_extension_degree(ExtensionDegree::DefaultPedersen).unwrap()
);
}
/// Default bases for PedersenCommitmentFactory and all extension degrees of ExtendedPedersenCommitmentFactory must
/// be equal
#[test]
fn check_extended_bases_between_factories() {
let factory_singular = PedersenCommitmentFactory::default();
for extension_degree in EXTENSION_DEGREE {
let factory_extended =
ExtendedPedersenCommitmentFactory::new_with_extension_degree(extension_degree).unwrap();
assert_eq!(factory_extended.extension_degree, extension_degree);
assert_eq!(factory_singular.G, factory_extended.g_base_vec[0]);
assert_eq!(factory_singular.H, factory_extended.h_base);
}
}
#[test]
/// Verify that the identity point is equal to a commitment to zero with a zero blinding factor vector on the base
/// points
fn check_zero_both_traits() {
for extension_degree in EXTENSION_DEGREE {
let zero_values = vec![Scalar::ZERO; extension_degree as usize + 1];
let mut points = Vec::with_capacity(extension_degree as usize + 1);
let factory = ExtendedPedersenCommitmentFactory::new_with_extension_degree(extension_degree).unwrap();
points.push(factory.h_base);
points.append(&mut factory.g_base_vec.clone());
let c = RistrettoPoint::multiscalar_mul(&zero_values, &points);
// HomomorphicCommitmentFactory
assert_eq!(
HomomorphicCommitment(RistrettoPublicKey::new_from_pk(c)),
ExtendedPedersenCommitmentFactory::zero(&factory)
);
// ExtendedHomomorphicCommitmentFactory
assert_eq!(
HomomorphicCommitment(RistrettoPublicKey::new_from_pk(c)),
ExtendedPedersenCommitmentFactory::zero_extended(&factory)
);
}
}
/// Simple test for open for each extension degree:
/// - Generate random sets of scalars and calculate the Pedersen commitment for them.
/// - Check that the commitment = sum(k_i.G_i) + v.H, and that `open` returns `true` for `open(k_i, v)`
#[test]
#[allow(non_snake_case)]
fn check_open_both_traits() {
let H = *ristretto_pedersen_h();
let mut rng = rand::thread_rng();
for extension_degree in EXTENSION_DEGREE {
let factory = ExtendedPedersenCommitmentFactory::new_with_extension_degree(extension_degree).unwrap();
for _ in 0..25 {
let v = RistrettoSecretKey::random(&mut rng);
let k_vec = vec![RistrettoSecretKey::random(&mut rng); extension_degree as usize];
let c_extended = factory.commit_extended(&k_vec, &v).unwrap();
let mut c_calc: RistrettoPoint = v.0 * H + k_vec[0].0 * RISTRETTO_PEDERSEN_G;
#[allow(clippy::needless_range_loop)]
for i in 1..(extension_degree as usize) {
c_calc += k_vec[i].0 * ristretto_nums_points()[i];
}
assert_eq!(RistrettoPoint::from(c_extended.as_public_key()), c_calc);
// ExtendedHomomorphicCommitmentFactory
// - Default open
assert!(factory.open_extended(&k_vec, &v, &c_extended).unwrap());
// - A different value doesn't open the commitment
assert!(!factory.open_extended(&k_vec, &(&v + &v), &c_extended).unwrap());
// - A different blinding factor doesn't open the commitment
let mut not_k = k_vec.clone();
not_k[0] = ¬_k[0] + v.clone();
assert!(!factory.open_extended(¬_k, &v, &c_extended).unwrap());
// HomomorphicCommitmentFactory vs. ExtendedHomomorphicCommitmentFactory
if extension_degree == ExtensionDegree::DefaultPedersen {
let c = factory.commit(&k_vec[0], &v);
assert_eq!(c, c_extended);
// - Default open
assert!(factory.open(&k_vec[0], &v, &c));
// - A different value doesn't open the commitment
assert!(!factory.open(&k_vec[0], &(&v + &v), &c));
// - A different blinding factor doesn't open the commitment
assert!(!factory.open(¬_k[0], &v, &c));
}
}
}
}
/// Test for random sets of scalars that the homomorphic property holds. i.e.
/// $$
/// C = C1 + C2 = sum((k1_i+k2_i).G_i) + (v1+v2).H
/// $$
/// and
/// `open(k1_i+k2_i, v1+v2)` is true for _C_
#[test]
fn check_homomorphism_both_traits() {
let mut rng = rand::thread_rng();
for extension_degree in EXTENSION_DEGREE {
for _ in 0..25 {
let v1 = RistrettoSecretKey::random(&mut rng);
let v2 = RistrettoSecretKey::random(&mut rng);
let v_sum = &v1 + &v2;
let k1_vec = vec![RistrettoSecretKey::random(&mut rng); extension_degree as usize];
let k2_vec = vec![RistrettoSecretKey::random(&mut rng); extension_degree as usize];
let mut k_sum_i = Vec::with_capacity(extension_degree as usize);
for i in 0..extension_degree as usize {
k_sum_i.push(&k1_vec[i] + &k2_vec[i]);
}
let factory = ExtendedPedersenCommitmentFactory::new_with_extension_degree(extension_degree).unwrap();
// ExtendedHomomorphicCommitmentFactory
let c1_extended = factory.commit_extended(&k1_vec, &v1).unwrap();
let c2_extended = factory.commit_extended(&k2_vec, &v2).unwrap();
let c_sum_extended = &c1_extended + &c2_extended;
let c_sum2_extended = factory.commit_extended(&k_sum_i, &v_sum).unwrap();
assert!(factory.open_extended(&k1_vec, &v1, &c1_extended).unwrap());
assert!(factory.open_extended(&k2_vec, &v2, &c2_extended).unwrap());
assert_eq!(c_sum_extended, c_sum2_extended);
assert!(factory.open_extended(&k_sum_i, &v_sum, &c_sum_extended).unwrap());
// HomomorphicCommitmentFactory vs. ExtendedHomomorphicCommitmentFactory
if extension_degree == ExtensionDegree::DefaultPedersen {
let c1 = factory.commit(&k1_vec[0], &v1);
assert_eq!(c1, c1_extended);
let c2 = factory.commit(&k2_vec[0], &v2);
assert_eq!(c2, c2_extended);
let c_sum = &c1 + &c2;
assert_eq!(c_sum, c_sum_extended);
let c_sum2 = factory.commit(&k_sum_i[0], &v_sum);
assert_eq!(c_sum2, c_sum2_extended);
assert!(factory.open(&k1_vec[0], &v1, &c1));
assert!(factory.open(&k2_vec[0], &v2, &c2));
assert_eq!(c_sum, c_sum2);
assert!(factory.open(&k_sum_i[0], &v_sum, &c_sum));
}
}
}
}
/// Test addition of a public key to a homomorphic commitment.
/// $$
/// C = C_1 + P = (v_1.H + k_1.G) + k_2.G = v_1.H + (k_1 + k_2).G
/// $$
/// and
/// `open(k1+k2, v1)` is true for _C_
#[test]
fn check_homomorphism_with_public_key_singular() {
let mut rng = rand::thread_rng();
// Left-hand side
let v1 = RistrettoSecretKey::random(&mut rng);
let k1 = RistrettoSecretKey::random(&mut rng);
let factory = ExtendedPedersenCommitmentFactory::default();
let c1 = factory.commit(&k1, &v1);
let (k2, k2_pub) = RistrettoPublicKey::random_keypair(&mut rng);
let c_sum = &c1 + &k2_pub;
// Right-hand side
let c2 = factory.commit(&(&k1 + &k2), &v1);
// Test
assert_eq!(c_sum, c2);
assert!(factory.open(&(&k1 + &k2), &v1, &c2));
}
fn scalar_random_not_zero(rng: &mut ThreadRng) -> Scalar {
loop {
let value = Scalar::random(rng);
if value != Scalar::ZERO {
return value;
}
}
}
// Try to create an extended 'RistrettoPublicKey'
fn random_keypair_extended(
factory: &ExtendedPedersenCommitmentFactory,
extension_degree: ExtensionDegree,
rng: &mut ThreadRng,
) -> (RistrettoSecretKey, RistrettoPublicKey) {
let mut k_vec = vec![scalar_random_not_zero(rng)];
if extension_degree != ExtensionDegree::DefaultPedersen {
k_vec.append(&mut vec![Scalar::default(); extension_degree as usize - 1]);
}
(
RistrettoSecretKey(k_vec[0]),
RistrettoPublicKey::new_from_pk(RistrettoPoint::multiscalar_mul(k_vec, &factory.g_base_vec)),
)
}
/// Test addition of a public key to a homomorphic commitment for extended commitments
/// with`ExtensionDegree::DefaultPedersen`. $$
/// C = C_1 + P = (v1.H + sum(k1_i.G_i)) + k2.G_0 = v1.H + (k2 + sum(k1_i))).G
/// $$
/// and
/// `open(k1+k2, v1)` is true for _C_
/// Note: Homomorphism with public key only holds for extended commitments with`ExtensionDegree::DefaultPedersen`
#[test]
fn check_homomorphism_with_public_key_extended() {
let mut rng = rand::thread_rng();
for extension_degree in EXTENSION_DEGREE {
// Left-hand side
let v1 = RistrettoSecretKey::random(&mut rng);
let k1_vec = vec![RistrettoSecretKey::random(&mut rng); extension_degree as usize];
let factory = ExtendedPedersenCommitmentFactory::new_with_extension_degree(extension_degree).unwrap();
let c1 = factory.commit_extended(&k1_vec, &v1).unwrap();
let mut k2_vec = Vec::with_capacity(extension_degree as usize);
let mut k2_pub_vec = Vec::with_capacity(extension_degree as usize);
for _i in 0..extension_degree as usize {
let (k2, k2_pub) = random_keypair_extended(&factory, extension_degree, &mut rng);
k2_vec.push(k2);
k2_pub_vec.push(k2_pub);
}
let mut c_sum = c1.0;
for k2_pub in &k2_pub_vec {
c_sum = c_sum + k2_pub.clone();
}
// Right-hand side
let mut k_sum_vec = Vec::with_capacity(extension_degree as usize);
for i in 0..extension_degree as usize {
k_sum_vec.push(&k1_vec[i] + &k2_vec[i]);
}
let c2 = factory.commit_extended(&k_sum_vec, &v1).unwrap();
// Test
assert!(factory.open_extended(&k_sum_vec, &v1, &c2).unwrap());
match extension_degree {
ExtensionDegree::DefaultPedersen => {
assert_eq!(c_sum, c2.0);
},
_ => {
assert_ne!(c_sum, c2.0);
},
}
}
}
/// Test addition of individual homomorphic commitments to be equal to a single vector homomorphic commitment.
/// $$
/// sum(C_j) = sum((v.H + k.G)_j) = sum(v_j).H + sum(k_j).G
/// $$
/// and
/// `open(sum(k_j), sum(v_j))` is true for `sum(C_j)`
#[test]
fn sum_commitment_vector_singular() {
let mut rng = rand::thread_rng();
let mut v_sum = RistrettoSecretKey::default();
let mut k_sum = RistrettoSecretKey::default();
let zero = RistrettoSecretKey::default();
let commitment_factory = ExtendedPedersenCommitmentFactory::default();
let mut c_sum = commitment_factory.commit(&zero, &zero);
let mut commitments = Vec::with_capacity(100);
for _ in 0..100 {
let v = RistrettoSecretKey::random(&mut rng);
v_sum = &v_sum + &v;
let k = RistrettoSecretKey::random(&mut rng);
k_sum = &k_sum + &k;
let c = commitment_factory.commit(&k, &v);
c_sum = &c_sum + &c;
commitments.push(c);
}
assert!(commitment_factory.open(&k_sum, &v_sum, &c_sum));
assert_eq!(c_sum, commitments.iter().sum());
}
/// Test addition of individual homomorphic commitments to be equal to a single vector homomorphic commitment for
/// extended commitments.
/// $$
/// sum(C_j) = sum((v.H + sum(k_i.G_i))_j) = sum(v_j).H + sum(sum(k_i.G_i)_j)
/// $$
/// and
/// `open(sum(sum(k_i)_j), sum(v_j))` is true for `sum(C_j)`
#[test]
fn sum_commitment_vector_extended() {
let mut rng = rand::thread_rng();
let v_zero = RistrettoSecretKey::default();
let k_zero = vec![RistrettoSecretKey::default(); ExtensionDegree::AddFiveBasePoints as usize];
for extension_degree in EXTENSION_DEGREE {
let mut v_sum = RistrettoSecretKey::default();
let mut k_sum_vec = vec![RistrettoSecretKey::default(); extension_degree as usize];
let factory = ExtendedPedersenCommitmentFactory::new_with_extension_degree(extension_degree).unwrap();
let mut c_sum = factory
.commit_extended(&k_zero[0..extension_degree as usize], &v_zero)
.unwrap();
let mut commitments = Vec::with_capacity(25);
for _ in 0..25 {
let v = RistrettoSecretKey::random(&mut rng);
v_sum = &v_sum + &v;
let k_vec = vec![RistrettoSecretKey::random(&mut rng); extension_degree as usize];
for i in 0..extension_degree as usize {
k_sum_vec[i] = &k_sum_vec[i] + &k_vec[i];
}
let c = factory.commit_extended(&k_vec, &v).unwrap();
c_sum = &c_sum + &c;
commitments.push(c);
}
assert!(factory.open_extended(&k_sum_vec, &v_sum, &c_sum).unwrap());
assert_eq!(c_sum, commitments.iter().sum());
}
}
#[cfg(feature = "serde")]
mod test_serialize {
use tari_utilities::message_format::MessageFormat;
use super::*;
use crate::ristretto::pedersen::PedersenCommitment;
#[test]
fn serialize_deserialize_singular() {
let mut rng = rand::thread_rng();
let factory = ExtendedPedersenCommitmentFactory::default();
let k = RistrettoSecretKey::random(&mut rng);
let c = factory.commit_value(&k, 420);
// Base64
let ser_c = c.to_base64().unwrap();
let c2 = PedersenCommitment::from_base64(&ser_c).unwrap();
assert!(factory.open_value(&k, 420, &c2));
// MessagePack
let ser_c = c.to_binary().unwrap();
let c2 = PedersenCommitment::from_binary(&ser_c).unwrap();
assert!(factory.open_value(&k, 420, &c2));
// Invalid Base64
assert!(PedersenCommitment::from_base64("bad@ser$").is_err());
}
#[test]
fn serialize_deserialize_extended() {
let mut rng = rand::thread_rng();
for extension_degree in EXTENSION_DEGREE {
let factory = ExtendedPedersenCommitmentFactory::new_with_extension_degree(extension_degree).unwrap();
let k_vec = vec![RistrettoSecretKey::random(&mut rng); extension_degree as usize];
let c = factory.commit_value_extended(&k_vec, 420).unwrap();
// Base64
let ser_c = c.to_base64().unwrap();
let c2 = PedersenCommitment::from_base64(&ser_c).unwrap();
assert!(factory.open_value_extended(&k_vec, 420, &c2).unwrap());
// MessagePack
let ser_c = c.to_binary().unwrap();
let c2 = PedersenCommitment::from_binary(&ser_c).unwrap();
assert!(factory.open_value_extended(&k_vec, 420, &c2).unwrap());
// Invalid Base64
assert!(PedersenCommitment::from_base64("bad@ser$").is_err());
}
}
}
#[test]
#[allow(clippy::redundant_clone)]
fn derived_methods_singular() {
let factory = ExtendedPedersenCommitmentFactory::default();
let k = RistrettoSecretKey::from(1024);
let value = 2048;
let c1 = factory.commit_value(&k, value);
// Test 'Debug' implementation
assert_eq!(
format!("{c1:?}"),
"HomomorphicCommitment(601cdc5c97e94bb16ae56f75430f8ab3ef4703c7d89ca9592e8acadc81629f0e)"
);
// Test 'Clone' implementation
let c2 = c1.clone();
assert_eq!(c1, c2);
// Test hash implementation
let mut hasher = DefaultHasher::new();
c1.hash(&mut hasher);
let result = format!("{:x}", hasher.finish());
assert_eq!(&result, "699d38210741194e");
// Test 'Ord' and 'PartialOrd' implementations
let mut values = (value - 100..value).collect::<Vec<_>>();
values.extend((value + 1..value + 101).collect::<Vec<_>>());
let (mut tested_less_than, mut tested_greater_than) = (false, false);
for val in values {
let c3 = factory.commit_value(&k, val);
assert_ne!(c2, c3);
assert_ne!(c2.cmp(&c3), c3.cmp(&c2));
if c2 > c3 {
assert!(c3 < c2);
assert!(matches!(c2.cmp(&c3), std::cmp::Ordering::Greater));
assert!(matches!(c3.cmp(&c2), std::cmp::Ordering::Less));
tested_less_than = true;
}
if c2 < c3 {
assert!(c3 > c2);
assert!(matches!(c2.cmp(&c3), std::cmp::Ordering::Less));
assert!(matches!(c3.cmp(&c2), std::cmp::Ordering::Greater));
tested_greater_than = true;
}
if tested_less_than && tested_greater_than {
break;
}
}
assert!(
tested_less_than && tested_greater_than,
"Try extending the range of values to compare"
);
}
#[test]
fn derived_methods_extended() {
for extension_degree in EXTENSION_DEGREE {
let factory = ExtendedPedersenCommitmentFactory::new_with_extension_degree(extension_degree).unwrap();
let k_vec = vec![RistrettoSecretKey::from(1024); extension_degree as usize];
let value = 2048;
let c1 = factory.commit_value_extended(&k_vec, value).unwrap();
// Test 'Clone` implementation
let c2 = c1.clone();
assert_eq!(c1, c2);
// Test 'Debug' and hashing implementations
let mut hasher = DefaultHasher::new();
c1.hash(&mut hasher);
match extension_degree {
ExtensionDegree::DefaultPedersen => {
assert_eq!(
format!("{c1:?}"),
"HomomorphicCommitment(601cdc5c97e94bb16ae56f75430f8ab3ef4703c7d89ca9592e8acadc81629f0e)"
);
let result = format!("{:x}", hasher.finish());
assert_eq!(&result, "699d38210741194e");
},
ExtensionDegree::AddOneBasePoint => {
assert_eq!(
format!("{c1:?}"),
"HomomorphicCommitment(f0019440ae20b39ba55a88f27ebd7ca56857251beca1047a3b195dc93642d829)"
);
let result = format!("{:x}", hasher.finish());
assert_eq!(&result, "fb68d75431b3a0b0");
},
ExtensionDegree::AddTwoBasePoints => {
assert_eq!(
format!("{c1:?}"),
"HomomorphicCommitment(b09789e597115f592491009f18ef4ec13ba7018a77e9df1729f1e2611b237a06)"
);
let result = format!("{:x}", hasher.finish());
assert_eq!(&result, "61dd716dc29a5fc5");
},
ExtensionDegree::AddThreeBasePoints => {
assert_eq!(
format!("{c1:?}"),
"HomomorphicCommitment(f8356cbea349191683f84818ab5203e48b04fef42f812ddf7d9b92df966c8473)"
);
let result = format!("{:x}", hasher.finish());
assert_eq!(&result, "49e988f621628ebc");
},
ExtensionDegree::AddFourBasePoints => {
assert_eq!(
format!("{c1:?}"),
"HomomorphicCommitment(1e113af7e33ac15b328e298239f3796e5061a0863d1a69e297ee8d81ee6e1f22)"
);
let result = format!("{:x}", hasher.finish());
assert_eq!(&result, "aff1b9967c7bffe7");
},
ExtensionDegree::AddFiveBasePoints => {
assert_eq!(
format!("{c1:?}"),
"HomomorphicCommitment(126844ee6889dd065ccc0c47e16ea23697f72e6ecce70f5e3fef320d843c332e)"
);
let result = format!("{:x}", hasher.finish());
assert_eq!(&result, "e27df20b2dd195ee");
},
}
// Test 'Ord' and 'PartialOrd' implementations
let mut values = (value - 100..value).collect::<Vec<_>>();
values.extend((value + 1..value + 101).collect::<Vec<_>>());
let (mut tested_less_than, mut tested_greater_than) = (false, false);
for val in values {
let c3 = factory.commit_value_extended(&k_vec, val).unwrap();
assert_ne!(c2, c3);
assert_ne!(c2.cmp(&c3), c3.cmp(&c2));
if c2 > c3 {
assert!(c3 < c2);
assert!(matches!(c2.cmp(&c3), std::cmp::Ordering::Greater));
assert!(matches!(c3.cmp(&c2), std::cmp::Ordering::Less));
tested_less_than = true;
}
if c2 < c3 {
assert!(c3 > c2);
assert!(matches!(c2.cmp(&c3), std::cmp::Ordering::Less));
assert!(matches!(c3.cmp(&c2), std::cmp::Ordering::Greater));
tested_greater_than = true;
}
if tested_less_than && tested_greater_than {
break;
}
}
assert!(
tested_less_than && tested_greater_than,
"Try extending the range of values to compare"
);
}
}
}
|
pub mod connection_details_page;
pub mod initial_page;
pub mod inspect_page;
pub mod notifications_page;
pub mod overview_page;
pub mod settings_language_page;
pub mod settings_notifications_page;
pub mod settings_style_page;
pub mod types;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.