text stringlengths 8 4.13M |
|---|
#![no_std]
#![feature(start)]
#![feature(asm)]
#![no_main]
use core::panic::PanicInfo;
#[no_mangle]
pub extern "C" fn _start() {
syscall(0, 0, 0);
main();
}
#[panic_handler]
pub fn panic(_: &PanicInfo) -> ! {
unsafe { asm!("push rax", "mov rax, 1", "int 80h", "pop rax") }
unsafe { asm!("push 0", "ret") }
loop {}
}
#[inline(never)]
#[allow(clippy::empty_loop)]
fn main() {
syscall(0, 0, 0);
syscall(0, 0, 0);
syscall(1, 0, 0);
syscall(2, 0, 0);
let fork = syscall(5, 0, 0);
// Debugs the pid of child
syscall(20, fork as u64, 0);
if fork == 0 {
syscall(6, 0, 0);
}
loop {}
}
extern "C" fn syscall(nb: u64, arg0 : u64, arg1 : u64) -> usize{
let res;
unsafe {
asm!(
"mov rax, {}",
"mov rdi, {}",
"mov rsi, {}",
"int 80h",
"mov {}, rax",
in(reg) nb, in(reg) arg0, in(reg) arg1, out(reg) res)
};
res
}
|
use std::{convert::TryInto, path::PathBuf};
use anyhow::{bail, Result};
use mongodb::{
bson::{doc, Bson, Document},
Client,
Collection,
Database,
};
use serde_json::Value;
use crate::{
bench::{drop_database, Benchmark, COLL_NAME, DATABASE_NAME},
fs::read_to_string,
};
pub struct FindOneBenchmark {
db: Database,
num_iter: usize,
coll: Collection<Document>,
uri: String,
}
/// Specifies the options to a `FindOneBenchmark::setup` operation.
pub struct Options {
pub num_iter: usize,
pub path: PathBuf,
pub uri: String,
}
#[async_trait::async_trait]
impl Benchmark for FindOneBenchmark {
type Options = Options;
async fn setup(options: Self::Options) -> Result<Self> {
let client = Client::with_uri_str(&options.uri).await?;
let db = client.database(&DATABASE_NAME);
drop_database(options.uri.as_str(), DATABASE_NAME.as_str()).await?;
let num_iter = options.num_iter;
let mut file = read_to_string(&options.path).await?;
let json: Value = serde_json::from_str(&mut file)?;
let mut doc = match json.try_into()? {
Bson::Document(doc) => doc,
_ => bail!("invalid json test file"),
};
let coll = db.collection(&COLL_NAME);
for i in 0..num_iter {
doc.insert("_id", i as i32);
coll.insert_one(doc.clone(), None).await?;
}
Ok(FindOneBenchmark {
db,
num_iter,
coll,
uri: options.uri,
})
}
async fn do_task(&self) -> Result<()> {
for i in 0..self.num_iter {
self.coll
.find_one(Some(doc! { "_id": i as i32 }), None)
.await?;
}
Ok(())
}
async fn teardown(&self) -> Result<()> {
drop_database(self.uri.as_str(), self.db.name()).await?;
Ok(())
}
}
|
pub struct PhysicsSystem {
}
impl System for PhysicsSystem {
fn execute(&self, &mut World);
}
|
use log::trace;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum State {
Vacant,
Split,
Occupied(usize),
}
pub struct BuddyAllocator {
size: usize,
unit: usize,
max_order: u32,
buddies: Vec<State>,
}
impl BuddyAllocator {
pub fn new(size: usize, max_order: u32) -> BuddyAllocator {
let unit = size >> max_order;
assert!(unit != 0, "too much order");
let nelem = (1 << (max_order + 1)) - 1;
BuddyAllocator {
size: size,
unit: unit,
max_order: max_order,
buddies: (0..nelem).into_iter()
.map(|_| State::Vacant)
.collect::<Vec<_>>(),
}
}
pub fn is_unused(&self) -> bool {
self.get_state(0, 0) == State::Vacant
}
#[inline]
fn get_state(&self, i: usize, rorder: u32) -> State {
let len = 1 << rorder;
debug_assert!(i < len);
let base = len - 1;
self.buddies[base + i]
}
#[inline]
fn set_state(&mut self, i: usize, rorder: u32, state: State) {
let len = 1 << rorder;
debug_assert!(i < len);
let base = len - 1;
self.buddies[base + i] = state;
}
/// Allocate a piece of memory in the managed memory page. Returns the
/// address (offset from the beginning of the memory pate) to the allocated
/// memory.
///
/// WARN: It should be noted that the reference count is initially set to 0.
/// The user code MUST `refer` to the base address of the allocated memory
/// otherwise the mechanism will break.
pub fn alloc(&mut self, alloc_size: usize, align: usize) -> Option<usize> {
const NBIT_USIZE: u32 = (std::mem::size_of::<usize>() * 8) as u32;
// Reject invalid sizes right away.
if alloc_size == 0 || alloc_size > self.size { return None }
debug_assert!(!align.is_power_of_two(),
"doesn't support non pow-of-2 alignment");
// Step of each iteration.
let step = (align + self.unit - 1) / self.unit;
// The number of units needed to contain the allocation.
let nunit = (alloc_size + self.unit - 1) / self.unit;
// The length of integer needed to describe the allocation size. With
// this number of bits the user can address any unit within the
// allocation. This is also the order number of the allocation.
let nbit_addr = NBIT_USIZE - (nunit - 1).leading_zeros();
// Calculate the reversed order number, which is:
let rorder = self.max_order - nbit_addr;
// Number of blocks in the order.
let nblock = 1 << rorder;
// Base index to the order.
let order_base = nblock - 1;
// Index to the current working block within the order.
let mut i = 0;
while i < nblock {
// The order where the topmost vacant block in the working block's
// parent chain is located.
let mut top_vacant_rorder = 0;
if self.buddies[order_base + i] != State::Vacant {
// The block is not vacant or has been split in to sub-blocks.
// Don't care.
i += step;
} else if let Some(occupied_rorder) = (0..rorder).into_iter().rev()
.filter_map(|cur_rorder| {
let cur_order = self.max_order - cur_rorder;
match self.get_state(i >> cur_order, cur_rorder) {
State::Occupied(_) => Some(cur_rorder),
State::Vacant => { top_vacant_rorder = cur_rorder; None },
_ => None,
}
})
.next() {
// Bubble up and check if any of the parent block has already
// been occupied. If so, skip the entire parent block.
let delta_rorder = rorder - occupied_rorder;
i >>= delta_rorder;
i += step;
i <<= delta_rorder;
} else {
// Found a vacant block. Occupy the block and split higher level
// blocks.
self.buddies[order_base + i] = State::Occupied(0);
for cur_rorder in top_vacant_rorder..rorder {
let cur_order = self.max_order - cur_rorder;
self.set_state(i >> cur_order, cur_rorder, State::Split);
}
let offset = i * (self.unit << nbit_addr);
trace!("allocated {} bytes at offset {:#x}", alloc_size, offset);
return Some(offset);
}
}
None
}
fn get_addr_occupied_location(&self, addr: usize) -> Option<(usize, u32)> {
// Reject unaddressable locations right away.
if addr >= self.size || addr % self.unit != 0 { return None }
let unit_idx = addr / self.unit;
// Order number and reversed order number will be updated with actual
// values.
let nbit_addr = unit_idx.trailing_zeros().min(self.max_order);
let mut rorder = self.max_order - nbit_addr;
// Try getting the actual allocation order number.
for cur_order in (0..=nbit_addr).into_iter().rev() {
let i = unit_idx >> cur_order;
rorder = self.max_order - cur_order;
match self.get_state(i, rorder) {
// This address hasn't been allocated yet. Cast no side-effect.
State::Vacant => return None,
// Discovered the actual order the memory has been allocated.
State::Occupied(_) => return Some((i, rorder)),
_ => {},
}
}
None
}
// Release a piece of allocated memory located at `addr`. If the memory
// address has not been allocated yet, the method works as an no-op. `true`
// is returned when the memory needs to be freed since the reference count
// is reduced to 0.
pub fn free(&mut self, addr: usize) -> Option<bool> {
let (i, rorder) = self.get_addr_occupied_location(addr)?;
match self.get_state(i, rorder) {
// Discovered the actual order the memory has been allocated.
State::Occupied(ref_count) => {
// Reduce reference count.
self.set_state(i, rorder, State::Occupied(ref_count - 1));
trace!("decreased reference counting at offset {:#x}", addr);
Some(false)
},
State::Occupied(1) => {
// After this free the reference count is decreased to 0.
// There will be no referrer, so release allocation and merge
// parents (if possible).
self.set_state(i, rorder, State::Vacant);
let unit_idx = addr / self.unit;
for cur_rorder in 0..rorder {
// We only have to check the neighbors' state because the
// states of parent-blocks are managed by us. Here we toggle
// the LSB to switch to the other bisection.
let cur_child_rorder = cur_rorder + 1;
let cur_child_order = self.max_order - cur_child_rorder;
let child_i = (unit_idx >> cur_child_order) ^ 1;
let cur_state = self.get_state(child_i, cur_child_order);
if cur_state != State::Vacant { break }
let cur_order = self.max_order - cur_rorder;
let i = unit_idx >> cur_order;
trace!("freed memory at offset {:#x}", addr);
self.set_state(i, cur_rorder, State::Vacant);
}
Some(true)
},
_ => unreachable!(),
}
}
// Increase the reference count at `addr`. This method works as an no-op if
// the address hasn't been allocated yet.
pub fn refer(&mut self, addr: usize) -> Option<()> {
let (i, rorder) = self.get_addr_occupied_location(addr)?;
match self.get_state(i, rorder) {
State::Occupied(ref_count) => {
self.set_state(i, rorder, State::Occupied(ref_count + 1));
trace!("increased reference counting at offset {:#x}", addr);
Some(())
},
_ => unreachable!(),
}
}
}
|
//The model that follows the db schema
use crate::schema::producers;
use getset::{CopyGetters, Setters};
#[repr(i16)]
#[derive(Serialize, Deserialize)]
pub enum ProducerType {
GENERIC = 1,
PLANT = 2,
COFFEE = 3,
MEAT = 4,
MILL = 5
}
#[table_name="producers"]
#[derive(Setters, CopyGetters, Serialize, Deserialize, AsChangeset, Queryable, Insertable)]
#[getset(set = "pub")]
pub struct Producer {
pub id: uuid::Uuid,
pub name: String,
address: serde_json::Value,
markets: Option<Vec<uuid::Uuid>>,
email: Option<String>,
phone: Option<String>,
website: Option<String>,
description: Option<String>,
prod_type: Option<i16>
} |
//! At the time of writing the only way to ensure some cleanup code is
//! run is by implementing Drop. Drop has some limitations however: you
//! can't pass extra arguments or report errors from it. Even the
//! standard library faces this problem:
//!
//! ```ignore
//! impl Drop for FileDesc {
//! fn drop(&mut self) {
//! // Note that errors are ignored when closing a file descriptor. The
//! // reason for this is that if an error occurs we don't actually know if
//! // the file descriptor was closed or not, and if we retried (for
//! // something like EINTR), we might close another valid file descriptor
//! // opened after we closed ours.
//! let _ = unsafe { libc::close(self.fd) };
//! }
//! }
//! ```
//!
//! Something called linear types would help, see [this
//! thread](https://users.rust-lang.org/t/prevent-drop-at-compile-time/20508)
//! for more information, but that is not available as of September
//! 2018\. For now the `prevent_drop!` macro allows you to detect if a
//! value would be dropped at compile time if and only if the compiler
//! optimization elides the drop function call. This means that you have
//! to enable optimizations for it to work. Unfortunately, if the
//! compiler is unable to prove a drop call can be elided when in theory
//! it could be, `prevent_drop` will report a false positive. Try to
//! restructure your code so you can take ownership of the values that
//! you are dropping. Alternatively, consider falling back to a run-time
//! check.
//!
//! ## Example
//!
//! This does not compile because `r` is dropped.
//!
//! ```compile_error
//! #[macro_use]
//! extern crate prevent_drop;
//!
//! struct Resource;
//!
//! prevent_drop!(Resource, prevent_drop_Resource);
//!
//! fn main() {
//! let r = Resource;
//! // `r` is dropped.
//! }
//! ```
//!
//! A more elaborate example. Note that this function __must__ consume
//! the value, otherwise you cannot prevent it from dropping. You might
//! need to put your struct in an `Option` to achieve this.
//!
//! ```
//! #[macro_use]
//! extern crate prevent_drop;
//!
//! struct Resource;
//! struct Context;
//! struct Error;
//!
//! impl Resource {
//! fn drop(self, context: &Context) -> Error {
//! let zelf = ::std::mem::ManuallyDrop::new(self);
//! // Perform cleanup.
//! // Return error.
//! Error
//! }
//! }
//!
//! prevent_drop!(Resource, prevent_drop_Resource);
//!
//! fn main() {
//! let c = Context;
//! let r = Resource;
//! r.drop(&c);
//! }
//! ```
//!
//! ## Configuration
//!
//! By default, `prevent_drop` only works when optimizations are
//! enabled. The macro relies on optimizations to remove the drop
//! function if it isn't called. To enable optimizations for debug
//! builds and tests you can use the following.
//!
//! ```ignore
//! [profile.test]
//! opt-level = 1
//! ```
//!
//! If that is not aggressive enough, you can try enabling maximum
//! optimization as well as thin link time optimization.
//!
//! ```ignore
//! [profile.test]
//! opt-level = 3
//! lto = "thin"
//! incremental = false
//! ```
//!
//! Alternatively, you can enable the either the `abort` or the `panic`
//! feature. Like the names suggest this will make `prevent_drop!` use
//! `prevent_drop_abort!` or `prevent_drop_panic!` respectively. To set
//! the strategy to panic for example, edit your `Cargo.toml` like this:
//!
//! ```ignore
//! [dependencies.prevent_drop]
//! version = "..."
//! features = ["panic"] # or "abort"
//! ```
//!
//! Review the documentation for the different prevent_drop strategies
//! for advice on when to use which one.
#![doc(html_root_url = "https://docs.rs/prevent_drop")]
#![deny(missing_docs)]
#![cfg_attr(test, deny(warnings))]
/// Implement Drop for a type that will not compile if it
/// gets called.
///
/// This is the default strategy. It declares an `extern` function that
/// should not have an implementation, causing the linker to emit an
/// error. Either `std::mem::ManuallyDrop` or `std::mem::forget` can be
/// used to prevent values from being dropped. Optimization is required
/// to elide the drop calls.
///
/// Since this is a compile-time check you cannot and need not test your
/// code for potential drops as it will not compile.
#[macro_export]
macro_rules! prevent_drop_link {
($T:ty, $label:ident) => {
extern "C" {
fn $label();
}
impl Drop for $T {
#[inline]
fn drop(&mut self) {
unsafe { $label() };
}
}
};
}
/// Implement Drop for a type that will abort if it gets called.
///
/// The abort strategy simply aborts the process. It is very user
/// unfriendly, because it doesn't report a proper error message and it
/// doesn't unwind like panic, but it is easier to spot in intermediate
/// code or the binary. You can use it on a type if you guarantee that
/// it will never be dropped but the compiler is unable to deduct this.
///
/// Since this is a run-time check you need to have proper tests to
/// discover all potential drops.
#[macro_export]
macro_rules! prevent_drop_abort {
($T:ty, $label:ident) => {
#[inline(never)]
#[no_mangle]
#[allow(non_snake_case, private_no_mangle_fns)]
pub fn $label() {
::std::process::abort();
}
impl Drop for $T {
#[inline]
fn drop(&mut self) {
$label();
}
}
};
}
/// Implement Drop for a type that will panic if it gets called.
///
/// The panic strategy panics with a customizable error message only if
/// the thread is not already panicking. The reason for this is that
/// usually the original panic is more informative. If we are already
/// panicking, leaking some resources is not as important.
///
/// Since this is a run-time check you need to have proper tests to
/// discover all potential drops.
#[macro_export]
macro_rules! prevent_drop_panic {
($T:ty, $label:ident) => {
prevent_drop_panic!(
$T,
$label,
concat!(
"Forgot to explicitly drop an instance of ",
stringify!($T),
"."
)
);
};
($T:ty, $label:ident, $msg:expr) => {
#[inline(never)]
#[no_mangle]
#[allow(non_snake_case, private_no_mangle_fns)]
pub fn $label() {
if ::std::thread::panicking() == false {
panic!($msg);
}
}
impl Drop for $T {
#[inline]
fn drop(&mut self) {
$label();
}
}
};
}
#[cfg(all(not(feature = "abort"), not(feature = "panic"), opt_level_gt_0))]
#[macro_export]
macro_rules! prevent_drop {
($T:ty, $label:ident) => {
prevent_drop_link!($T, $label);
};
($T:ty, $label:ident, $msg:expr) => {
prevent_drop!($T, $label);
};
}
#[cfg(all(not(feature = "abort"), not(feature = "panic"), not(opt_level_gt_0)))]
#[macro_export]
/// Implement Drop for a type so that instances of it cannot
/// be dropped.
///
/// By default, this macro redirects to `prevent_drop_link`. If the
/// `abort` feature is enabled it will redirect to `prevent_drop_abort.
/// If the `panic` feature is enabled it will redirect to
/// `prevent_drop_panic`.
macro_rules! prevent_drop {
($T:ty, $label:ident) => {
compile_error!("The `prevent_drop!` macro requires you to enable optimizations or to enable either the `abort` or the `panic` feature.");
};
($T:ty, $label:ident, $msg:expr) => {
prevent_drop!($T, $label);
};
}
#[cfg(all(feature = "abort", not(feature = "panic")))]
#[macro_export]
macro_rules! prevent_drop {
($T:ty, $label:ident) => {
prevent_drop_abort!($T, $label);
};
($T:ty, $label:ident, $msg:expr) => {
prevent_drop!($T, $label);
};
}
#[cfg(all(not(feature = "abort"), feature = "panic"))]
#[macro_export]
macro_rules! prevent_drop {
($T:ty, $label:ident) => {
prevent_drop_panic!($T, $label);
};
($T:ty, $label:ident, $msg:expr) => {
prevent_drop_panic!($T, $label, $msg);
};
}
#[cfg(all(feature = "abort", feature = "panic"))]
compile_error!("You cannot use both the abort and the panic strategies at the same time. Choose one or the other.");
#[cfg(test)]
mod tests {
struct Resource;
struct Context;
struct Error;
impl Resource {
fn drop(self, _context: &Context) -> Error {
let _self = ::std::mem::ManuallyDrop::new(self);
Error
}
}
prevent_drop!(Resource, prevent_drop_Resource);
#[test]
fn test() {
let c = Context;
let r = Resource;
r.drop(&c);
}
#[derive(Debug)]
struct PanicStrategy;
prevent_drop_panic!(PanicStrategy, forget_to_explicitly_drop_an_instance_of_PanicStrategy);
#[test]
#[should_panic(expected = "Forgot to explicitly drop an instance of PanicStrategy.")]
fn prevent_drop_panic_panics() {
let x = PanicStrategy;
::std::mem::drop(x);
}
#[test]
#[should_panic(expected = "Something else happened that I need to know about!")]
#[allow(unreachable_code, unused_variables)]
fn prevent_drop_panic_does_not_panic_while_panicking() {
let x = PanicStrategy;
panic!("Something else happened that I need to know about!");
::std::mem::drop(x);
}
#[test]
fn prevent_drop_panic_does_not_panic_if_value_is_dropped() {
let _ = ::std::mem::ManuallyDrop::new(PanicStrategy);
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// Host : Object representing a host.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Host {
/// Host aliases collected by Datadog.
#[serde(rename = "aliases", skip_serializing_if = "Option::is_none")]
pub aliases: Option<Vec<String>>,
/// The Datadog integrations reporting metrics for the host.
#[serde(rename = "apps", skip_serializing_if = "Option::is_none")]
pub apps: Option<Vec<String>>,
/// AWS name of your host.
#[serde(rename = "aws_name", skip_serializing_if = "Option::is_none")]
pub aws_name: Option<String>,
/// The host name.
#[serde(rename = "host_name", skip_serializing_if = "Option::is_none")]
pub host_name: Option<String>,
/// The host ID.
#[serde(rename = "id", skip_serializing_if = "Option::is_none")]
pub id: Option<i64>,
/// If a host is muted or unmuted.
#[serde(rename = "is_muted", skip_serializing_if = "Option::is_none")]
pub is_muted: Option<bool>,
/// Last time the host reported a metric data point.
#[serde(rename = "last_reported_time", skip_serializing_if = "Option::is_none")]
pub last_reported_time: Option<i64>,
#[serde(rename = "meta", skip_serializing_if = "Option::is_none")]
pub meta: Option<Box<crate::models::HostMeta>>,
#[serde(rename = "metrics", skip_serializing_if = "Option::is_none")]
pub metrics: Option<Box<crate::models::HostMetrics>>,
/// Timeout of the mute applied to your host.
#[serde(rename = "mute_timeout", skip_serializing_if = "Option::is_none")]
pub mute_timeout: Option<i64>,
/// The host name.
#[serde(rename = "name", skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// Source or cloud provider associated with your host.
#[serde(rename = "sources", skip_serializing_if = "Option::is_none")]
pub sources: Option<Vec<String>>,
/// List of tags for each source (AWS, Datadog Agent, Chef..).
#[serde(rename = "tags_by_source", skip_serializing_if = "Option::is_none")]
pub tags_by_source: Option<::std::collections::HashMap<String, Vec<String>>>,
/// Displays UP when the expected metrics are received and displays `???` if no metrics are received.
#[serde(rename = "up", skip_serializing_if = "Option::is_none")]
pub up: Option<bool>,
}
impl Host {
/// Object representing a host.
pub fn new() -> Host {
Host {
aliases: None,
apps: None,
aws_name: None,
host_name: None,
id: None,
is_muted: None,
last_reported_time: None,
meta: None,
metrics: None,
mute_timeout: None,
name: None,
sources: None,
tags_by_source: None,
up: None,
}
}
}
|
use amethyst::ecs::{Component, NullStorage};
#[derive(Debug, Default)]
pub struct Player;
impl Component for Player {
type Storage = NullStorage<Self>;
}
|
use std::env;
use std::fs;
use std::path::PathBuf;
fn main() {
let out = PathBuf::from(env::var_os("OUT_DIR").unwrap());
fs::remove_dir_all(&out).unwrap();
fs::create_dir(&out).unwrap();
cc::Build::new()
.file("src/foo.c")
.flag_if_supported("-Wall")
.flag_if_supported("-Wfoo-bar-this-flag-does-not-exist")
.define("FOO", None)
.define("BAR", "1")
.compile("foo");
cc::Build::new()
.file("src/bar1.c")
.file("src/bar2.c")
.include("src/include")
.compile("bar");
let target = std::env::var("TARGET").unwrap();
let file = target.split("-").next().unwrap();
let file = format!(
"src/{}.{}",
file,
if target.contains("msvc") { "asm" } else { "S" }
);
cc::Build::new().file(file).compile("asm");
cc::Build::new()
.file("src/baz.cpp")
.cpp(true)
.compile("baz");
if target.contains("windows") {
cc::Build::new().file("src/windows.c").compile("windows");
}
// Test that the `windows_registry` module will set PATH by looking for
// nmake which runs vanilla cl, and then also test it after we remove all
// the relevant env vars from our own process.
if target.contains("msvc") {
let out = out.join("tmp");
fs::create_dir(&out).unwrap();
println!("nmake 1");
let status = cc::windows_registry::find(&target, "nmake.exe")
.unwrap()
.env_remove("MAKEFLAGS")
.arg("/fsrc/NMakefile")
.env("OUT_DIR", &out)
.status()
.unwrap();
assert!(status.success());
fs::remove_dir_all(&out).unwrap();
fs::create_dir(&out).unwrap();
env::remove_var("PATH");
env::remove_var("VCINSTALLDIR");
env::remove_var("INCLUDE");
env::remove_var("LIB");
println!("nmake 2");
let status = cc::windows_registry::find(&target, "nmake.exe")
.unwrap()
.env_remove("MAKEFLAGS")
.arg("/fsrc/NMakefile")
.env("OUT_DIR", &out)
.status()
.unwrap();
assert!(status.success());
println!("cargo:rustc-link-lib=msvc");
println!("cargo:rustc-link-search={}", out.display());
}
// This tests whether we can build a library but not link it to the main
// crate. The test module will do its own linking.
cc::Build::new()
.cargo_metadata(false)
.file("src/opt_linkage.c")
.compile("OptLinkage");
let out = cc::Build::new().file("src/expand.c").expand();
let out = String::from_utf8(out).unwrap();
assert!(out.contains("hello world"));
}
|
extern crate colored;
use colored::*;
fn main() {
let x = {
let y = five();
y + (y * 2) // no semicolon dawg
};
let mut y = 1;
println!("let's explore show_xy in a weird while loop:");
while y <= 9 {
show_xy(x, y);
y = y + 1;
}
println!("\nhow about a for loop over an array:");
let a = [10, 20, 25, 30, 50, 75];
for n in a.iter() {
show_xy(*n, n * n);
}
println!("\nlet's redo our first while as a for over a range:");
for y in 1..9 {
show_xy(x, y);
}
}
fn five() -> i32 {
5
}
fn is_lucky(n: i32) -> bool {
n == 7
}
fn show_xy(x: i32, y: i32) {
println!("the value of 'x' is {}", x);
show_y(y);
}
fn show_y(y: i32) {
// if is an expression 😎
let message = "the value of 'y' is";
let (message, y) = if is_lucky(y) {
(message.green(), y.to_string().green())
} else {
(message.normal(), y.to_string().normal())
};
println!("{} {}", message, y);
}
|
use std::io;
use std::sync::mpsc;
use std::thread;
use termion::event::Key;
use termion::input::TermRead;
use std::time::Duration;
pub enum Event<I> {
Input(I),
Tick,
}
pub struct Events {
rx: mpsc::Receiver<Event<Key>>,
}
// TODO should either the sender or receiver use tokio's version?
impl Events {
pub fn new() -> Events {
let tick_rate = Duration::from_millis(250);
let (tx, rx) = mpsc::channel();
let _input_handle = {
let tx = tx.clone();
thread::spawn(move || {
let stdin = io::stdin();
for key in stdin.keys().flatten() {
if tx.send(Event::Input(key)).is_err() {
return;
}
}
})
};
let _tick_handle = {
thread::spawn(move || {
loop {
// TODO receive ticks
if tx.send(Event::Tick).is_err() {
break;
}
thread::sleep(tick_rate);
}
})
};
Events { rx }
}
pub fn next(&self) -> Result<Event<Key>, mpsc::RecvError> {
self.rx.recv()
}
}
|
use crate::node::{Clip, Fill, Real, Stroke, Transform, TransformMatrix};
#[derive(Default, Debug, Clone, PartialEq)]
pub struct Group {
pub id: Option<String>,
pub transparency: Option<Real>,
pub stroke: Option<Stroke>,
pub fill: Option<Fill>,
pub clip: Clip,
pub transform: Transform,
}
impl Group {
pub const NAME: &'static str = "group";
pub fn id(&self) -> Option<&str> {
self.id.as_ref().map(|s| s.as_str())
}
pub fn recalculate_transform(&mut self, parent_global: TransformMatrix) -> TransformMatrix {
if let Some(transform) = self.clip.transform_mut() {
transform.calculate_global(parent_global);
}
self.transform.calculate_global(parent_global)
}
pub fn empty_overrides(&self) -> bool {
self.stroke.is_none() && self.fill.is_none() && self.transform.is_not_exist()
}
}
|
use std::collections::HashMap;
use std::sync::mpsc::{channel, Sender, Receiver};
use std::thread::Builder;
use battle_state::BattleContext;
use login::AccountBox;
use net::{
OutPacket,
ServerSlot,
ServerSlotId,
SlotInMsg,
};
use sector_data::{SectorData, SectorId};
use sector_state::SectorState;
use vec::Vec2;
pub struct Sector {
pub slot_id: ServerSlotId,
pub to_sector: Sender<AccountBox>,
pub from_sector: Receiver<AccountBox>,
pub ack: Receiver<()>,
pub data: SectorData,
}
pub struct StarMapServer {
slot: ServerSlot,
sectors: HashMap<SectorId, Sector>,
}
impl StarMapServer {
pub fn new(slot: ServerSlot) -> StarMapServer {
let slot_id = slot.get_id();
let mut sectors = HashMap::new();
// Sector 0
let (to_sector_sender, to_sector_receiver) = channel();
let (from_sector_sender, from_sector_receiver) = channel();
let (ack_sender, ack_receiver) = channel();
let sector_slot = slot.create_slot();
let sector_id = SectorId(0);
sectors.insert(sector_id, Sector {
slot_id: sector_slot.get_id(),
to_sector: to_sector_sender,
from_sector: from_sector_receiver,
ack: ack_receiver,
data: SectorData {
id: sector_id,
map_position: Vec2 { x: 50.0, y: 50.0 },
},
});
Builder::new()
.name(format!("sector_{}_thread", 0))
.stack_size(8388608)
.spawn(move || {
let mut sector_state = SectorState::new(sector_slot, slot_id, BattleContext::new(vec!()), false);
sector_state.run(from_sector_sender, to_sector_receiver, ack_sender, false);
});
// Sector 1
let (to_sector_sender, to_sector_receiver) = channel();
let (from_sector_sender, from_sector_receiver) = channel();
let (ack_sender, ack_receiver) = channel();
let sector_slot = slot.create_slot();
let sector_id = SectorId(1);
sectors.insert(sector_id, Sector {
slot_id: sector_slot.get_id(),
to_sector: to_sector_sender,
from_sector: from_sector_receiver,
ack: ack_receiver,
data: SectorData {
id: sector_id,
map_position: Vec2 { x: 100.0, y: 100.0 },
},
});
Builder::new()
.name(format!("sector_{}_thread", 1))
.stack_size(8388608)
.spawn(move || {
let mut sector_state = SectorState::new(sector_slot, slot_id, BattleContext::new(vec!()), false);
sector_state.run(from_sector_sender, to_sector_receiver, ack_sender, true);
});
StarMapServer {
slot: slot,
sectors: sectors,
}
}
pub fn run(&mut self, account_receiver: Receiver<AccountBox>) {
loop {
if let Ok(slot_msg) = self.slot.try_receive() {
match slot_msg {
SlotInMsg::Joined(client_id) => {
println!("Client {} joined the star map", client_id);
},
SlotInMsg::ReceivedPacket(client_id, mut packet) => {
},
_ => {},
}
}
if let Ok(account) = account_receiver.try_recv() {
let client_id = account.client_id.expect("This needs to have a client ID");
let sector_data: Vec<SectorData> = self.sectors.iter().map(|(_, s)| s.data.clone()).collect();
let mut sectors_packet = OutPacket::new();
sectors_packet.write(§or_data).ok().expect("Failed to write SectorData");
self.slot.send(client_id, sectors_packet);
let ref sector = self.sectors[&account.sector];
sector.to_sector.send(account);
sector.ack.recv();
self.slot.transfer_client(client_id, sector.slot_id);
}
// Send any jumping ships to their new sector
for sector in self.sectors.values() {
if let Ok(mut account) = sector.from_sector.try_recv() {
let client_id = account.client_id.expect("This needs to have a client ID");
let target_sector =
{
let ship = account.ship.as_mut().expect("Ship must exist");
ship.target_sector.take().expect("There must be a target sector")
};
let ref sector = self.sectors[&target_sector];
sector.to_sector.send(account);
sector.ack.recv();
self.slot.transfer_client(client_id, sector.slot_id);
}
}
}
}
} |
use std::fmt::Formatter;
use std::string::FromUtf8Error;
/// `Error` represents custom errors in this crate.
#[derive(Debug, Eq, PartialEq, Clone)]
pub enum Error {
/// `None` represents for no error.
None,
/// `LogNotFound` when the specific log entry is not found
LogNotFound,
/// `PipelineReplicationNotSupported` can be returned by the transport to
/// signal that pipeline replication is not supported in general, and that
/// no error message should be produced.
PipeLineReplicationNotSupported,
/// `UnableToStoreLogs` unable to store logs within log store
UnableToStoreLogs(String),
/// `NotFound` when some else is not found, which is different from `LogNotFound`
NotFound,
/// `FromUtf8Error` when cannot convert UTF8 bytes to `String`
FromUtf8Error(FromUtf8Error),
/// local_id cannot be empty
EmptyLocalID,
/// heartbeat_timeout is too low
ShortHeartbeatTimeout,
/// election_timeout is too low
ShortElectionTimeout,
/// commit_timeout is too low
ShortCommitTimeout,
/// max_append_entries is too large
LargeMaxAppendEntries,
/// snapshot_interval is too short
ShortSnapshotInterval,
/// leader_lease_timeout is too short
ShortLeaderLeaseTimeout,
/// leader_lease_timeout cannot be larger than heartbeat timeout
LeaderLeaseTimeoutLargerThanHeartbeatTimeout,
/// election_timeout must be equal or greater than heartbeat timeout
ElectionTimeoutSmallerThanHeartbeatTimeout,
/// `ErrLeader` is returned when an operation can't be completed on a
/// leader node.
Leader,
/// `ErrNotLeader` is returned when an operation can't be completed on a
/// follower or candidate node.
NotLeader,
/// `ErrLeadershipLost` is returned when a leader fails to commit a log entry
/// because it's been deposed in the process.
LeadershipLost,
/// `AbortedByRestore` is returned when a leader fails to commit a log
/// entry because it's been superseded by a user snapshot restore.
AbortedByRestore,
/// `RaftShutdown` is returned when operations are requested against an
/// inactive Raft.
RaftShutdown,
/// `EnqueueTimeout` is returned when a command fails due to a timeout.
EnqueueTimeout,
/// `NothingNewToSnapshot` is returned when trying to create a snapshot
/// but there's nothing new committed to the `FSM` since we started.
NothingNewToSnapshot,
// /// `UnsupportedProtocol` is returned when an operation is attempted
// /// that's not supported by the current protocol version.
// #[error("operation not supported with current protocol version")]
// UnsupportedProtocol,
/// `CantBootstrap` is returned when attempt is made to bootstrap a
/// cluster that already has state present.
CantBootstrap,
/// `LeadershipTransferInProgress` is returned when the leader is rejecting
/// client requests because it is attempting to transfer leadership.
LeadershipTransferInProgress,
/// `DuplicateServerID` found duplicate server ID in configuration
DuplicateServerID(u64),
/// `DuplicateServerAddress` found duplicate server address in configuration
DuplicateServerAddress(String),
/// `NonVoter` need at least one voter in configuration
NonVoter,
/// `ConfigurationChanged` configuration changed since prev_index (latest is current_index)
ConfigurationChanged { current_index: u64, prev_index: u64 },
/// `FSMRestore` error
FSMRestore(String),
}
impl std::error::Error for Error {}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Error::None => write!(f, "no error"),
Error::LogNotFound => write!(f, "log not found"),
Error::PipeLineReplicationNotSupported => {
write!(f, "pipeline replication not supported")
}
Error::UnableToStoreLogs(s) => {
write!(f, "unable to store logs within log store, err: {}", *s)
}
Error::NotFound => write!(f, "not found"),
Error::EmptyLocalID => write!(f, "local_id cannot be empty"),
Error::ShortHeartbeatTimeout => write!(f, "heartbeat_timeout is too low"),
Error::ShortElectionTimeout => write!(f, "election_timeout is too low"),
Error::ShortCommitTimeout => write!(f, "commit_timeout is too low"),
Error::LargeMaxAppendEntries => write!(f, " max_append_entries is too large"),
Error::ShortSnapshotInterval => write!(f, "snapshot_interval is too short"),
Error::ShortLeaderLeaseTimeout => write!(f, "leader_lease_timeout is too short"),
Error::LeaderLeaseTimeoutLargerThanHeartbeatTimeout => write!(
f,
"leader_lease_timeout cannot be larger than heartbeat timeout"
),
Error::ElectionTimeoutSmallerThanHeartbeatTimeout => write!(
f,
"election_timeout must be equal or greater than heartbeat timeout"
),
Error::FromUtf8Error(e) => write!(f, "cannot convert UTF8 to String. From: {}", *e),
Error::Leader => write!(f, "node is the leader"),
Error::NotLeader => write!(f, "node is not the leader"),
Error::LeadershipLost => write!(f, "leadership lost while committing log"),
Error::AbortedByRestore => write!(f, "snapshot restored while committing log"),
Error::RaftShutdown => write!(f, "raft is already shutdown"),
Error::EnqueueTimeout => write!(f, "timed out enqueuing operation"),
Error::NothingNewToSnapshot => write!(f, "othing new to snapshot"),
Error::CantBootstrap => write!(f, "bootstrap only works on new clusters"),
Error::LeadershipTransferInProgress => write!(f, "leadership transfer in progress"),
Error::DuplicateServerID(id) => {
write!(f, "found duplicate server ID in configuration: {}", *id)
}
Error::DuplicateServerAddress(addr) => write!(
f,
"found duplicate server address in configuration: {}",
*addr
),
Error::NonVoter => write!(f, "need at least one voter in configuration"),
Error::ConfigurationChanged {
current_index,
prev_index,
} => write!(
f,
"configuration changed since {} (latest is {})",
*prev_index, *current_index
),
#[cfg(feature = "default")]
Error::FSMRestore(e) => write!(f, "FSM restore IO error. From: {}", *e),
}
}
}
impl From<FromUtf8Error> for Error {
fn from(e: FromUtf8Error) -> Self {
Self::FromUtf8Error(e)
}
}
|
use lib::Forecast;
mod lib;
pub fn forecast(api: String) -> Forecast {
let url = format!("http://api.openweathermap.org/data/2.5/weather?q={},{}&appid={}","Dublin", "IE", api);
let client = reqwest::blocking::Client::new();
let res = client.get(&url)
.send()
.unwrap()
.json::<Forecast>()
.unwrap();
return res;
}
|
use crate::connection::Throughput as ThroughputEnum;
use crate::estimate::Estimate;
use crate::model::BenchmarkGroup;
use crate::report::{
compare_to_threshold, BenchmarkId, ComparisonResult, MeasurementData, Report, ReportContext,
};
use crate::value_formatter::ValueFormatter;
use anyhow::Result;
use serde_derive::Serialize;
use serde_json::json;
use std::io::{stdout, Write};
trait Message: serde::ser::Serialize {
fn reason() -> &'static str;
}
#[derive(Serialize)]
struct ConfidenceInterval {
estimate: f64,
lower_bound: f64,
upper_bound: f64,
unit: String,
}
impl ConfidenceInterval {
fn from_estimate(estimate: &Estimate, value_formatter: &ValueFormatter) -> ConfidenceInterval {
let mut array = [
estimate.point_estimate,
estimate.confidence_interval.lower_bound,
estimate.confidence_interval.upper_bound,
];
let unit = value_formatter.scale_for_machines(&mut array);
let [estimate, lower_bound, upper_bound] = array;
ConfidenceInterval {
estimate,
lower_bound,
upper_bound,
unit,
}
}
fn from_percent(estimate: &Estimate) -> ConfidenceInterval {
ConfidenceInterval {
estimate: estimate.point_estimate,
lower_bound: estimate.confidence_interval.lower_bound,
upper_bound: estimate.confidence_interval.upper_bound,
unit: "%".to_owned(),
}
}
}
#[derive(Serialize)]
struct Throughput {
per_iteration: u64,
unit: String,
}
impl From<&ThroughputEnum> for Throughput {
fn from(other: &ThroughputEnum) -> Self {
match other {
ThroughputEnum::Bytes(bytes) => Throughput {
per_iteration: *bytes,
unit: "bytes".to_owned(),
},
ThroughputEnum::Elements(elements) => Throughput {
per_iteration: *elements,
unit: "elements".to_owned(),
},
}
}
}
#[derive(Serialize)]
enum ChangeType {
NoChange,
Improved,
Regressed,
}
#[derive(Serialize)]
struct ChangeDetails {
mean: ConfidenceInterval,
median: ConfidenceInterval,
change: ChangeType,
}
#[derive(Serialize)]
struct BenchmarkComplete {
id: String,
report_directory: String,
iteration_count: Vec<u64>,
measured_values: Vec<f64>,
unit: String,
throughput: Vec<Throughput>,
typical: ConfidenceInterval,
mean: ConfidenceInterval,
median: ConfidenceInterval,
median_abs_dev: ConfidenceInterval,
slope: Option<ConfidenceInterval>,
change: Option<ChangeDetails>,
}
impl Message for BenchmarkComplete {
fn reason() -> &'static str {
"benchmark-complete"
}
}
#[derive(Serialize)]
struct BenchmarkGroupComplete {
group_name: String,
benchmarks: Vec<String>,
report_directory: String,
}
impl Message for BenchmarkGroupComplete {
fn reason() -> &'static str {
"group-complete"
}
}
pub struct JsonMessageReport;
impl JsonMessageReport {
fn send_message<M: Message>(&self, message: M) {
fn do_send<M: Message>(message: M) -> Result<()> {
// Format the message to string
let message_text = serde_json::to_string(&message)?;
assert!(message_text.starts_with('{'));
let reason = json!(M::reason());
// Concatenate that into the message
writeln!(stdout(), "{{\"reason\":{},{}", reason, &message_text[1..])?;
Ok(())
}
if let Err(e) = do_send(message) {
error!("Unexpected error writing JSON message: {:?}", e)
}
}
}
impl Report for JsonMessageReport {
fn measurement_complete(
&self,
id: &BenchmarkId,
context: &ReportContext,
measurements: &MeasurementData<'_>,
formatter: &ValueFormatter,
) {
let mut measured_values = measurements.sample_times().to_vec();
let unit = formatter.scale_for_machines(&mut measured_values);
let iteration_count: Vec<u64> = measurements
.iter_counts()
.iter()
.map(|count| *count as u64)
.collect();
let message = BenchmarkComplete {
id: id.as_title().to_owned(),
report_directory: path!(&context.output_directory, id.as_directory_name())
.display()
.to_string(),
iteration_count,
measured_values,
unit,
throughput: measurements
.throughput
.iter()
.map(Throughput::from)
.collect(),
typical: ConfidenceInterval::from_estimate(
measurements.absolute_estimates.typical(),
formatter,
),
mean: ConfidenceInterval::from_estimate(
&measurements.absolute_estimates.mean,
formatter,
),
median: ConfidenceInterval::from_estimate(
&measurements.absolute_estimates.median,
formatter,
),
median_abs_dev: ConfidenceInterval::from_estimate(
&measurements.absolute_estimates.median_abs_dev,
formatter,
),
slope: measurements
.absolute_estimates
.slope
.as_ref()
.map(|slope| ConfidenceInterval::from_estimate(slope, formatter)),
change: measurements.comparison.as_ref().map(|comparison| {
let different_mean = comparison.p_value < comparison.significance_threshold;
let mean_est = &comparison.relative_estimates.mean;
let change = if !different_mean {
ChangeType::NoChange
} else {
let comparison = compare_to_threshold(&mean_est, comparison.noise_threshold);
match comparison {
ComparisonResult::Improved => ChangeType::Improved,
ComparisonResult::Regressed => ChangeType::Regressed,
ComparisonResult::NonSignificant => ChangeType::NoChange,
}
};
ChangeDetails {
mean: ConfidenceInterval::from_percent(&comparison.relative_estimates.mean),
median: ConfidenceInterval::from_percent(&comparison.relative_estimates.median),
change,
}
}),
};
self.send_message(message);
}
fn summarize(
&self,
context: &ReportContext,
group_id: &str,
benchmark_group: &BenchmarkGroup,
_formatter: &ValueFormatter,
) {
let message = BenchmarkGroupComplete {
group_name: group_id.to_owned(),
benchmarks: benchmark_group
.benchmarks
.keys()
.map(|id| id.as_title().to_owned())
.collect(),
report_directory: path!(
&context.output_directory,
BenchmarkId::new(group_id.to_owned(), None, None, None).as_directory_name()
)
.display()
.to_string(),
};
self.send_message(message);
}
}
|
// This file is part of Substrate.
// Copyright (C) 2019-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[doc(hidden)]
pub use crate::sp_runtime::traits::ValidateUnsigned;
#[doc(hidden)]
pub use crate::sp_runtime::transaction_validity::{
TransactionSource, TransactionValidity, TransactionValidityError, UnknownTransaction,
};
/// Implement `ValidateUnsigned` for `Runtime`.
/// All given modules need to implement `ValidateUnsigned`.
///
/// # Example
///
/// ```
/// # mod timestamp {
/// # pub struct Module;
/// #
/// # impl frame_support::unsigned::ValidateUnsigned for Module {
/// # type Call = Call;
/// #
/// # fn validate_unsigned(_source: frame_support::unsigned::TransactionSource, _call: &Self::Call)
/// -> frame_support::unsigned::TransactionValidity {
/// # unimplemented!();
/// # }
/// # }
/// #
/// # pub enum Call {
/// # }
/// # }
/// #
/// # pub type Timestamp = timestamp::Module;
/// #
/// #
/// # pub enum Call {
/// # Timestamp(timestamp::Call),
/// # }
/// # #[allow(unused)]
/// pub struct Runtime;
///
/// frame_support::impl_outer_validate_unsigned! {
/// impl ValidateUnsigned for Runtime {
/// Timestamp
/// }
/// }
/// ```
#[macro_export]
macro_rules! impl_outer_validate_unsigned {
(
impl ValidateUnsigned for $runtime:ident {
$( $module:ident )*
}
) => {
impl $crate::unsigned::ValidateUnsigned for $runtime {
type Call = Call;
fn pre_dispatch(call: &Self::Call) -> Result<(), $crate::unsigned::TransactionValidityError> {
#[allow(unreachable_patterns)]
match call {
$( Call::$module(inner_call) => $module::pre_dispatch(inner_call), )*
// pre-dispatch should not stop inherent extrinsics, validation should prevent
// including arbitrary (non-inherent) extrinsics to blocks.
_ => Ok(()),
}
}
fn validate_unsigned(
#[allow(unused_variables)]
source: $crate::unsigned::TransactionSource,
call: &Self::Call,
) -> $crate::unsigned::TransactionValidity {
#[allow(unreachable_patterns)]
match call {
$( Call::$module(inner_call) => $module::validate_unsigned(source, inner_call), )*
_ => $crate::unsigned::UnknownTransaction::NoUnsignedValidator.into(),
}
}
}
};
}
#[cfg(test)]
mod test_empty_call {
pub enum Call {}
#[allow(unused)]
pub struct Runtime;
impl_outer_validate_unsigned! {
impl ValidateUnsigned for Runtime {
}
}
}
#[cfg(test)]
mod test_partial_and_full_call {
pub mod timestamp {
pub struct Module;
impl super::super::ValidateUnsigned for Module {
type Call = Call;
fn validate_unsigned(
_source: super::super::TransactionSource,
_call: &Self::Call,
) -> super::super::TransactionValidity {
unimplemented!();
}
}
pub enum Call {
Foo,
}
}
mod test_full_unsigned {
pub type Timestamp = super::timestamp::Module;
pub enum Call {
Timestamp(super::timestamp::Call),
}
pub struct Runtime;
impl_outer_validate_unsigned! {
impl ValidateUnsigned for Runtime {
Timestamp
}
}
#[test]
fn used() {
let _ = Call::Timestamp(super::timestamp::Call::Foo);
let _ = Runtime;
}
}
mod test_not_full_unsigned {
pub enum Call {
Timestamp(super::timestamp::Call),
}
pub struct Runtime;
impl_outer_validate_unsigned! {
impl ValidateUnsigned for Runtime {
}
}
#[test]
fn used() {
let _ = Call::Timestamp(super::timestamp::Call::Foo);
let _ = Runtime;
}
}
}
|
extern crate bela_sys;
use bela_sys::{BelaContext, BelaInitSettings};
use std::convert::TryInto;
use std::{mem, slice};
use std::{thread, time};
pub mod error;
pub enum DigitalDirection {
INPUT,
OUTPUT,
}
#[repr(C)]
pub enum BelaHw {
NoHw = bela_sys::BelaHw_BelaHw_NoHw as isize,
Bela = bela_sys::BelaHw_BelaHw_Bela as isize,
BelaMini = bela_sys::BelaHw_BelaHw_BelaMini as isize,
Salt = bela_sys::BelaHw_BelaHw_Salt as isize,
CtagFace = bela_sys::BelaHw_BelaHw_CtagFace as isize,
CtagBeast = bela_sys::BelaHw_BelaHw_CtagBeast as isize,
CtagFaceBela = bela_sys::BelaHw_BelaHw_CtagFaceBela as isize,
CtagBeastBela = bela_sys::BelaHw_BelaHw_CtagBeastBela as isize,
}
impl BelaHw {
fn from_i32(v: i32) -> Option<BelaHw> {
match v {
bela_sys::BelaHw_BelaHw_NoHw => Some(BelaHw::NoHw),
bela_sys::BelaHw_BelaHw_Bela => Some(BelaHw::Bela),
bela_sys::BelaHw_BelaHw_BelaMini => Some(BelaHw::BelaMini),
bela_sys::BelaHw_BelaHw_Salt => Some(BelaHw::Salt),
bela_sys::BelaHw_BelaHw_CtagFace => Some(BelaHw::CtagFace),
bela_sys::BelaHw_BelaHw_CtagBeast => Some(BelaHw::CtagBeast),
bela_sys::BelaHw_BelaHw_CtagFaceBela => Some(BelaHw::CtagFaceBela),
bela_sys::BelaHw_BelaHw_CtagBeastBela => Some(BelaHw::CtagBeastBela),
_ => None,
}
}
}
/// The `Bela` struct is essentially built to ensure that the type parameter
/// `<T>` is consistent across all invocations of the setup, render, and cleanup
/// functions. This is because `<T>` is the `UserData` of the original Bela
/// library -- we want to ensure that the `UserData` we are initializing with
/// is the exact same as the one we are attempting to access with each function.
///
/// TODO: Bela needs to also wrap the various setup, render, and cleanup
/// functions and keep them in the same struct.
///
/// Called when audio is initialized.
///
/// ```rust
/// pub type SetupFn = FnOnce(&mut Context, T) -> bool;
/// ```
///
/// Called on every frame.
///
/// ```rust
/// pub type RenderFn = Fn(&mut Context, T);
/// ```
///
/// Called when audio is stopped.
///
/// ```rust
/// pub type CleanupFn = FnOnce(&mut Context, T) -> bool;
/// ```
pub struct Bela<T> {
initialized: bool,
user_data: T,
}
extern "C" fn render_trampoline<'a, T>(
context: *mut BelaContext,
user_data: *mut std::os::raw::c_void,
) where
T: UserData<'a> + 'a,
{
let mut context = Context::new(context);
let user_data = unsafe { &mut *(user_data as *mut T) };
user_data.render_fn(&mut context);
}
extern "C" fn setup_trampoline<'a, T>(
context: *mut BelaContext,
user_data: *mut std::os::raw::c_void,
) -> bool
where
T: UserData<'a> + 'a,
{
let mut context = Context::new(context);
let user_data = unsafe { &mut *(user_data as *mut T) };
user_data.setup_fn(&mut context).is_ok()
}
extern "C" fn cleanup_trampoline<'a, T>(
context: *mut BelaContext,
user_data: *mut std::os::raw::c_void,
) where
T: UserData<'a> + 'a,
{
let mut context = Context::new(context);
let user_data = unsafe { &mut *(user_data as *mut T) };
user_data.cleanup_fn(&mut context);
}
pub struct CreatedTask(bela_sys::AuxiliaryTask);
impl<'a, T: UserData<'a> + 'a> Bela<T> {
pub fn new(user_data: T) -> Self {
Bela {
initialized: false,
user_data,
}
}
pub fn run(&mut self, settings: &mut InitSettings) -> Result<(), error::Error> {
self.init_audio(settings)?;
self.start_audio()?;
while !self.should_stop() {
thread::sleep(time::Duration::new(0, 1000));
}
self.stop_audio();
self.cleanup_audio();
Ok(())
}
pub fn set_render<F: 'a>(&mut self, func: &'a mut F)
where
F: FnMut(&mut Context, T::Data),
for<'r, 's> F: FnMut(&'r mut Context, &'s mut T::Data),
{
self.user_data.set_render_fn(func);
}
pub fn set_setup<F: 'a>(&mut self, func: &'a mut F)
where
F: FnMut(&mut Context, T::Data) -> bool,
for<'r, 's> F: FnMut(&'r mut Context, &'s mut T::Data) -> Result<(), error::Error>,
{
self.user_data.set_setup_fn(Some(func));
}
pub fn set_cleanup<F: 'a>(&mut self, func: &'a mut F)
where
F: FnMut(&mut Context, T::Data),
for<'r, 's> F: FnMut(&'r mut Context, &'s mut T::Data),
{
self.user_data.set_cleanup_fn(Some(func));
}
pub fn init_audio(&mut self, settings: &mut InitSettings) -> Result<(), error::Error> {
settings.settings.setup = Some(setup_trampoline::<T>);
settings.settings.render = Some(render_trampoline::<T>);
settings.settings.cleanup = Some(cleanup_trampoline::<T>);
let out = unsafe {
bela_sys::Bela_initAudio(
settings.settings_ptr(),
&mut self.user_data as *mut _ as *mut _,
)
};
match out {
0 => {
self.initialized = true;
Ok(())
}
_ => Err(error::Error::Init),
}
}
pub fn start_audio(&self) -> Result<(), error::Error> {
if !self.initialized {
return Err(error::Error::Start);
}
let out = unsafe { bela_sys::Bela_startAudio() };
match out {
0 => Ok(()),
_ => Err(error::Error::Start),
}
}
pub fn should_stop(&self) -> bool {
unsafe { bela_sys::Bela_stopRequested() != 0 }
}
/// Create an auxiliary task that runs on a lower-priority thread
/// `name` must be globally unique across all Xenomai processes!
pub fn create_auxiliary_task<Auxiliary>(
task: Box<Auxiliary>,
priority: i32,
name: &std::ffi::CStr,
) -> CreatedTask
where
Auxiliary: FnMut() + Send + 'static,
{
// TODO: Bela API does not currently offer an API to stop and unregister a task,
// so we can only leak the task. Otherwise, we could `Box::into_raw` here, store the
// raw pointer in `CreatedTask` and drop it after unregistering & joining the thread
// using `Box::from_raw`.
let task_ptr = Box::leak(task) as *mut _ as *mut _;
extern "C" fn auxiliary_task_trampoline<Auxiliary>(aux_ptr: *mut std::os::raw::c_void)
where
Auxiliary: FnMut() + Send + 'static,
{
let task_ptr = unsafe { &mut *(aux_ptr as *mut Auxiliary) };
task_ptr();
}
let aux_task = unsafe {
bela_sys::Bela_createAuxiliaryTask(
Some(auxiliary_task_trampoline::<Auxiliary>),
priority,
name.as_ptr(),
task_ptr,
)
};
CreatedTask(aux_task)
}
pub fn schedule_auxiliary_task(task: &CreatedTask) -> Result<(), error::Error> {
let res = unsafe { bela_sys::Bela_scheduleAuxiliaryTask(task.0) };
match res {
0 => Ok(()),
_ => Err(error::Error::Task),
}
}
pub fn stop_audio(&self) {
unsafe {
bela_sys::Bela_stopAudio();
}
}
pub fn cleanup_audio(&self) {
unsafe {
bela_sys::Bela_cleanupAudio();
}
}
}
/// Wraps `BelaContext`
pub struct Context {
context: *mut BelaContext,
}
impl Context {
pub fn new(context: *mut BelaContext) -> Context {
Context { context }
}
pub fn context_mut_ptr(&mut self) -> *mut BelaContext {
let ptr: *mut BelaContext = self.context;
ptr
}
pub fn context_ptr(&self) -> *const BelaContext {
let ptr: *mut BelaContext = self.context;
ptr
}
/// Access the audio output slice
///
/// Mutably borrows self so that (hopefully) we do not have multiple mutable
/// pointers to the audio buffer available simultaneously.
pub fn audio_out(&mut self) -> &mut [f32] {
unsafe {
let context = self.context_mut_ptr();
let n_frames = (*context).audioFrames;
let n_channels = (*context).audioOutChannels;
let audio_out_ptr = (*context).audioOut;
slice::from_raw_parts_mut(audio_out_ptr, (n_frames * n_channels) as usize)
}
}
/// Access the audio input slice
///
/// Immutably borrows self and returns an immutable buffer of audio in data.
pub fn audio_in(&self) -> &[f32] {
unsafe {
let context = self.context_ptr();
let n_frames = (*context).audioFrames;
let n_channels = (*context).audioInChannels;
let audio_in_ptr = (*context).audioIn;
slice::from_raw_parts(audio_in_ptr, (n_frames * n_channels) as usize)
}
}
/// Access the digital input/output slice immutably
pub fn digital(&self) -> &[u32] {
unsafe {
let context = self.context_ptr();
let n_frames = (*context).digitalFrames;
let n_channels = (*context).digitalChannels;
let digital_ptr = (*context).digital;
slice::from_raw_parts(digital_ptr, (n_frames * n_channels) as usize)
}
}
/// Access the digital input/output slice mutably
///
/// Mutably borrows self so that (hopefully) we do not have multiple mutable
/// pointers to the digital buffer available simultaneously.
pub fn digital_mut(&mut self) -> &mut [u32] {
unsafe {
let context = self.context_ptr();
let n_frames = (*context).digitalFrames;
let n_channels = (*context).digitalChannels;
let digital_ptr = (*context).digital;
slice::from_raw_parts_mut(digital_ptr, (n_frames * n_channels) as usize)
}
}
/// Access the analog output slice
///
/// Mutably borrows self so that (hopefully) we do not have multiple mutable
/// pointers to the analog buffer available simultaneously.
pub fn analog_out(&mut self) -> &mut [f32] {
unsafe {
let context = self.context_ptr();
let n_frames = (*context).analogFrames;
let n_channels = (*context).analogOutChannels;
let analog_out_ptr = (*context).analogOut;
slice::from_raw_parts_mut(analog_out_ptr, (n_frames * n_channels) as usize)
}
}
/// Access the analog input slice
pub fn analog_in(&self) -> &[f32] {
unsafe {
let n_frames = (*self.context).analogFrames;
let n_channels = (*self.context).analogInChannels;
let analog_in_ptr = (*self.context).analogIn;
slice::from_raw_parts(analog_in_ptr, (n_frames * n_channels) as usize)
}
}
pub fn audio_frames(&self) -> usize {
unsafe { (*self.context).audioFrames as usize }
}
pub fn audio_in_channels(&self) -> usize {
unsafe { (*self.context).audioInChannels as usize }
}
pub fn audio_out_channels(&self) -> usize {
unsafe { (*self.context).audioOutChannels as usize }
}
pub fn audio_sample_rate(&self) -> f32 {
unsafe { (*self.context).audioSampleRate }
}
pub fn analog_frames(&self) -> usize {
unsafe { (*self.context).analogFrames as usize }
}
pub fn analog_in_channels(&self) -> usize {
unsafe { (*self.context).analogInChannels as usize }
}
pub fn analog_out_channels(&self) -> usize {
unsafe { (*self.context).analogOutChannels as usize }
}
pub fn analog_sample_rate(&self) -> f32 {
unsafe { (*self.context).analogSampleRate }
}
pub fn digital_frames(&self) -> usize {
unsafe { (*self.context).digitalFrames as usize }
}
pub fn digital_channels(&self) -> usize {
unsafe { (*self.context).digitalChannels as usize }
}
pub fn digital_sample_rate(&self) -> f32 {
unsafe { (*self.context).digitalSampleRate }
}
pub fn audio_frames_elapsed(&self) -> usize {
unsafe { (*self.context).audioFramesElapsed as usize }
}
pub fn multiplexer_channels(&self) -> usize {
unsafe { (*self.context).multiplexerChannels as usize }
}
pub fn multiplexer_starting_channels(&self) -> usize {
unsafe { (*self.context).multiplexerStartingChannel as usize }
}
pub fn multiplexer_analog_in(&self) -> &[f32] {
unsafe {
let n_frames = (*self.context).analogFrames;
let n_channels = (*self.context).multiplexerChannels;
let analog_in_ptr = (*self.context).multiplexerAnalogIn;
slice::from_raw_parts(analog_in_ptr, (n_frames * n_channels) as usize)
}
}
pub fn multiplexer_enabled(&self) -> u32 {
unsafe { (*self.context).audioExpanderEnabled }
}
pub fn flags(&self) -> u32 {
unsafe { (*self.context).flags }
}
// Returns the value of a given digital input at the given frame number
pub fn digital_read(&self, frame: usize, channel: usize) -> bool {
let digital = self.digital();
(digital[frame] >> (channel + 16)) & 1 != 0
}
// Sets a given digital output channel to a value for the current frame and all subsequent frames
pub fn digital_write(&mut self, frame: usize, channel: usize, value: bool) {
let digital = self.digital_mut();
for out in &mut digital[frame..] {
if value {
*out |= 1 << (channel + 16)
} else {
*out &= !(1 << (channel + 16));
}
}
}
// Sets a given digital output channel to a value for the current frame only
pub fn digital_write_once(&mut self, frame: usize, channel: usize, value: bool) {
let digital = self.digital_mut();
if value {
digital[frame] |= 1 << (channel + 16);
} else {
digital[frame] &= !(1 << (channel + 16));
}
}
// Sets the direction of a digital pin for the current frame and all subsequent frames
pub fn pin_mode(&mut self, frame: usize, channel: usize, mode: DigitalDirection) {
let digital = self.digital_mut();
for out in &mut digital[frame..] {
match mode {
DigitalDirection::INPUT => {
*out |= 1 << channel;
}
DigitalDirection::OUTPUT => {
*out &= !(1 << channel);
}
}
}
}
// Sets the direction of a digital pin for the current frame only
pub fn pin_mode_once(&mut self, frame: usize, channel: usize, mode: DigitalDirection) {
let digital = self.digital_mut();
match mode {
DigitalDirection::INPUT => {
digital[frame] |= 1 << channel;
}
DigitalDirection::OUTPUT => {
digital[frame] &= !(1 << channel);
}
}
}
}
pub trait UserData<'a> {
type Data;
fn render_fn(&mut self, context: &mut Context);
fn set_render_fn(&mut self, render_fn: &'a mut dyn FnMut(&mut Context, &mut Self::Data));
fn setup_fn(&mut self, context: &mut Context) -> Result<(), error::Error>;
fn set_setup_fn(
&mut self,
setup_fn: Option<
&'a mut dyn FnMut(&mut Context, &mut Self::Data) -> Result<(), error::Error>,
>,
);
fn cleanup_fn(&mut self, context: &mut Context);
fn set_cleanup_fn(
&mut self,
cleanup_fn: Option<&'a mut dyn FnMut(&mut Context, &mut Self::Data)>,
);
}
pub struct AppData<'a, D: 'a> {
pub data: D,
render: &'a mut dyn FnMut(&mut Context, &mut D),
setup: Option<&'a mut dyn FnMut(&mut Context, &mut D) -> Result<(), error::Error>>,
cleanup: Option<&'a mut dyn FnMut(&mut Context, &mut D)>,
}
impl<'a, D> AppData<'a, D> {
pub fn new(
data: D,
render: &'a mut dyn FnMut(&mut Context, &mut D),
setup: Option<&'a mut dyn FnMut(&mut Context, &mut D) -> Result<(), error::Error>>,
cleanup: Option<&'a mut dyn FnMut(&mut Context, &mut D)>,
) -> AppData<'a, D> {
AppData {
data,
render,
setup,
cleanup,
}
}
}
impl<'a, D> UserData<'a> for AppData<'a, D> {
type Data = D;
fn render_fn(&mut self, context: &mut Context) {
let AppData { render, data, .. } = self;
render(context, data)
}
fn set_render_fn(&mut self, callback: &'a mut (dyn FnMut(&mut Context, &mut D) + 'a)) {
self.render = callback;
}
fn setup_fn(&mut self, context: &mut Context) -> Result<(), error::Error> {
let AppData { setup, data, .. } = self;
match setup {
Some(f) => f(context, data),
None => Ok(()),
}
}
fn set_setup_fn(
&mut self,
callback: Option<
&'a mut (dyn FnMut(&mut Context, &mut D) -> Result<(), error::Error> + 'a),
>,
) {
self.setup = callback;
}
fn cleanup_fn(&mut self, context: &mut Context) {
let AppData { cleanup, data, .. } = self;
match cleanup {
Some(f) => f(context, data),
None => (),
};
}
fn set_cleanup_fn(&mut self, callback: Option<&'a mut (dyn FnMut(&mut Context, &mut D) + 'a)>) {
self.cleanup = callback;
}
}
/// Safe wrapper for `BelaInitSettings`, which sets initial parameters for the
/// Bela system.
pub struct InitSettings {
settings: BelaInitSettings,
}
impl InitSettings {
pub fn settings_ptr(&mut self) -> *mut BelaInitSettings {
&mut self.settings
}
/// Get number of analog frames per period (buffer). Number of audio frames
/// depends on relative sample rates of the two. By default, audio is twice
/// the sample rate, so has twice the period size.
pub fn period_size(&self) -> usize {
self.settings.periodSize as usize
}
/// Set number of analog frames per period (buffer). Number of audio frames
/// depends on relative sample rates of the two. By default, audio is twice
/// the sample rate, so has twice the period size.
pub fn set_period_size(&mut self, size: usize) {
self.settings.periodSize = size.try_into().unwrap();
}
/// Get whether to use the analog input and output
pub fn use_analog(&self) -> bool {
self.settings.useAnalog != 0
}
/// Set whether to use the analog input and output
pub fn set_use_analog(&mut self, use_analog: bool) {
self.settings.useAnalog = use_analog as _;
}
/// Get whether to use the digital input and output
pub fn use_digital(&self) -> bool {
self.settings.useDigital != 0
}
/// Set whether to use the digital input and output
pub fn set_use_digital(&mut self, use_digital: bool) {
self.settings.useDigital = use_digital as _;
}
pub fn num_analog_in_channels(&self) -> usize {
self.settings.numAnalogInChannels as usize
}
pub fn set_num_analog_in_channels(&mut self, num: usize) {
self.settings.numAnalogInChannels = num.try_into().unwrap();
}
pub fn num_analog_out_channels(&self) -> usize {
self.settings.numAnalogOutChannels as usize
}
pub fn set_num_analog_out_channels(&mut self, num: usize) {
self.settings.numAnalogOutChannels = num.try_into().unwrap();
}
pub fn num_digital_channels(&self) -> usize {
self.settings.numDigitalChannels as usize
}
pub fn set_num_digital_channels(&mut self, num: usize) {
self.settings.numDigitalChannels = num.try_into().unwrap();
}
pub fn begin_muted(&self) -> bool {
self.settings.beginMuted != 0
}
pub fn set_begin_muted(&mut self, val: bool) {
self.settings.beginMuted = val as _;
}
pub fn dac_level(&self) -> f32 {
self.settings.dacLevel
}
pub fn set_dac_level(&mut self, val: f32) {
self.settings.dacLevel = val;
}
pub fn adc_level(&self) -> f32 {
self.settings.adcLevel
}
pub fn set_adc_level(&mut self, val: f32) {
self.settings.adcLevel = val;
}
pub fn pga_gain(&self) -> [f32; 2] {
self.settings.pgaGain
}
pub fn set_pga_gain(&mut self, val: [f32; 2]) {
self.settings.pgaGain = val;
}
pub fn headphone_level(&self) -> f32 {
self.settings.headphoneLevel
}
pub fn set_headphone_level(&mut self, val: f32) {
self.settings.headphoneLevel = val;
}
pub fn num_mux_channels(&self) -> usize {
self.settings.numMuxChannels as usize
}
pub fn set_num_mux_channels(&mut self, val: usize) {
self.settings.numMuxChannels = val.try_into().unwrap();
}
pub fn audio_expander_inputs(&self) -> usize {
self.settings.audioExpanderInputs as usize
}
pub fn set_audio_expander_inputs(&mut self, val: usize) {
self.settings.audioExpanderInputs = val.try_into().unwrap();
}
pub fn audio_expander_outputs(&self) -> usize {
self.settings.audioExpanderOutputs as usize
}
pub fn set_audio_expander_outputs(&mut self, val: usize) {
self.settings.audioExpanderOutputs = val.try_into().unwrap();
}
pub fn pru_number(&self) -> usize {
self.settings.pruNumber as usize
}
pub fn set_pru_number(&mut self, val: usize) {
self.settings.pruNumber = val.try_into().unwrap();
}
pub fn pru_filename(&self) -> [u8; 256] {
self.settings.pruFilename
}
pub fn set_pru_filename(&mut self, val: [u8; 256]) {
self.settings.pruFilename = val;
}
pub fn detect_underruns(&self) -> bool {
self.settings.detectUnderruns != 0
}
pub fn set_detect_underruns(&mut self, val: bool) {
self.settings.detectUnderruns = val as _;
}
pub fn verbose(&self) -> bool {
self.settings.verbose != 0
}
pub fn set_verbose(&mut self, val: bool) {
self.settings.verbose = val as _;
}
pub fn enable_led(&self) -> bool {
self.settings.enableLED != 0
}
pub fn set_enable_led(&mut self, val: bool) {
self.settings.enableLED = val as _;
}
pub fn stop_button_pin(&self) -> Option<i8> {
match self.settings.stopButtonPin {
0..=127 => Some(self.settings.stopButtonPin as _),
_ => None,
}
}
pub fn set_stop_button_pin(&mut self, val: Option<i8>) {
self.settings.stopButtonPin = match val {
Some(v) if v >= 0 => v as _,
_ => -1,
};
}
pub fn high_performance_mode(&self) -> bool {
self.settings.highPerformanceMode != 0
}
pub fn set_high_performance_mode(&mut self, val: bool) {
self.settings.highPerformanceMode = val as _;
}
pub fn interleave(&self) -> bool {
self.settings.interleave != 0
}
pub fn set_interleave(&mut self, val: bool) {
self.settings.interleave = val as _;
}
pub fn analog_outputs_persist(&self) -> bool {
self.settings.analogOutputsPersist != 0
}
pub fn set_analog_outputs_persist(&mut self, val: bool) {
self.settings.analogOutputsPersist = val as _;
}
pub fn uniform_sample_rate(&self) -> bool {
self.settings.uniformSampleRate != 0
}
pub fn set_uniform_sample_rate(&mut self, val: bool) {
self.settings.uniformSampleRate = val as _;
}
pub fn audio_thread_stack_size(&self) -> usize {
self.settings.audioThreadStackSize as usize
}
pub fn set_audio_thread_stack_size(&mut self, num: usize) {
self.settings.audioThreadStackSize = num.try_into().unwrap();
}
pub fn auxiliary_task_stack_size(&self) -> usize {
self.settings.auxiliaryTaskStackSize as usize
}
pub fn set_auxiliary_task_stack_size(&mut self, num: usize) {
self.settings.auxiliaryTaskStackSize = num.try_into().unwrap();
}
pub fn amp_mute_pin(&self) -> Option<i8> {
match self.settings.ampMutePin {
0..=127 => Some(self.settings.ampMutePin as _),
_ => None,
}
}
pub fn set_amp_mute_pin(&mut self, val: Option<i8>) {
self.settings.ampMutePin = match val {
Some(v) if v >= 0 => v as _,
_ => -1,
};
}
/// Get user selected board to work with (as opposed to detected hardware).
pub fn board(&self) -> BelaHw {
BelaHw::from_i32(self.settings.board).expect("unexpected board type")
}
/// Set user selected board to work with (as opposed to detected hardware).
pub fn set_board(&mut self, board: BelaHw) {
self.settings.board = board as _;
}
}
impl Default for InitSettings {
fn default() -> InitSettings {
let settings = unsafe {
let mut settings = mem::MaybeUninit::<BelaInitSettings>::uninit();
bela_sys::Bela_defaultSettings(settings.as_mut_ptr());
settings.assume_init()
};
InitSettings { settings }
}
}
|
use cosmwasm_std::{
Api, Binary, Env, Extern, HandleResponse, InitResponse,
MigrateResult, Querier, StdResult, Storage,
};
use cw20_base::contract::{handle as cw20_handle, init as cw20_init, query as cw20_query, migrate as cw20_migrate};
use cw20_base::msg::{HandleMsg, InitMsg, MigrateMsg, QueryMsg};
pub fn init<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
msg: InitMsg,
) -> StdResult<InitResponse> {
cw20_init(deps, env, msg)
}
pub fn handle<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
msg: HandleMsg,
) -> StdResult<HandleResponse> {
cw20_handle(deps, env, msg)
}
pub fn query<S: Storage, A: Api, Q: Querier>(
deps: &Extern<S, A, Q>,
msg: QueryMsg,
) -> StdResult<Binary> {
cw20_query(deps, msg)
}
pub fn migrate<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
msg: MigrateMsg,
) -> MigrateResult {
cw20_migrate(deps, env, msg)
}
|
test_normalize! {"
error[E0308]: mismatched types
--> tests/compile-fail/surface_source_interval_badarg.rs:7:25
|
5 | let mut df = hydroflow_syntax! {
| __________________-
6 | | // Should be a `Duration`.
7 | | source_interval(5) -> for_each(std::mem::drop);
| | ^ expected `Duration`, found integer
8 | | };
| |_____- arguments to this function are incorrect
|
note: function defined here
--> /home/runner/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-1.26.0/src/time/interval.rs:74:8
|
74 | pub fn interval(period: Duration) -> Interval {
| ^^^^^^^^
" "
error[E0308]: mismatched types
--> tests/compile-fail/surface_source_interval_badarg.rs:7:25
|
5 | let mut df = hydroflow_syntax! {
| __________________-
6 | | // Should be a `Duration`.
7 | | source_interval(5) -> for_each(std::mem::drop);
| | ^ expected `Duration`, found integer
8 | | };
| |_____- arguments to this function are incorrect
|
note: function defined here
--> $CARGO/tokio-1.26.0/src/time/interval.rs
|
| pub fn interval(period: Duration) -> Interval {
| ^^^^^^^^
"}
|
use common::{Item, Result};
use std::sync::{Arc, Mutex};
pub trait IOutput: Send + Sync + 'static {
fn write(&mut self, item: Item) -> Result<()>;
}
#[derive(Debug)]
pub struct Output<T: ?Sized + IOutput> {
o: T,
}
unsafe impl<T: IOutput> Send for Output<T> {}
unsafe impl<T: IOutput> Sync for Output<T> {}
impl<T: IOutput> Output<T> {
pub fn new(o: T) -> Self {
Self { o }
}
}
impl<T: IOutput> IOutput for Output<T> {
// delegate write IOutput.write
fn write(&mut self, item: Item) -> Result<()> {
self.o.write(item)
}
}
pub fn sync_via_output(line: &str, output: Arc<Mutex<dyn IOutput>>) -> Result<()> {
if let Ok(mut output) = output.lock() {
return output.write(Item::from(line));
}
Ok(())
}
pub fn via_output<T: IOutput>(line: &str, o: &mut T) -> Result<()> {
o.write(Item::from(line))
}
pub fn new_sync_output<T: IOutput>(t: T) -> Arc<Mutex<T>> {
Arc::new(Mutex::new(t))
}
pub struct FakeOutput;
impl IOutput for FakeOutput {
fn write(&mut self, item: Item) -> Result<()> {
println!("{}", item.string());
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::thread;
#[test]
fn it_works() {
let output = &mut Output::new(FakeOutput);
if let Err(e) = via_output(&r#"abc"#, output) {
panic!("{}", e);
}
}
#[test]
fn it_works_with_multiple_threads() {
let fake_output = Arc::new(Mutex::new(FakeOutput));
let mut list = vec![];
for _ in 0..2 {
let output = fake_output.clone();
list.push(thread::spawn(move || {
if let Err(e) = sync_via_output(&r#"abc"#, output) {
panic!("{}", e);
}
}));
}
for j in list.into_iter() {
j.join().unwrap()
}
}
}
|
use std::io::{Error as IOError, Write};
use std::sync::mpsc::Receiver;
use deco::{dprintln, dwrite, dwriteln};
use crate::dictionary::{Entry, Text};
use crate::errors::AppResultU;
use crate::pager::with_pager;
pub fn main(rx: Receiver<Option<Vec<Entry>>>) -> AppResultU {
for entries in rx {
if let Some(entries) = entries {
print(entries)?
} else {
print_not_found();
}
}
Ok(())
}
pub fn print(entries: Vec<Entry>) -> AppResultU {
fn color_key<W: Write>(out: &mut W, key: &str) -> Result<(), IOError> {
dwriteln!(out, [black on_yellow bold "{}" !] key)
}
fn color<W: Write>(out: &mut W, text: &Text) -> Result<(), IOError> {
use self::Text::*;
match text {
Annot(s) => dwrite!(out, [yellow "{}" !] s),
Class(s) => dwrite!(out, [blue "{}" !] s),
Countability(c) => dwrite!(out, [yellow bold "{}" !] c),
Definition(s) => dwrite!(out, [white bold "{}" !] s),
Error(s) => dwrite!(out, [red bold "{}" !] s),
Etymology(s) => dwrite!(out, [magenta bold "語源" ! " {}"] s),
Example(s) => dwrite!(out, [green "{}" !] s),
Information(s) => dwrite!(out, [cyan "{}" !] s),
Note(s) => write!(out, "{}", s),
Tag(s) => dwrite!(out, [red bold "{}" !] s),
Word(s) => color_key(out, &s),
}
}
with_pager(|out| {
for entry in entries {
color_key(out, &entry.key)?;
for definition in &entry.definitions {
for (index, text) in definition.content.iter().enumerate() {
if 0 < index {
write!(out, " ")?;
}
color(out, text)?;
}
writeln!(out)?;
}
}
Ok(())
})
}
pub fn print_not_found() {
dprintln!([black on_red "{}" !] "Not Found");
}
|
use crate::data::GcpAccessToken;
use serenity::prelude::*;
use std::{sync::Arc, time::Duration};
macro_rules! unwrap {
($e:expr) => {
match $e {
Some(e) => e,
None => continue,
}
};
}
pub async fn renew_token(ctx: Arc<Context>) {
let ctx = Arc::clone(&ctx);
tokio::spawn(async move {
loop {
info!("Renewing access token");
let data = ctx.data.read().await;
let token = unwrap!(data.get::<GcpAccessToken>());
for _ in 0..5i32 {
let mut token = token.lock().await;
if token.renew_token().await.is_err() {
warn!("Renew token failed");
tokio::time::delay_for(Duration::from_secs(3)).await;
} else {
info!("New Token is: {:?}", token.show());
break;
}
error!("Renew token failed for 5 times")
}
tokio::time::delay_for(Duration::from_secs(1000)).await;
}
});
}
|
#![no_std]
#![no_main]
#![feature(asm)]
#![feature(abi_x86_interrupt)]
#![feature(custom_test_frameworks)]
#![test_runner(test_runner)]
#![reexport_test_harness_main = "test_main"]
use blanc_os::test_runner;
use coop::{executor::Executor, Task};
use memory::{allocator, phys::PhysFrameAllocator};
use core::panic::PanicInfo;
use serial::{serial_print, serial_println};
use bootloader::{entry_point, BootInfo};
// Macro for pointing to where the entry point function is
entry_point!(main);
static HELLO_WORLD: &[u8] =
include_bytes!("../applications/hello_world/target/hello_world/debug/hello_world");
fn main(boot_info: &'static mut BootInfo) -> ! {
let frame_buffer_info = boot_info.framebuffer.as_ref().unwrap().info();
if let Some(frame_buffer) = boot_info.framebuffer.as_mut() {
blanc_os::init_logger(frame_buffer.buffer_mut(), frame_buffer_info);
}
blanc_os::init();
unsafe { memory::init(boot_info.recursive_index) };
PhysFrameAllocator::init(&boot_info.memory_regions);
allocator::init_heap().expect("Heap did not properly map");
#[cfg(test)]
test_main();
let mut executor = Executor::new();
executor.spawn(Task::new(coop::keyboard::print_keypresses()));
executor.spawn(Task::new(coop::mouse::print_mouse()));
executor.run();
}
#[test_case]
fn test_jump_to_elf() {
use task::elf::align_bin;
use task::task::Ring;
use task::task::Task;
let elf = task::task::Task::binary(Some("hello_world"), HELLO_WORLD, Some(Ring::Ring0), None);
unsafe {
x86_64::registers::control::Efer::write_raw(
x86_64::registers::control::Efer::read_raw() ^ 2 ^ 11,
);
asm!(
"jmp {}",
in(reg) elf.entry_point()
);
}
}
#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
blanc_os::test_panic_handler(info)
}
|
#[cfg(feature = "sled-storage")]
use gluesql::{parse, Glue, Payload::*, Query};
use sqlparser::ast::{OrderByExpr, Statement};
use std::rc::Rc;
use crate::applic_folder::compileargs::*;
use crate::applic_folder::get_storage::get_storage;
use crate::applic_folder::orderby::*;
use crate::applic_folder::show_select::*;
/***************************************************/
pub fn go_exec(how: Rc<HowToOpenStorage>, one_query: String, seen: &Seen) {
let storage = get_storage(how);
let mut glue = Glue::new(storage);
for query in parse(&one_query).unwrap() {
match &glue.execute(&query).unwrap() {
Select { labels, rows } => {
let mut order_by: Vec<OrderByExpr> = Vec::new();
match query {
Query(xxx) => {
match &xxx {
Statement::Query(yyy) => {
order_by = yyy.order_by.clone();
}
_ => {
eprintln!("???{}", xxx);
}
};
}
}
let rows = orderby(labels.to_vec(), rows.to_vec(), &order_by);
show_select(
labels.to_vec(),
rows.to_vec(),
if seen.printsqlstm {
Some(&one_query)
} else {
None
},
)
}
Insert(n) => eprintln!("{} rows inserted", n),
Delete(n) => eprintln!("{} rows deleted", n),
Update(n) => eprintln!("{} rows update", n),
Create => eprintln!("Table created"),
DropTable => eprintln!("Table dropped"),
_ => eprintln!("Not yet fom glue.execute"),
}
}
}
|
use bytes::Bytes;
#[derive(Debug)]
pub enum Command {
/* Client initiated commands.
* These commands are initiated by clients
* which connect to the server.*/
/* "GET" command. */
Get {
key: String,
id: i64,
},
/* "SET" command. */
Set {
key: String,
value: Bytes,
id: i64,
},
/* "INFO" command. */
Info {
id: i64,
},
/* "REPL_JOIN" command. */
ReplJoin {
addr: String,
port: u16,
id: i64,
},
/* Node initiated commands.
* These commands are initiated by other peer nodes
* which connect to the server. */
ReplPing {
id: i64,
},
/* Bad shaped command. */
BadCommand {
message: String,
}
} |
use std::iter;
use crate::jit;
const JMP_SIZE: usize = 8;
const TRAMPOLINE_ENTRYPOINT: [u8; 10] = [
0x48, 0xff, 0xc0, // inc rax
0xe9, 0x02, 0x00, 0x00, 0x00, // jmp 0x7
0x31, 0xc0, // xor eax, eax
];
//const TRAMPOLINE_END: [u8; 19] = [
// 0xe8, 0x00, 0x00, 0x00, 0x00, // call 0xf
// 0x5b, // pop rbx
// 0x48, 0x83, 0xc3, 0x17, // add ebx, 0x17
// 0x48, 0x8d, 0x04, 0xc3, // lea rax, [rbx+rax*8]
// 0x48, 0x8b, 0x00, // mov rax, QWORD PTR [rax]
// 0x48, 0xbb, // mov rbx, XXXXXXXXXXXXX
//];
const TRAMPOLINE_END_LEN: usize = 19;
const TRAMPOLINE_END: [u8; TRAMPOLINE_END_LEN] = [
0xe8, 0x00, 0x00, 0x00, 0x00, // call 0xf
0x5b, // pop rbx
0x48, 0x83, 0xc3, TRAMPOLINE_END_LEN as u8 + 5, // add ebx, 0x17
0x48, 0x8d, 0x04, 0xc3, // lea rax, [rbx+rax*8]
//0x48, 0x89, 0xDF, // mov rdi, rbx
0x48, 0x89, 0xC7, // mov rdi, rax
//0x48, 0x8b, 0x00, // mov rdi, QWORD PTR [rax]
0x48, 0xbb, // mov rbx, XXXXXXXXXXXXX
];
const SIZE_OF_PTR: usize = 8;
const JMP_RBX: [u8; 2] = [0xff, 0xe3];
const SIZE_OF_TRAMPOLINE_END: usize = TRAMPOLINE_END.len() + SIZE_OF_PTR + JMP_RBX.len();
const NOP: u8 = 0x90;
fn repeat_entrypoint() -> impl Iterator<Item = u8> {
iter::repeat(TRAMPOLINE_ENTRYPOINT.iter().cloned())
.take(TRAMPOLINE_CAPACITY)
.flatten()
.skip(JMP_SIZE) // skip jmp on first iteration
}
const FIRST_ENTRYPOINT_SIZE: usize = TRAMPOLINE_ENTRYPOINT.len() - JMP_SIZE;
const ENTRYPOINT_AREA_SIZE: usize = jit::PAGE_SIZE - SIZE_OF_TRAMPOLINE_END;
pub const TRAMPOLINE_CAPACITY: usize = {
((ENTRYPOINT_AREA_SIZE - FIRST_ENTRYPOINT_SIZE) / TRAMPOLINE_ENTRYPOINT.len()) + 1
};
const NOP_COUNT: usize = jit::PAGE_SIZE - ((
(TRAMPOLINE_CAPACITY * TRAMPOLINE_ENTRYPOINT.len()) - JMP_SIZE
) + SIZE_OF_TRAMPOLINE_END);
pub extern "C" fn call(boxed_closure: &&&dyn Fn()) {
boxed_closure();
}
fn generate_trampolines() -> impl Iterator<Item = u8> {
let call_bytes = usize::to_ne_bytes(call as *const () as usize).iter().cloned().collect::<Vec<_>>();
iter::repeat(NOP).take(NOP_COUNT)
.chain(repeat_entrypoint())
.chain(TRAMPOLINE_END.iter().cloned())
.chain(call_bytes.into_iter())
.chain(JMP_RBX.iter().cloned())
}
pub struct TrampolineSet {
jit_mem: jit::JitCodeDataPagePair
}
impl<'a> TrampolineSet {
pub const CAPACITY: usize = TRAMPOLINE_CAPACITY;
pub fn new() -> Self {
let mut jit_mem = jit::JitCodeDataPagePair::new();
unsafe {
jit_mem.unlock();
jit_mem.code_as_slice()
.iter_mut()
.zip(generate_trampolines())
.for_each(|(code, trampoline)| {
*code = trampoline;
});
jit_mem.lock();
}
Self { jit_mem }
}
pub fn get_slot_fn(&mut self, index: usize) -> unsafe extern "C" fn() {
if index >= TRAMPOLINE_CAPACITY {
panic!("Index out of bounds of the max number")
}
let index = index + 1;
let offset = ((TRAMPOLINE_CAPACITY - index) * TRAMPOLINE_ENTRYPOINT.len()) + NOP_COUNT;
self.jit_mem.get_func_ptr(offset)
}
pub fn set_slot_fn(&mut self, index: usize, closure: Box<Box<dyn Fn() + 'a>>) {
unsafe {
let x = self.jit_mem.data_as_mut_slice();
x[index] = Box::leak(closure);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_build_payload() {
assert_eq!(
generate_trampolines().collect::<Vec<_>>().len(),
jit::PAGE_SIZE
)
}
#[test]
fn test_trampoline_set() {
let mut tramp_set = TrampolineSet::new();
let x = 10;
tramp_set.set_slot_fn(0, Box::new(Box::new(|| { println!("test {}", x) })));
unsafe {
let func = tramp_set.get_slot_fn(0);
dbg!(func);
func();
}
}
}
// e9 02 00 00 00 jmp 0x7
// 31 c0 xor eax,eax
// 48 ff c0 inc rax
//
// e8 00 00 00 00 call 0xf
// 5b pop rbx
// 83 c3 09 add ebx,0x9
// 48 8d 04 c3 lea rax,[rbx+rax*8]
// 48 8b 00 mov rax,QWORD PTR [rax]
//
|
mod context;
mod device;
mod swapchain;
pub mod sys;
mod text;
pub use device::Device;
pub use swapchain::Swapchain;
|
//! Exposes utility constants, enums, classes and functions for easy use.
mod event_loop;
mod errors;
pub use self::event_loop::EventLoop;
pub use self::errors::AppError;
|
use serde::Serialize;
use time::PreciseTime;
pub struct OpenTimer<'a> {
name: &'static str,
timer_tree: &'a mut TimerTree,
start: PreciseTime,
depth: u32,
}
impl<'a> OpenTimer<'a> {
/// Starts timing a new named subtask
///
/// The timer is stopped automatically
/// when the `OpenTimer` is dropped.
pub fn open(&mut self, name: &'static str) -> OpenTimer {
OpenTimer {
name,
timer_tree: self.timer_tree,
start: PreciseTime::now(),
depth: self.depth + 1,
}
}
}
impl<'a> Drop for OpenTimer<'a> {
fn drop(&mut self) {
self.timer_tree.timings.push(Timing {
name: self.name,
duration: self
.start
.to(PreciseTime::now())
.num_microseconds()
.unwrap(),
depth: self.depth,
});
}
}
/// Timing recording
#[derive(Debug, Serialize)]
pub struct Timing {
name: &'static str,
duration: i64,
depth: u32,
}
/// Timer tree
#[derive(Debug, Serialize)]
pub struct TimerTree {
timings: Vec<Timing>,
}
impl TimerTree {
/// Returns the total time elapsed in microseconds
pub fn total_time(&self) -> i64 {
self.timings.last().unwrap().duration
}
/// Open a new named subtask
pub fn open(&mut self, name: &'static str) -> OpenTimer {
OpenTimer {
name,
timer_tree: self,
start: PreciseTime::now(),
depth: 0,
}
}
}
impl Default for TimerTree {
fn default() -> TimerTree {
TimerTree {
timings: Vec::new(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_timer() {
let mut timer_tree = TimerTree::default();
{
let mut a = timer_tree.open("a");
{
let mut ab = a.open("b");
{
let _abc = ab.open("c");
}
{
let _abd = ab.open("d");
}
}
}
assert_eq!(timer_tree.timings.len(), 4);
}
}
|
use crate::server::ChatServer;
use actix::prelude::*;
use actix::Handler;
pub struct ListRooms;
impl actix::Message for ListRooms {
type Result = Vec<String>;
}
impl Handler<ListRooms> for ChatServer {
type Result = MessageResult<ListRooms>;
fn handle(&mut self, _: ListRooms, _: &mut Context<Self>) -> Self::Result {
let mut rooms = Vec::new();
for key in self.rooms.keys() {
rooms.push(key.to_owned())
}
MessageResult(rooms)
}
}
|
#[derive(Clone, Copy, PartialEq, Hash)]
pub struct Board {
pub board: u64,
}
impl Board {
pub fn transpose(&self) -> Board {
// https://github.com/nneonneo/2048-ai
let x = self.board;
let a1 = x & 0xF0F00F0FF0F00F0F;
let a2 = x & 0x0000F0F00000F0F0;
let a3 = x & 0x0F0F00000F0F0000;
let a = a1 | (a2 << 12) | (a3 >> 12);
let b1 = a & 0xFF00FF0000FF00FF;
let b2 = a & 0x00FF00FF00000000;
let b3 = a & 0x00000000FF00FF00;
Board {
board: b1 | (b2 >> 24) | (b3 << 24),
}
}
pub fn count_empty(&self) -> usize {
// https://github.com/nneonneo/2048-ai
let mut x = self.board;
if x == 0 {
return 16;
}
x |= (x >> 2) & 0x3333333333333333;
x |= x >> 1;
x = !x & 0x1111111111111111;
// At this point each nibble is:
// 0 if the original nibble was non-zero
// 1 if the original nibble was zero
// Next sum them all
x += x >> 32;
x += x >> 16;
x += x >> 8;
x += x >> 4;
// Since the board is non-empty, the answer is at most 15,
// so overflow is not a problem.
(x & 0xf) as usize
}
}
fn row_to_col(row: u16) -> u64 {
let row = row as u64;
(row | (row << 12) | (row << 24) | (row << 36)) & 0x000F000F000F000F
}
fn reverse_row(row: u16) -> u16 {
(row >> 12) | ((row >> 4) & 0x00F0) | ((row << 4) & 0x0F00) | (row << 12)
}
pub struct BoardTable {
gravity1: [u16; 0x10000],
gravity2: [u16; 0x10000],
gravity3: [u64; 0x10000],
gravity4: [u64; 0x10000],
}
impl BoardTable {
pub fn new() -> Self {
let mut g1 = [0; 0x10000];
let mut g2 = [0; 0x10000];
let mut g3 = [0; 0x10000];
let mut g4 = [0; 0x10000];
for idx in 0..0x10000usize {
let row = idx as u16;
let a = (row & 0xF) as u8;
let b = ((row >> 4) & 0xF) as u8;
let c = ((row >> 8) & 0xF) as u8;
let d = (row >> 12) as u8;
let (a, b, c, d) = gravity4(a, b, c, d);
let result = (a as u16) | ((b as u16) << 4) | ((c as u16) << 8) | ((d as u16) << 12);
let rev_result = reverse_row(result);
let rev_row = reverse_row(row);
let rev_idx = rev_row as usize;
g1[idx] = row ^ result;
g2[rev_idx] = rev_row ^ rev_result;
g3[idx] = row_to_col(row) ^ row_to_col(result);
g4[rev_idx] = row_to_col(rev_row) ^ row_to_col(rev_result);
}
Self {
gravity1: g1,
gravity2: g2,
gravity3: g3,
gravity4: g4,
}
}
fn gravity1(&self, board: Board) -> Board {
let mut x = board.board;
x ^= (self.gravity1[((x >> (0 * 16)) & 0xFFFF) as usize] as u64) << (0 * 16);
x ^= (self.gravity1[((x >> (1 * 16)) & 0xFFFF) as usize] as u64) << (1 * 16);
x ^= (self.gravity1[((x >> (2 * 16)) & 0xFFFF) as usize] as u64) << (2 * 16);
x ^= (self.gravity1[((x >> (3 * 16)) & 0xFFFF) as usize] as u64) << (3 * 16);
Board { board: x }
}
fn gravity2(&self, board: Board) -> Board {
let mut x = board.board;
x ^= (self.gravity2[((x >> (0 * 16)) & 0xFFFF) as usize] as u64) << (0 * 16);
x ^= (self.gravity2[((x >> (1 * 16)) & 0xFFFF) as usize] as u64) << (1 * 16);
x ^= (self.gravity2[((x >> (2 * 16)) & 0xFFFF) as usize] as u64) << (2 * 16);
x ^= (self.gravity2[((x >> (3 * 16)) & 0xFFFF) as usize] as u64) << (3 * 16);
Board { board: x }
}
fn gravity3(&self, board: Board) -> Board {
let mut x = board.board;
let t = board.transpose().board;
x ^= self.gravity3[((t >> (0 * 16)) & 0xFFFF) as usize] << (0 * 4);
x ^= self.gravity3[((t >> (1 * 16)) & 0xFFFF) as usize] << (1 * 4);
x ^= self.gravity3[((t >> (2 * 16)) & 0xFFFF) as usize] << (2 * 4);
x ^= self.gravity3[((t >> (3 * 16)) & 0xFFFF) as usize] << (3 * 4);
Board { board: x }
}
fn gravity4(&self, board: Board) -> Board {
let mut x = board.board;
let t = board.transpose().board;
x ^= self.gravity4[((t >> (0 * 16)) & 0xFFFF) as usize] << (0 * 4);
x ^= self.gravity4[((t >> (1 * 16)) & 0xFFFF) as usize] << (1 * 4);
x ^= self.gravity4[((t >> (2 * 16)) & 0xFFFF) as usize] << (2 * 4);
x ^= self.gravity4[((t >> (3 * 16)) & 0xFFFF) as usize] << (3 * 4);
Board { board: x }
}
}
fn gravity2(a: u8, b: u8) -> (u8, u8) {
if a == 0 {
(b, 0)
} else if a == b {
(a + 1, 0)
} else {
(a, b)
}
}
fn gravity3(a: u8, b: u8, c: u8) -> (u8, u8, u8) {
if a == 0 {
let (b, c) = gravity2(b, c);
(b, c, 0)
} else if a == b {
(a + 1, c, 0)
} else if b == 0 && a == c {
(a + 1, 0, 0)
} else {
let (b, c) = gravity2(b, c);
(a, b, c)
}
}
fn gravity4(a: u8, b: u8, c: u8, d: u8) -> (u8, u8, u8, u8) {
if a == 0 {
let (b, c, d) = gravity3(b, c, d);
(b, c, d, 0)
} else if a == b {
let (c, d) = gravity2(c, d);
(a + 1, c, d, 0)
} else if b == 0 && a == c {
(a + 1, d, 0, 0)
} else if b == 0 && c == 0 && a == d {
(a + 1, 0, 0, 0)
} else {
let (b, c, d) = gravity3(b, c, d);
(a, b, c, d)
}
}
pub struct BoardTiles(u64);
impl BoardTiles {
pub fn iter(self) -> impl Iterator<Item = u8> {
(0..16).map(move |i| (self.0 >> (4 * i)) as u8 & 0xf)
}
pub fn as_array(&self) -> [u8; 16] {
let x = self.0;
[
(x >> (0 * 4)) as u8 & 0xf,
(x >> (1 * 4)) as u8 & 0xf,
(x >> (2 * 4)) as u8 & 0xf,
(x >> (3 * 4)) as u8 & 0xf,
(x >> (4 * 4)) as u8 & 0xf,
(x >> (5 * 4)) as u8 & 0xf,
(x >> (6 * 4)) as u8 & 0xf,
(x >> (7 * 4)) as u8 & 0xf,
(x >> (8 * 4)) as u8 & 0xf,
(x >> (9 * 4)) as u8 & 0xf,
(x >> (10 * 4)) as u8 & 0xf,
(x >> (11 * 4)) as u8 & 0xf,
(x >> (12 * 4)) as u8 & 0xf,
(x >> (13 * 4)) as u8 & 0xf,
(x >> (14 * 4)) as u8 & 0xf,
(x >> (15 * 4)) as u8 & 0xf,
]
}
}
impl Board {
pub fn new() -> Self {
Self { board: 0 }
}
pub fn from_array(tiles: [u8; 16]) -> Self {
Self {
board: (tiles[0] as u64) << (0 * 4)
| (tiles[1] as u64) << (1 * 4)
| (tiles[2] as u64) << (2 * 4)
| (tiles[3] as u64) << (3 * 4)
| (tiles[4] as u64) << (4 * 4)
| (tiles[5] as u64) << (5 * 4)
| (tiles[6] as u64) << (6 * 4)
| (tiles[7] as u64) << (7 * 4)
| (tiles[8] as u64) << (8 * 4)
| (tiles[9] as u64) << (9 * 4)
| (tiles[10] as u64) << (10 * 4)
| (tiles[11] as u64) << (11 * 4)
| (tiles[12] as u64) << (12 * 4)
| (tiles[13] as u64) << (13 * 4)
| (tiles[14] as u64) << (14 * 4)
| (tiles[15] as u64) << (15 * 4),
}
}
pub fn tiles(&self) -> BoardTiles {
BoardTiles(self.board)
}
pub fn iter_moves(self, t: &BoardTable) -> impl Iterator<Item = Board> {
BoardMovesTable {
board: self,
boards: [
t.gravity1(self),
t.gravity2(self),
t.gravity3(self),
t.gravity4(self),
],
i: 0,
}
}
pub fn iter_growth(self) -> impl Iterator<Item = BoardGrowth> {
self.tiles().iter().enumerate().filter_map(move |(i, v)| {
if v != 0 {
None
} else {
Some(BoardGrowth {
board: self,
i: i as u8,
})
}
})
}
}
struct BoardMovesTable {
board: Board,
boards: [Board; 4],
i: usize,
}
impl Iterator for BoardMovesTable {
type Item = Board;
fn next(&mut self) -> Option<Board> {
while self.i < self.boards.len() && self.boards[self.i] == self.board {
self.i += 1;
}
if self.i == self.boards.len() {
return None;
}
let res = Some(self.boards[self.i]);
self.i += 1;
res
}
}
pub struct BoardGrowth {
board: Board,
i: u8,
}
impl BoardGrowth {
pub fn set(&self, v: u8) -> Board {
let mut b = self.board.board;
b |= (v as u64) << (4 * self.i);
Board { board: b }
}
}
impl std::fmt::Display for Board {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
for (i, v) in self.tiles().iter().enumerate() {
if v == 0 {
write!(fmt, " ")?;
} else {
write!(fmt, "{:^5}", 1 << v)?;
}
if i < 15 && i % 4 == 3 {
write!(fmt, "\n\n")?;
}
}
Ok(())
}
}
impl std::fmt::Debug for Board {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "Board::from_array({:?})", self.tiles().as_array())
}
}
|
extern crate aoc2017;
use aoc2017::days::day20;
fn main() {
let input = aoc2017::read("input/day20.txt").unwrap();
let value1 = day20::part1::parse(&input, 10_000);
let value2 = day20::part2::parse(&input, 10_000);
println!("Day 20 part 1 value: {}", value1);
println!("Day 20 part 2 value: {}", value2);
}
|
use std::cmp::Ordering;
use std::collections::{BinaryHeap, HashMap, VecDeque};
use std::fs::File;
use std::io::{BufRead, BufReader};
lazy_static! {
static ref ALPHABET_BITS: HashMap<char, u32> = "abcdefghijklmnopqrstuvwxyz"
.chars()
.enumerate()
.map(|(i, c)| (c, 1u32 << i))
.collect();
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
enum TileContent {
Wall,
Empty,
Me,
Key(char),
Door(char),
}
impl From<char> for TileContent {
fn from(x: char) -> Self {
match x {
'#' => Self::Wall,
'.' => Self::Empty,
'@' => Self::Me,
c => {
if c >= 'a' {
Self::Key(c)
} else {
Self::Door(c)
}
}
}
}
}
impl TileContent {
fn is_visitable(self) -> bool {
match self {
Self::Wall => false,
_ => true,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum Direction {
Up,
Down,
Left,
Right,
}
#[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)]
struct Position(usize, usize);
impl Position {
fn step(&self, d: Direction) -> Self {
match d {
Direction::Up => Position(self.0, self.1 + 1),
Direction::Down => Position(self.0, self.1 - 1),
Direction::Right => Position(self.0 + 1, self.1),
Direction::Left => Position(self.0 - 1, self.1),
}
}
fn neighbours(&self) -> impl Iterator<Item = Position> + '_ {
let directions: &[Direction] = &[
Direction::Up,
Direction::Down,
Direction::Left,
Direction::Right,
];
directions.iter().map(move |&d| self.step(d))
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
struct Tile {
position: Position,
distance: usize,
doors: u32,
}
impl Tile {
fn new(position: Position, distance: usize, doors: u32) -> Self {
Self {
position,
distance,
doors,
}
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
struct SearchNode {
position: Position,
collected: u32,
}
impl SearchNode {
fn new(position: Position) -> Self {
Self {
position,
collected: 0,
}
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
struct SearchState {
node: SearchNode,
distance: usize,
}
impl SearchState {
fn new(node: SearchNode, distance: usize) -> Self {
Self { node, distance }
}
}
impl Ord for SearchState {
fn cmp(&self, other: &Self) -> Ordering {
other.distance.cmp(&self.distance)
}
}
impl PartialOrd for SearchState {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
struct MultiSearchNode {
positions: [Position; 4],
collected: u32,
}
impl MultiSearchNode {
fn new(positions: [Position; 4]) -> Self {
Self {
positions,
collected: 0,
}
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
struct MultiState {
node: MultiSearchNode,
distance: usize,
}
impl MultiState {
fn new(node: MultiSearchNode, distance: usize) -> Self {
Self { node, distance }
}
}
impl Ord for MultiState {
fn cmp(&self, other: &Self) -> Ordering {
other.distance.cmp(&self.distance)
}
}
impl PartialOrd for MultiState {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
pub(crate) fn day18() {
let input = File::open("data/day18.txt").expect("Failed to open input");
let buffered = BufReader::new(input);
let mut map = HashMap::new();
let mut me = None;
for (y, line) in buffered.lines().enumerate() {
for (x, c) in line.unwrap().chars().enumerate() {
let position = Position(x, y);
let content = TileContent::from(c);
if content == TileContent::Me {
me.replace(position);
}
map.insert(position, content);
}
}
let me = me.unwrap();
let key_positions: HashMap<char, Position> = map
.iter()
.filter_map(|(&position, &content)| match content {
TileContent::Key(c) => Some((c, position)),
_ => None,
})
.collect();
let all_keys = ALPHABET_BITS.values().fold(0, |acc, bit| acc | bit);
// Part one: find shortest path to a state where we've collected all keys.
let part_one = map.clone();
let node = SearchNode::new(me);
let mut distance_table = HashMap::new();
let mut cache = HashMap::new();
let mut best_distance = None;
let state = SearchState::new(node, 0);
let mut queue = BinaryHeap::new();
queue.push(state);
while let Some(state) = queue.pop() {
if state.node.collected == all_keys {
best_distance.replace(state.distance);
break;
}
let entry = cache.entry(state.node).or_insert(std::usize::MAX);
if state.distance >= *entry {
continue;
};
*entry = state.distance;
let distances = distance_table
.entry(state.node.position)
.or_insert_with(|| distances_to_keys(&part_one, state.node.position));
for (key, (step, doors)) in distances {
let key_bit = ALPHABET_BITS[key];
if (state.node.collected & key_bit) != 0 {
continue;
}
if (*doors & !state.node.collected) != 0 {
continue;
}
let mut new_state = state;
new_state.node.collected |= key_bit;
new_state.node.position = key_positions[key];
new_state.distance += *step;
queue.push(new_state);
}
}
let best_distance = best_distance.unwrap();
println!("Part one answer is: {}", best_distance);
// Part two is much the same.
let mut part_two = map;
part_two.insert(me, TileContent::Wall);
part_two.insert(me.step(Direction::Up), TileContent::Wall);
part_two.insert(me.step(Direction::Down), TileContent::Wall);
part_two.insert(me.step(Direction::Left), TileContent::Wall);
part_two.insert(me.step(Direction::Right), TileContent::Wall);
let starts = [
me.step(Direction::Up).step(Direction::Left),
me.step(Direction::Up).step(Direction::Right),
me.step(Direction::Down).step(Direction::Left),
me.step(Direction::Down).step(Direction::Right),
];
let node = MultiSearchNode::new(starts);
let mut distance_table = HashMap::new();
let mut cache = HashMap::new();
let mut best_distance = None;
let state = MultiState::new(node, 0);
let mut queue = BinaryHeap::new();
queue.push(state);
while let Some(state) = queue.pop() {
if state.node.collected == all_keys {
best_distance.replace(state.distance);
break;
}
let entry = cache.entry(state.node).or_insert(std::usize::MAX);
if state.distance >= *entry {
continue;
};
*entry = state.distance;
for (bot, position) in state.node.positions.iter().enumerate() {
let distances = distance_table
.entry(*position)
.or_insert_with(|| distances_to_keys(&part_two, *position));
for (key, (step, doors)) in distances {
let key_bit = ALPHABET_BITS[key];
if (state.node.collected & key_bit) != 0 {
continue;
}
if (*doors & !state.node.collected) != 0 {
continue;
}
let mut new_state = state;
new_state.node.collected |= key_bit;
new_state.node.positions[bot] = key_positions[key];
new_state.distance += *step;
queue.push(new_state);
}
}
}
let best_distance = best_distance.unwrap();
println!("Part two answer is: {}", best_distance);
}
// Returns a hash map keyed by key, where the entries give the distance to that key and the doors
// that we must pass through to get there.
fn distances_to_keys(
map: &HashMap<Position, TileContent>,
start: Position,
) -> HashMap<char, (usize, u32)> {
let start_tile = Tile::new(start, 0, 0);
let mut distances: HashMap<Position, Tile> = HashMap::new();
let mut queue = VecDeque::new();
queue.push_back(start_tile);
while let Some(tile) = queue.pop_front() {
for new_position in tile.position.neighbours() {
let content = map.get(&new_position);
if content.map_or(false, |p| p.is_visitable()) && !distances.contains_key(&new_position)
{
let new_distance = tile.distance + 1;
let mut doors = tile.doors;
if let Some(TileContent::Door(c)) = content {
doors |= ALPHABET_BITS[&c.to_ascii_lowercase()];
}
let new_tile = Tile::new(new_position, new_distance, doors);
queue.push_back(new_tile.clone());
distances.insert(new_position, new_tile);
}
}
}
distances
.into_iter()
.filter_map(|(p, t)| match map[&p] {
TileContent::Key(c) => Some((c, (t.distance, t.doors))),
_ => None,
})
.collect()
}
|
/*
* Copyright Stalwart Labs Ltd. See the COPYING
* file at the top-level directory of this distribution.
*
* Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
* https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
* <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
* option. This file may not be copied, modified, or distributed
* except according to those terms.
*/
use serde::Serialize;
use crate::{
core::query::{self, QueryObject},
Set,
};
use super::{Mailbox, QueryArguments, Role};
#[derive(Serialize, Clone, Debug)]
#[serde(untagged)]
pub enum Filter {
ParentId {
#[serde(rename = "parentId")]
value: Option<String>,
},
Name {
#[serde(rename = "name")]
value: String,
},
Role {
#[serde(rename = "role")]
value: Option<Role>,
},
HasAnyRole {
#[serde(rename = "hasAnyRole")]
value: bool,
},
IsSubscribed {
#[serde(rename = "isSubscribed")]
value: bool,
},
}
#[derive(Serialize, Debug, Clone)]
#[serde(tag = "property")]
pub enum Comparator {
#[serde(rename = "name")]
Name,
#[serde(rename = "sortOrder")]
SortOrder,
#[serde(rename = "parentId")]
ParentId,
}
impl Filter {
pub fn parent_id(value: Option<impl Into<String>>) -> Self {
Filter::ParentId {
value: value.map(Into::into),
}
}
pub fn name(value: impl Into<String>) -> Self {
Filter::Name {
value: value.into(),
}
}
pub fn role(value: Role) -> Self {
Filter::Role {
value: if !matches!(value, Role::None) {
value.into()
} else {
None
},
}
}
pub fn has_any_role(value: bool) -> Self {
Filter::HasAnyRole { value }
}
pub fn is_subscribed(value: bool) -> Self {
Filter::IsSubscribed { value }
}
}
impl Comparator {
pub fn name() -> query::Comparator<Comparator> {
query::Comparator::new(Comparator::Name)
}
pub fn sort_order() -> query::Comparator<Comparator> {
query::Comparator::new(Comparator::SortOrder)
}
pub fn parent_id() -> query::Comparator<Comparator> {
query::Comparator::new(Comparator::ParentId)
}
}
impl QueryArguments {
pub fn sort_as_tree(&mut self, value: bool) -> &mut Self {
self.sort_as_tree = value;
self
}
pub fn filter_as_tree(&mut self, value: bool) -> &mut Self {
self.filter_as_tree = value;
self
}
}
impl QueryObject for Mailbox<Set> {
type QueryArguments = QueryArguments;
type Filter = Filter;
type Sort = Comparator;
}
|
use grid_printer::GridPrinter;
use grid_printer::style::{Fg, Bg, Sgr, StyleOpt};
use std::error::Error;
fn main() -> Result<(), Box<dyn Error>> {
let grid = vec![
vec![1, 2, 3, 4, ],
vec![5, 6, 7, 8, ],
vec![9, 10, 11, 12, ],
];
let rows = grid.len();
let cols = grid[0].len();
let printer = GridPrinter::builder(rows, cols)
.col_style(0, StyleOpt::new().fg(Fg::Magenta))?
.col_style(1, StyleOpt::new().fg(Fg::Black).bg(Bg::BrightYellow))?
.col_style(2, StyleOpt::new().sgr(Sgr::StrikeThrough))?
.col_style(3, StyleOpt::new().fg(Fg::Cyan))?
.build();
printer.print(&grid);
Ok(())
}
|
use gl;
use graphics::texture::Texture;
use std::ops::Drop;
pub struct FrameBuffer {
gl_handle: u32,
color_buffer: Texture,
highlights: Texture,
}
impl FrameBuffer {
pub fn new(width: u32, height: u32) -> FrameBuffer {
let mut frame_buffer = FrameBuffer {
gl_handle: 0,
color_buffer: Texture::new(width, height),
highlights: Texture::new(width, height),
};
unsafe {
gl::GenFramebuffers(1, &mut frame_buffer.gl_handle);
gl::BindFramebuffer(gl::FRAMEBUFFER, frame_buffer.gl_handle);
gl::FramebufferTexture2D(
gl::FRAMEBUFFER,
gl::COLOR_ATTACHMENT0,
gl::TEXTURE_2D,
frame_buffer.color_buffer.gl_handle,
0,
);
gl::FramebufferTexture2D(
gl::FRAMEBUFFER,
gl::COLOR_ATTACHMENT1,
gl::TEXTURE_2D,
frame_buffer.highlights.gl_handle,
0,
);
let mut render_buffer_obj = 0u32;
gl::GenRenderbuffers(1, &mut render_buffer_obj);
gl::BindRenderbuffer(gl::RENDERBUFFER, render_buffer_obj);
gl::RenderbufferStorage(
gl::RENDERBUFFER,
gl::DEPTH24_STENCIL8,
width as i32,
height as i32,
);
gl::BindRenderbuffer(gl::RENDERBUFFER, 0);
gl::FramebufferRenderbuffer(
gl::FRAMEBUFFER,
gl::DEPTH_STENCIL_ATTACHMENT,
gl::RENDERBUFFER,
render_buffer_obj,
);
if gl::CheckFramebufferStatus(gl::FRAMEBUFFER) != gl::FRAMEBUFFER_COMPLETE {
panic!("Framebuffer setup failed");
}
gl::BindFramebuffer(gl::FRAMEBUFFER, 0);
}
frame_buffer
}
pub fn bind(&mut self) {
unsafe {
gl::BindFramebuffer(gl::FRAMEBUFFER, self.gl_handle);
}
}
pub fn unbind(&self) {
unsafe {
gl::BindFramebuffer(gl::FRAMEBUFFER, 0);
}
}
pub fn get_color_texture(&mut self) -> &mut Texture {
&mut self.color_buffer
}
pub fn get_highlights_texture(&mut self) -> &mut Texture {
&mut self.highlights
}
}
impl Drop for FrameBuffer {
fn drop(&mut self) {
unsafe {
gl::DeleteFramebuffers(1, &mut self.gl_handle);
}
}
}
|
// Copyright 2019 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Functions for building partial transactions to be passed
//! around during an interactive wallet exchange
use crate::blake2::blake2b::blake2b;
use crate::error::{Error, ErrorKind};
use crate::grin_core::core::amount_to_hr_string;
use crate::grin_core::core::committed::Committed;
use crate::grin_core::core::transaction::{
Input, KernelFeatures, Output, OutputFeatures, Transaction, TransactionBody, TxKernel,
Weighting,
};
use crate::grin_core::core::verifier_cache::LruVerifierCache;
use crate::grin_core::global;
use crate::grin_core::libtx::{aggsig, build, proof::ProofBuild, secp_ser, tx_fee};
use crate::grin_core::map_vec;
use crate::grin_keychain::{BlindSum, BlindingFactor, Keychain, SwitchCommitmentType};
use crate::grin_util::secp::key::{PublicKey, SecretKey};
use crate::grin_util::secp::pedersen::Commitment;
use crate::grin_util::secp::Signature;
use crate::grin_util::{self, secp, RwLock};
use crate::Context;
use serde::ser::{Serialize, Serializer};
use serde_json;
use std::fmt;
use std::sync::Arc;
use uuid::Uuid;
use crate::slate_versions::v2::SlateV2;
use crate::slate_versions::v2::SlateV2ParseTTL;
use crate::slate_versions::v3::{
CoinbaseV3, InputV3, OutputV3, ParticipantDataV3, PaymentInfoV3, SlateV3, TransactionBodyV3,
TransactionV3, TxKernelV3, VersionCompatInfoV3,
};
// use crate::slate_versions::{CURRENT_SLATE_VERSION, GRIN_BLOCK_HEADER_VERSION};
use crate::grin_core::core::{Inputs, NRDRelativeHeight, OutputIdentifier};
use crate::proof::proofaddress;
use crate::proof::proofaddress::ProvableAddress;
use crate::types::CbData;
use crate::{SlateVersion, Slatepacker, CURRENT_SLATE_VERSION};
use ed25519_dalek::SecretKey as DalekSecretKey;
use rand::rngs::mock::StepRng;
use rand::thread_rng;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct PaymentInfo {
#[serde(
serialize_with = "proofaddress::as_string",
deserialize_with = "proofaddress::proof_address_from_string"
)]
pub sender_address: ProvableAddress,
#[serde(
serialize_with = "proofaddress::as_string",
deserialize_with = "proofaddress::proof_address_from_string"
)]
pub receiver_address: ProvableAddress,
pub receiver_signature: Option<String>,
}
/// Public data for each participant in the slate
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
pub struct ParticipantData {
/// Id of participant in the transaction. (For now, 0=sender, 1=rec)
#[serde(with = "secp_ser::string_or_u64")]
pub id: u64,
/// Public key corresponding to private blinding factor
#[serde(with = "secp_ser::pubkey_serde")]
pub public_blind_excess: PublicKey,
/// Public key corresponding to private nonce
#[serde(with = "secp_ser::pubkey_serde")]
pub public_nonce: PublicKey,
/// Public partial signature
#[serde(with = "secp_ser::option_sig_serde")]
pub part_sig: Option<Signature>,
/// A message for other participants
pub message: Option<String>,
/// Signature, created with private key corresponding to 'public_blind_excess'
#[serde(with = "secp_ser::option_sig_serde")]
pub message_sig: Option<Signature>,
}
impl ParticipantData {
/// A helper to return whether this participant
/// has completed round 1 and round 2;
/// Round 1 has to be completed before instantiation of this struct
/// anyhow, and for each participant consists of:
/// -Inputs added to transaction
/// -Outputs added to transaction
/// -Public signature nonce chosen and added
/// -Public contribution to blinding factor chosen and added
/// Round 2 can only be completed after all participants have
/// performed round 1, and adds:
/// -Part sig is filled out
pub fn is_complete(&self) -> bool {
self.part_sig.is_some()
}
}
/// Public message data (for serialising and storage)
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ParticipantMessageData {
/// id of the particpant in the tx
#[serde(with = "secp_ser::string_or_u64")]
pub id: u64,
/// Public key
#[serde(with = "secp_ser::pubkey_serde")]
pub public_key: PublicKey,
/// Message,
pub message: Option<String>,
/// Signature
#[serde(with = "secp_ser::option_sig_serde")]
pub message_sig: Option<Signature>,
}
impl ParticipantMessageData {
/// extract relevant message data from participant data
pub fn from_participant_data(p: &ParticipantData) -> ParticipantMessageData {
ParticipantMessageData {
id: p.id,
public_key: p.public_blind_excess,
message: p.message.clone(),
message_sig: p.message_sig,
}
}
}
impl fmt::Display for ParticipantMessageData {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f)?;
write!(f, "Participant ID {} ", self.id)?;
if self.id == 0 {
writeln!(f, "(Sender)")?;
} else {
writeln!(f, "(Recipient)")?;
}
writeln!(f, "---------------------")?;
writeln!(
f,
"Public Key: {}",
&grin_util::to_hex(&self.public_key.serialize_vec(true))
)?;
let message = match self.message.clone() {
None => "None".to_owned(),
Some(m) => m,
};
writeln!(f, "Message: {}", message)?;
let message_sig = match self.message_sig {
None => "None".to_owned(),
Some(m) => grin_util::to_hex(&m.to_raw_data()),
};
writeln!(f, "Message Signature: {}", message_sig)
}
}
/// A 'Slate' is passed around to all parties to build up all of the public
/// transaction data needed to create a finalized transaction. Callers can pass
/// the slate around by whatever means they choose, (but we can provide some
/// binary or JSON serialization helpers here).
#[derive(Deserialize, Debug, Clone)]
pub struct Slate {
/// True is created from slatepack data.
pub compact_slate: bool,
/// Versioning info
pub version_info: VersionCompatInfo,
/// The number of participants intended to take part in this transaction
pub num_participants: usize,
/// Unique transaction ID, selected by sender
pub id: Uuid,
/// The core transaction data:
/// inputs, outputs, kernels, kernel offset
pub tx: Transaction,
/// base amount (excluding fee)
#[serde(with = "secp_ser::string_or_u64")]
pub amount: u64,
/// fee amount
#[serde(with = "secp_ser::string_or_u64")]
pub fee: u64,
/// Block height for the transaction
#[serde(with = "secp_ser::string_or_u64")]
pub height: u64,
/// Lock height
#[serde(with = "secp_ser::string_or_u64")]
pub lock_height: u64,
/// TTL, the block height at which wallets
/// should refuse to process the transaction and unlock all
/// associated outputs
#[serde(with = "secp_ser::opt_string_or_u64")]
pub ttl_cutoff_height: Option<u64>,
/// Participant data, each participant in the transaction will
/// insert their public data here. For now, 0 is sender and 1
/// is receiver, though this will change for multi-party
pub participant_data: Vec<ParticipantData>,
/// Payment Proof
#[serde(default = "default_payment_none")]
pub payment_proof: Option<PaymentInfo>,
/// Offset, needed when posting of transaction is deferred.
#[serde(default = "zero_bf")]
pub offset: BlindingFactor,
}
fn zero_bf() -> BlindingFactor {
BlindingFactor::zero()
}
fn default_payment_none() -> Option<PaymentInfo> {
None
}
/// Versioning and compatibility info about this slate
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct VersionCompatInfo {
/// The current version of the slate format
pub version: u16,
/// The grin block header version this slate is intended for
pub block_header_version: u16,
}
/// Helper just to facilitate serialization
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ParticipantMessages {
/// included messages
pub messages: Vec<ParticipantMessageData>,
}
impl Slate {
/// Attempt to find slate version
pub fn parse_slate_version(slate_json: &str) -> Result<u16, Error> {
let probe: SlateVersionProbe = serde_json::from_str(slate_json).map_err(|e| {
ErrorKind::SlateVersionParse(format!(
"Unable to find slate version at {}, {}",
slate_json, e
))
})?;
Ok(probe.version())
}
/// Check if this text slate is plain
pub fn deserialize_is_plain(slate_str: &str) -> bool {
slate_str.len() > 0 && slate_str.as_bytes()[0] == '{' as u8
}
/// Recieve a slate, upgrade it to the latest version internally
pub fn deserialize_upgrade_slatepack(
slate_str: &str,
dec_key: &DalekSecretKey,
) -> Result<Slatepacker, Error> {
let sp = Slatepacker::decrypt_slatepack(slate_str.as_bytes(), dec_key)?;
Ok(sp)
}
/// Recieve a slate, upgrade it to the latest version internally
pub fn deserialize_upgrade_plain(slate_json: &str) -> Result<Slate, Error> {
let version = Slate::parse_slate_version(slate_json)?;
//I don't think we need to do this for coin_type and network_type, the slate containing these two
//fields has to be version 3. If receiver wallet doesn't supported them, they will be filtered out.
let ttl_cutoff_height = if version == 2 {
let parse_slate: Result<SlateV2ParseTTL, serde_json::error::Error> =
serde_json::from_str(slate_json);
if parse_slate.is_ok() {
parse_slate.unwrap().ttl_cutoff_height
} else {
None
}
} else {
None
};
let v3: SlateV3 = match version {
3 => serde_json::from_str(slate_json).map_err(|e| {
ErrorKind::SlateDeser(format!(
"Json to SlateV3 conversion failed for {}, {}",
slate_json, e
))
})?,
2 => {
let v2: SlateV2 = serde_json::from_str(slate_json).map_err(|e| {
ErrorKind::SlateDeser(format!(
"Json to SlateV2 conversion failed for {}, {}",
slate_json, e
))
})?;
let mut ret = SlateV3::from(v2);
ret.ttl_cutoff_height = ttl_cutoff_height;
ret
}
_ => return Err(ErrorKind::SlateVersion(version).into()),
};
Ok(v3.to_slate()?)
}
/// Create a new slate
/// slatepack also mean 'compact slate'. Please note the slates are build different way, so for the compact
/// slates we have defferent method of building it.
pub fn blank(num_participants: usize, compact_slate: bool) -> Slate {
let np = match num_participants {
0 => 2,
n => n,
};
let mut slate = Slate {
compact_slate,
num_participants: np, // assume 2 if not present
id: Uuid::new_v4(),
tx: Transaction::empty(),
amount: 0,
fee: 0,
height: 0,
lock_height: 0,
ttl_cutoff_height: None,
participant_data: vec![],
version_info: VersionCompatInfo {
version: CURRENT_SLATE_VERSION,
block_header_version: 1, // GRIN_BLOCK_HEADER_VERSION,
},
payment_proof: None,
offset: BlindingFactor::zero(),
};
// The transaction inputs type need need to be Commit and feature. So let's fix that now while it is empty.
slate.tx.body.inputs = Inputs::FeaturesAndCommit(vec![]);
slate
}
/// Compare two slates for send: sended and responded. Just want to check if sender didn't mess with slate
pub fn compare_slates_send(send_slate: &Self, respond_slate: &Self) -> Result<(), Error> {
if send_slate.id != respond_slate.id {
return Err(ErrorKind::SlateValidation("uuid mismatch".to_string()).into());
}
if !send_slate.compact_slate {
if send_slate.amount != respond_slate.amount {
return Err(ErrorKind::SlateValidation("amount mismatch".to_string()).into());
}
if send_slate.fee != respond_slate.fee {
return Err(ErrorKind::SlateValidation("fee mismatch".to_string()).into());
}
// Checking transaction...
// Inputs must match excatly
if send_slate.tx.body.inputs != respond_slate.tx.body.inputs {
return Err(ErrorKind::SlateValidation("inputs mismatch".to_string()).into());
}
// Checking if participant data match each other
for pat_data in &send_slate.participant_data {
if !respond_slate.participant_data.contains(&pat_data) {
return Err(ErrorKind::SlateValidation(
"participant data mismatch".to_string(),
)
.into());
}
}
// Respond outputs must include send_slate's. Expected that some was added
for output in &send_slate.tx.body.outputs {
if !respond_slate.tx.body.outputs.contains(&output) {
return Err(ErrorKind::SlateValidation("outputs mismatch".to_string()).into());
}
}
// Kernels must match excatly
if send_slate.tx.body.kernels != respond_slate.tx.body.kernels {
return Err(ErrorKind::SlateValidation("kernels mismatch".to_string()).into());
}
}
if send_slate.lock_height != respond_slate.lock_height {
return Err(ErrorKind::SlateValidation("lock_height mismatch".to_string()).into());
}
if send_slate.height != respond_slate.height {
return Err(ErrorKind::SlateValidation("heigh mismatch".to_string()).into());
}
if send_slate.ttl_cutoff_height != respond_slate.ttl_cutoff_height {
return Err(ErrorKind::SlateValidation("ttl_cutoff mismatch".to_string()).into());
}
Ok(())
}
/// Compare two slates for invoice: sended and responded. Just want to check if sender didn't mess with slate
pub fn compare_slates_invoice(invoice_slate: &Self, respond_slate: &Self) -> Result<(), Error> {
if invoice_slate.id != respond_slate.id {
return Err(ErrorKind::SlateValidation("uuid mismatch".to_string()).into());
}
if invoice_slate.amount != respond_slate.amount {
return Err(ErrorKind::SlateValidation("amount mismatch".to_string()).into());
}
if invoice_slate.height != respond_slate.height {
return Err(ErrorKind::SlateValidation("heigh mismatch".to_string()).into());
}
if invoice_slate.ttl_cutoff_height != respond_slate.ttl_cutoff_height {
return Err(ErrorKind::SlateValidation("ttl_cutoff mismatch".to_string()).into());
}
assert!(invoice_slate.tx.body.inputs.is_empty());
// Respond outputs must include original ones. Expected that some was added
for output in &invoice_slate.tx.body.outputs {
if !respond_slate.tx.body.outputs.contains(&output) {
return Err(ErrorKind::SlateValidation("outputs mismatch".to_string()).into());
}
}
// Checking if participant data match each other
for pat_data in &invoice_slate.participant_data {
if !respond_slate.participant_data.contains(&pat_data) {
return Err(
ErrorKind::SlateValidation("participant data mismatch".to_string()).into(),
);
}
}
Ok(())
}
/// Calculate minimal plain Slate version. For exchange we want to keep the varsion as low as possible
/// because there are might be many non upgraded wallets and we want ot be friendly to them.
pub fn lowest_version(&self) -> SlateVersion {
if self.payment_proof.is_some() || self.ttl_cutoff_height.is_some() || self.compact_slate {
SlateVersion::V3
} else {
SlateVersion::V2
}
}
/// Adds selected inputs and outputs to the slate's transaction
/// Returns blinding factor
pub fn add_transaction_elements<K, B>(
&mut self,
keychain: &K,
builder: &B,
elems: Vec<Box<build::Append<K, B>>>,
) -> Result<BlindingFactor, Error>
where
K: Keychain,
B: ProofBuild,
{
self.update_kernel();
if elems.is_empty() {
return Ok(BlindingFactor::zero());
}
let (tx, blind) = build::partial_transaction(self.tx.clone(), &elems, keychain, builder)?;
self.tx = tx;
Ok(blind)
}
/// Update the tx kernel based on kernel features derived from the current slate.
/// The fee may change as we build a transaction and we need to
/// update the tx kernel to reflect this during the tx building process.
pub fn update_kernel(&mut self) {
self.tx = self
.tx
.clone()
.replace_kernel(TxKernel::with_features(self.kernel_features()));
}
/// Completes callers part of round 1, adding public key info
/// to the slate
pub fn fill_round_1<K>(
&mut self,
keychain: &K,
sec_key: &mut SecretKey,
sec_nonce: &SecretKey,
participant_id: usize,
message: Option<String>,
use_test_rng: bool,
) -> Result<(), Error>
where
K: Keychain,
{
if !self.compact_slate {
// Generating offset for backward compability. Offset ONLY for the TX, the slate copy is kept the same.
if self.tx.offset == BlindingFactor::zero() {
self.generate_legacy_offset(keychain, sec_key, use_test_rng)?;
}
}
self.add_participant_info(
keychain.secp(),
&sec_key,
&sec_nonce,
participant_id,
None,
message,
use_test_rng,
)?;
Ok(())
}
/// Construct the appropriate kernel features based on our fee and lock_height.
/// If lock_height is 0 then its a plain kernel, otherwise its a height locked kernel.
pub fn kernel_features(&self) -> KernelFeatures {
match self.lock_height {
0 => KernelFeatures::Plain { fee: self.fee },
_ => KernelFeatures::HeightLocked {
fee: self.fee,
lock_height: self.lock_height,
},
}
}
// This is the msg that we will sign as part of the tx kernel.
// If lock_height is 0 then build a plain kernel, otherwise build a height locked kernel.
fn msg_to_sign(&self) -> Result<secp::Message, Error> {
let msg = self.kernel_features().kernel_sig_msg()?;
Ok(msg)
}
/// Completes caller's part of round 2, completing signatures
pub fn fill_round_2(
&mut self,
secp: &secp::Secp256k1,
sec_key: &SecretKey,
sec_nonce: &SecretKey,
participant_id: usize,
) -> Result<(), Error> {
// TODO: Note we're unable to verify fees in this instance because of the slatepacks
// Inputs are not transferred.
// Also with lock later feature, fees and inputs can be adjusted before finalizing by the send init party
// self.check_fees()?;
self.verify_part_sigs(secp)?;
let sig_part = aggsig::calculate_partial_sig(
secp,
sec_key,
sec_nonce,
&self.pub_nonce_sum()?,
Some(&self.pub_blind_sum()?),
&self.msg_to_sign()?,
)?;
for i in 0..self.num_participants {
if self.participant_data[i].id == participant_id as u64 {
self.participant_data[i].part_sig = Some(sig_part);
break;
}
}
Ok(())
}
/// Creates the final signature, callable by either the sender or recipient
/// (after phase 3: sender confirmation)
pub fn finalize<K>(&mut self, keychain: &K) -> Result<(), Error>
where
K: Keychain,
{
let final_sig = self.finalize_signature(keychain.secp())?;
self.finalize_transaction(keychain, &final_sig)
}
/// Return the participant with the given id
pub fn participant_with_id(&self, id: usize) -> Option<ParticipantData> {
for p in self.participant_data.iter() {
if p.id as usize == id {
return Some(p.clone());
}
}
None
}
/// Return the sum of public nonces
fn pub_nonce_sum(&self) -> Result<PublicKey, Error> {
let pub_nonces: Vec<&PublicKey> = self
.participant_data
.iter()
.map(|p| &p.public_nonce)
.collect();
if pub_nonces.len() == 0 {
return Err(
ErrorKind::GenericError(format!("Participant nonces cannot be empty")).into(),
);
}
match PublicKey::from_combination(pub_nonces) {
Ok(k) => Ok(k),
Err(e) => Err(Error::from(e)),
}
}
/// Return the sum of public blinding factors
fn pub_blind_sum(&self) -> Result<PublicKey, Error> {
let pub_blinds: Vec<&PublicKey> = self
.participant_data
.iter()
.map(|p| &p.public_blind_excess)
.collect();
if pub_blinds.len() == 0 {
return Err(
ErrorKind::GenericError(format!("Participant Blind sums cannot be empty")).into(),
);
}
match PublicKey::from_combination(pub_blinds) {
Ok(k) => Ok(k),
Err(e) => Err(Error::from(e)),
}
}
/// Return vector of all partial sigs
fn part_sigs(&self) -> Vec<&Signature> {
self.participant_data
.iter()
.filter(|p| p.part_sig.is_some())
.map(|p| p.part_sig.as_ref().unwrap())
.collect()
}
/// Adds participants public keys to the slate data
/// and saves participant's transaction context
/// sec_key can be overridden to replace the blinding
/// factor (by whoever split the offset)
pub fn add_participant_info(
&mut self,
secp: &secp::Secp256k1,
sec_key: &SecretKey,
sec_nonce: &SecretKey,
id: usize,
part_sig: Option<Signature>,
message: Option<String>,
use_test_rng: bool,
) -> Result<(), Error> {
// Add our public key and nonce to the slate
let pub_key = PublicKey::from_secret_key(secp, &sec_key)?;
let pub_nonce = PublicKey::from_secret_key(secp, &sec_nonce)?;
let test_message_nonce = SecretKey::from_slice(&[1; 32])?;
let message_nonce = match use_test_rng {
false => None,
true => Some(&test_message_nonce),
};
// Sign the provided message
let message_sig = {
if let Some(m) = message.clone() {
let hashed = blake2b(secp::constants::MESSAGE_SIZE, &[], &m.as_bytes()[..]);
let m = secp::Message::from_slice(&hashed.as_bytes())?;
let res = aggsig::sign_single(secp, &m, &sec_key, message_nonce, Some(&pub_key))?;
Some(res)
} else {
None
}
};
// The record might exist. In this case we should update it
match self
.participant_data
.iter_mut()
.find(|ref p| p.id == id as u64)
{
Some(pp) => {
if pp.public_blind_excess == pub_key && pp.public_nonce == pub_nonce {
if part_sig.is_some() {
pp.part_sig = part_sig;
}
if pp.message == message {
if message_sig.is_some() {
pp.message_sig = message_sig;
}
} else {
pp.message_sig = message_sig;
}
} else {
pp.part_sig = part_sig;
pp.message_sig = message_sig;
}
pp.public_blind_excess = pub_key;
pp.public_nonce = pub_nonce;
pp.message = message;
}
None => {
self.participant_data.push(ParticipantData {
id: id as u64,
public_blind_excess: pub_key,
public_nonce: pub_nonce,
part_sig: part_sig,
message: message,
message_sig: message_sig,
});
}
}
Ok(())
}
/// helper to return all participant messages
pub fn participant_messages(&self) -> ParticipantMessages {
let mut ret = ParticipantMessages { messages: vec![] };
for ref m in self.participant_data.iter() {
ret.messages
.push(ParticipantMessageData::from_participant_data(m));
}
ret
}
/// NOTE: Non compact workflow supporting. This code does generate the offset for NON slatepack case
/// Slateppacks will override that!!!!
/// Somebody involved needs to generate an offset with their private key
/// For now, we'll have the transaction initiator be responsible for it
/// Return offset private key for the participant to use later in the
/// transaction
fn generate_legacy_offset<K: Keychain>(
&mut self,
keychain: &K,
sec_key: &mut SecretKey,
use_test_rng: bool,
) -> Result<(), Error> {
// Generate a random kernel offset here
// and subtract it from the blind_sum so we create
// the aggsig context with the "split" key
self.tx.offset = match use_test_rng {
false => BlindingFactor::from_secret_key(SecretKey::new(&mut thread_rng())),
true => {
// allow for consistent test results
let mut test_rng = StepRng::new(1_234_567_890_u64, 1);
BlindingFactor::from_secret_key(SecretKey::new(&mut test_rng))
}
};
let blind_offset = keychain.blind_sum(
&BlindSum::new()
.add_blinding_factor(BlindingFactor::from_secret_key(sec_key.clone()))
.sub_blinding_factor(self.tx.offset.clone()),
)?;
*sec_key = blind_offset.secret_key()?;
Ok(())
}
/// Add our contribution to the offset based on the excess, inputs and outputs
pub fn adjust_offset<K: Keychain>(
&mut self,
keychain: &K,
context: &Context,
) -> Result<(), Error> {
// Only compact slate flow.
debug_assert!(self.compact_slate);
let mut sum = BlindSum::new()
.add_blinding_factor(self.offset.clone())
.sub_blinding_factor(BlindingFactor::from_secret_key(
context.initial_sec_key.clone(),
));
for (id, _, amount) in &context.input_ids {
sum = sum.sub_blinding_factor(BlindingFactor::from_secret_key(keychain.derive_key(
*amount,
id,
SwitchCommitmentType::Regular,
)?));
}
for (id, _, amount) in &context.output_ids {
sum = sum.add_blinding_factor(BlindingFactor::from_secret_key(keychain.derive_key(
*amount,
id,
SwitchCommitmentType::Regular,
)?));
}
self.offset = keychain.blind_sum(&sum)?;
Ok(())
}
/// Checks the fees in the transaction in the given slate are valid
fn check_fees(&self) -> Result<(), Error> {
// double check the fee amount included in the partial tx
// we don't necessarily want to just trust the sender
// we could just overwrite the fee here (but we won't) due to the sig
let fee = tx_fee(
self.tx.inputs().len(),
self.tx.outputs().len(),
self.tx.kernels().len(),
None,
);
if fee > self.tx.fee() {
return Err(
ErrorKind::Fee(format!("Fee Dispute Error: {}, {}", self.tx.fee(), fee,)).into(),
);
}
if fee > self.amount + self.fee {
let reason = format!(
"Rejected the transfer because transaction fee ({}) exceeds received amount ({}).",
amount_to_hr_string(fee, false),
amount_to_hr_string(self.amount + self.fee, false)
);
info!("{}", reason);
return Err(ErrorKind::Fee(reason).into());
}
Ok(())
}
/// Verifies all of the partial signatures in the Slate are valid
fn verify_part_sigs(&self, secp: &secp::Secp256k1) -> Result<(), Error> {
// collect public nonces
for p in self.participant_data.iter() {
if p.is_complete() {
debug_assert!(p.part_sig.is_some());
aggsig::verify_partial_sig(
secp,
p.part_sig.as_ref().unwrap(),
&self.pub_nonce_sum()?,
&p.public_blind_excess,
Some(&self.pub_blind_sum()?),
&self.msg_to_sign()?,
)?;
}
}
Ok(())
}
/// Verifies any messages in the slate's participant data match their signatures
pub fn verify_messages(&self) -> Result<(), Error> {
let secp = secp::Secp256k1::with_caps(secp::ContextFlag::VerifyOnly);
for p in self.participant_data.iter() {
if let Some(msg) = &p.message {
let hashed = blake2b(secp::constants::MESSAGE_SIZE, &[], &msg.as_bytes()[..]);
let m = secp::Message::from_slice(&hashed.as_bytes())?;
let signature = match p.message_sig {
None => {
error!("verify_messages - participant message doesn't have signature. Message: \"{}\"",
String::from_utf8_lossy(&msg.as_bytes()[..]));
return Err(ErrorKind::Signature(
"Optional participant messages doesn't have signature".to_owned(),
)
.into());
}
Some(s) => s,
};
if !aggsig::verify_single(
&secp,
&signature,
&m,
None,
&p.public_blind_excess,
Some(&p.public_blind_excess),
false,
) {
error!("verify_messages - participant message doesn't match signature. Message: \"{}\"",
String::from_utf8_lossy(&msg.as_bytes()[..]));
return Err(ErrorKind::Signature(
"Optional participant messages do not match signatures".to_owned(),
)
.into());
} else {
info!(
"verify_messages - signature verified ok. Participant message: \"{}\"",
String::from_utf8_lossy(&msg.as_bytes()[..])
);
}
}
}
Ok(())
}
/// This should be callable by either the sender or receiver
/// once phase 3 is done
///
/// Receive Part 3 of interactive transactions from sender, Sender
/// Confirmation Return Ok/Error
/// -Receiver receives sS
/// -Receiver verifies sender's sig, by verifying that
/// kS * G + e *xS * G = sS* G
/// -Receiver calculates final sig as s=(sS+sR, kS * G+kR * G)
/// -Receiver puts into TX kernel:
///
/// Signature S
/// pubkey xR * G+xS * G
/// fee (= M)PaymentInfoV3
///
/// Returns completed transaction ready for posting to the chain
pub fn finalize_signature(&mut self, secp: &secp::Secp256k1) -> Result<Signature, Error> {
self.verify_part_sigs(secp)?;
let part_sigs = self.part_sigs();
let pub_nonce_sum = self.pub_nonce_sum()?;
let final_pubkey = self.pub_blind_sum()?;
// get the final signature
let final_sig = aggsig::add_signatures(secp, part_sigs, &pub_nonce_sum)?;
// Calculate the final public key (for our own sanity check)
// Check our final sig verifies
aggsig::verify_completed_sig(
secp,
&final_sig,
&final_pubkey,
Some(&final_pubkey),
&self.msg_to_sign()?,
)?;
Ok(final_sig)
}
/// return the final excess
pub fn calc_excess<K>(&self, keychain: Option<&K>) -> Result<Commitment, Error>
where
K: Keychain,
{
if self.compact_slate {
let sum = self.pub_blind_sum()?;
Ok(Commitment::from_pubkey(&sum)?)
} else {
// Legacy method
let kernel_offset = &self.tx.offset;
let tx = self.tx.clone();
let overage = tx.fee() as i64;
let tx_excess = tx.sum_commitments(overage)?;
// subtract the kernel_excess (built from kernel_offset)
let offset_excess = keychain
.unwrap()
.secp()
.commit(0, kernel_offset.secret_key()?)?;
Ok(secp::Secp256k1::commit_sum(
vec![tx_excess],
vec![offset_excess],
)?)
}
}
/// builds a final transaction after the aggregated sig exchange
fn finalize_transaction<K>(
&mut self,
keychain: &K,
final_sig: &secp::Signature,
) -> Result<(), Error>
where
K: Keychain,
{
self.check_fees()?;
// build the final excess based on final tx and offset
let final_excess = self.calc_excess(Some(keychain))?;
debug!("Final Tx excess: {:?}", final_excess);
let mut final_tx = self.tx.clone();
// update the tx kernel to reflect the offset excess and sig
assert_eq!(final_tx.kernels().len(), 1);
final_tx.body.kernels[0].excess = final_excess.clone();
final_tx.body.kernels[0].excess_sig = final_sig.clone();
// confirm the kernel verifies successfully before proceeding
debug!("Validating final transaction");
trace!(
"Final tx: {}",
serde_json::to_string_pretty(&final_tx).unwrap()
);
final_tx.kernels()[0].verify()?;
// confirm the overall transaction is valid (including the updated kernel)
// accounting for tx weight limits
let verifier_cache = Arc::new(RwLock::new(LruVerifierCache::new()));
final_tx.validate(Weighting::AsTransaction, verifier_cache)?;
self.tx = final_tx;
Ok(())
}
}
impl Serialize for Slate {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
use serde::ser::Error;
let v3 = SlateV3::from(self);
match self.version_info.version {
3 => v3.serialize(serializer),
// left as a reminder
2 => {
let v2 = SlateV2::from(&v3);
v2.serialize(serializer)
}
v => Err(S::Error::custom(format!("Unknown slate version {}", v))),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct SlateVersionProbe {
#[serde(default)]
version: Option<u64>,
#[serde(default)]
version_info: Option<VersionCompatInfo>,
}
impl SlateVersionProbe {
pub fn version(&self) -> u16 {
match &self.version_info {
Some(v) => v.version,
None => match self.version {
Some(_) => 1,
None => 0,
},
}
}
}
// Coinbase data to versioned.
impl From<CbData> for CoinbaseV3 {
fn from(cb: CbData) -> CoinbaseV3 {
CoinbaseV3 {
output: OutputV3::from(&cb.output),
kernel: TxKernelV3::from(&cb.kernel),
key_id: cb.key_id,
}
}
}
// Current slate version to versioned conversions
// Slate to versioned
impl From<Slate> for SlateV3 {
fn from(slate: Slate) -> SlateV3 {
let Slate {
compact_slate,
num_participants,
id,
tx,
amount,
fee,
height,
lock_height,
ttl_cutoff_height,
participant_data,
version_info,
payment_proof,
offset: tx_offset,
} = slate;
let participant_data = map_vec!(participant_data, |data| ParticipantDataV3::from(data));
let version_info = VersionCompatInfoV3::from(&version_info);
let payment_proof = match payment_proof {
Some(p) => Some(PaymentInfoV3::from(&p)),
None => None,
};
let mut tx = TransactionV3::from(tx);
if compact_slate {
// for compact the Slate offset is dominate
tx.offset = tx_offset;
}
SlateV3 {
num_participants,
id,
tx,
amount,
fee,
height,
lock_height,
ttl_cutoff_height,
coin_type: Some("mwc".to_string()),
network_type: Some(global::get_network_name()),
participant_data,
version_info,
payment_proof,
compact_slate: if compact_slate { Some(true) } else { None },
}
}
}
impl From<&Slate> for SlateV3 {
fn from(slate: &Slate) -> SlateV3 {
let Slate {
compact_slate,
num_participants,
id,
tx,
amount,
fee,
height,
lock_height,
ttl_cutoff_height,
participant_data,
version_info,
payment_proof,
offset: tx_offset,
} = slate;
let num_participants = *num_participants;
let id = *id;
let mut tx = TransactionV3::from(tx);
let amount = *amount;
let fee = *fee;
let height = *height;
let lock_height = *lock_height;
let ttl_cutoff_height = *ttl_cutoff_height;
let participant_data = map_vec!(participant_data, |data| ParticipantDataV3::from(data));
let version_info = VersionCompatInfoV3::from(version_info);
let payment_proof = match payment_proof {
Some(p) => Some(PaymentInfoV3::from(p)),
None => None,
};
if *compact_slate {
// for compact the Slate offset is dominate
tx.offset = tx_offset.clone();
}
SlateV3 {
num_participants,
id,
tx,
amount,
fee,
height,
lock_height,
ttl_cutoff_height,
coin_type: Some("mwc".to_string()),
network_type: Some(global::get_network_name()),
participant_data,
version_info,
payment_proof,
compact_slate: if *compact_slate { Some(true) } else { None },
}
}
}
impl From<&ParticipantData> for ParticipantDataV3 {
fn from(data: &ParticipantData) -> ParticipantDataV3 {
let ParticipantData {
id,
public_blind_excess,
public_nonce,
part_sig,
message,
message_sig,
} = data;
let id = *id;
let public_blind_excess = *public_blind_excess;
let public_nonce = *public_nonce;
let part_sig = *part_sig;
let message: Option<String> = message.as_ref().map(|t| String::from(&**t));
let message_sig = *message_sig;
ParticipantDataV3 {
id,
public_blind_excess,
public_nonce,
part_sig,
message,
message_sig,
}
}
}
impl From<&VersionCompatInfo> for VersionCompatInfoV3 {
fn from(data: &VersionCompatInfo) -> VersionCompatInfoV3 {
let VersionCompatInfo {
version,
block_header_version,
} = data;
let version = *version;
let block_header_version = *block_header_version;
VersionCompatInfoV3 {
version,
orig_version: version,
block_header_version,
}
}
}
impl From<&PaymentInfo> for PaymentInfoV3 {
fn from(data: &PaymentInfo) -> PaymentInfoV3 {
let PaymentInfo {
sender_address,
receiver_address,
receiver_signature,
} = data;
let sender_address = sender_address.clone();
let receiver_address = receiver_address.clone();
let receiver_signature = receiver_signature.clone();
PaymentInfoV3 {
sender_address,
receiver_address,
receiver_signature,
}
}
}
impl From<Transaction> for TransactionV3 {
fn from(tx: Transaction) -> TransactionV3 {
let Transaction { offset, body } = tx;
let body = TransactionBodyV3::from(&body);
TransactionV3 { offset, body }
}
}
impl From<&Transaction> for TransactionV3 {
fn from(tx: &Transaction) -> TransactionV3 {
let Transaction { offset, body } = tx;
let offset = offset.clone();
let body = TransactionBodyV3::from(body);
TransactionV3 { offset, body }
}
}
impl From<&TransactionBody> for TransactionBodyV3 {
fn from(body: &TransactionBody) -> TransactionBodyV3 {
let TransactionBody {
inputs,
outputs,
kernels,
} = body;
let inputs = match inputs {
Inputs::CommitOnly(commits) => {
error!("Transaction Body has type Inputs::CommitOnly, some data is lost");
map_vec!(commits, |c| InputV3 {
features: OutputFeatures::Plain,
commit: c.commitment(),
})
}
Inputs::FeaturesAndCommit(inputs) => {
map_vec!(inputs, |inp| InputV3 {
features: inp.features,
commit: inp.commit,
})
}
};
let outputs = map_vec!(outputs, |out| OutputV3::from(out));
let kernels = map_vec!(kernels, |kern| TxKernelV3::from(kern));
TransactionBodyV3 {
inputs,
outputs,
kernels,
}
}
}
impl From<&Input> for InputV3 {
fn from(input: &Input) -> InputV3 {
let Input { features, commit } = *input;
InputV3 { features, commit }
}
}
impl From<&Output> for OutputV3 {
fn from(output: &Output) -> OutputV3 {
let Output {
identifier: OutputIdentifier { features, commit },
proof,
} = *output;
OutputV3 {
features,
commit,
proof,
}
}
}
impl From<&TxKernel> for TxKernelV3 {
fn from(kernel: &TxKernel) -> TxKernelV3 {
let (features, fee, lock_height) = match kernel.features {
KernelFeatures::Plain { fee } => (CompatKernelFeatures::Plain, fee, 0),
KernelFeatures::Coinbase => (CompatKernelFeatures::Coinbase, 0, 0),
KernelFeatures::HeightLocked { fee, lock_height } => {
(CompatKernelFeatures::HeightLocked, fee, lock_height)
}
KernelFeatures::NoRecentDuplicate {
fee,
relative_height: _,
} => {
error!("NRD kernel not supported well. Wrong height. Fix me");
(CompatKernelFeatures::NoRecentDuplicate, fee, 0)
}
};
TxKernelV3 {
features,
fee,
lock_height,
excess: kernel.excess,
excess_sig: kernel.excess_sig,
}
}
}
impl From<&ParticipantDataV3> for ParticipantData {
fn from(data: &ParticipantDataV3) -> ParticipantData {
let ParticipantDataV3 {
id,
public_blind_excess,
public_nonce,
part_sig,
message,
message_sig,
} = data;
let id = *id;
let public_blind_excess = *public_blind_excess;
let public_nonce = *public_nonce;
let part_sig = *part_sig;
let message: Option<String> = message.as_ref().map(|t| String::from(&**t));
let message_sig = *message_sig;
ParticipantData {
id,
public_blind_excess,
public_nonce,
part_sig,
message,
message_sig,
}
}
}
impl From<&VersionCompatInfoV3> for VersionCompatInfo {
fn from(data: &VersionCompatInfoV3) -> VersionCompatInfo {
let VersionCompatInfoV3 {
version,
orig_version: _,
block_header_version,
} = data;
let version = *version;
let block_header_version = *block_header_version;
VersionCompatInfo {
version,
block_header_version,
}
}
}
impl From<&PaymentInfoV3> for PaymentInfo {
fn from(data: &PaymentInfoV3) -> PaymentInfo {
let PaymentInfoV3 {
sender_address,
receiver_address,
receiver_signature,
} = data;
let sender_address = sender_address.clone();
let receiver_address = receiver_address.clone();
let receiver_signature = receiver_signature.clone();
PaymentInfo {
sender_address,
receiver_address,
receiver_signature,
}
}
}
impl From<TransactionV3> for Transaction {
fn from(tx: TransactionV3) -> Transaction {
let TransactionV3 { offset, body } = tx;
let body = TransactionBody::from(&body);
Transaction { offset, body }
}
}
impl From<&TransactionBodyV3> for TransactionBody {
fn from(body: &TransactionBodyV3) -> TransactionBody {
let TransactionBodyV3 {
inputs,
outputs,
kernels,
} = body;
let inputs = map_vec!(inputs, |inp| Input::from(inp));
let outputs = map_vec!(outputs, |out| Output::from(out));
let kernels = map_vec!(kernels, |kern| TxKernel::from(kern));
TransactionBody {
inputs: Inputs::FeaturesAndCommit(inputs),
outputs,
kernels,
}
}
}
impl From<&InputV3> for Input {
fn from(input: &InputV3) -> Input {
let InputV3 { features, commit } = *input;
Input { features, commit }
}
}
impl From<&OutputV3> for Output {
fn from(output: &OutputV3) -> Output {
let OutputV3 {
features,
commit,
proof,
} = *output;
Output {
identifier: OutputIdentifier { features, commit },
proof,
}
}
}
impl From<&TxKernelV3> for TxKernel {
fn from(kernel: &TxKernelV3) -> TxKernel {
let (fee, lock_height) = (kernel.fee, kernel.lock_height);
let features = match kernel.features {
CompatKernelFeatures::Plain => KernelFeatures::Plain { fee },
CompatKernelFeatures::Coinbase => KernelFeatures::Coinbase,
CompatKernelFeatures::HeightLocked => KernelFeatures::HeightLocked { fee, lock_height },
CompatKernelFeatures::NoRecentDuplicate => KernelFeatures::NoRecentDuplicate {
fee,
relative_height: NRDRelativeHeight::new(lock_height).unwrap(),
},
};
TxKernel {
features,
excess: kernel.excess,
excess_sig: kernel.excess_sig,
}
}
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub enum CompatKernelFeatures {
Plain,
Coinbase,
HeightLocked,
NoRecentDuplicate,
}
|
use itertools::Itertools;
use std::env::args;
use std::result::Result;
#[derive(PartialEq)]
enum Seat {
ROW,
COLUMN,
}
#[derive(Debug, Clone)]
struct BoardingPass {
pass: String,
row: usize,
column: usize,
seat_id: usize,
}
impl BoardingPass {
pub fn new(new_pass: String) -> BoardingPass {
BoardingPass {
pass: new_pass, //(Birth Year)
row: 0,
column: 0,
seat_id: 0,
}
}
pub fn generate_seat_row(&mut self) -> usize {
self.find_seat(Seat::ROW, 0, 127)
}
pub fn generate_seat_column(&mut self) -> usize {
self.find_seat(Seat::COLUMN, 0, 7)
}
pub fn create_seat_id(&mut self) {
self.seat_id = (self.row * 8) + self.column;
}
fn find_seat(&mut self, seat: Seat, input_low: usize, input_high: usize) -> usize {
let mut low = input_low;
let mut high = input_high;
let mut row_number: usize = 0;
let mut middle: f32 = 0.0;
let mut found: bool = false;
let input_string: &str = if seat == Seat::ROW {
&self.pass[..7]
} else {
&self.pass[7..10]
};
input_string.chars().for_each(|s| {
middle = (high as f32 - low as f32) / 2 as f32;
match s {
'B' | 'R' => {
// high
low = high - middle.floor() as usize;
if high == low && !found {
row_number = high;
found = true;
}
}
'F' | 'L' => {
// low
high = low + middle.floor() as usize;
if high == low && !found {
row_number = low;
found = true;
}
}
_ => println!("No match"),
}
});
row_number
}
}
fn read_boarding_passes_and_sort(content: &String) -> Result<Vec<BoardingPass>, &'static str> {
let mut passes: Vec<BoardingPass> = Vec::new();
let count: usize = content
.lines()
.map(|s| {
let mut new_pass = BoardingPass::new(String::from(s));
new_pass.row = new_pass.generate_seat_row();
new_pass.column = new_pass.generate_seat_column();
new_pass.create_seat_id();
passes.push(new_pass);
1
})
.sum();
passes.sort_by(|a, b| a.seat_id.partial_cmp(&b.seat_id).unwrap());
println!("Count: {}", count);
Ok(passes)
}
fn part1(passes: &Vec<BoardingPass>) -> usize {
passes.iter().map(|a| a.seat_id).max().unwrap()
}
fn part2(passes: &Vec<BoardingPass>) -> usize {
let mut missing_id: usize = 0;
for (pass1, pass2) in passes
.iter()
.filter(|a| (a.row > 0 && a.row < 127))
.tuple_windows()
{
if (pass2.seat_id - pass1.seat_id) == 2 {
missing_id = pass1.seat_id + 1;
}
}
missing_id
}
#[cfg(test)]
mod test {
use super::*;
const TEST_INPUT: &str = r#"FBFBBFFRLR"#;
#[test]
fn testcase1() {
let content = String::from(TEST_INPUT);
let passes = read_boarding_passes_and_sort(&content).unwrap();
let count = part1(&passes);
assert_eq!(count, 357);
}
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let filename = args().nth(1).ok_or("I need a filename")?;
let content = std::fs::read_to_string(&filename)?;
let boarding_passes = read_boarding_passes_and_sort(&content).unwrap();
let part1_answer = part1(&boarding_passes);
println!("Part1 Answer: {}", part1_answer);
let part2_answer = part2(&boarding_passes);
println!("Part2 Answer: {}", part2_answer);
Ok(())
}
|
use std::sync::Arc;
use anyhow::Context;
use axum::extract::{Extension, Path, Query};
use hyper::{Body, Request, Response};
use percent_encoding::{percent_encode, NON_ALPHANUMERIC};
use serde_derive::Deserialize;
use serde_json::Value as JsonValue;
use svc_utils::extractors::AccountIdExtractor;
use tracing::error;
use url::Url;
use uuid::Uuid;
use super::FEATURE_POLICY;
use crate::app::error::ErrorExt;
use crate::app::error::ErrorKind as AppErrorKind;
use crate::app::http::Json;
use crate::app::AppContext;
use crate::app::{authz::AuthzObject, metrics::AuthorizeMetrics};
use crate::db::class::AsClassType;
pub type AppError = crate::app::error::Error;
pub type AppResult = Result<Response<Body>, AppError>;
pub async fn healthz() -> &'static str {
"Ok"
}
pub async fn create_event(
Extension(ctx): Extension<Arc<dyn AppContext>>,
Path(id): Path<Uuid>,
AccountIdExtractor(account_id): AccountIdExtractor,
Json(mut payload): Json<JsonValue>,
) -> AppResult {
let class = find_class(ctx.as_ref(), id)
.await
.error(AppErrorKind::ClassNotFound)?;
let object = AuthzObject::new(&["classrooms", &class.id().to_string()]).into();
ctx.authz()
.authorize(
class.audience().to_owned(),
account_id.clone(),
object,
"update".into(),
)
.await
.measure()?;
payload["room_id"] = serde_json::to_value(class.event_room_id()).unwrap();
let result = ctx.event_client().create_event(payload).await;
if let Err(e) = &result {
error!(
classroom_id = ?id,
"Failed to create event in event room, err = {:?}", e
);
}
result
.context("Failed to create event")
.error(AppErrorKind::InvalidPayload)?;
let response = Response::builder()
.status(201)
.body(Body::from("{}"))
.unwrap();
Ok(response)
}
pub async fn find_class(
state: &dyn AppContext,
id: Uuid,
) -> anyhow::Result<crate::db::class::Object> {
let webinar = {
let mut conn = state.get_conn().await?;
crate::db::class::ReadQuery::by_id(id)
.execute(&mut conn)
.await?
.ok_or_else(|| anyhow!("Failed to find class"))?
};
Ok(webinar)
}
async fn find_class_by_scope(
state: &dyn AppContext,
audience: &str,
scope: &str,
) -> anyhow::Result<crate::db::class::Object> {
let webinar = {
let mut conn = state.get_conn().await?;
crate::db::class::ReadQuery::by_scope(audience, scope)
.execute(&mut conn)
.await?
.ok_or_else(|| anyhow!("Failed to find class by scope"))?
};
Ok(webinar)
}
#[derive(Deserialize)]
pub struct RedirQuery {
pub scope: String,
}
pub async fn redirect_to_frontend(
ctx: Extension<Arc<dyn AppContext>>,
Path((tenant, app)): Path<(String, String)>,
Query(query): Query<RedirQuery>,
request: Request<Body>,
) -> AppResult {
let conn = ctx.get_conn().await;
let base_url = match conn {
Err(e) => {
error!("Failed to acquire conn: {:?}", e);
None
}
Ok(mut conn) => {
let fe =
crate::db::frontend::FrontendByScopeQuery::new(query.scope.clone(), app.clone())
.execute(&mut conn)
.await;
match fe {
Err(e) => {
error!("Failed to find frontend: {:?}", e);
None
}
Ok(Some(frontend)) => {
let u = Url::parse(&frontend.url);
u.ok()
}
Ok(None) => None,
}
}
};
let mut url = base_url.unwrap_or_else(|| ctx.build_default_frontend_url_new(&tenant, &app));
url.set_query(request.uri().query());
// Add dispatcher base URL as `backurl` get parameter.
let back_url = crate::app::api::build_back_url(&request)?.to_string();
// Percent-encode it since it's being passed as a get parameter.
let urlencoded_back_url =
percent_encode(back_url.as_str().as_bytes(), NON_ALPHANUMERIC).to_string();
url.query_pairs_mut()
.append_pair("backurl", &urlencoded_back_url);
let url = url.to_string();
let response = Response::builder()
.status(307)
.header("Location", &url)
.header("Feature-Policy", FEATURE_POLICY)
.body(Body::empty())
.unwrap();
Ok(response)
}
async fn find<T: AsClassType>(
state: &dyn AppContext,
id: Uuid,
) -> anyhow::Result<crate::db::class::Object> {
let webinar = {
let mut conn = state.get_conn().await?;
crate::db::class::GenericReadQuery::<T>::by_id(id)
.execute(&mut conn)
.await?
.ok_or_else(|| anyhow!("Failed to find {}", T::as_str()))?
};
Ok(webinar)
}
async fn find_by_scope<T: AsClassType>(
state: &dyn AppContext,
audience: &str,
scope: &str,
) -> anyhow::Result<crate::db::class::Object> {
let webinar = {
let mut conn = state.get_conn().await?;
crate::db::class::GenericReadQuery::<T>::by_scope(audience, scope)
.execute(&mut conn)
.await?
.ok_or_else(|| anyhow!("Failed to find {} by scope", T::as_str()))?
};
Ok(webinar)
}
pub mod account;
pub mod authz;
pub mod class;
pub mod minigroup;
pub mod p2p;
#[cfg(test)]
mod tests;
pub mod webinar;
|
#[doc = "Register `HSECR` reader"]
pub type R = crate::R<HSECR_SPEC>;
#[doc = "Register `HSECR` writer"]
pub type W = crate::W<HSECR_SPEC>;
#[doc = "Field `UNLOCKED` reader - Register lock system"]
pub type UNLOCKED_R = crate::BitReader;
#[doc = "Field `UNLOCKED` writer - Register lock system"]
pub type UNLOCKED_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HSES` reader - HSE Sense amplifier threshold"]
pub type HSES_R = crate::BitReader;
#[doc = "Field `HSES` writer - HSE Sense amplifier threshold"]
pub type HSES_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HSEGMC` reader - HSE current control"]
pub type HSEGMC_R = crate::FieldReader;
#[doc = "Field `HSEGMC` writer - HSE current control"]
pub type HSEGMC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `HSETUNE` reader - HSE capacitor tuning"]
pub type HSETUNE_R = crate::FieldReader;
impl R {
#[doc = "Bit 0 - Register lock system"]
#[inline(always)]
pub fn unlocked(&self) -> UNLOCKED_R {
UNLOCKED_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 3 - HSE Sense amplifier threshold"]
#[inline(always)]
pub fn hses(&self) -> HSES_R {
HSES_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bits 4:6 - HSE current control"]
#[inline(always)]
pub fn hsegmc(&self) -> HSEGMC_R {
HSEGMC_R::new(((self.bits >> 4) & 7) as u8)
}
#[doc = "Bits 8:13 - HSE capacitor tuning"]
#[inline(always)]
pub fn hsetune(&self) -> HSETUNE_R {
HSETUNE_R::new(((self.bits >> 8) & 0x3f) as u8)
}
}
impl W {
#[doc = "Bit 0 - Register lock system"]
#[inline(always)]
#[must_use]
pub fn unlocked(&mut self) -> UNLOCKED_W<HSECR_SPEC, 0> {
UNLOCKED_W::new(self)
}
#[doc = "Bit 3 - HSE Sense amplifier threshold"]
#[inline(always)]
#[must_use]
pub fn hses(&mut self) -> HSES_W<HSECR_SPEC, 3> {
HSES_W::new(self)
}
#[doc = "Bits 4:6 - HSE current control"]
#[inline(always)]
#[must_use]
pub fn hsegmc(&mut self) -> HSEGMC_W<HSECR_SPEC, 4> {
HSEGMC_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Clock HSE register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hsecr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`hsecr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HSECR_SPEC;
impl crate::RegisterSpec for HSECR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hsecr::R`](R) reader structure"]
impl crate::Readable for HSECR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`hsecr::W`](W) writer structure"]
impl crate::Writable for HSECR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets HSECR to value 0x30"]
impl crate::Resettable for HSECR_SPEC {
const RESET_VALUE: Self::Ux = 0x30;
}
|
use common::Result;
use event::obj::Dispatch;
use serde::{Deserialize, Serialize};
use std::convert::AsRef;
use std::{
collections::HashMap,
sync::{Arc, Mutex},
};
use strum::AsRefStr;
#[derive(AsRefStr, Debug)]
pub enum Event {
#[strum(serialize = "Add")]
Add,
#[strum(serialize = "Delete")]
Delete,
#[strum(serialize = "Update")]
Update,
}
pub trait GetPod {
fn get(&self) -> Option<&Pod>;
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Pod {
// on this the uuid path is unique identifier
pub uuid: String,
pub offset: usize,
pub namespace: String,
pub pod_name: String,
pub container_name: String,
pub upload: bool,
pub filter: String,
pub output: String,
}
impl Default for Pod {
fn default() -> Pod {
Pod {
uuid: "".to_owned(),
offset: 0,
namespace: "".to_owned(),
pod_name: "".to_owned(),
container_name: "".to_owned(),
upload: false,
filter: "".to_owned(),
output: "".to_owned(),
}
}
}
impl GetPod for Pod {
fn get(&self) -> Option<&Pod> {
Some(self)
}
}
type PodList = Vec<Pod>;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct PodListMarshaller(PodList);
impl PodListMarshaller {
pub fn to_json(&self) -> String {
match serde_json::to_string(&self.0) {
Ok(contents) => contents,
Err(_) => "".to_owned(),
}
}
}
pub struct Database {
// pod key is the pod path uuid
pods: HashMap<String, Pod>,
// pod op registry and handle events
event_handler: Dispatch<Pod>,
}
impl Database {
pub fn new(event_handler: Dispatch<Pod>) -> Self {
Self {
pods: HashMap::new(),
event_handler,
}
}
pub fn all(&self) -> PodListMarshaller {
PodListMarshaller(
self.pods
.iter()
.map(|(_, v)| v.clone())
.collect::<Vec<Pod>>(),
)
}
pub fn get(&self, uuid: String) -> Option<&Pod> {
self.pods.get(&*uuid)
}
pub fn incr_offset_by_uuid(&mut self, uuid: String, incr_size: usize) {
self.pods.get_mut(&uuid).unwrap().offset += incr_size
}
pub fn get_by_namespace_pod(
&self,
namespace: String,
pod: String,
) -> Vec<Option<(String, Pod)>> {
self.pods
.iter()
.map(|(k, v)| {
if v.namespace == namespace && v.pod_name == pod {
Some((k.clone(), v.clone()))
} else {
None
}
})
.collect::<Vec<_>>()
}
pub fn put(&mut self, pod: Pod) -> Result<()> {
self.event_handler
.dispatch(Event::Add.as_ref().to_string(), pod.clone());
self.pods.insert(pod.uuid.clone(), pod);
Ok(())
}
pub fn delete_by_namespace_pod(&mut self, namespace: String, pod: String) -> Result<()> {
let need_deleted_list = self
.pods
.iter()
.map(|(k, v)| {
if v.namespace == namespace && v.pod_name == pod {
Some((k, v))
} else {
None
}
})
.collect::<Vec<_>>();
for item in need_deleted_list.iter() {
if let Some((_, pod)) = item {
self.event_handler
.dispatch(Event::Delete.as_ref().to_string(), (**pod).clone());
}
}
self.pods
.retain(|_, v| !(v.namespace == namespace && v.pod_name == pod));
Ok(())
}
pub fn delete(&mut self, uuid: String) -> Result<()> {
match self.pods.get(&*uuid) {
Some(pod) => {
self.event_handler
.dispatch(Event::Delete.as_ref().to_string(), pod.clone());
self.pods.remove(&*uuid);
}
_ => {}
}
Ok(())
}
pub fn update(&mut self, uuid: String, pod: Pod) -> Result<()> {
self.event_handler
.dispatch(Event::Update.as_ref().to_string(), pod.clone());
self.pods.insert(uuid, pod);
Ok(())
}
}
pub fn new_sync_database(db: Database) -> Arc<Mutex<Database>> {
Arc::new(Mutex::new(db))
}
#[cfg(test)]
mod tests {
use crate::Event;
#[test]
fn event_it_works() {
assert_eq!(Event::Add.as_ref(), "Add");
assert_eq!(Event::Delete.as_ref(), "Delete");
assert_eq!(Event::Update.as_ref(), "Update");
}
}
|
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[warn(clippy::cmp_owned)]
#[allow(clippy::unnecessary_operation)]
fn main() {
fn with_to_string(x: &str) {
x != "foo".to_string();
"foo".to_string() != x;
}
let x = "oh";
with_to_string(x);
x != "foo".to_owned();
x != String::from("foo");
42.to_string() == "42";
Foo.to_owned() == Foo;
"abc".chars().filter(|c| c.to_owned() != 'X');
"abc".chars().filter(|c| *c != 'X');
let x = &Baz;
let y = &Baz;
y.to_owned() == *x;
let x = &&Baz;
let y = &Baz;
y.to_owned() == **x;
}
struct Foo;
impl PartialEq for Foo {
fn eq(&self, other: &Self) -> bool {
self.to_owned() == *other
}
}
impl ToOwned for Foo {
type Owned = Bar;
fn to_owned(&self) -> Bar {
Bar
}
}
#[derive(PartialEq)]
struct Bar;
impl PartialEq<Foo> for Bar {
fn eq(&self, _: &Foo) -> bool {
true
}
}
impl std::borrow::Borrow<Foo> for Bar {
fn borrow(&self) -> &Foo {
static FOO: Foo = Foo;
&FOO
}
}
#[derive(PartialEq)]
struct Baz;
impl ToOwned for Baz {
type Owned = Baz;
fn to_owned(&self) -> Baz {
Baz
}
}
|
//! This module contains the various end point definitions for stellar's horizon
//! API server.
use error::Result;
use serde::de::DeserializeOwned;
use http;
pub mod account;
pub mod asset;
pub mod ledger;
pub mod payment;
pub mod transaction;
mod records;
pub use self::records::Records;
/// Represents the body of a request to an EndPoint.
#[derive(Debug)]
pub enum Body {
/// Declares that the endpoint does not have a body.
None,
}
/// Declares the definition of a stellar endpoint and the return type.
pub trait EndPoint {
/// The deserializable type that is expected to come back from the stellar server.
type Response: DeserializeOwned;
/// The request body to be sent to stellar. Generally this is just a `()` unit.
/// Converts the implementing struct into an http request.
fn into_request(self, host: &str) -> Result<http::Request<Body>>;
}
/// The order to return results in.
#[derive(Debug)]
pub enum Order {
/// Order the results ascending
Asc,
/// Order the results descending
Desc,
}
impl Order {
pub(crate) fn to_param(&self) -> String {
match *self {
Order::Asc => "asc".to_string(),
Order::Desc => "desc".to_string(),
}
}
}
|
use std::collections::HashMap;
use game::{State, Action, Reward};
use game::Action::*;
use gpi::Alg;
use util::VaryingStepSizer;
type ValueFn = HashMap<(State, Action), Reward>;
pub struct MonteCarlo {
value_fn: ValueFn,
step_sizer: VaryingStepSizer,
reward_this_episode: Reward,
visited_this_episode: HashMap<(State, Action), bool>,
}
impl MonteCarlo {
pub fn new() -> Self {
MonteCarlo {
value_fn: HashMap::new(),
step_sizer: VaryingStepSizer::new(),
reward_this_episode: 0.0,
visited_this_episode: HashMap::new(),
}
}
}
impl Alg for MonteCarlo {
fn choose_best_action(&self, state: State) -> Action {
let hit = *self.value_fn.get(&(state, Hit)).unwrap_or(&0.0);
let stick = *self.value_fn.get(&(state, Stick)).unwrap_or(&0.0);
if hit > stick { Hit } else { Stick }
}
fn get_expected_reward(&self, state: State, action: Action) -> Reward {
*self.value_fn.get(&(state, action)).unwrap_or(&0.0)
}
fn on_episode_begin(&mut self) {
self.reward_this_episode = 0.0;
self.visited_this_episode.drain();
}
fn on_episode_step(&mut self, state: State, action: Action,
reward: Reward, _next_state: State,
_next_action: Option<Action>) -> Option<Action> {
// We only care about the *first* time a state/action pair
// was visited in an episode.
self.visited_this_episode.entry((state, action)).or_insert(true);
self.reward_this_episode += reward;
None
}
fn on_episode_end(&mut self) {
for &(state, action) in self.visited_this_episode.keys() {
let old_value = *self.value_fn.get(&(state, action))
.unwrap_or(&0.0);
let step_size = self.step_sizer.update(state, action);
let new_value = old_value + step_size *
(self.reward_this_episode - old_value);
self.value_fn.insert((state, action), new_value);
}
}
}
|
pub fn parse(data: &str) -> usize {
data.lines().map(|line| {
let row: Vec<usize> = line.split_whitespace().map(|chr| {
chr.parse::<usize>().unwrap()
}).collect();
let max = row.iter().max().unwrap();
let min = row.iter().min().unwrap();
max - min
}).sum()
}
#[cfg(test)]
mod tests {
use super::parse;
#[test]
fn day02_part1_test1() {
let spreadsheet = "5 1 9 5\n7 5 3\n2 4 6 8";
assert_eq!(18, parse(spreadsheet));
}
}
|
use async_trait::async_trait;
use crate::event::EventHandler;
use crate::result::Result;
#[async_trait]
pub trait EventSubscriber {
async fn subscribe(
&self,
handler: Box<dyn EventHandler>, // TODO: use generics.
) -> Result<bool>;
}
|
use crate::{
regressor::{
sgd::SGD,
adagrad::AdaGrad,
rmsprop::RMSProp,
adam::Adam,
utils::*,
}
};
#[derive(Copy, Clone, Debug)]
pub struct Config {
/// The maximum number of passes over the training data.
pub iterations: usize,
/// Constant that multiplies the regularization term.
pub alpha: f64,
/// The penalty (aka regularization term) to be used.
pub penalty: Penalty,
/// The stopping criterion.
/// Training will stop when `error > best_error - tolerance`.
pub tolerance: f64,
/// Whether or not the training data should be shuffled after each epoch.
pub shuffle: bool,
/// Should an important information be printed.
pub verbose: bool,
/// Number of iterations with no improvement to wait before early stopping.
pub stumble: usize,
// The conservation factor.
pub gamma: f64,
// The conservation factor for gradient.
pub beta_m: f64,
// The conservation factor for eta.
pub beta_v: f64,
/// The initial learning rate.
pub eta: f64,
// A small value to avoid division by zero.
pub epsilon: f64,
}
impl Config {
builder_field!(iterations, usize);
builder_field!(alpha, f64);
builder_field!(penalty, Penalty);
builder_field!(tolerance, f64);
builder_field!(shuffle, bool);
builder_field!(verbose, bool);
builder_field!(stumble, usize);
builder_field!(gamma, f64);
builder_field!(beta_m, f64);
builder_field!(beta_v, f64);
builder_field!(eta, f64);
builder_field!(epsilon, f64);
/// Fit a linear model with Stochastic Gradient Descent.
pub fn to_SGD(self) -> SGD { SGD::new(self) }
/// Fit a linear model with Adaptive Gradient Descent.
pub fn to_AdaGrad(self) -> AdaGrad { AdaGrad::new(self) }
/// Fit a linear model with AdaGrad with root mean square propagation.
pub fn to_RMSProp(self) -> RMSProp { RMSProp::new(self) }
/// Fit a linear model with Adaptive Moment Estimation.
pub fn to_Adam(self) -> Adam { Adam::new(self) }
}
impl Default for Config {
fn default() -> Self {
Self {
iterations: 1000,
alpha: 1e-4,
penalty: Penalty::L2,
tolerance: 1e-3,
shuffle: true,
verbose: false,
stumble: 6,
gamma: 0.9,
beta_m: 0.9,
beta_v: 0.9,
eta: 1e-2,
epsilon: 1e-8,
}
}
}
|
//! blsic
/// Isolate lowest set bit and complement
pub trait Blsic {
/// Clears least significant bit and sets all other bits.
///
/// If there is no set bit in `self`, it sets all the bits.
///
/// # Instructions
///
/// - [`BLSIC`](http://support.amd.com/TechDocs/24594.pdf):
/// - Description: Isolate lowest set bit and complement.
/// - Architecture: x86.
/// - Instruction set: TBM.
/// - Registers: 32/64 bit.
///
/// # Example
///
/// ```
/// # use bitintr::*;
/// assert_eq!(0b0101_0100u8.blsic(), 0b1111_1011u8);
/// assert_eq!(0b0000_0000u8.blsic(), 0b1111_1111u8);
/// ```
fn blsic(self) -> Self;
}
macro_rules! impl_blsic {
($id:ident) => {
impl Blsic for $id {
#[inline]
fn blsic(self) -> Self {
!self | (self.wrapping_sub(1))
}
}
};
}
impl_all!(impl_blsic: u8, u16, u32, u64, i8, i16, i32, i64);
|
use Pixel;
pub static ALICE_BLUE: Pixel = Pixel {r: 240, g: 248, b: 255 };
pub static ANTIQUE_WHITE: Pixel = Pixel {r: 250, g: 235, b: 215 };
pub static AQUA: Pixel = Pixel {r: 0, g: 255, b: 255 };
pub static AQUAMARINE: Pixel = Pixel {r: 127, g: 255, b: 212 };
pub static AZURE: Pixel = Pixel {r: 240, g: 255, b: 255 };
pub static BEIGE: Pixel = Pixel {r: 245, g: 245, b: 220 };
pub static BISQUE: Pixel = Pixel {r: 255, g: 228, b: 196 };
pub static BLACK: Pixel = Pixel {r: 0, g: 0, b: 0 };
pub static BLANCHED_ALMOND: Pixel = Pixel {r: 255, g: 235, b: 205 };
pub static BLUE: Pixel = Pixel {r: 0, g: 0, b: 255 };
pub static BLUE_VIOLET: Pixel = Pixel {r: 138, g: 43, b: 226 };
pub static BROWN: Pixel = Pixel {r: 165, g: 42, b: 42 };
pub static BURLYWOOD: Pixel = Pixel {r: 222, g: 184, b: 135 };
pub static CADET_BLUE: Pixel = Pixel {r: 95, g: 158, b: 160 };
pub static CHARTREUSE: Pixel = Pixel {r: 127, g: 255, b: 0 };
pub static CHOCOLATE: Pixel = Pixel {r: 210, g: 105, b: 30 };
pub static CORAL: Pixel = Pixel {r: 255, g: 127, b: 80 };
pub static CORNFLOWER_BLUE: Pixel = Pixel {r: 100, g: 149, b: 237 };
pub static CORNSILK: Pixel = Pixel {r: 255, g: 248, b: 220 };
pub static CRIMSON: Pixel = Pixel {r: 220, g: 20, b: 60 };
pub static CYAN: Pixel = Pixel {r: 0, g: 255, b: 255 };
pub static DARK_BLUE: Pixel = Pixel {r: 0, g: 0, b: 139 };
pub static DARK_CYAN: Pixel = Pixel {r: 0, g: 139, b: 139 };
pub static DARK_GOLDENROD: Pixel = Pixel {r: 184, g: 134, b: 11 };
pub static DARK_GRAY: Pixel = Pixel {r: 169, g: 169, b: 169 };
pub static DARK_GREEN: Pixel = Pixel {r: 0, g: 100, b: 0 };
pub static DARK_GREY: Pixel = Pixel {r: 169, g: 169, b: 169 };
pub static DARK_KHAKI: Pixel = Pixel {r: 189, g: 183, b: 107 };
pub static DARK_MAGENTA: Pixel = Pixel {r: 139, g: 0, b: 139 };
pub static DARK_OLIVE_GREEN: Pixel = Pixel {r: 85, g: 107, b: 47 };
pub static DARK_ORANGE: Pixel = Pixel {r: 255, g: 140, b: 0 };
pub static DARK_ORCHID: Pixel = Pixel {r: 153, g: 50, b: 204 };
pub static DARK_RED: Pixel = Pixel {r: 139, g: 0, b: 0 };
pub static DARK_SALMON: Pixel = Pixel {r: 233, g: 150, b: 122 };
pub static DARK_SEAGREEN: Pixel = Pixel {r: 143, g: 188, b: 143 };
pub static DARK_SLATE_BLUE: Pixel = Pixel {r: 72, g: 61, b: 139 };
pub static DARK_SLATE_GRAY: Pixel = Pixel {r: 47, g: 79, b: 79 };
pub static DARK_SLATE_GREY: Pixel = Pixel {r: 47, g: 79, b: 79 };
pub static DARK_TURQUOISE: Pixel = Pixel {r: 0, g: 206, b: 209 };
pub static DARK_VIOLET: Pixel = Pixel {r: 148, g: 0, b: 211 };
pub static DEEP_PINK: Pixel = Pixel {r: 255, g: 20, b: 147 };
pub static DEEP_SKYBLUE: Pixel = Pixel {r: 0, g: 191, b: 255 };
pub static DIM_GRAY: Pixel = Pixel {r: 105, g: 105, b: 105 };
pub static DIM_GREY: Pixel = Pixel {r: 105, g: 105, b: 105 };
pub static DODGER_BLUE: Pixel = Pixel {r: 30, g: 144, b: 255 };
pub static FIREBRICK: Pixel = Pixel {r: 178, g: 34, b: 34 };
pub static FLORAL_WHITE: Pixel = Pixel {r: 255, g: 250, b: 240 };
pub static FOREST_GREEN: Pixel = Pixel {r: 34, g: 139, b: 34 };
pub static FUCHSIA: Pixel = Pixel {r: 255, g: 0, b: 255 };
pub static GAINSBORO: Pixel = Pixel {r: 220, g: 220, b: 220 };
pub static GHOST_WHITE: Pixel = Pixel {r: 248, g: 248, b: 255 };
pub static GOLD: Pixel = Pixel {r: 255, g: 215, b: 0 };
pub static GOLDENROD: Pixel = Pixel {r: 218, g: 165, b: 32 };
pub static GRAY: Pixel = Pixel {r: 128, g: 128, b: 128 };
pub static GREY: Pixel = Pixel {r: 128, g: 128, b: 128 };
pub static GREEN: Pixel = Pixel {r: 0, g: 128, b: 0 };
pub static GREEN_YELLOW: Pixel = Pixel {r: 173, g: 255, b: 47 };
pub static HONEYDEW: Pixel = Pixel {r: 240, g: 255, b: 240 };
pub static HOT_PINK: Pixel = Pixel {r: 255, g: 105, b: 180 };
pub static INDIAN_RED: Pixel = Pixel {r: 205, g: 92, b: 92 };
pub static INDIGO: Pixel = Pixel {r: 75, g: 0, b: 130 };
pub static IVORY: Pixel = Pixel {r: 255, g: 255, b: 240 };
pub static KHAKI: Pixel = Pixel {r: 240, g: 230, b: 140 };
pub static LAVENDER: Pixel = Pixel {r: 230, g: 230, b: 250 };
pub static LAVENDERBLUSH: Pixel = Pixel {r: 255, g: 240, b: 245 };
pub static LAWN_GREEN: Pixel = Pixel {r: 124, g: 252, b: 0 };
pub static LEMON_CHIFFON: Pixel = Pixel {r: 255, g: 250, b: 205 };
pub static LIGHT_BLUE: Pixel = Pixel {r: 173, g: 216, b: 230 };
pub static LIGHT_CORAL: Pixel = Pixel {r: 240, g: 128, b: 128 };
pub static LIGHT_CYAN: Pixel = Pixel {r: 224, g: 255, b: 255 };
pub static LIGHT_GOLDENROD_YELLOW: Pixel = Pixel {r: 250, g: 250, b: 210 };
pub static LIGHT_GRAY: Pixel = Pixel {r: 211, g: 211, b: 211 };
pub static LIGHT_GREEN: Pixel = Pixel {r: 144, g: 238, b: 144 };
pub static LIGHT_GREY: Pixel = Pixel {r: 211, g: 211, b: 211 };
pub static LIGHT_PINK: Pixel = Pixel {r: 255, g: 182, b: 193 };
pub static LIGHT_SALMON: Pixel = Pixel {r: 255, g: 160, b: 122 };
pub static LIGHT_SEAGREEN: Pixel = Pixel {r: 32, g: 178, b: 170 };
pub static LIGHT_SKYBLUE: Pixel = Pixel {r: 135, g: 206, b: 250 };
pub static LIGHT_SLATE_GRAY: Pixel = Pixel {r: 119, g: 136, b: 153 };
pub static LIGHT_SLATE_GREY: Pixel = Pixel {r: 119, g: 136, b: 153 };
pub static LIGHT_STEEL_BLUE: Pixel = Pixel {r: 176, g: 196, b: 222 };
pub static LIGHT_YELLOW: Pixel = Pixel {r: 255, g: 255, b: 224 };
pub static LIME: Pixel = Pixel {r: 0, g: 255, b: 0 };
pub static LIME_GREEN: Pixel = Pixel {r: 50, g: 205, b: 50 };
pub static LINEN: Pixel = Pixel {r: 250, g: 240, b: 230 };
pub static MAGENTA: Pixel = Pixel {r: 255, g: 0, b: 255 };
pub static MAROON: Pixel = Pixel {r: 128, g: 0, b: 0 };
pub static MEDIUM_AQUAMARINE: Pixel = Pixel {r: 102, g: 205, b: 170 };
pub static MEDIUM_BLUE: Pixel = Pixel {r: 0, g: 0, b: 205 };
pub static MEDIUM_ORCHID: Pixel = Pixel {r: 186, g: 85, b: 211 };
pub static MEDIUM_PURPLE: Pixel = Pixel {r: 147, g: 112, b: 219 };
pub static MEDIUM_SEAGREEN: Pixel = Pixel {r: 60, g: 179, b: 113 };
pub static MEDIUM_SLATE_BLUE: Pixel = Pixel {r: 123, g: 104, b: 238 };
pub static MEDIUM_SPRING_GREEN: Pixel = Pixel {r: 0, g: 250, b: 154 };
pub static MEDIUM_TURQUOISE: Pixel = Pixel {r: 72, g: 209, b: 204 };
pub static MEDIUM_VIOLETRED: Pixel = Pixel {r: 199, g: 21, b: 133 };
pub static MIDNIGHT_BLUE: Pixel = Pixel {r: 25, g: 25, b: 112 };
pub static MINT_CREAM: Pixel = Pixel {r: 245, g: 255, b: 250 };
pub static MISTY_ROSE: Pixel = Pixel {r: 255, g: 228, b: 225 };
pub static MOCCASIN: Pixel = Pixel {r: 255, g: 228, b: 181 };
pub static NAVAJO_WHITE: Pixel = Pixel {r: 255, g: 222, b: 173 };
pub static NAVY: Pixel = Pixel {r: 0, g: 0, b: 128 };
pub static OLD_LACE: Pixel = Pixel {r: 253, g: 245, b: 230 };
pub static OLIVE: Pixel = Pixel {r: 128, g: 128, b: 0 };
pub static OLIVE_DRAB: Pixel = Pixel {r: 107, g: 142, b: 35 };
pub static ORANGE: Pixel = Pixel {r: 255, g: 165, b: 0 };
pub static ORANGE_RED: Pixel = Pixel {r: 255, g: 69, b: 0 };
pub static ORCHID: Pixel = Pixel {r: 218, g: 112, b: 214 };
pub static PALE_GOLDENROD: Pixel = Pixel {r: 238, g: 232, b: 170 };
pub static PALE_GREEN: Pixel = Pixel {r: 152, g: 251, b: 152 };
pub static PALE_TURQUOISE: Pixel = Pixel {r: 175, g: 238, b: 238 };
pub static PALE_VIOLETRED: Pixel = Pixel {r: 219, g: 112, b: 147 };
pub static PAPAYAWHIP: Pixel = Pixel {r: 255, g: 239, b: 213 };
pub static PEACHPUFF: Pixel = Pixel {r: 255, g: 218, b: 185 };
pub static PERU: Pixel = Pixel {r: 205, g: 133, b: 63 };
pub static PINK: Pixel = Pixel {r: 255, g: 192, b: 203 };
pub static PLUM: Pixel = Pixel {r: 221, g: 160, b: 221 };
pub static POWDER_BLUE: Pixel = Pixel {r: 176, g: 224, b: 230 };
pub static PURPLE: Pixel = Pixel {r: 128, g: 0, b: 128 };
pub static RED: Pixel = Pixel {r: 255, g: 0, b: 0 };
pub static ROSY_BROWN: Pixel = Pixel {r: 188, g: 143, b: 143 };
pub static ROYAL_BLUE: Pixel = Pixel {r: 65, g: 105, b: 225 };
pub static SADDLE_BROWN: Pixel = Pixel {r: 139, g: 69, b: 19 };
pub static SALMON: Pixel = Pixel {r: 250, g: 128, b: 114 };
pub static SANDY_BROWN: Pixel = Pixel {r: 244, g: 164, b: 96 };
pub static SEAGREEN: Pixel = Pixel {r: 46, g: 139, b: 87 };
pub static SEASHELL: Pixel = Pixel {r: 255, g: 245, b: 238 };
pub static SIENNA: Pixel = Pixel {r: 160, g: 82, b: 45 };
pub static SILVER: Pixel = Pixel {r: 192, g: 192, b: 192 };
pub static SKYBLUE: Pixel = Pixel {r: 135, g: 206, b: 235 };
pub static SLATE_BLUE: Pixel = Pixel {r: 106, g: 90, b: 205 };
pub static SLATE_GRAY: Pixel = Pixel {r: 112, g: 128, b: 144 };
pub static SLATE_GREY: Pixel = Pixel {r: 112, g: 128, b: 144 };
pub static SNOW: Pixel = Pixel {r: 255, g: 250, b: 250 };
pub static SPRING_GREEN: Pixel = Pixel {r: 0, g: 255, b: 127 };
pub static STEEL_BLUE: Pixel = Pixel {r: 70, g: 130, b: 180 };
pub static TAN: Pixel = Pixel {r: 210, g: 180, b: 140 };
pub static TEAL: Pixel = Pixel {r: 0, g: 128, b: 128 };
pub static THISTLE: Pixel = Pixel {r: 216, g: 191, b: 216 };
pub static TOMATO: Pixel = Pixel {r: 255, g: 99, b: 71 };
pub static TURQUOISE: Pixel = Pixel {r: 64, g: 224, b: 208 };
pub static VIOLET: Pixel = Pixel {r: 238, g: 130, b: 238 };
pub static WHEAT: Pixel = Pixel {r: 245, g: 222, b: 179 };
pub static WHITE: Pixel = Pixel {r: 255, g: 255, b: 255 };
pub static WHITE_SMOKE: Pixel = Pixel {r: 245, g: 245, b: 245 };
pub static YELLOW: Pixel = Pixel {r: 255, g: 255, b: 0 };
pub static YELLOW_GREEN: Pixel = Pixel {r: 154, g: 205, b: 50 };
|
use core::cmp::min;
use crate::TinyMT32;
const TINYMT32_MEXP: usize = 127;
const TINYMT32_SH0: u32 = 1;
const TINYMT32_SH1: u32 = 10;
const TINYMT32_SH8: u32 = 8;
const TINYMT32_MASK: u32 = 0x7fff_ffff_u32;
const TINYMT32_MUL: f64 = 1.0f64 / 16_777_216.0_f64;
const MIN_LOOP: usize = 8;
const PRE_LOOP: usize = 8;
impl TinyMT32 {
pub fn new(status: [u32; 4], mat1: u32, mat2: u32, tmat: u32) -> TinyMT32 {
TinyMT32 { status, mat1, mat2, tmat }
}
}
/// This function represents a function used in the initialization by init_by_array
fn ini_func1(x: u32) -> u32 {
(x ^ (x >> 27)).wrapping_mul(1_664_525_u32)
}
/// This function represents a function used in the initialization by init_by_array
fn ini_func2(x: u32) -> u32 {
(x ^ (x >> 27)).wrapping_mul(1_566_083_941_u32)
}
/// This function certificate the period of 2^127-1.
/// @param random tinymt state vector.
fn period_certification(random: &mut TinyMT32) {
if random.status[0] & TINYMT32_MASK == 0
&& random.status[1] == 0
&& random.status[2] == 0
&& random.status[3] == 0
{
random.status[0] = 'T' as u32;
random.status[1] = 'I' as u32;
random.status[2] = 'N' as u32;
random.status[3] = 'Y' as u32;
}
}
/// This function initializes the internal state array with a 32-bit unsigned integer seed.
/// @param random tinymt state vector.
/// @param seed a 32-bit unsigned integer used as a seed.
pub fn tinymt32_init(random: &mut TinyMT32, seed: u32) {
random.status[0] = seed;
random.status[1] = random.mat1;
random.status[2] = random.mat2;
random.status[3] = random.tmat;
for i in 1..MIN_LOOP {
random.status[i & 3] ^= (i as u32).wrapping_add(
1_812_433_253_u32
.wrapping_mul(random.status[(i - 1) & 3] ^ (random.status[(i - 1) & 3] >> 30)),
);
}
period_certification(random);
for _ in 0..PRE_LOOP {
tinymt32_next_state(random);
}
}
/// This function initializes the internal state array, with an array of 32-bit unsigned integers used as seeds
/// @param init_key the array of 32-bit integers, used as a seed.
/// @param key_length the length of init_key.
pub fn tinymt32_init_by_array(random: &mut TinyMT32, init_key: &[u32]) {
let key_length: usize = init_key.len();
let lag: usize = 1;
let mid: usize = 1;
let size: usize = 4;
let st: &mut [u32; 4] = &mut random.status;
st[0] = 0;
st[1] = random.mat1;
st[2] = random.mat2;
st[3] = random.tmat;
let mut count: usize = if key_length + 1 > MIN_LOOP { key_length + 1 } else { MIN_LOOP };
let mut r: u32 = ini_func1(st[0] ^ st[mid % size] ^ st[(size - 1) % size]);
st[mid % size] = st[mid % size].wrapping_add(r);
r += key_length as u32;
st[(mid + lag) % size] = st[(mid + lag) % size].wrapping_add(r);
st[0] = r;
count -= 1;
let mut i: usize = 1;
let boundary = min(count, key_length);
for key in init_key.iter().take(boundary) {
r = ini_func1(st[i % size] ^ st[(i + mid) % size] ^ st[(i + size - 1) % size]);
st[(i + mid) % size] = st[(i + mid) % size].wrapping_add(r);
r += key + i as u32;
st[(i + mid + lag) % size] = st[(i + mid + lag) % size].wrapping_add(r);
st[i % size] = r;
i = (i + 1) % size;
}
for _ in min(count, key_length)..count {
r = ini_func1(st[i % size] ^ st[(i + mid) % size] ^ st[(i + size - 1) % size]);
st[(i + mid) % size] = st[(i + mid) % size].wrapping_add(r);
r += i as u32;
st[(i + mid + lag) % size] = st[(i + mid + lag) % size].wrapping_add(r);
st[i % size] = r;
i = (i + 1) % size;
}
for _ in 0..size {
r = ini_func2(
st[i % size].wrapping_add(st[(i + mid) % size]).wrapping_add(st[(i + size - 1) % size]),
);
st[(i + mid) % size] ^= r;
r -= i as u32;
st[(i + mid + lag) % size] ^= r;
st[i % size] = r;
i = (i + 1) % size;
}
period_certification(random);
for _ in 0..PRE_LOOP {
tinymt32_next_state(random);
}
}
/// This function always returns 127
/// @return always 127
#[inline]
pub fn tinymt32_get_mexp(_: &TinyMT32) -> usize {
TINYMT32_MEXP
}
/// This function changes internal state of tinymt32. Users should not call this function directly.
/// @param random tinymt internal status
#[inline]
pub fn tinymt32_next_state(random: &mut TinyMT32) {
let mut y: u32 = random.status[3];
let mut x: u32 = (random.status[0] & TINYMT32_MASK) ^ random.status[1] ^ random.status[2];
x ^= x << TINYMT32_SH0;
y ^= (y >> TINYMT32_SH0) ^ x;
random.status[0] = random.status[1];
random.status[1] = random.status[2];
random.status[2] = x ^ (y << TINYMT32_SH1);
random.status[3] = y;
random.status[1] ^= (-((y & 1) as i32) as u32) & random.mat1;
random.status[2] ^= (-((y & 1) as i32) as u32) & random.mat2;
}
/// This function outputs 32-bit unsigned integer from internal state. Users should not call this function directly.
/// @param random tinymt internal status
/// @return 32-bit unsigned pseudorandom number
#[inline]
pub fn tinymt32_temper(random: &mut TinyMT32) -> u32 {
let mut t0: u32 = random.status[3];
// defined(LINEARITY_CHECK)
// t1 = random->status[0]^ (random->status[2] >> TINYMT32_SH8);
let t1: u32 = random.status[0].wrapping_add(random.status[2] >> TINYMT32_SH8);
t0 ^= t1;
t0 ^ (-((t1 & 1) as i32) as u32) & random.tmat
}
/// This function outputs floating point number from internal state. Users should not call this function directly.
/// @param random tinymt internal status
/// @return floating point number r (1.0 <= r < 2.0)
#[inline]
pub fn tinymt32_temper_conv(random: &mut TinyMT32) -> f32 {
let mut t0: u32 = random.status[3];
// defined(LINEARITY_CHECK)
// t1 = random->status[0]^ (random->status[2] >> TINYMT32_SH8);
let t1: u32 = random.status[0].wrapping_add(random.status[2] >> TINYMT32_SH8);
t0 ^= t1;
let u: u32 = ((t0 ^ ((-((t1 & 1) as i32) as u32) & random.tmat)) >> 9) | 0x3f80_0000_u32;
f32::from_le_bytes(u.to_le_bytes())
}
/// This function outputs floating point number from internal state. Users should not call this function directly.
/// @return floating point number r (1.0 < r < 2.0)
#[inline]
pub fn tinymt32_temper_conv_open(random: &mut TinyMT32) -> f32 {
let mut t0: u32 = random.status[3];
// defined(LINEARITY_CHECK)
// t1 = random->status[0] ^ (random->status[2] >> TINYMT32_SH8);
let t1: u32 = random.status[0].wrapping_add(random.status[2] >> TINYMT32_SH8);
t0 ^= t1;
let u: u32 = ((t0 ^ ((-((t1 & 1) as i32) as u32) & random.tmat)) >> 9) | 0x3f80_0001_u32;
f32::from_le_bytes(u.to_le_bytes())
}
/// This function outputs 32-bit unsigned integer from internal state.
/// @return 32-bit unsigned integer r (0 <= r < 2^32)
#[inline]
pub fn tinymt32_generate_uint32(random: &mut TinyMT32) -> u32 {
tinymt32_next_state(random);
tinymt32_temper(random)
}
/// This function outputs floating point number from internal state. This function is implemented using multiplying by (1 / 2^24). floating point multiplication is faster than using union trick in my Intel CPU.
/// @return floating point number r (0.0 <= r < 1.0)
#[inline]
pub fn tinymt32_generate_float(random: &mut TinyMT32) -> f32 {
tinymt32_next_state(random);
((tinymt32_temper(random) >> 8) as f64 * TINYMT32_MUL) as f32
}
/// This function outputs floating point number from internal state. This function is implemented using union trick.
/// @return floating point number r (1.0 <= r < 2.0)
#[inline]
pub fn tinymt32_generate_float12(random: &mut TinyMT32) -> f32 {
tinymt32_next_state(random);
tinymt32_temper_conv(random)
}
/// This function outputs floating point number from internal state.
/// This function is implemented using union trick.
/// @return floating point number r (0.0 <= r < 1.0)
#[inline]
pub fn tinymt32_generate_float01(random: &mut TinyMT32) -> f32 {
tinymt32_next_state(random);
tinymt32_temper_conv(random) - 1.0f32
}
/// This function outputs floating point number from internal state. This function may return 1.0 and never returns 0.0.
/// @return floating point number r (0.0 < r <= 1.0)
#[inline]
pub fn tinymt32_generate_float_oc(random: &mut TinyMT32) -> f32 {
tinymt32_next_state(random);
1.0f32 - tinymt32_generate_float(random)
}
/// This function outputs floating point number from internal state. This function returns neither 0.0 nor 1.0.
/// @return floating point number r (0.0 < r < 1.0)
#[inline]
pub fn tinymt32_generate_float_oo(random: &mut TinyMT32) -> f32 {
tinymt32_next_state(random);
tinymt32_temper_conv_open(random) - 1.0f32
}
/// This function outputs double precision floating point number from internal state. The returned value has 32-bit precision. In other words, this function makes one double precision floating point number from one 32-bit unsigned integer.
/// @return floating point number r (0.0 <= r < 1.0)
#[inline]
pub fn tinymt32_generate_32double(random: &mut TinyMT32) -> f64 {
tinymt32_next_state(random);
tinymt32_temper(random) as f64 * (1.0f64 / 4_294_967_296.0_f64)
}
|
//! Config parameters for building kernel
/// Length of the message buffer in pages
pub const MSG_BUF_LEN: usize = 1;
/// Maximum number of logical cpu's supported
pub const MAX_CPUS: usize = 16;
/// How long between interrupts sent by the timer
pub const TIMER_PERIOD: Duration = Duration::from_millis(40);
/// amount of time that elapses before we will switch to a new thread
pub const SCHED_TIME: Duration = Duration::from_millis(10);
// don't tweak the parameters below
use core::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use core::time::Duration;
use crate::mem::PAGE_SIZE;
use crate::arch::x64::cpuid;
pub const MSG_BUF_SIZE: usize = MSG_BUF_LEN * PAGE_SIZE;
pub const SCHED_TIME_NANOS: u64 = SCHED_TIME.as_nanos() as u64;
// dynamic config parameters set by kernel
static USE_APIC: AtomicBool = AtomicBool::new(true);
pub fn use_apic() -> bool {
USE_APIC.load(Ordering::Acquire)
}
pub fn set_use_apic(val: bool) {
USE_APIC.store(val, Ordering::Release);
}
static CPU_COUNT: AtomicUsize = AtomicUsize::new(1);
pub fn cpu_count() -> usize {
CPU_COUNT.load(Ordering::Acquire)
}
pub fn set_cpu_count(val: usize) {
CPU_COUNT.store(val, Ordering::Release);
}
pub fn init() {
set_use_apic(cpuid::has_apic());
}
|
use std::fmt::Display;
//就像泛型类型参数,泛型生命周期参数需要声明在函数名和参数列表间的尖括号中。
// 生命周期注解告诉 Rust 多个引用的泛型生命周期参数如何相互联系的。
// 这里我们想要告诉 Rust longest 函数返回的引用的生命周期应该与传入参数的生命周期中较短那个保持一致。
fn longest<'a>(x: &'a str, y: &'a str) -> &'a str {
if x.len() > y.len() {
x
} else {
y
}
}
pub fn lifetime_complex_test() {
println!(
"{}",
"------------lifetime_complex_test start-------------------"
);
// 函数签名中的生命周期注解
lifetime_grammar_one();
lifetime_grammar_two();
//结构体定义中的生命周期注解
lifetime_grammar_three();
//生命周期省略
lifetime_grammar_four();
lifetime_grammar_five();
}
fn lifetime_grammar_one() {
println!(
"{}",
"------------lifetime_grammar_one start-------------------"
);
let string1 = String::from("long string is long");
{
let string2 = String::from("xyz");
let result = longest(string1.as_str(), string2.as_str());
println!("The longest string is {}", result);
}
/* This will cause: borrowed value does not live long enough
let string1 = String::from("long string is long");
let result;
{
let string2 = String::from("xyz");
//longest return a refer, but it does not live long enough
result = longest(string1.as_str(), string2.as_str());
}
println!("The longest string is {}", result);
*/
}
fn lifetime_grammar_two() {
println!(
"{}",
"------------lifetime_grammar_two start-------------------"
);
let string1 = String::from("long string is long");
let string2 = String::from("xyz");
let s1;
{
s1 = string1.as_str();
let s2 = string2.as_str();
//The lifetime of s1,s2 not equal
let result = longest(s1, s2);
println!("The longest string is {}", result);
}
println!("s1 string is {}", s1);
// It will cause error
// println!("s2 string is {}",s2);
}
//定义包含引用的结构体,需要为结构体定义中的每一个引用添加生命周期注解
//这个注解意味着 ImportantExcerpt 的实例不能比其 part 字段中的引用存在的更久。
struct ImportantExcerpt<'a> {
part: &'a str,
}
fn lifetime_grammar_three() {
println!(
"{}",
"------------lifetime_grammar_three start-------------------"
);
let novel = String::from("Call me Ishmael. Some years ago...");
let novelpart = novel.as_str();
let _i = ImportantExcerpt { part: novelpart };
println!("{}", _i.part)
}
fn lifetime_grammar_four() {
println!(
"{}",
"------------lifetime_grammar_two start-------------------"
);
/*
第一条规则是每一个是引用的参数都有它自己的生命周期参数。
换句话说就是,有一个引用参数的函数有一个生命周期参数:fn foo<'a>(x: &'a i32),
有两个引用参数的函数有两个不同的生命周期参数,fn foo<'a, 'b>(x: &'a i32, y: &'b i32),依此类推。
第二条规则是如果只有一个输入生命周期参数,
那么它被赋予所有输出生命周期参数:fn foo<'a>(x: &'a i32) -> &'a i32。
*/
}
fn lifetime_grammar_five() {
println!(
"{}",
"------------lifetime_grammar_five start-------------------"
);
let string1 = String::from("long string is long");
let string2 = String::from("xyz");
let string3 = longest_with_an_announcement(string1.as_str(), string2.as_str(), 50);
println!("string3 value is {}", string3);
}
fn longest_with_an_announcement<'a, T>(x: &'a str, y: &'a str, ann: T) -> &'a str
where
T: Display,
{
println!("Announcement! {}", ann);
if x.len() > y.len() {
x
} else {
y
}
}
|
use std::io;
fn get_line() -> String {
let mut input = String::new();
let stdin = io::stdin();
stdin.read_line(&mut input).unwrap();
input
}
fn main() {
let mut line = get_line();
let mut twos = 0;
let mut threes = 0;
while &line != "" {
let mut letters = [0; 26];
for c in line.chars() {
if c as usize >= 97 {
letters[c as usize - 97] = letters[c as usize - 97] + 1;
}
}
let mut two = false;
let mut three = false;
for l in &letters {
if l == &2 {
two = true;
}
else if l == &3 {
three = true;
}
}
if two {
twos = twos + 1;
}
if three {
threes = threes + 1;
}
line = get_line();
}
println!("{}", twos * threes);
}
|
mod blockchain;
extern crate durian;
use ethereum_types::{U256, Address};
use blockchain::Blockchain;
use durian::stateless_vm::StatelessVM;
use durian::transaction::Transaction;
use std::fs::File;
use std::io::Read;
use std::sync::Arc;
fn main() {
let mut bc = Blockchain::new();
let file_path = "./compiled-contract/pwasm_erc20_token.wasm";
let mut file = match File::open(file_path) {
Ok(file) => file,
Err(err) => panic!(err.to_string()),
};
let mut code = Vec::new();
if let Err(err) = file.read_to_end(&mut code) {
panic!(err.to_string());
}
// let mut data = vec![1,2,3];
//
let tx = Transaction::new(
bc.get_address("alice"),
bc.get_address("bob"),
//Address::zero(),
U256::from(10000),
Some(Arc::new(code)),
//Some(data),
None,
);
let vm = StatelessVM::new();
let res = match vm.fire(tx, &bc) {
Ok(res) => res,
Err(err) => panic!("error"),
};
bc.commit();
}
|
#[macro_use]
extern crate clap;
extern crate osm_xml as osm;
mod schemes;
mod utils;
use clap::{App, Arg, ArgMatches, SubCommand};
use std::error::Error;
use std::fmt;
use schemes::chase::matrix::SE;
use utils::prf::PRF;
use utils::prp::PRP;
use std::path::Path;
use std::process;
use std::fs::File;
const EXTENSION_JSON: &'static str = "json";
const EXTENSION_OSM: &'static str = "osm";
const OSM_FILE: &'static str = "campus-garching";
const KEY_FILE: &'static str = "se-key";
const SYSTEM_FILE: &'static str = "se-system";
const CT_FILE: &'static str = "se-ciphertext";
const PT_FILE: &'static str = "se-object";
const KEY_BEGIN: &'static str = "-----BEGIN SE KEY-----\n";
const KEY_END: &'static str = "\n-----END SE KEY-----";
const CT_BEGIN: &'static str = "-----BEGIN SE CIPHERTEXT-----\n";
const CT_END: &'static str = "\n-----END SE CIPHERTEXT-----";
const DOT: &'static str = ".";
// Application commands
const CMD_SETUP: &'static str = "setup";
const CMD_SETUP_ARG_1: &'static str = "file";
const CMD_SETUP_ARG_2: &'static str = "name";
const CMD_KEYGEN: &'static str = "keygen";
const CMD_KEYGEN_ARG_1: &'static str = "file";
const CMD_ENCRYPT: &'static str = "encrypt";
const CMD_ENCRYPT_ARG_1: &'static str = "file";
const CMD_ENCRYPT_ARG_2: &'static str = "output";
const CMD_ENCRYPT_ARG_3: &'static str = "key";
const CMD_DECRYPT: &'static str = "decrypt";
const CMD_DECRYPT_ARG_1: &'static str = "object";
const CMD_DECRYPT_ARG_2: &'static str = "key";
const CMD_TOKEN: &'static str = "token";
const CMD_TOKEN_ARG_1: &'static str = "type";
const CMD_TOKEN_ARG_2: &'static str = "name";
const CMD_TOKEN_ARG_3: &'static str = "key";
const CMD_LOOKUP: &'static str = "lookup";
const CMD_LOOKUP_ARG_1: &'static str = "token";
#[derive(Debug)]
struct RustSEError {
details: String,
}
impl RustSEError {
fn new(msg: &str) -> RustSEError {
RustSEError { details: msg.to_string() }
}
}
impl fmt::Display for RustSEError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Error: {}", self.details)
}
}
impl Error for RustSEError {
fn description(&self) -> &str {
&self.details
}
}
fn main() {
arg_enum! {
#[derive(Debug)]
enum ObjectType {
Way,
Node,
}
}
// Default file names
let _key_default = [KEY_FILE, DOT, EXTENSION_JSON].concat();
let _system_default = [SYSTEM_FILE, DOT, EXTENSION_JSON].concat();
let _pt_default = [PT_FILE, DOT, EXTENSION_JSON].concat();
let _input_default = [OSM_FILE, DOT, EXTENSION_OSM].concat();
let _output_default = [CT_FILE, DOT, EXTENSION_JSON].concat();
let _abe_app = App::new(crate_name!())
.version(crate_version!())
.author(crate_authors!("\n"))
.about(crate_description!())
.subcommand(
// Keygen
SubCommand::with_name(CMD_SETUP)
.about(
"sets up a new se scheme and generates the corresponding system key.",
)
.arg(
Arg::with_name(CMD_SETUP_ARG_1)
.required(true)
.takes_value(true)
.default_value(&_system_default)
.help("the system key file."),
)
.arg(
Arg::with_name(CMD_SETUP_ARG_2)
.required(true)
.takes_value(true)
.help("the system key file."),
),
)
.subcommand(
// Keygen
SubCommand::with_name(CMD_KEYGEN)
.about("generates a new key.")
.arg(
Arg::with_name(CMD_KEYGEN_ARG_1)
.required(false)
.takes_value(true)
.default_value(&_key_default)
.help("the secret key file."),
),
)
.subcommand(
// Encrypt
SubCommand::with_name(CMD_ENCRYPT)
.about("encrypts an open street map in osm/xml format..")
.arg(
Arg::with_name(CMD_ENCRYPT_ARG_1)
.required(true)
.takes_value(true)
.default_value(&_input_default)
.help("the osm file to use."),
)
.arg(
Arg::with_name(CMD_ENCRYPT_ARG_2)
.required(true)
.takes_value(true)
.default_value(&_input_default)
.help("."),
)
.arg(
Arg::with_name(CMD_ENCRYPT_ARG_3)
.required(false)
.takes_value(true)
.default_value(&_key_default)
.help("the key used to encrypt."),
),
)
.subcommand(
// Decrypt
SubCommand::with_name(CMD_DECRYPT)
.about("Decrypts an object (either Edge or Vertex).")
.arg(
Arg::with_name(CMD_DECRYPT_ARG_1)
.required(true)
.takes_value(true)
.default_value(&_pt_default)
.help("the json file to export the decrypted object to."),
)
.arg(
Arg::with_name(CMD_ENCRYPT_ARG_2)
.required(false)
.takes_value(true)
.default_value(&_key_default)
.help("the key used to encrypt."),
),
)
.subcommand(
// Token
SubCommand::with_name(CMD_TOKEN)
.about("Generates a lookup token (either Edge or Vertex).")
.arg(
Arg::with_name(CMD_TOKEN_ARG_1)
.required(true)
.takes_value(true)
.possible_values(&ObjectType::variants())
.help("Type of token to generate."),
)
.arg(
Arg::with_name(CMD_TOKEN_ARG_2)
.required(true)
.takes_value(true)
.help("the value to search for."),
)
.arg(
Arg::with_name(CMD_TOKEN_ARG_3)
.required(false)
.takes_value(true)
.default_value(&_key_default)
.help("the key used to encrypt."),
),
)
.subcommand(
// Lookup
SubCommand::with_name(CMD_LOOKUP)
.about(
"Looks up the results of a token based search (either Edge or Vertex).",
)
.arg(
Arg::with_name(CMD_LOOKUP_ARG_1)
.required(true)
.takes_value(true)
.help("The token to use for the search."),
),
)
.get_matches();
if let Err(e) = run(_abe_app) {
println!("Application error: {}", e);
process::exit(1);
}
fn run(matches: ArgMatches) -> Result<(), RustSEError> {
match matches.subcommand() {
(CMD_SETUP, Some(matches)) => run_setup(matches),
(CMD_KEYGEN, Some(matches)) => run_keygen(matches),
(CMD_ENCRYPT, Some(matches)) => run_encrypt(matches),
(CMD_DECRYPT, Some(matches)) => run_decrypt(matches),
(CMD_TOKEN, Some(matches)) => {
let _token = value_t!(matches.value_of(CMD_TOKEN_ARG_1), ObjectType).unwrap();
run_token(matches, _token)
}
(CMD_LOOKUP, Some(matches)) => run_lookup(matches),
_ => Ok(()),
}
}
fn run_setup(arguments: &ArgMatches) -> Result<(), RustSEError> {
let mut _key_file = String::from("");
let mut _name = String::from("");
match arguments.value_of(CMD_SETUP_ARG_1) {
None => {
_key_file.push_str(&KEY_FILE);
_key_file.push_str(&DOT);
_key_file.push_str(&EXTENSION_JSON);
}
Some(_file) => _key_file = _file.to_string(),
}
match arguments.value_of(CMD_SETUP_ARG_2) {
None => {
_name.push_str(&KEY_FILE);
_name.push_str(&DOT);
_name.push_str(&EXTENSION_JSON);
}
Some(_file) => _name = _file.to_string(),
}
let _se = SE::new(_name);
Ok(())
}
fn run_keygen(arguments: &ArgMatches) -> Result<(), RustSEError> {
let mut _key_file = String::from("");
match arguments.value_of(CMD_KEYGEN_ARG_1) {
None => {
_key_file.push_str(&KEY_FILE);
_key_file.push_str(&DOT);
_key_file.push_str(&EXTENSION_JSON);
}
Some(_file) => _key_file = _file.to_string(),
}
let _key = SE::keygen().unwrap();
println!("Your key is:\n{:?}", _key);
Ok(())
}
fn run_encrypt(arguments: &ArgMatches) -> Result<(), RustSEError> {
let mut _file = String::from("");
let mut _name = String::from("");
let mut _key = String::from("");
match arguments.value_of(CMD_ENCRYPT_ARG_1) {
None => {
_file.push_str(&KEY_FILE);
_file.push_str(&DOT);
_file.push_str(&EXTENSION_JSON);
}
Some(_files) => _file = _files.to_string(),
}
match arguments.value_of(CMD_ENCRYPT_ARG_2) {
None => {
_name.push_str(&OSM_FILE);
}
Some(name) => _name = name.to_string(),
}
match arguments.value_of(CMD_ENCRYPT_ARG_3) {
None => {
_file.push_str(&KEY_FILE);
_file.push_str(&DOT);
_file.push_str(&EXTENSION_JSON);
}
Some(name) => _name = _key.to_string(),
}
let f = File::open(&_file).unwrap();
let doc = osm::OSM::parse(f).unwrap();
let rel_info = relation_reference_statistics(&doc);
let way_info = way_reference_statistics(&doc);
let poly_count = doc.ways.values().fold(0, |acc, way| {
if way.is_polygon() {
return acc + 1;
}
acc
});
let highway_count = doc.ways.values().fold(0, |acc, way| {
if way.is_highway() {
return acc + 1;
}
acc
});
println!("Node count {}", doc.nodes.len());
println!("Polygon count {}", poly_count);
println!("Relation count {}", doc.relations.len());
println!("Tag count {}", tag_count(&doc));
//println!("Highway count {}", highway_count);
println!(
"Way reference count: {}, invalid references: {}",
way_info.0,
way_info.1
);
println!(
"Relation reference count: {}, resolved: {}, unresolved: {}",
rel_info.0,
rel_info.1,
rel_info.2
);
Ok(())
}
fn run_decrypt(arguments: &ArgMatches) -> Result<(), RustSEError> {
Ok(())
}
fn run_lookup(arguments: &ArgMatches) -> Result<(), RustSEError> {
Ok(())
}
fn run_token(arguments: &ArgMatches, _type: ObjectType) -> Result<(), RustSEError> {
Ok(())
}
}
fn relation_reference_statistics(doc: &osm::OSM) -> (usize, usize, usize) {
doc.relations
.values()
.flat_map(|relation| relation.members.iter())
.fold((0, 0, 0), |acc, member| {
let el_ref = match *member {
osm::Member::Node(ref el_ref, _) => el_ref,
osm::Member::Way(ref el_ref, _) => el_ref,
osm::Member::Relation(ref el_ref, _) => el_ref,
};
match doc.resolve_reference(&el_ref) {
osm::Reference::Unresolved => (acc.0 + 1, acc.1, acc.2 + 1),
osm::Reference::Node(_) |
osm::Reference::Way(_) |
osm::Reference::Relation(_) => (acc.0 + 1, acc.1 + 1, acc.2),
}
})
}
fn way_reference_statistics(doc: &osm::OSM) -> (usize, usize) {
doc.ways.values().flat_map(|way| way.nodes.iter()).fold(
(0, 0),
|acc,
node| {
match doc.resolve_reference(&node) {
osm::Reference::Node(_) => (acc.0 + 1, acc.1),
osm::Reference::Unresolved |
osm::Reference::Way(_) |
osm::Reference::Relation(_) => (acc.0, acc.1 + 1),
}
},
)
}
fn tag_count(doc: &osm::OSM) -> usize {
let node_tag_count = doc.nodes.values().map(|node| node.tags.len()).fold(
0,
|acc, c| {
acc + c
},
);
let way_tag_count = doc.ways.values().map(|way| way.tags.len()).fold(
0,
|acc, c| acc + c,
);
let relation_tag_count = doc.relations
.values()
.map(|relation| relation.tags.len())
.fold(0, |acc, c| acc + c);
node_tag_count + way_tag_count + relation_tag_count
}
|
use dynamic_pool::DynamicPool;
pub use dynamic_pool::{DynamicPoolItem, DynamicReset};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum SizedPoolError {
#[error("the given size exceeds the maximum allowed size of the pool")]
SizeExceedMaxSize,
}
pub trait SizedAllocatable {
fn new(size: usize) -> Self;
fn size(&self) -> usize;
}
pub struct SizedPool<T: SizedAllocatable + DynamicReset> {
/// each entry represents an allocation queue of 2**n bytes block
sub_pools: Vec<DynamicPool<T>>,
}
impl<T: SizedAllocatable + DynamicReset> SizedPool<T> {
/// cap is the capacity of each subpool, pool_size_power_of_two is the number of subpools,
/// init_fn takes the pool_size (the power of two) as input and outputs the reusable resource
pub fn new(cap: usize, pool_size_power_of_two: u32, max_pool_size: usize) -> Self {
let mut pools = Vec::new();
for pool_power in 0..pool_size_power_of_two {
let pool =
DynamicPool::new(cap, max_pool_size, move || T::new(2_usize.pow(pool_power)));
pools.push(pool);
}
Self { sub_pools: pools }
}
fn get_subpool_location(&self, size: usize) -> usize {
(size.next_power_of_two().trailing_zeros()) as usize
}
fn get_subpool(&self, size: usize) -> Result<&DynamicPool<T>, SizedPoolError> {
self.sub_pools
.get(self.get_subpool_location(size))
.ok_or(SizedPoolError::SizeExceedMaxSize)
}
pub fn try_pull(&self, size: usize) -> Result<DynamicPoolItem<T>, SizedPoolError> {
let pool = self.get_subpool(size)?;
match pool.try_take() {
None => {
tracing::debug!("not enough items in pool, allocating");
Ok(pool.take())
}
Some(x) => Ok(x),
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[derive(Debug)]
struct TestItem {
size: usize,
}
impl SizedAllocatable for TestItem {
fn new(size: usize) -> Self {
Self { size }
}
fn size(&self) -> usize {
self.size
}
}
impl DynamicReset for TestItem {
fn reset(&mut self) {}
}
#[test]
fn test_allocate() {
let pool: SizedPool<TestItem> = SizedPool::new(0, 40, 1024);
let mut items = Vec::new();
for _ in 0..2048 {
items.push(pool.try_pull(10).unwrap());
}
}
}
|
use std::env::{remove_var, set_var, var};
use crate::{bson::Document, client::auth::aws::test_utils::*, test::DEFAULT_URI, Client};
use super::TestClient;
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn auth_aws() {
let client = TestClient::new().await;
let coll = client.database("aws").collection::<Document>("somecoll");
coll.find_one(None, None).await.unwrap();
}
// The TestClient performs operations upon creation that trigger authentication, so the credential
// caching tests use a regular client instead to avoid that noise.
async fn get_client() -> Client {
Client::with_uri_str(DEFAULT_URI.clone()).await.unwrap()
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn credential_caching() {
// This test should only be run when authenticating using AWS endpoints.
if var("SKIP_CREDENTIAL_CACHING_TESTS").is_ok() {
return;
}
clear_cached_credential().await;
let client = get_client().await;
let coll = client.database("aws").collection::<Document>("somecoll");
coll.find_one(None, None).await.unwrap();
assert!(cached_credential().await.is_some());
let now = bson::DateTime::now();
set_cached_expiration(now).await;
let client = get_client().await;
let coll = client.database("aws").collection::<Document>("somecoll");
coll.find_one(None, None).await.unwrap();
assert!(cached_credential().await.is_some());
assert!(cached_expiration().await > now);
poison_cached_credential().await;
let client = get_client().await;
let coll = client.database("aws").collection::<Document>("somecoll");
match coll.find_one(None, None).await {
Ok(_) => panic!(
"find one should have failed with authentication error due to poisoned cached \
credential"
),
Err(error) => assert!(error.is_auth_error()),
}
assert!(cached_credential().await.is_none());
coll.find_one(None, None).await.unwrap();
assert!(cached_credential().await.is_some());
}
#[cfg_attr(feature = "tokio-runtime", tokio::test)]
#[cfg_attr(feature = "async-std-runtime", async_std::test)]
async fn credential_caching_environment_vars() {
// This test should only be run when authenticating using AWS endpoints.
if var("SKIP_CREDENTIAL_CACHING_TESTS").is_ok() {
return;
}
clear_cached_credential().await;
let client = get_client().await;
let coll = client.database("aws").collection::<Document>("somecoll");
coll.find_one(None, None).await.unwrap();
assert!(cached_credential().await.is_some());
set_var("AWS_ACCESS_KEY_ID", cached_access_key_id().await);
set_var("AWS_SECRET_ACCESS_KEY", cached_secret_access_key().await);
if let Some(session_token) = cached_session_token().await {
set_var("AWS_SESSION_TOKEN", session_token);
}
clear_cached_credential().await;
let client = get_client().await;
let coll = client.database("aws").collection::<Document>("somecoll");
coll.find_one(None, None).await.unwrap();
assert!(cached_credential().await.is_none());
set_var("AWS_ACCESS_KEY_ID", "bad");
set_var("AWS_SECRET_ACCESS_KEY", "bad");
set_var("AWS_SESSION_TOKEN", "bad");
let client = get_client().await;
let coll = client.database("aws").collection::<Document>("somecoll");
match coll.find_one(None, None).await {
Ok(_) => panic!(
"find one should have failed with authentication error due to poisoned environment \
variables"
),
Err(error) => assert!(error.is_auth_error()),
}
remove_var("AWS_ACCESS_KEY_ID");
remove_var("AWS_SECRET_ACCESS_KEY");
remove_var("AWS_SESSION_TOKEN");
clear_cached_credential().await;
let client = get_client().await;
let coll = client.database("aws").collection::<Document>("somecoll");
coll.find_one(None, None).await.unwrap();
assert!(cached_credential().await.is_some());
set_var("AWS_ACCESS_KEY_ID", "bad");
set_var("AWS_SECRET_ACCESS_KEY", "bad");
set_var("AWS_SESSION_TOKEN", "bad");
let client = get_client().await;
let coll = client.database("aws").collection::<Document>("somecoll");
coll.find_one(None, None).await.unwrap();
remove_var("AWS_ACCESS_KEY_ID");
remove_var("AWS_SECRET_ACCESS_KEY");
remove_var("AWS_SESSION_TOKEN");
}
|
use std::cmp::Ordering;
use std::rc::Rc;
use std::collections::HashMap;
const WORLD_SIZE : usize = 10;
#[derive(PartialEq, Debug, Eq, Hash, Clone, Copy)]
struct Point {
x: i32,
y: i32
}
fn pt(x : i32, y : i32) -> Point {
Point {x : x, y :y}
}
impl Point {
fn add(&self, p : Point) -> Point {
Point { x : self.x + p.x, y : self.y + p.y }
}
}
struct World {
entities : Vec<Point>
}
impl World {
fn is_clean(&self, p : &Point) -> bool {
p.x > -1
&& (p.x as usize) < WORLD_SIZE
&& p.y >-1
&& (p.y as usize) < WORLD_SIZE
&& self.entities.iter().all(|&pt| {
pt != *p
})
}
}
#[derive(Debug)]
enum List<A> {
Cons(A, Rc<List<A>>),
Nil
}
use List::*;
struct StepCost {
step : Point,
cost : f64
}
struct Finder<'a> {
world : &'a World,
marks : HashMap<Point, f64>,
step_costs : Vec<StepCost>
}
type Path = List<Point>;
#[derive(Debug)]
struct SearchResult {
path : Rc<Path>,
cost : f64
}
impl<'a> Finder<'a> {
fn new(world : &World) -> Finder {
Finder {
world : &world,
marks : HashMap::new(),
step_costs : get_step_costs()
}
}
fn find(&mut self, orig : Point, dest : Point) -> Option<SearchResult> {
self.marks = HashMap::new();
self.best_dist(orig, dest, Rc::new(Cons(orig, Rc::new(Nil))), 0.0)
}
fn best_dist(&mut self, p : Point, dest : Point, path : Rc<Path>, acc_cost : f64) -> Option<SearchResult> {
if p == dest {
Some(SearchResult{ path: path.clone(), cost: acc_cost})
} else {
// create a block to release ownership of the closure to use self later
let next_steps = {
// Boilerplate to avoid borrowing self
let world = &self.world;
let marks = &mut self.marks;
let step_costs = &self.step_costs;
step_costs
.iter()
.filter_map(|step_cost : &StepCost| {
let next = p.add(step_cost.step);
let oldcost = *marks.get(&next).unwrap_or(&0.0);
let cost = acc_cost + step_cost.cost;
if world.is_clean(&next)
&& (oldcost == 0.0 || oldcost > cost) {
marks.insert(next, cost);
Some(next)
} else {
None
}
})
.collect::<Vec<Point>>() // HACK?? can't return iterator because references are no more valid outside of the block
};
let search_results = next_steps
.iter()
.filter_map(|&next| {
let newpath = Rc::new(Cons(next, path.clone()));
let newcost = *self.marks.get(&next).unwrap();
self.best_dist(next, dest, newpath, newcost)
});
search_results.fold(None, |acc, search_result| {
match acc {
Some(best) =>
if best.cost.partial_cmp(&search_result.cost) == Some(Ordering::Less) {
Some(best)
} else {
Some(search_result)
},
None => Some(search_result)
}
})
}
}
}
fn get_step_costs() -> Vec<StepCost> {
[pt(1, 0), pt(0, 1), pt(0, -1), pt(-1, 0)]
.iter()
.map(|&p| StepCost{ step : p, cost : 1.0})
.chain([pt(1, 1), pt(1, -1), pt(-1, 1), pt(-1, -1)]
.iter()
.map(|&p| StepCost{ step: p, cost: 1.414213} ))
.collect()
}
fn main() {
let entities = (3..)
.take(6)
.map(|i| pt(7, i))
.collect::<Vec<_>>();
let world = World { entities : entities };
let orig = Point { x : 3, y : 3};
let dest = Point { x : 8, y : 6};
let mut finder = Finder::new(&world);
let sol = finder.find(orig, dest);
println!("sol = {:?}", sol);
}
|
use core::fmt;
use core::marker::PhantomData;
use conquer_pointer::{MarkedNonNull, MarkedPtr, Null};
use crate::traits::Reclaim;
use crate::{Maybe, Protected, Shared};
/********** impl Clone ****************************************************************************/
impl<T, R, const N: usize> Clone for Protected<'_, T, R, N> {
#[inline]
fn clone(&self) -> Self {
Self { inner: self.inner, _marker: PhantomData }
}
}
/********** impl Copy *****************************************************************************/
impl<T, R, const N: usize> Copy for Protected<'_, T, R, N> {}
/********** impl inherent (const) *****************************************************************/
impl<T, R, const N: usize> Protected<'_, T, R, N> {
/// Creates a new `null` pointer.
#[inline]
pub const fn null() -> Self {
Self { inner: MarkedPtr::null(), _marker: PhantomData }
}
#[inline]
pub const unsafe fn cast<'a, U>(self) -> Protected<'a, U, R, N> {
Protected { inner: self.inner.cast(), _marker: PhantomData }
}
}
/********** impl inherent *************************************************************************/
impl<'g, T, R: Reclaim<T>, const N: usize> Protected<'g, T, R, N> {
impl_from_ptr_for_nullable!();
impl_from_non_null!();
#[inline]
pub fn is_null(self) -> bool {
self.inner.is_null()
}
impl_common!();
#[inline]
pub fn shared(self) -> Maybe<Shared<'g, T, R, N>> {
match MarkedNonNull::new(self.inner) {
Ok(inner) => Maybe::Some(Shared { inner, _marker: PhantomData }),
Err(Null(tag)) => Maybe::Null(tag),
}
}
#[inline]
pub unsafe fn shared_unchecked(self) -> Shared<'g, T, R, N> {
Shared { inner: MarkedNonNull::new_unchecked(self.inner), _marker: PhantomData }
}
#[inline]
pub unsafe fn as_ref(self) -> Option<&'g T> {
self.inner.as_ref()
}
#[inline]
pub unsafe fn decompose_ref(self) -> (Option<&'g T>, usize) {
self.inner.decompose_ref()
}
#[inline]
pub unsafe fn deref(self) -> &'g T {
&*self.inner.decompose_ptr()
}
}
/********** impl Debug ****************************************************************************/
impl<T, R, const N: usize> fmt::Debug for Protected<'_, T, R, N> {
impl_fmt_debug!(Protected);
}
/********** impl Default **************************************************************************/
impl<T, R, const N: usize> Default for Protected<'_, T, R, N> {
default_null!();
}
/********** impl Pointer **************************************************************************/
impl<T, R, const N: usize> fmt::Pointer for Protected<'_, T, R, N> {
impl_fmt_pointer!();
}
|
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::collections::HashSet;
use cosmwasm_std::{
HumanAddr,
};
//use cosmwasm_std::{CanonicalAddr, Storage};
//use cosmwasm_storage::{singleton, singleton_read, ReadonlySingleton, Singleton};
#[derive(Serialize, Deserialize, Clone, Debug, JsonSchema)]
pub struct Tally {
// Number of votes in favor
pub yes: u64,
// Number of votes against
pub no: u64,
// List of addresses of voters
pub voters: HashSet<Vec<u8>>, // FIXME I would have liked to make it a HashMap but for some reason I couldn't make it work yet
// Time of beginning of vote
pub init_timestamp: u64,
// Time of end of vote
pub end_timestamp: u64,
// Defines whether current tally state shall be private until completed
pub early_results_allowed: bool,
// Completion status, if true, that tally can be queried
pub is_completed: bool,
}
#[derive(Serialize, Deserialize, Clone, Debug, JsonSchema)]
pub struct Ballot {
// voted
pub has_voted: bool,
// time of vote
pub timestamp: u64,
// vote
pub vote: Option<bool>,
// allow liquid democracy
pub delegate: Option<HumanAddr>,
// vote value (can be increased through transfered votes)
pub vote_value: u64,
}
|
use math::round::half_up;
pub type LabColor = [f64; 3];
pub fn sub(color: LabColor, other: LabColor) -> LabColor {
[
color[0] - other[0],
color[1] - other[1],
color[2] - other[2],
]
}
// Illuminant and reference angle for output values: D65 2°
pub fn rgb_2_lab(color: [f64; 3]) -> LabColor {
let mut rgb = [0.0; 3];
for i in 0..color.len() {
rgb[i] = rgb_stab(color[i]);
}
let mut xyz = [0.0; 3];
xyz[0] = xyz_stab(half_up(rgb[0] * 0.4124 + rgb[1] * 0.3576 + rgb[2] * 0.1805, 4) / 95.047);
xyz[1] = xyz_stab(half_up(rgb[0] * 0.2126 + rgb[1] * 0.7152 + rgb[2] * 0.0722, 4) / 100.0);
xyz[2] = xyz_stab(half_up(rgb[0] * 0.0193 + rgb[1] * 0.1192 + rgb[2] * 0.9504, 4) / 108.883);
[
half_up((116.0 * xyz[1]) - 16.0, 4),
half_up(500.0 * (xyz[0] - xyz[1]), 4),
half_up(200.0 * (xyz[1] - xyz[2]), 4),
]
}
fn xyz_stab(c: f64) -> f64 {
if c > 0.008856 {
c.powf(0.3333333333333333)
} else {
(7.787 * c) + (16.0 / 116.0)
}
}
fn rgb_stab(c: f64) -> f64 {
let d = c / 255.0;
if d > 0.04045 {
((d + 0.055) / 1.055).powf(2.4) * 100.0
} else {
d / 12.92 * 100.0
}
}
|
#[cfg(target_arch = "x86")]
use core::arch::x86::*;
#[cfg(target_arch = "x86_64")]
use core::arch::x86_64::*;
// 参考:
// https://www.intel.cn/content/dam/www/public/us/en/documents/white-papers/carry-less-multiplication-instruction-in-gcm-mode-paper.pdf
#[derive(Clone)]
pub struct GHash {
key: __m128i,
buf: __m128i,
}
impl GHash {
pub const KEY_LEN: usize = 16;
pub const BLOCK_LEN: usize = 16;
pub const TAG_LEN: usize = 16;
#[inline(always)]
pub fn new(key: &[u8; Self::KEY_LEN]) -> Self {
unsafe { Self::new_simd(key) }
}
#[target_feature(enable = "sse2,pclmulqdq")]
unsafe fn new_simd(key: &[u8; Self::KEY_LEN]) -> Self {
let key = key.clone();
let tag = _mm_setzero_si128();
let vm = _mm_setr_epi8(15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0);
let key = _mm_shuffle_epi8(_mm_loadu_si128(key.as_ptr() as *const __m128i), vm);
Self { key, buf: tag }
}
// Performing Ghash Using Algorithms 1 and 5 (C)
#[inline]
unsafe fn gf_mul(&mut self, x: &[u8]) {
let a = self.key;
let vm = _mm_setr_epi8(15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0);
let mut b = _mm_loadu_si128(x.as_ptr() as *const __m128i);
b = _mm_shuffle_epi8(b, vm);
b = _mm_xor_si128(b, self.buf);
let mut tmp2: __m128i = core::mem::zeroed();
let mut tmp3: __m128i = core::mem::zeroed();
let mut tmp4: __m128i = core::mem::zeroed();
let mut tmp5: __m128i = core::mem::zeroed();
let mut tmp6: __m128i = core::mem::zeroed();
let mut tmp7: __m128i = core::mem::zeroed();
let mut tmp8: __m128i = core::mem::zeroed();
let mut tmp9: __m128i = core::mem::zeroed();
tmp3 = _mm_clmulepi64_si128(a, b, 0x00);
tmp4 = _mm_clmulepi64_si128(a, b, 0x10);
tmp5 = _mm_clmulepi64_si128(a, b, 0x01);
tmp6 = _mm_clmulepi64_si128(a, b, 0x11);
tmp4 = _mm_xor_si128(tmp4, tmp5);
tmp5 = _mm_slli_si128(tmp4, 8);
tmp4 = _mm_srli_si128(tmp4, 8);
tmp3 = _mm_xor_si128(tmp3, tmp5);
tmp6 = _mm_xor_si128(tmp6, tmp4);
tmp7 = _mm_srli_epi32(tmp3, 31);
tmp8 = _mm_srli_epi32(tmp6, 31);
tmp3 = _mm_slli_epi32(tmp3, 1);
tmp6 = _mm_slli_epi32(tmp6, 1);
tmp9 = _mm_srli_si128(tmp7, 12);
tmp8 = _mm_slli_si128(tmp8, 4);
tmp7 = _mm_slli_si128(tmp7, 4);
tmp3 = _mm_or_si128(tmp3, tmp7);
tmp6 = _mm_or_si128(tmp6, tmp8);
tmp6 = _mm_or_si128(tmp6, tmp9);
tmp7 = _mm_slli_epi32(tmp3, 31);
tmp8 = _mm_slli_epi32(tmp3, 30);
tmp9 = _mm_slli_epi32(tmp3, 25);
tmp7 = _mm_xor_si128(tmp7, tmp8);
tmp7 = _mm_xor_si128(tmp7, tmp9);
tmp8 = _mm_srli_si128(tmp7, 4);
tmp7 = _mm_slli_si128(tmp7, 12);
tmp3 = _mm_xor_si128(tmp3, tmp7);
tmp2 = _mm_srli_epi32(tmp3, 1);
tmp4 = _mm_srli_epi32(tmp3, 2);
tmp5 = _mm_srli_epi32(tmp3, 7);
tmp2 = _mm_xor_si128(tmp2, tmp4);
tmp2 = _mm_xor_si128(tmp2, tmp5);
tmp2 = _mm_xor_si128(tmp2, tmp8);
tmp3 = _mm_xor_si128(tmp3, tmp2);
tmp6 = _mm_xor_si128(tmp6, tmp3);
_mm_storeu_si128(&mut self.buf as _, tmp6);
}
#[inline(always)]
pub fn update(&mut self, m: &[u8]) {
unsafe { self.update_simd(m) }
}
#[target_feature(enable = "sse2,pclmulqdq")]
unsafe fn update_simd(&mut self, m: &[u8]) {
let mlen = m.len();
if mlen == 0 {
return ();
}
let n = mlen / Self::BLOCK_LEN;
for i in 0..n {
let chunk = &m[i * Self::BLOCK_LEN..i * Self::BLOCK_LEN + Self::BLOCK_LEN];
self.gf_mul(chunk);
}
if mlen % Self::BLOCK_LEN != 0 {
let rem = &m[n * Self::BLOCK_LEN..];
let rlen = rem.len();
let mut last_block = [0u8; Self::BLOCK_LEN];
last_block[..rlen].copy_from_slice(rem);
self.gf_mul(&last_block);
}
}
#[inline(always)]
pub fn finalize(self) -> [u8; Self::TAG_LEN] {
unsafe { self.finalize_simd() }
}
#[target_feature(enable = "sse2,pclmulqdq")]
unsafe fn finalize_simd(self) -> [u8; Self::TAG_LEN] {
let mut out = [0u8; Self::TAG_LEN];
let vm = _mm_setr_epi8(15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0);
_mm_storeu_si128(
out.as_mut_ptr() as *mut __m128i,
_mm_shuffle_epi8(self.buf, vm),
);
out
}
}
|
pub fn is_valid(s: String) -> bool {
let n = s.len();
if n == 0 {
return true
}
if n % 2 == 1 {
return false
}
let mut previous_pos = vec![0; n];
let chars: Vec<char> = s.chars().collect();
let mut last_position = 0;
let mut counter = 0;
for i in 0..n {
let c = chars[i];
match c {
'{' | '[' | '(' => {
previous_pos[i] = last_position;
last_position = i;
counter += 1;
},
'}' | ']' | ')' => {
let d = chars[last_position];
if !(d == '(' && c == ')') && !(d == '[' && c == ']') && !(d == '{' && c == '}') {
return false
}
last_position = previous_pos[last_position];
counter -= 1;
},
_ => panic!("Didn't expect {}!", c),
}
}
counter == 0
}
#[test]
fn test_is_valid() {
// assert_eq!(is_valid("()".to_string()), true);
// assert_eq!(is_valid("()[]{}".to_string()), true);
// assert_eq!(is_valid("(]".to_string()), false);
// assert_eq!(is_valid("([)]".to_string()), false);
assert_eq!(is_valid("((".to_string()), false);
} |
//! A component that keeps track of the current route string and can modify its wrapped children via props
//! to indicate the route.
use crate::agent::{bridge::RouteAgentBridge, RouteRequest};
use crate::route_info::RouteInfo;
use crate::router_component::YewRouterState;
use std::fmt::{Debug, Error as FmtError, Formatter};
use yew::virtual_dom::VNode;
use yew::{
ChildrenWithProps, Component, ComponentLink, Html, Properties, Renderable, ShouldRender,
};
/// A trait allowing user-defined components to have their props rewritten by a parent `RouteInjector` when the route changes.
pub trait RouteInjectable<T: for<'de> YewRouterState<'de>>: Properties {
/// Changes the props based on a route.
///
///
/// # Example
/// ```
/// use yew_router::components::route_injector::RouteInjectable;
/// use yew_router::State;
///# use yew_router::{RouteInfo, Matcher};
///# use yew::{Children, Component, ComponentLink, Properties};
///
///# struct ListElement;
///# impl Component for ListElement {
///# type Message = ();type Properties = ();
///# fn create(props: Self::Properties,link: ComponentLink<Self>) -> Self {unimplemented!()}
///# fn update(&mut self,msg: Self::Message) -> bool {unimplemented!()}
///# }
///
/// ##[derive(Properties)]
/// struct ListElementProps {
/// is_active: bool,
/// children: Children<ListElement>,
/// ##[props(required)]
/// matcher: Matcher
/// }
/// impl RouteInjectable<State> for ListElementProps {
/// fn inject_route(&mut self, route_info: &RouteInfo) {
/// self.is_active = self.matcher.match_route_string(&route_info.route).is_some();
/// }
/// }
/// ```
fn inject_route(&mut self, route_info: &RouteInfo<T>);
}
/// A component that wraps child components and can tell them what the route is via props.
///
/// # Example
/// ```
/// use yew_router::matcher::{Matcher, MatcherProvider};
/// # use yew::{Component, ComponentLink, Renderable, Html, Properties, html, Classes, Children};
/// use yew_router::{RouteInjector, State};
/// use yew_router::components::route_injector::RouteInjectable;
/// # use yew_router::{RouteInfo, route};
/// pub struct ListElement {
/// props: ListElementProps
/// }
/// #[derive(Properties)]
/// pub struct ListElementProps {
/// is_active: bool,
/// children: Children<ListElement>,
/// #[props(required)]
/// matcher: Matcher
/// }
/// impl Component for ListElement {
///# type Message = ();
/// type Properties = ListElementProps;
/// // ...
///#
///# fn create(props: Self::Properties,link: ComponentLink<Self>) -> Self {
///# unimplemented!()
///# }
///# fn update(&mut self,msg: Self::Message) -> bool {
///# unimplemented!()
///# }
/// }
/// impl Renderable<ListElement> for ListElement {
/// fn view(&self) -> Html<ListElement> {
/// let mut classes = Classes::new();
/// if self.props.is_active {
/// classes.push("active");
/// }
/// html!{
/// <li class=classes>
/// {self.props.children.iter().collect::<Html<ListElement>>()}
/// </li>
/// }
/// }
/// }
/// impl RouteInjectable<State> for ListElementProps {
/// fn inject_route(&mut self, route_info: &RouteInfo) {
/// self.is_active = self.matcher.match_route_string(&route_info.route).is_some();
/// }
/// }
///# pub struct Comp;
///# impl Component for Comp {type Message = ();type Properties = ();
///# fn create(props: Self::Properties,link: ComponentLink<Self>) -> Self {unimplemented!()}
///# fn update(&mut self,msg: Self::Message) -> bool {unimplemented!()}
///# }
///
/// fn view() -> Html<Comp> {
/// html! {
/// <ul>
/// <RouteInjector<ListElement>>
/// <ListElement matcher = route!("/hi")> {"Hi"} </ListElement>
/// <ListElement matcher = route!("/goodbye")> {"Goodbye"} </ListElement>
/// </RouteInjector>
/// </ul>
/// }
/// }
///
///
/// ```
///
#[derive(Debug)]
pub struct RouteInjector<T, C>
where
T: for<'de> YewRouterState<'de>,
C: Component + Renderable<C>,
<C as Component>::Properties: RouteInjectable<T>,
{
router_bridge: RouteAgentBridge<T>,
route_info: Option<RouteInfo<T>>,
props: Props<T, C>,
}
/// Properties for `RouteInjector`.
#[derive(Properties)]
pub struct Props<T: for<'de> YewRouterState<'de>, C: Component + Renderable<C>>
where
<C as Component>::Properties: RouteInjectable<T>,
{
children: ChildrenWithProps<C, RouteInjector<T, C>>,
}
impl<T, C> Debug for Props<T, C>
where
T: for<'de> YewRouterState<'de>,
C: Component + Renderable<C>,
<C as Component>::Properties: RouteInjectable<T>,
{
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
f.debug_struct("Props")
.field(
"children",
&"ChildrenWithProps<_, ActiveWrapper<_, _>".to_owned(),
)
.finish()
}
}
/// Message type for `RouteInjector`.
#[derive(Debug)]
pub enum Msg<T: for<'de> YewRouterState<'de>> {
/// Message indicating that the route has changed
RouteUpdated(RouteInfo<T>),
}
impl<T, C> Component for RouteInjector<T, C>
where
T: for<'de> YewRouterState<'de>,
C: Component + Renderable<C>,
<C as Component>::Properties: RouteInjectable<T>,
{
type Message = Msg<T>;
type Properties = Props<T, C>;
fn create(props: Self::Properties, mut link: ComponentLink<Self>) -> Self {
let callback = link.send_back(|route_info| Msg::RouteUpdated(route_info));
RouteInjector {
router_bridge: RouteAgentBridge::new(callback),
route_info: None,
props,
}
}
fn mounted(&mut self) -> ShouldRender {
self.router_bridge.send(RouteRequest::GetCurrentRoute);
false
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::RouteUpdated(route_info) => self.route_info = Some(route_info),
}
true
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
self.props = props;
true
}
}
impl<T, C> Renderable<RouteInjector<T, C>> for RouteInjector<T, C>
where
T: for<'de> YewRouterState<'de>,
C: Component + Renderable<C>,
<C as Component>::Properties: RouteInjectable<T>,
{
fn view(&self) -> Html<Self> {
self.props
.children
.iter()
.map(|mut child| {
if let Some(route_info) = &self.route_info {
// Allow the children to change their props based on the route.
child.props.inject_route(&route_info)
}
// TODO, is this necessary to render children from an iter over children?
crate::router_component::render::create_component_with_scope::<C, Self>(
child.props,
child.scope,
)
})
.collect::<VNode<Self>>()
}
}
|
use ckb_fixed_hash::H256;
use lazy_static::lazy_static;
pub type Message = H256;
lazy_static! {
pub static ref SECP256K1: secp256k1::Secp256k1<secp256k1::All> = secp256k1::Secp256k1::new();
}
mod error;
mod generator;
mod privkey;
mod pubkey;
mod signature;
pub use self::error::Error;
pub use self::generator::Generator;
pub use self::privkey::Privkey;
pub use self::pubkey::Pubkey;
pub use self::signature::Signature;
#[cfg(test)]
mod tests {
use super::*;
use rand::{self, Rng};
fn random_message() -> Message {
let mut message = Message::default();
let mut rng = rand::thread_rng();
rng.fill(message.as_mut());
message
}
#[test]
fn test_gen_keypair() {
let (privkey, pubkey) = Generator::random_keypair();
assert_eq!(privkey.pubkey().expect("pubkey"), pubkey);
}
#[test]
fn test_sign_verify() {
let (privkey, pubkey) = Generator::random_keypair();
let message = random_message();
let signature = privkey.sign_recoverable(&message).unwrap();
assert!(signature.is_valid());
assert!(pubkey.verify(&message, &signature).is_ok());
}
#[test]
fn test_recover() {
let (privkey, pubkey) = Generator::random_keypair();
let message = random_message();
let signature = privkey.sign_recoverable(&message).unwrap();
assert_eq!(pubkey, signature.recover(&message).unwrap());
}
#[test]
fn test_serialize() {
let (privkey, pubkey) = Generator::random_keypair();
let ser_pubkey = privkey.pubkey().expect("pubkey").serialize();
assert_eq!(ser_pubkey.len(), 33);
let deser_pubkey = Pubkey::from_slice(&ser_pubkey).expect("deserialize pubkey");
assert_eq!(deser_pubkey, pubkey);
let msg = random_message();
let signature = privkey.sign_recoverable(&msg).expect("sign");
let ser_signature = signature.serialize();
assert_eq!(ser_signature.len(), 65);
let deser_signature = Signature::from_slice(&ser_signature).expect("deserialize");
assert!(deser_signature.is_valid());
assert_eq!(ser_signature, deser_signature.serialize());
}
#[test]
fn privkey_zeroize() {
let (mut privkey, _) = Generator::random_keypair();
privkey.zeroize();
assert!(privkey == Privkey::from_slice([0u8; 32].as_ref()));
}
}
|
use std::net::SocketAddr;
use std::net::{IpAddr, Ipv4Addr};
use actix_web::{middleware, web, App, HttpRequest, HttpServer, HttpResponse, delete, get, head, options, patch, post, put};
#[actix_web::main]
pub async fn start_server() -> std::io::Result<()> {
let addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8000);
println!("Runnning HTTP Server on http://{}", addr);
// std::env::set_var("RUST_LOG", "actix_web=info");
// env_logger::init();
HttpServer::new(|| {
App::new()
.wrap(middleware::Logger::default())
.service(index)
.service(handle_ping)
})
.bind(&addr)?
.run()
.await
}
#[get("/")]
async fn index() -> HttpResponse {
HttpResponse::Ok().body("Welcome to iota-p2p-poc!")
}
#[get("/ping")]
async fn handle_ping() -> HttpResponse {
HttpResponse::Ok().body("pong")
} |
use crate::backend::db::DbPool;
use crate::backend::result::{JsResult, Result};
use crate::backend::schema::*;
use crate::common::models::*;
use actix_web::http::StatusCode;
use actix_web::web::{Data, Json, Path, ServiceConfig};
use actix_web::{get, post, put, Error, HttpRequest, HttpResponse, Responder};
use diesel::prelude::*;
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
struct Empty;
impl Future for Empty {
type Output = std::result::Result<HttpResponse, Error>;
fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
Poll::Ready(Ok(HttpResponse::new(StatusCode::NO_CONTENT)))
}
}
impl Responder for Empty {
type Error = Error;
type Future = Empty;
fn respond_to(self, _req: &HttpRequest) -> Self::Future {
Empty
}
}
#[get("/api/events/statuses")]
async fn get_event_statuses(pool: Data<DbPool>) -> JsResult<Vec<EventStatus>> {
use self::event_statuses::dsl::*;
let conn = pool.get()?;
let results = event_statuses.load(&*conn)?;
Ok(Json(results))
}
#[get("/api/events")]
async fn get_events(pool: Data<DbPool>) -> JsResult<Vec<Event>> {
use self::events::dsl::*;
let conn = pool.get()?;
let results = events.load(&*conn)?;
Ok(Json(results))
}
#[get("/api/events/{pk}")]
async fn get_event(pool: Data<DbPool>, pk: Path<i32>) -> JsResult<Event> {
use self::events::dsl::*;
let conn = pool.get()?;
let result = events.filter(id.eq(*pk)).get_result(&*conn)?;
Ok(Json(result))
}
#[get("/api/festivals")]
async fn get_festivals(pool: Data<DbPool>) -> JsResult<Vec<Festival>> {
use self::festivals::dsl::*;
let conn = pool.get()?;
let results = festivals.load(&*conn)?;
Ok(Json(results))
}
#[get("/api/festivals/{pk}")]
async fn get_festival(pool: Data<DbPool>, pk: Path<i32>) -> JsResult<Festival> {
use self::festivals::dsl::*;
let conn = pool.get()?;
let result = festivals.filter(id.eq(*pk)).get_result(&*conn)?;
Ok(Json(result))
}
#[post("/api/festivals")]
async fn post_festival(pool: Data<DbPool>, festival: Json<NewFestival>) -> Result<Empty> {
let conn = pool.get()?;
diesel::insert_into(festivals::table)
.values(&*festival)
.execute(&*conn)?;
Ok(Empty)
}
#[post("/api/festivals")]
async fn post_event(pool: Data<DbPool>, event: Json<NewEvent>) -> Result<Empty> {
let conn = pool.get()?;
diesel::insert_into(events::table)
.values(&*event)
.execute(&*conn)?;
Ok(Empty)
}
#[put("/api/events/{pk}")]
async fn put_event(pool: Data<DbPool>, pk: Path<i32>, event: Json<NewEvent>) -> Result<Empty> {
use self::events::dsl::*;
let conn = pool.get()?;
diesel::update(events.filter(id.eq(*pk)))
.set(&*event)
.execute(&*conn)?;
Ok(Empty)
}
#[put("/api/festivals/{pk}")]
async fn put_festival(
pool: Data<DbPool>,
pk: Path<i32>,
festival: Json<NewFestival>,
) -> Result<Empty> {
use self::festivals::dsl::*;
let conn = pool.get()?;
diesel::update(festivals.filter(id.eq(*pk)))
.set(&*festival)
.execute(&*conn)?;
Ok(Empty)
}
pub fn setup(cfg: &mut ServiceConfig) {
cfg.service(get_event_statuses)
.service(get_events)
.service(get_festivals)
.service(get_event)
.service(get_festival)
.service(post_festival)
.service(post_event)
.service(put_festival)
.service(put_event);
}
|
pub extern crate rusty_sword;
use rusty_sword::*;
fn main() {
// To avoid lock contention for this group of objects, we'll follow the rule:
// - You must have a lock on floor before trying to lock anything else
// - You must unlock all other locks before (or when) floor gets unlocked
let floor = Arc::new(Mutex::new(Floor::new(60, 30)));
let player = Arc::new(Mutex::new(Player::new(Coord::new(30, 15))));
let dirty_coords = Arc::new(Mutex::new(Vec::<Coord>::new()));
let monsters = Arc::new(Mutex::new(Vec::<Monster>::new()));
// To avoid lock contention, we'll follow the rule:
// - stop should be locked only when no other objects are locked
let stop = Arc::new(Mutex::new(false));
// Render Thread
let render_thread = {
let stop = stop.clone();
let floor = floor.clone();
let player = player.clone();
let dirty_coords = dirty_coords.clone();
let monsters = monsters.clone();
spawn(move || { render_loop(stop, floor, player, dirty_coords, monsters) } )
};
// Sound Thread
let (sound_tx, sound_rx) = mpsc::channel::<&str>();
let sound_thread = {
let stop = stop.clone();
spawn(move || { sound_loop(stop, sound_rx) } )
};
// Game Loop
let mut quit = false;
let mut astdin = async_stdin();
let mut rng = rand::thread_rng();
let mut spawn_timer = Timer::from_millis(1000);
let mut last_instant = Instant::now();
loop {
sleep(Duration::from_millis(10));
if quit {
sleep(Duration::from_millis(50));
*stop.lock().unwrap() = true;
break;
}
// Lock floor first!
let floor = floor.lock().unwrap();
let mut player = player.lock().unwrap();
let mut dirty_coords = dirty_coords.lock().unwrap();
let mut monsters = monsters.lock().unwrap();
let current_instant = Instant::now();
let delta = current_instant - last_instant;
// Player moves?
let mut player_moved = false;
let mut bytebuf : [u8; 1] = [0];
while let Ok(amount) = astdin.read(&mut bytebuf) {
if amount == 1 {
match bytebuf[0] {
27|b'q' => {
quit = true;
},
_ => {
if let Some(direction) = byte_to_direction(bytebuf[0]) {
player_moved = player.travel(direction, &floor, &mut dirty_coords);
}
},
}
} else {
break;
}
}
// Update monster timers
for monster in monsters.iter_mut() {
monster.move_timer.update(delta);
}
// Monsters move?
if !player_moved {
for monster in monsters.iter_mut() {
monster.try_travel(player.coord, &mut dirty_coords);
}
}
// Did a monster die?
let num_monsters = monsters.len();
monsters.retain(|monster| monster.coord != player.sword_coord);
let num_killed = num_monsters - monsters.len();
if num_killed > 0 {
player.score += num_killed as u64;
sound_tx.send("monster_dies").unwrap();
}
// Spawn a new monster!
spawn_timer.update(delta);
if spawn_timer.ready {
spawn_timer = Timer::from_millis(sample(&mut rng, 1000..5000, 1)[0]);
let to_coord = Coord::new(
sample(&mut rng, 1..59, 1)[0],
sample(&mut rng, 1..29, 1)[0],
);
if to_coord != player.coord {
monsters.push(Monster::new(to_coord, &mut rng));
sound_tx.send("monster_spawns").unwrap();
}
}
// Did the player die?
if monsters.iter().any(|monster| monster.coord == player.coord) {
quit = true;
sound_tx.send("player_dies").unwrap();
}
last_instant = current_instant;
}
// Wait for other threads to stop before exiting
render_thread.join().unwrap();
println!("Thanks for playing!");
sound_thread.join().unwrap();
}
|
use std::io::Write;
/// The `Hash` trait specifies the common interface for hash functions.
///
/// The `write` from `Write` trait adds more data to the running hash.
/// It never returns an error.
pub trait Hash: Write {
/// block_size returns the hash's underlying block size.
/// The Write method must be able to accept any amount
/// of data, but it may operate more efficiently if all writes
/// are a multiple of the block size.
fn block_size() -> usize;
/// reset resets the Hash to its initial state
fn reset(&mut self);
/// size returns the number of bytes Sum will return
fn size() -> usize;
/// sum returns the resulting slice. It does not change the underlying hash state.
fn sum(&mut self) -> Vec<u8>;
}
|
use std::fmt;
use strum_macros::{EnumIter, EnumString};
#[derive(Debug, PartialEq, EnumString, EnumIter)]
pub enum Alpha3 {
None,
// ENUM START
AFG,
AGO,
ALB,
AND,
ANT,
ARE,
ARG,
ARM,
AUT,
AZE,
BDI,
BEL,
BEN,
BFA,
BGD,
BGR,
BIH,
BLR,
BLZ,
BOL,
BRA,
BRN,
BTN,
BWA,
CAF,
CAN,
CHE,
CHL,
CHN,
CIV,
CMR,
COD,
COG,
COL,
CRI,
CUB,
CZE,
DEU,
DJI,
DNK,
DOM,
DZA,
ECU,
EGY,
ERI,
ESH,
ESP,
EST,
ETH,
FIN,
FRA,
GAB,
GBR,
GEO,
GHA,
GIB,
GIN,
GMB,
GNB,
GNQ,
GRC,
GTM,
GUF,
GUY,
HND,
HRV,
HTI,
HUN,
IDN,
IND,
IRL,
IRN,
IRQ,
ISR,
ITA,
JOR,
KAZ,
KEN,
KGZ,
KHM,
KOR,
KWT,
LAO,
LBN,
LBR,
LBY,
LIE,
LSO,
LTU,
LUX,
LVA,
MAF,
MAR,
MCO,
MDA,
MEX,
MKD,
MLI,
MMR,
MNE,
MNG,
MOZ,
MRT,
MWI,
MYS,
NAM,
NER,
NGA,
NIC,
NLD,
NOR,
NPL,
OMN,
PAK,
PAN,
PER,
PNG,
POL,
PRK,
PRT,
PRY,
PSE,
QAT,
ROU,
RUS,
RWA,
SAU,
SCG,
SDN,
SEN,
SLE,
SLV,
SMR,
SOM,
SRB,
SUR,
SVK,
SVN,
SWE,
SWZ,
SXM,
SYR,
TCD,
TGO,
THA,
TJK,
TKM,
TLS,
TUN,
TUR,
TZA,
UGA,
UKR,
URY,
USA,
UZB,
VAT,
VEN,
VNM,
YEM,
ZAF,
ZMB,
ZWE,
// ENUM END
}
impl Alpha3 {
pub fn is_none(&self) -> bool {
match *self {
Alpha3::None => true,
_ => false,
}
}
}
impl fmt::Display for Alpha3 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
|
use apllodb_server::Record;
pub(super) trait RecordCliDisplay {
fn cli_display(self) -> String;
}
impl RecordCliDisplay for Record {
fn cli_display(self) -> String {
let mut s = String::new();
for (name, value) in self.into_name_values() {
s.push_str(&format!("{}: {}\t", name, value));
}
s
}
}
|
mod termion;
pub use self::termion::TermionKeyboard;
pub trait Keyboard {
fn get_next_keystroke(&mut self) -> Option<KeyStroke>;
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum KeyStroke {
Char(char),
KeyF(u8),
Alt(char),
KeyUp,
KeyDown,
KeyLeft,
KeyRight,
KeyPreviousPage,
KeyNextPage,
KeyEscape,
KeyBackSpace,
KeyDelete,
KeySpace,
}
impl KeyStroke {
pub fn from_description(description: &str) -> Option<Self> {
if description.len() == 1 {
return Some(KeyStroke::Char(description.chars().nth(0).unwrap()));
}
match description {
"<f1>" => Some(KeyStroke::KeyF(1)),
"<key_up>" => Some(KeyStroke::KeyUp),
"<key_down>" => Some(KeyStroke::KeyDown),
"<key_left>" => Some(KeyStroke::KeyLeft),
"<key_right>" => Some(KeyStroke::KeyRight),
"<page_up>" => Some(KeyStroke::KeyPreviousPage),
"<page_down>" => Some(KeyStroke::KeyNextPage),
"<backspace>" => Some(KeyStroke::KeyBackSpace),
"<del>" => Some(KeyStroke::KeyDelete),
"<space>" => Some(KeyStroke::KeySpace),
"<esc>" => Some(KeyStroke::KeyEscape),
_ => None,
}
}
}
|
//use std::fmt;
use futures::{Async, Future, Poll};
/// This is created by the `FutureInspector::inspect_err` method.
#[derive(Debug)]
#[must_use = "futures do nothing unless polled"]
pub struct InspectErr<A, F>
where
A: Future,
{
future: A,
f: Option<F>,
}
impl<A, F> InspectErr<A, F>
where
A: Future,
for<'r> F: FnOnce(&'r A::Error) -> (),
{
fn new(future: A, f: F) -> Self {
Self { future, f: Some(f) }
}
}
impl<A, F> Future for InspectErr<A, F>
where
A: Future,
F: FnOnce(&A::Error),
{
type Item = A::Item;
type Error = A::Error;
fn poll(&mut self) -> Poll<A::Item, A::Error> {
match self.future.poll() {
Ok(Async::NotReady) => Ok(Async::NotReady),
Ok(Async::Ready(e)) => Ok(Async::Ready(e)),
Err(e) => {
if cfg!(any(debug_assertions, not(feature = "debug-only"))) {
(self.f.take().expect("cannot poll InspectErr twice"))(&e);
}
Err(e)
}
}
}
}
/// Do something with the error of a future, passing it on.
/// This combinator was implemented in futures-0.2 series (which is now dead),
/// but never made it back to futures-0.1.x series.
#[allow(clippy::module_name_repetitions)]
pub trait FutureInspector<I, E>: Future<Item = I, Error = E> {
/// Do something with the error of a future, passing it on.
///
/// When using futures, you'll often chain several of them together.
/// While working on such code, you might want to check out what's happening to the errors
/// at various parts in the pipeline. To do that, insert a call to `inspect_err()`.
fn inspect_err<F>(self, f: F) -> InspectErr<Self, F>
where
for<'r> F: FnOnce(&'r Self::Error) -> (),
Self: Sized,
{
assert_future::<Self::Item, Self::Error, _>(InspectErr::new(self, f))
}
// fn debug(self) -> future::Inspect<Self, _>;
}
impl<I, E, T> FutureInspector<I, E> for T where T: Future<Item = I, Error = E> {}
fn assert_future<A, B, F>(f: F) -> F
where
F: Future<Item = A, Error = B>,
{
f
}
|
use crate::parser::{ Parser, gen::Compiler };
use crate::common::Closure;
use crate::vm::VM;
use std::io::prelude::*;
use std::fs::File;
pub fn compile_file(name: String) -> Result<Closure, String> {
let mut file = File::open(name.clone()).expect("could not open file");
let mut str = String::new();
file.read_to_string(&mut str).expect("failure to read file");
if str == "" {
let mut c = Closure::new(name);
c.name = "main".into();
return Ok(c)
}
let mut parser = Parser::new(str.into(), name.clone());
parser.parse()?;
let mut compiler = Compiler::new(name.clone());
compiler.compile(parser.nodes)?;
compiler.closure.name = "main".into();
Ok(compiler.closure)
}
pub fn do_file(name: String) -> Result<(), String> {
let closure = compile_file(name)?;
let mut vm = VM::new(closure);
vm.run()
}
#[allow(unused)]
pub fn do_string(src: String) -> Result<(), String> {
let parser = Parser::new(src, "buf".into());
let mut compiler = Compiler::new("buf".into());
compiler.compile(parser.nodes)?;
let mut vm = VM::new(compiler.closure.clone());
vm.run()
} |
use serde::Serialize;
use crate::domain::author::Author;
use crate::domain::category::Category;
use crate::domain::collection::Collection;
use crate::domain::interaction::Review;
use crate::domain::publication::{Image, Page, Publication, Statistics};
use crate::domain::reader::Reader;
#[derive(Serialize)]
pub struct StatisticsDto {
pub views: u32,
pub unique_views: u32,
pub readings: u32,
pub likes: u32,
pub reviews: u32,
pub stars: f32,
}
impl From<&Statistics> for StatisticsDto {
fn from(statistics: &Statistics) -> Self {
StatisticsDto {
views: statistics.views(),
unique_views: statistics.unique_views(),
readings: statistics.readings(),
likes: statistics.likes(),
reviews: statistics.reviews(),
stars: statistics.stars(),
}
}
}
#[derive(Serialize)]
pub struct AuthorDto {
pub id: String,
pub username: String,
pub name: String,
pub lastname: String,
pub publications: Option<Vec<PublicationDto>>,
pub publication_count: Option<usize>,
pub collection_count: Option<usize>,
}
impl From<&Author> for AuthorDto {
fn from(author: &Author) -> Self {
AuthorDto {
id: author.base().id().to_string(),
username: author.username().to_string(),
name: author.name().to_string(),
lastname: author.lastname().to_string(),
publications: None,
publication_count: None,
collection_count: None,
}
}
}
impl AuthorDto {
pub fn publications(mut self, publications: Vec<PublicationDto>) -> Self {
self.publications = Some(publications);
self
}
pub fn publication_count(mut self, count: usize) -> Self {
self.publication_count = Some(count);
self
}
pub fn collection_count(mut self, count: usize) -> Self {
self.collection_count = Some(count);
self
}
}
#[derive(Serialize)]
pub struct CategoryDto {
pub id: String,
pub name: String,
pub publications: Option<Vec<PublicationDto>>,
}
impl From<&Category> for CategoryDto {
fn from(category: &Category) -> Self {
CategoryDto {
id: category.base().id().to_string(),
name: category.name().to_string(),
publications: None,
}
}
}
impl CategoryDto {
pub fn publications(mut self, publications: Vec<PublicationDto>) -> Self {
self.publications = Some(publications);
self
}
}
#[derive(Serialize)]
pub struct ImageDto {
pub url: String,
}
impl From<&Image> for ImageDto {
fn from(image: &Image) -> Self {
ImageDto {
url: image.url().to_string(),
}
}
}
#[derive(Serialize)]
pub struct PageDto {
pub number: u32,
pub images: Vec<ImageDto>,
}
impl From<&Page> for PageDto {
fn from(page: &Page) -> Self {
PageDto {
number: page.number(),
images: page
.images()
.iter()
.map(|image| ImageDto::from(image))
.collect(),
}
}
}
#[derive(Serialize)]
pub struct PublicationDto {
pub id: String,
pub author_id: Option<String>,
pub author: Option<AuthorDto>,
pub name: String,
pub synopsis: String,
pub category_id: Option<String>,
pub category: Option<CategoryDto>,
pub tags: Vec<String>,
pub statistics: StatisticsDto,
pub pages: Option<Vec<PageDto>>,
pub status: Option<String>,
}
impl From<&Publication> for PublicationDto {
fn from(publication: &Publication) -> Self {
PublicationDto {
id: publication.base().id().to_string(),
author_id: None,
author: None,
name: publication.header().name().to_string(),
synopsis: publication.header().synopsis().to_string(),
category_id: None,
category: None,
tags: publication
.header()
.tags()
.iter()
.map(|tag| tag.name().to_string())
.collect(),
statistics: StatisticsDto::from(publication.statistics()),
pages: None,
status: None,
}
}
}
impl PublicationDto {
pub fn author_id(mut self, publication: &Publication) -> Self {
self.author_id = Some(publication.author_id().to_string());
self
}
pub fn author(mut self, author: AuthorDto) -> Self {
self.author = Some(author);
self
}
pub fn category_id(mut self, publication: &Publication) -> Self {
self.category_id = Some(publication.header().category_id().to_string());
self
}
pub fn category(mut self, category: CategoryDto) -> Self {
self.category = Some(category);
self
}
pub fn pages(mut self, publication: &Publication) -> Self {
self.pages = Some(
publication
.pages()
.iter()
.map(|page| PageDto::from(page))
.collect(),
);
self
}
pub fn status(mut self, publication: &Publication) -> Self {
self.status = Some(publication.status_history().current().status().to_string());
self
}
}
#[derive(Serialize)]
pub struct CollectionDto {
pub id: String,
pub author_id: Option<String>,
pub author: Option<AuthorDto>,
pub name: String,
pub synopsis: String,
pub category_id: Option<String>,
pub category: Option<CategoryDto>,
pub tags: Vec<String>,
pub publication_count: Option<usize>,
pub publications: Option<Vec<PublicationDto>>,
}
impl From<&Collection> for CollectionDto {
fn from(collection: &Collection) -> Self {
CollectionDto {
id: collection.base().id().to_string(),
author_id: None,
author: None,
name: collection.header().name().to_string(),
synopsis: collection.header().synopsis().to_string(),
category_id: None,
category: None,
tags: collection
.header()
.tags()
.iter()
.map(|tag| tag.name().to_string())
.collect(),
publication_count: None,
publications: None,
}
}
}
impl CollectionDto {
pub fn author_id(mut self, collection: &Collection) -> Self {
self.author_id = Some(collection.author_id().to_string());
self
}
pub fn author(mut self, author: AuthorDto) -> Self {
self.author = Some(author);
self
}
pub fn category_id(mut self, collection: &Collection) -> Self {
self.category_id = Some(collection.header().category_id().to_string());
self
}
pub fn category(mut self, category: CategoryDto) -> Self {
self.category = Some(category);
self
}
pub fn publication_count(mut self, count: usize) -> Self {
self.publication_count = Some(count);
self
}
pub fn publications(mut self, publications: Vec<PublicationDto>) -> Self {
self.publications = Some(publications);
self
}
}
#[derive(Serialize)]
pub struct ReviewDto {
pub reader_id: Option<String>,
pub reader: Option<ReaderDto>,
pub publication_id: String,
pub stars: u8,
pub comment: String,
}
impl From<&Review> for ReviewDto {
fn from(review: &Review) -> Self {
ReviewDto {
reader_id: None,
reader: None,
publication_id: review.base().publication_id().to_string(),
stars: review.stars().value(),
comment: review.comment().to_string(),
}
}
}
impl ReviewDto {
pub fn reader_id(mut self, review: &Review) -> Self {
self.reader_id = Some(review.base().reader_id().to_string());
self
}
pub fn reader(mut self, review: ReaderDto) -> Self {
self.reader = Some(review);
self
}
}
#[derive(Serialize)]
pub struct ReaderDto {
pub id: String,
pub username: String,
pub name: String,
pub lastname: String,
pub subscribed: bool,
}
impl From<&Reader> for ReaderDto {
fn from(reader: &Reader) -> Self {
ReaderDto {
id: reader.base().id().to_string(),
username: reader.username().to_string(),
name: reader.name().to_string(),
lastname: reader.lastname().to_string(),
subscribed: reader.is_subscribed(),
}
}
}
|
use std::cmp;
#[derive(Clone, Copy)]
struct Edge {
to: usize,
cap: usize,
rev: usize,
}
pub struct Maxflow {
graph: Vec<Vec<Edge>>,
used: Vec<bool>,
max_v: usize,
}
impl Maxflow {
pub fn new(max_v: usize) -> Self {
Maxflow {
graph: vec![vec![]; max_v],
used: vec![false; max_v],
max_v: max_v,
}
}
pub fn add_edge(&mut self, from: usize, to: usize, cap: usize) {
let rev = self.graph[to].len();
self.graph[from].push(Edge { to, cap, rev });
let rev = self.graph[from].len() - 1;
self.graph[to].push(Edge {
to: from,
cap: 0,
rev,
});
}
fn dfs(&mut self, v: usize, t: usize, f: usize) -> usize {
if v == t {
return f;
}
self.used[v] = true;
for i in 0..self.graph[v].len() {
if !self.used[self.graph[v][i].to] && self.graph[v][i].cap > 0 {
let d = self.dfs(self.graph[v][i].to, t, cmp::min(f, self.graph[v][i].cap));
if d > 0 {
self.graph[v][i].cap -= d;
let to = self.graph[v][i].to;
let rev = self.graph[v][i].rev;
self.graph[to][rev].cap += d;
return d;
}
}
}
0
}
fn clear(&mut self) {
self.used = vec![false; self.max_v]
}
pub fn execute(&mut self, s: usize, t: usize) -> usize {
let mut flow = 0;
let inf = std::usize::MAX;
loop {
self.clear();
let f = self.dfs(s, t, inf);
if f == 0 {
return flow;
}
flow += f;
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_maxflow() {
let v = 4;
let edges = vec![(0, 1, 2), (0, 2, 1), (1, 2, 1), (1, 3, 1), (2, 3, 2)];
let mut mf = Maxflow::new(v);
for &e in &edges {
mf.add_edge(e.0, e.1, e.2);
}
let result = mf.execute(0, v - 1);
assert_eq!(result, 3);
}
}
|
use crate::things::thing::Thing;
pub struct Camera {
pub x: f32,
pub y: f32,
pub z: f32,
pub rx: f32,
pub ry: f32,
pub radius: f32,
}
impl Camera {
pub fn new(x: f32, y: f32, z: f32, rx: f32, ry: f32, radius: f32) -> Self {
Camera { x, y, z, rx, ry, radius }
}
pub fn update_orbit(&mut self, target: &Thing) {
let sin_x = self.rx.sin();
let cos_x = self.rx.cos();
let sin_y = self.ry.sin();
let cos_y = self.ry.cos();
self.x = target.position.x - self.radius * cos_x * sin_y;
self.y = target.position.y + self.radius * sin_x + target.height;
self.z = target.position.z + self.radius * cos_x * cos_y;
}
}
|
use actix::prelude::*;
use actix_web::{dev::Body, http::StatusCode, web::HttpResponse, ResponseError};
use diesel::prelude::*;
use failure_derive::Fail;
use lazy_static::lazy_static;
use uaparser::{Parser as _, UserAgentParser};
use crate::db::models::Log;
use crate::db::DbExecutor;
use crate::logs::LogEntry;
lazy_static! {
static ref UA_PARSER: UserAgentParser =
UserAgentParser::from_bytes(include_bytes!("../../ua_regexes.yaml"))
.expect("Invalid ua_regexes.yaml file");
}
pub struct GetLogs;
#[derive(Debug, Fail)]
pub enum Error {
#[fail(display = "Database error: {}", _0)]
DbError(#[cause] diesel::result::Error),
}
impl ResponseError for Error {
fn error_response(&self) -> HttpResponse<Body> {
match self {
Error::DbError(_) => HttpResponse::new(StatusCode::INTERNAL_SERVER_ERROR),
}
}
}
impl From<diesel::result::Error> for Error {
fn from(f: diesel::result::Error) -> Self {
Error::DbError(f)
}
}
impl Message for GetLogs {
type Result = Result<Vec<LogEntry>, Error>;
}
impl Handler<GetLogs> for DbExecutor {
type Result = Result<Vec<LogEntry>, Error>;
fn handle(&mut self, _msg: GetLogs, _: &mut Self::Context) -> Self::Result {
use super::schema::logs::dsl::logs;
let vlog = logs.load::<Log>(&self.0)?;
Ok(vlog
.into_iter()
.map(|log| {
let ua = UA_PARSER.parse(&log.user_agent);
let os = format!("{} {}", ua.os.family, ua.os.major.unwrap_or_default());
let browser = format!(
"{} {}",
ua.user_agent.family,
ua.user_agent.major.unwrap_or_default()
);
LogEntry {
id: log.id,
login: log.login,
logging_time: log.logging_time,
logging_succession: log.logging_succession,
ip_addr: log.ip_addr,
os,
browser,
}
})
.collect())
}
}
|
use session::Session;
use stream;
use util::FileId;
use byteorder::{BigEndian, WriteBytesExt};
use std::io::Write;
const CHUNK_SIZE: usize = 0x20000;
pub enum Response<H> {
// Wait(H),
Continue(H),
Seek(H, usize),
Close,
}
pub trait Handler : Sized + Send + 'static {
fn on_header(self, header_id: u8, header_data: &[u8], session: &Session) -> Response<Self>;
fn on_data(self, offset: usize, data: &[u8], session: &Session) -> Response<Self>;
fn on_eof(self, session: &Session) -> Response<Self>;
fn on_error(self, session: &Session);
}
pub struct AudioFile<H: Handler> {
handler: H,
file_id: FileId,
offset: usize,
}
impl <H: Handler> AudioFile<H> {
pub fn new(file_id: FileId, offset: usize, handler: H, session: &Session) {
let handler = AudioFile {
handler: handler,
file_id: file_id,
offset: offset,
};
session.stream(Box::new(handler));
}
}
impl <H: Handler> stream::Handler for AudioFile<H> {
fn on_create(self, channel_id: stream::ChannelId, session: &Session) -> stream::Response<Self> {
debug!("Got channel {}", channel_id);
let mut data: Vec<u8> = Vec::new();
data.write_u16::<BigEndian>(channel_id).unwrap();
data.write_u8(0).unwrap();
data.write_u8(1).unwrap();
data.write_u16::<BigEndian>(0x0000).unwrap();
data.write_u32::<BigEndian>(0x00000000).unwrap();
data.write_u32::<BigEndian>(0x00009C40).unwrap();
data.write_u32::<BigEndian>(0x00020000).unwrap();
data.write(&self.file_id.0).unwrap();
data.write_u32::<BigEndian>(self.offset as u32 / 4).unwrap();
data.write_u32::<BigEndian>((self.offset + CHUNK_SIZE) as u32 / 4).unwrap();
session.send_packet(0x8, &data).unwrap();
stream::Response::Continue(self)
}
fn on_header(mut self, header_id: u8, header_data: &[u8], session: &Session) -> stream::Response<Self> {
//println!("on_header");
match self.handler.on_header(header_id, header_data, session) {
Response::Continue(handler) => {
self.handler = handler;
stream::Response::Continue(self)
}
Response::Seek(handler, offset) => {
self.handler = handler;
self.offset = offset;
stream::Response::Spawn(self)
}
Response::Close => stream::Response::Close,
}
}
fn on_data(mut self, data: &[u8], session: &Session) -> stream::Response<Self> {
//println!("on_data");
match self.handler.on_data(self.offset, data, session) {
Response::Continue(handler) => {
self.handler = handler;
self.offset += data.len();
stream::Response::Continue(self)
}
Response::Seek(handler, offset) => {
println!("seek request {}", offset);
self.handler = handler;
self.offset = offset;
stream::Response::Spawn(self)
}
Response::Close => stream::Response::Close,
}
}
fn on_close(self, _session: &Session) -> stream::Response<Self> {
// End of chunk, request a new one
stream::Response::Spawn(self)
}
fn on_error(mut self, session: &Session) -> stream::Response<Self> {
println!("on_error");
match self.handler.on_eof(session) {
Response::Continue(_) => stream::Response::Close,
Response::Seek(handler, offset) => {
println!("seek request {}", offset);
self.handler = handler;
self.offset = offset;
stream::Response::Spawn(self)
}
Response::Close => stream::Response::Close,
}
}
fn box_on_create(self: Box<Self>, channel_id: stream::ChannelId, session: &Session) -> stream::Response<Box<stream::Handler>> {
self.on_create(channel_id, session).boxed()
}
fn box_on_header(self: Box<Self>, header_id: u8, header_data: &[u8], session: &Session) -> stream::Response<Box<stream::Handler>> {
self.on_header(header_id, header_data, session).boxed()
}
fn box_on_data(self: Box<Self>, data: &[u8], session: &Session) -> stream::Response<Box<stream::Handler>> {
self.on_data(data, session).boxed()
}
fn box_on_error(self: Box<Self>, session: &Session) -> stream::Response<Box<stream::Handler>> {
self.on_error(session).boxed()
}
fn box_on_close(self: Box<Self>, session: &Session) -> stream::Response<Box<stream::Handler>> {
self.on_close(session).boxed()
}
}
|
use crate::participants::viewer_folder::viewer::Viewer;
use actix::prelude::*;
#[derive(Message)]
#[rtype(result = "()")]
pub struct RegisterAddressGetInfo {
pub name: String,
pub addr: Addr<Viewer>,
}
|
//! Tests auto-converted from "sass-spec/spec/non_conformant/mixin/content"
#[allow(unused)]
use super::rsass;
mod arguments;
// From "sass-spec/spec/non_conformant/mixin/content/before_if.hrx"
#[test]
fn before_if() {
assert_eq!(
rsass(
"// Regression test for sass/dart-sass#482.\
\n@mixin outer {\
\n a {@content}\
\n}\
\n\
\n@mixin inner {\
\n @content;\
\n}\
\n\
\n@include outer {\
\n @include inner {}\
\n x: y;\
\n}"
)
.unwrap(),
"a {\
\n x: y;\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/mixin/content/recursive.hrx"
#[test]
fn recursive() {
assert_eq!(
rsass(
"@mixin span($i) {\
\n x: y;\
\n @content;\
\n}\
\n\
\n.a {\
\n @include span(5) {\
\n .inner { @include span(2); }\
\n }\
\n}\
\n"
)
.unwrap(),
".a {\
\n x: y;\
\n}\
\n.a .inner {\
\n x: y;\
\n}\
\n"
);
}
|
use crate::db::models::User;
use crate::db::repository::{PostgrSQLUserRepository, UserRepository};
use crate::errors::AuthError;
use crate::utils;
/// Public function for the login
/// See `_login` for more info
///
pub fn login(username: &str, passwd: &str) -> Result<User, AuthError> {
let repository = PostgrSQLUserRepository {};
_login(username, passwd, &repository)
}
/// User login
///
/// # Arguments
///
/// * `email` - the email of the user trying to login
///
/// * `passwd` - the password of the user trying to login
///
/// * `repository` - the user repository to interact with
///
fn _login(
username: &str,
passwd: &str,
repository: &dyn UserRepository,
) -> Result<User, AuthError> {
// get all the user info we need from the database
let u = repository.get_user(username);
if let Err(_) = u {
// to avoid timing attacks, perform a argon2 hash to "waste" time
utils::hash(passwd);
return Err(AuthError::LoginError);
}
let u = u.unwrap();
// check the password
if utils::verify_hash(&u.password, passwd) {
Ok(u)
} else {
Err(AuthError::LoginError)
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::db::repository::MockPostgrSQLUserRepository;
use crate::errors::DBError;
#[test]
fn test_login_with_unknown_user() {
let mut mock = MockPostgrSQLUserRepository::new();
mock.expect_get_user()
.returning(|_| Err(DBError::UserNotFound));
let res = _login("whoamI", "password", &mock);
assert_eq!(Err(AuthError::LoginError), res);
}
#[test]
fn test_login_with_known_user_but_wrong_password() {
let mut mock = MockPostgrSQLUserRepository::new();
let passwd = utils::hash("password");
mock.expect_get_user() //User::new(e, "password", "Student")
.returning(move |_| Ok(User::new("bob", &passwd, "Teacher")));
let res = _login("bob", "wrong", &mock);
assert_eq!(Err(AuthError::LoginError), res);
}
#[test]
fn test_login_with_known_student() {
let mut mock = MockPostgrSQLUserRepository::new();
let passwd = utils::hash("password");
mock.expect_get_user() //User::new(e, "password", "Student")
.returning(move |_| Ok(User::new("doran", &passwd, "Student")));
let res = _login("doran", "password", &mock);
assert_ne!(Err(AuthError::LoginError), res);
let u = res.unwrap();
assert_eq!("doran", u.username);
assert_eq!("Student", u.role);
assert!(utils::verify_hash(&u.password, "password"));
}
#[test]
fn test_login_with_known_teacher() {
let mut mock = MockPostgrSQLUserRepository::new();
let passwd = utils::hash("password");
mock.expect_get_user() //User::new(e, "password", "Student")
.returning(move |_| Ok(User::new("alexandre", &passwd, "Teacher")));
let res = _login("alexandre", "password", &mock);
assert_ne!(Err(AuthError::LoginError), res);
let u = res.unwrap();
assert_eq!("alexandre", u.username);
assert_eq!("Teacher", u.role);
assert!(utils::verify_hash(&u.password, "password"));
}
}
|
use async_std;
use crate::heartbeat::Timestamp;
use crate::locks::*;
use std;
use std::collections::{HashMap, HashSet};
use std::fs::{File, OpenOptions};
// Vars
lazy_static! {
pub static ref DEBUG: RwLockOption<bool> = RwLockOption::new();
pub static ref LOG_FILE: RwLockOption<File> = RwLockOption::new();
pub static ref UDP_SOCKET: RwLockOption<std::net::UdpSocket> = RwLockOption::new();
pub static ref IS_JOINED: RwLockOption<bool> = RwLockOption::new();
pub static ref MEMBERSHIP_LIST: RwLockOption<Vec<String>> = RwLockOption::new();
pub static ref SUCCESSOR_LIST: RwLockOption<Vec<String>> = RwLockOption::new();
pub static ref PREDECESSOR_LIST: RwLockOption<Vec<String>> = RwLockOption::new();
pub static ref PREDECESSOR_TIMESTAMPS: RwLockOption<HashMap<String, Timestamp>> = RwLockOption::new();
pub static ref MY_IP_ADDR: RwLockOption<String> = RwLockOption::new();
pub static ref MY_ID: RwLockOption<String> = RwLockOption::new();
pub static ref TCP_ADDR: RwLockOption<String> = RwLockOption::new();
pub static ref SERVER_SOCKET: RwLockOption<async_std::net::TcpListener> = RwLockOption::new();
pub static ref UDP_TO_TCP_MAP: RwLockOption<HashMap<String, String>> = RwLockOption::new();
pub static ref ALL_FILE_OWNERS: RwLockOption<HashMap<String, HashSet<String>>> = RwLockOption::new();
}
|
use crossbeam::atomic::AtomicCell;
use std::cell::RefCell;
use std::rc::Rc;
// use parking_lot::RefCell
#[derive(Default)]
pub struct ColumnWidthsVec {
widths_hdr: Rc<RefCell<Vec<f32>>>,
widths: Rc<RefCell<Vec<f32>>>,
}
impl ColumnWidthsVec {
pub fn get(&self) -> Vec<f32> {
let prev_hdr = self.widths_hdr.borrow();
let prev = self.widths.borrow();
// let mut ws = [0.0f32; N];
// let prev_hdr = self.widths_hdr.load();
// let prev = self.widths.load();
let prevs = prev_hdr.iter().zip(prev.iter()).map(|(h, r)| h.max(*r));
prevs.collect()
}
pub fn set_hdr(&self, widths: &[f32]) {
let mut ws = self.widths_hdr.borrow_mut();
ws.clear();
ws.extend_from_slice(widths);
}
pub fn set(&self, widths: &[f32]) {
let mut ws = self.widths.borrow_mut();
ws.clear();
ws.extend_from_slice(widths);
}
}
pub struct ColumnWidths<const N: usize> {
widths_hdr: AtomicCell<[f32; N]>,
widths: AtomicCell<[f32; N]>,
}
impl<const N: usize> ColumnWidths<N> {
pub fn get(&self) -> [f32; N] {
let mut ws = [0.0f32; N];
let prev_hdr = self.widths_hdr.load();
let prev = self.widths.load();
let prevs = prev_hdr.iter().zip(prev).map(|(h, r)| h.max(r));
for (w, prev) in ws.iter_mut().zip(prevs) {
*w = prev
}
ws
}
pub fn set_hdr(&self, widths: &[f32]) {
let mut ws = self.widths_hdr.load();
for (ix, w) in ws.iter_mut().enumerate() {
if let Some(new_w) = widths.get(ix).copied() {
*w = new_w;
}
}
self.widths_hdr.store(ws);
}
pub fn set(&self, widths: &[f32]) {
let mut ws = self.widths.load();
for (ix, w) in ws.iter_mut().enumerate() {
if let Some(new_w) = widths.get(ix).copied() {
*w = new_w;
}
}
self.widths.store(ws);
}
}
impl<const N: usize> std::default::Default for ColumnWidths<N> {
fn default() -> Self {
let arr = [0.0; N];
Self {
widths_hdr: arr.into(),
widths: arr.into(),
}
}
}
fn add_label_width(
ui: &mut egui::Ui,
width: f32,
text: &str,
) -> (f32, egui::Response) {
let label = egui::Label::new(text);
let galley = label.layout(ui);
let size = galley.size();
let real_width = size.x;
let resp = ui
.with_layout(egui::Layout::right_to_left(), |ui| {
ui.set_min_width(width.max(real_width));
ui.add(label)
})
.response;
(real_width, resp)
}
pub fn grid_row_label(
ui: &mut egui::Ui,
id: egui::Id,
fields: &[&str],
with_separator: bool,
prev_widths: Option<&[f32]>,
) -> egui::InnerResponse<Vec<f32>> {
assert!(!fields.is_empty());
let mut row: Option<egui::Response> = None;
let cols = fields.len();
let mut prev_widths = prev_widths
.map(|ws| Vec::from(ws))
.unwrap_or(vec![0.0f32; cols]);
if prev_widths.len() < fields.len() {
for _ in 0..(cols - prev_widths.len()) {
prev_widths.push(0.0);
}
}
let mut widths = vec![0.0f32; cols];
for (ix, (field, width)) in fields.into_iter().zip(prev_widths).enumerate()
{
if with_separator {
if let Some(r) = row.as_mut() {
*r = r.union(ui.separator());
}
};
let (w, resp) = add_label_width(ui, width, field);
widths[ix] = w;
if let Some(r) = row.as_mut() {
*r = r.union(resp);
} else {
row = Some(resp);
}
}
let row = ui.interact(
row.unwrap().rect,
id,
egui::Sense::click().union(egui::Sense::hover()),
);
let visuals = ui.style().interact_selectable(&row, false);
ui.end_row();
if row.hovered() {
// let mut rect = row.rect;
// rect.max.x = ui.max_rect().right();
let rect = row.rect.expand(visuals.expansion);
ui.painter().rect_stroke(rect, 0.0, visuals.bg_stroke);
}
egui::InnerResponse {
inner: widths,
response: row,
}
}
pub fn add_scroll_buttons(ui: &mut egui::Ui) -> Option<egui::Align> {
ui.horizontal(|ui| {
let mut r = None;
if ui.button("Top").clicked() {
r = Some(egui::Align::TOP);
}
if ui.button("Bottom").clicked() {
r = Some(egui::Align::BOTTOM);
}
r
})
.inner
}
pub fn scrolled_area(
ui: &mut egui::Ui,
num_rows: usize,
scroll_align: Option<egui::Align>,
) -> egui::ScrollArea {
let text_style = egui::TextStyle::Body;
let row_height = ui.fonts()[text_style].row_height();
let spacing = ui.style().spacing.item_spacing.y;
let mut scroll_area = egui::ScrollArea::vertical();
if let Some(align) = scroll_align {
let h = row_height + spacing;
let offset = match align {
egui::Align::Min => 0.0,
egui::Align::Max => h * (num_rows + 1) as f32,
_ => 0.0,
};
scroll_area = scroll_area.scroll_offset(offset);
}
scroll_area
}
|
struct FOO
{
f: i32,
}
struct Link<'a>{
link: &'a FOO,
}
fn store_foo<'a>(x: &mut Link<'a>, y: &'a FOO)
{
x.link=y;
}
fn main() {
let a=FOO{f: 30};
let x=&mut Link{link: &a};
{
let b=FOO{f: 12};
store_foo(x,&b);
}
println!("{}",x.link.f);
}
|
use bevy::prelude::*;
use crate::{
collider::{Collider,BallHitEvent},
game_data::LevelFinishedEvent
};
pub struct Destroyable {
pub hp: u16,
}
pub const BRICK_SIZE_X: f32 = 150.;
const BRICK_SIZE_Y: f32 = 70.;
pub fn spawn_brick (commands: &mut Commands, material: Handle<ColorMaterial>, x: f32, y: f32, hp: u16, size_mult: f32) {
commands
.spawn(SpriteComponents {
material,
sprite: Sprite::new(Vec2::new(BRICK_SIZE_X * size_mult, BRICK_SIZE_Y * size_mult)),
transform: Transform::from_translation(
Vec3::new(x, y, 0.)
),
..Default::default()
})
.with(Collider::Destroyable)
.with(Destroyable { hp });
}
pub fn handle_destroyable_hit (
mut commands: Commands,
mut reader: Local<EventReader<BallHitEvent>>,
ball_hit_events: Res<Events<BallHitEvent>>,
mut level_finished_events: ResMut<Events<LevelFinishedEvent>>,
mut destroyable_q: Query<&mut Destroyable>
) {
if let Some(BallHitEvent::Destroyable(entity)) = reader.iter(&ball_hit_events).next() {
// This should be always true
let mut destroyable = destroyable_q.get_mut(*entity).unwrap();
destroyable.hp -= 1;
if destroyable.hp == 0 {
commands.despawn(*entity);
// We calculate if there are any destroyables left, if not we won the level
// We subtract 1 because the last entity despawn will execure after this query
let destroyables_left = destroyable_q.iter_mut().count() - 1;
println!("Destroyables left: {}", destroyables_left);
if destroyables_left == 0 {
level_finished_events.send(LevelFinishedEvent::Won);
}
}
}
} |
/**
<summary>
All defined in MSDN: [Window Constants (Windows)]/[Window Styles]
The following are the window styles. After the window has been created,
these styles cannot be modified, except as noted.
</summary>
<applies-to>Desktop apps only</applies-to>
<requirements>
<minimum-client>Windows 2000 Professional</minimum-client>
<minimum-server>Windows 2000 Server</minimum-server>
<header>Winuser.h (include Windows.h)</header>
</requirements>
**/
use super::super::prelude::WindowStyle;
/**
The window is an overlapped window.
An overlapped window has a title bar and a border. Same as the **WS_TILED** style.
**/
pub static OverLapped : WindowStyle = 0x00000000;
/**
The windows is a pop-up window. This style cannot be used with the **WS_CHILD** style.
**/
pub static Popup : WindowStyle = 0x80000000;
/**
The window is a child window. A window with this style cannot have a menu bar.
This style cannot be used with the **WS_POPUP** style.
**/
pub static Child : WindowStyle = 0x40000000;
/**
The window is initially minimized. Same as the **WS_ICONIC** style.
**/
pub static Minimize : WindowStyle = 0x20000000;
/**
The window is initially visible.
This style can be turned on and off
by using the [!MSDN=ShowWindow] or [!MSDN=SetWindowPos] function.
**/
pub static Visible : WindowStyle = 0x10000000;
/**
The window is initially disabled. A disabled window cannot receive input from the user.
To change this after a window has been created, use the [!MSDN=EnableWindow] function.
**/
pub static Disabled : WindowStyle = 0x08000000;
/**
Clips child windows relative to each other;
that is, when a particular child window receives a [!MSDN=WM_PAINT] message,
the **WS_CLIPSIBLINGS** style clips all other overlapping child windows out of
the region of the child window to be updated.
If **WS_CLIPSIBLINGS** is not specified and child windows overlap, it is possible,
when drawing within the client area of a child window,
to draw within the client area of a neighboring child window.
**/
pub static ClipSiblings : WindowStyle = 0x04000000;
/**
Excludes the area occupied by child windows when drawing occurs within the parent window.
This style is used when creating the parent window.
**/
pub static ClipChildren : WindowStyle = 0x02000000;
/**
The window is initially maximized.
**/
pub static Maximize : WindowStyle = 0x01000000;
/**
The window has a title bar (includes the **WS_BORDER** style).
Caption = Border | DlgFrame
**/
pub static Caption : WindowStyle = 0x00C00000;
/**
The window has a thin-line border.
**/
pub static Border : WindowStyle = 0x00800000;
/**
The window has a border of a style typically used with dialog boxes.
A window with this style cannot have a title bar.
**/
pub static DlgFrame : WindowStyle = 0x00400000;
/**
The window has a vertical scroll bar.
**/
pub static VScroll : WindowStyle = 0x00200000;
/**
The window has a horizontal scroll bar.
**/
pub static HScroll : WindowStyle = 0x00100000;
/**
The window has a window menu on its title bar.
The **WS_CAPTION** style must also be specified.
**/
pub static SysMenu : WindowStyle = 0x00080000;
/**
The window has a sizing border. Same as the **WS_SIZEBOX** style.
**/
pub static ThickFrame : WindowStyle = 0x00040000;
/**
The window is the first control of a group of controls.
The group consists of this first control and all controls defined after it,
up to the next control with the **WS_GROUP** style.
The first control in each group usually has the **WS_TABSTOP** style
so that the user can move from group to group.
The user can subsequently change the keyboard focus from one control in the group
to the next control in the group by using the direction keys.
You can turn this style on and off to change dialog box navigation.
To change this style after a window has been created, use the [!MSDN=SetWindowLong] function.
**/
pub static Group : WindowStyle = 0x00020000;
/**
The window is a control that can receive the keyboard focus when the user presses the TAB key.
Pressing the TAB key changes the keyboard focus to the next control with the **WS_TABSTOP** style.
You can turn this style on and off to change dialog box navigation.
To change this style after a window has been created, use the [!MSDN=SetWindowLong] function.
For user-created windows and modeless dialogs to work with tab stops,
alter the message loop to call the [!MSDN=IsDialogMessage] function.
**/
pub static TabStop : WindowStyle = 0x00010000;
/**
The window has a minimize button. Cannot be combined with the **WS_EX_CONTEXTHELP** style.
The **WS_SYSMENU** style must also be specified.
**/
pub static MinimizeBox : WindowStyle = 0x00020000;
/**
The window has a maximize button. Cannot be combined with the **WS_EX_CONTEXTHELP** style.
The **WS_SYSMENU** style must also be specified.
**/
pub static MaximizeBox : WindowStyle = 0x00010000;
/**
The window is an overlapped window.
An overlapped window has a title bar and a border. Same as the **WS_OVERLAPPED** style.
**/
pub static Tiled : WindowStyle = 0x00000000;
/**
The window is initially minimized. Same as the **WS_MINIMIZE** style.
**/
pub static IconIC : WindowStyle = 0x20000000;
/**
The window has a sizing border. Same as the **WS_THICKFRAME** style.
**/
pub static SizeBox : WindowStyle = 0x00040000;
/**
The window is an overlapped window. Same as the **WS_OVERLAPPEDWINDOW** style.
**/
pub static TiledWindow : WindowStyle = (0x00000000u32 | 0x00C00000u32 | 0x00080000u32 |
0x00040000u32 | 0x00020000u32 | 0x00010000u32 );
/**
The window is an overlapped window. Same as the **WS_TILEDWINDOW** style.
**/
pub static OverLappedWindow : WindowStyle = (0x00000000u32 | 0x00C00000u32 | 0x00080000u32 |
0x00040000u32 | 0x00020000u32 | 0x00010000u32 );
/**
The window is a pop-up window.
The **WS_CAPTION** and **WS_POPUPWINDOW** styles must be combined to make the window menu visible.
**/
pub static PopupWindow : WindowStyle = (0x80000000u32 | 0x00800000u32 | 0x00080000u32);
/**
Same as the **WS_CHILD** style.
**/
pub static ChildWindow : WindowStyle = 0x40000000; |
use std::f32;
fn main() {
let rect_height = 12;
let rect_width = 20;
let square_size = 10;
let circle_radius = 15.0;
let triangle_sides = (10.0, 6.0, 7.0);
let rect = Rectangle { width: rect_width, height: rect_height };
let sq = Rectangle::square(square_size);
let circle = Circle { radius: circle_radius};
let triangle = Triangle { side1: triangle_sides.0, side2: triangle_sides.1, side3: triangle_sides.2 };
println!("Area of rectangle of height - {} and width - {} is {}", rect_height, rect_width, rect.area());
println!("Perimeter of rectangle of height - {} and width - {} is {}", rect_height, rect_width, rect.perimeter());
println!("Area of square of side - {} is {}", square_size, sq.area());
println!("Perimeter of square of side - {} is {}", square_size, sq.perimeter());
println!("Area of circle of radius - {} is {:.3}", circle_radius, circle.area());
println!("Circumference of circle of radius - {} is {:.3}", circle_radius, circle.circumference());
println!("Area of triangle of sides - {}, {}, {} is {:.3}", triangle_sides.0, triangle_sides.1, triangle_sides.2,
triangle.area());
}
struct Rectangle {
width: u32,
height: u32
}
impl Rectangle {
fn area(&self) -> u32 {
self.width * self.height
}
fn perimeter(&self) -> u32 {
2 * (self.width + self.height)
}
fn square(size: u32) -> Rectangle {
Rectangle { width: size, height: size}
}
}
struct Circle {
radius: f32
}
impl Circle {
fn area(&self) -> f32 {
f32::consts::PI * self.radius * self.radius
}
fn circumference(&self) -> f32 {
2.0 * f32::consts::PI * self.radius
}
}
struct Triangle {
side1: f32,
side2: f32,
side3: f32
}
impl Triangle {
fn area(&self) -> f64 {
// Area = sqrt ( s ( s - a ) x ( s - b ) x (s - c) ) where s = side1 + side2 + side3 / 2
let s = (self.side1 + self.side2 + self.side3) / 2.0;
((s* (s - self.side1) * (s - self.side2) * (s - self.side3)) as f64).sqrt()
}
}
|
//! This module contains all the logic dealing with images:
//! Parsing the config file data, shuffling pixels of big images,
//! ordering pixels of small images and sorting the signatures to the end.
use crate::ascii_art::DEFAULT_IMAGES;
use crate::ascii_art::IMAGE_KNOWN_SIGNATURES;
use crate::dictionary::ConfigParseError;
use crate::game::Game;
use crate::LIVES;
use rand::seq::SliceRandom;
use rand::thread_rng;
use serde_derive::Deserialize;
use std::cmp::{Ord, Ordering};
use std::fmt;
/// Default game mode. Can be changed in the configuration file.
const DEFAULT_REWARDING_SCHEME: RewardingScheme = RewardingScheme::UnhideWhenGuessedChar;
/// Threshold to decide from how many characters on the images is considered to be "big".
/// Big images are disclosed with another algorithm.
/// This is just big enough that the gallow image stays small.
const BIG_IMAGE: usize = 60; // sort algorithm <-> random algorithm
/// A game mode defining how the ASCII-art image will be disclosed progressively.
#[derive(Clone, Debug, PartialEq)]
pub enum RewardingScheme {
/// Game mode that is used together with the traditional gallows image (the gallows image
/// is not build in, but can be added in the configuration file. The image is disclosed
/// piecemeal after each wrong guess.
UnhideWhenLostLife,
/// Default game mode. The image is disclosed piecemeal after each right guess.
UnhideWhenGuessedChar,
}
/// One character of the ASCII art image.
#[derive(Eq, PartialEq, Debug, Copy, Clone)] //omitting Ord
pub struct ImChar {
pub point: (u8, u8),
pub code: char,
}
/// Format an image character.
impl fmt::Display for ImChar {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "{}", self.code)
}
}
/// Delegate the comparison to `Ord`.
impl PartialOrd for ImChar {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
/// Ord enables us to v.sort() the image characters.
impl Ord for ImChar {
/// Compares to ImChar.
/// Points near the left lower corner are small.
fn cmp(&self, other: &Self) -> Ordering {
fn weight(ic: &ImChar) -> isize {
let &ImChar { point: (x, y), .. } = ic;
// points near the upper left corner are light
(x as isize * x as isize) + (y as isize * y as isize)
}
weight(self).cmp(&weight(other))
}
}
#[derive(Clone, Debug, PartialEq)]
/// An ASCII-art image.
pub struct Image {
pub ichars: Vec<ImChar>,
pub dimension: (u8, u8),
pub visible_points: usize,
pub rewarding_scheme: RewardingScheme,
}
/// Format an image.
impl fmt::Display for Image {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let x_max = self.dimension.0 as usize;
let y_max = self.dimension.1 as usize;
let mut i = vec![' '; ((x_max + 1) * y_max) as usize];
for y in 0..y_max {
i[((x_max + 1) * y + x_max) as usize] = '\n';
}
for ic in self.ichars.iter().take(self.visible_points) {
let &ImChar {
point: (x, y),
code,
} = ic;
i[(x as usize + y as usize * (x_max + 1))] = code;
}
write!(f, "{}", i.into_iter().collect::<String>())
}
}
impl Image {
/// Returns a random built-in image.
pub fn new() -> Result<Self, ConfigParseError> {
let mut rng = thread_rng();
Self::from_yaml((DEFAULT_IMAGES).choose(&mut rng).unwrap())
}
/// Constructor reading image data from YAML configuration files.
pub fn from_yaml(input: &str) -> Result<Self, ConfigParseError> {
#[derive(Debug, PartialEq, Deserialize)]
pub struct RawImage {
image: Option<String>,
traditional: Option<bool>,
}
let input = input.trim_start_matches('\u{feff}');
let raw: RawImage = serde_yaml::from_str(input)?;
let (image, rewarding_scheme) = match raw {
RawImage { image: None, .. } => return Err(ConfigParseError::NoImageData),
RawImage {
image: Some(i),
traditional: None,
} => (i, DEFAULT_REWARDING_SCHEME),
RawImage {
image: Some(i),
traditional: Some(r),
} => (
i,
if r {
RewardingScheme::UnhideWhenLostLife
} else {
RewardingScheme::UnhideWhenGuessedChar
},
),
};
Self::from(&image, rewarding_scheme)
}
#[inline]
/// This constructor takes a pure ASCII, non-escaped, multiline image string.
pub fn from(image: &str, rewarding_scheme: RewardingScheme) -> Result<Self, ConfigParseError> {
let mut ascii: Vec<ImChar> = Vec::new();
let mut signature: Vec<ImChar> = Vec::new();
// Create a string of `' '` with length of longest `IMAGE_KNOWN_SIGNATURES`.
let mut spaces = String::new();
let longest = IMAGE_KNOWN_SIGNATURES
.iter()
.map(|s| s.len())
.max()
.unwrap();
for _ in 0..longest {
spaces.push(' ');
}
for (y, line) in image.lines().enumerate() {
let mut ascii_line = line.to_owned();
for sig in IMAGE_KNOWN_SIGNATURES {
// `spaces` has the same length than `sig`.
let short_spaces = &spaces[..sig.len()];
debug_assert_eq!(sig.len(), short_spaces.len());
ascii_line = ascii_line.replace(sig, short_spaces);
}
debug_assert_eq!(line.len(), ascii_line.len());
// Generate `ImChar` pixel from `ascii_line`.
let mut ii: Vec<_> = ascii_line
.char_indices()
// consider only chars != ' '
.filter(|&(_, c)| c != ' ')
// save in ImChar object
.map(|(x, c)| ImChar {
point: ((x) as u8, y as u8),
code: c,
})
.collect();
ascii.append(&mut ii);
// Check what we have changed and generate
// `signature_line`.
let mut signature_line = String::new();
for (l, a) in line.chars().zip(ascii_line.chars()) {
if l == a {
// Nothing changed here.
signature_line.push(' ');
} else {
signature_line.push(l);
}
}
debug_assert_eq!(signature_line.chars().count(), ascii_line.chars().count());
// Generate `ImChar` pixel from `signature_line`.
let mut ii: Vec<_> = signature_line
.char_indices()
// consider only chars != ' '
.filter(|&(_, c)| c != ' ')
// save in ImChar object
.map(|(x, c)| ImChar {
point: ((x) as u8, y as u8),
code: c,
})
.collect();
signature.append(&mut ii);
}
// Order or shuffle pixel in `ascii`
if ascii.len() <= BIG_IMAGE {
ascii.sort(); // Sort algorithm, see "impl Ord for ImageChar"
} else {
let mut rng = thread_rng();
(&mut ascii).shuffle(&mut rng); // points appear randomly.
}
// Append `signatures` at the end of `ascii`.
ascii.append(&mut signature);
// Find the dimensions of the whole.
let dimension = if !ascii.is_empty() {
let mut x_max = 0;
let mut y_max = 0;
for i in &ascii {
let &ImChar { point: (x, y), .. } = i;
if x > x_max {
x_max = x
};
if y > y_max {
y_max = y
};
}
// We know that there is at least one char.
(x_max + 1, y_max + 1)
} else {
(0, 0)
};
// Find the number of pixels.
let visible_points = ascii.len();
if ascii.is_empty() {
Err(ConfigParseError::NoImageData)
} else {
Ok(Self {
ichars: ascii,
dimension,
visible_points,
rewarding_scheme,
})
}
}
/// Discloses parts of the image according to the course of the play.
pub fn update(&mut self, game: &Game) {
match self.rewarding_scheme {
RewardingScheme::UnhideWhenGuessedChar => {
if game.lifes != 0 {
self.hide((game.secret.hidden_chars(), game.secret.chars_to_guess()));
}
}
RewardingScheme::UnhideWhenLostLife => {
self.hide((game.lifes as usize, LIVES as usize));
}
};
}
/// Sets how much of the image will be disclosed next time the image is rendered.
fn hide(&mut self, fraction: (usize, usize)) {
let l = self.ichars.len();
let as_points = |(n, d)| (5 * l * (d - n) as usize / d as usize + l) / 6;
// silently ignore division by zero
if fraction.1 > 0 {
self.visible_points = as_points(fraction);
};
}
}
// *******************************
#[cfg(test)]
mod tests {
use super::DEFAULT_REWARDING_SCHEME;
use super::{ImChar, Image};
use crate::dictionary::ConfigParseError;
#[test]
fn test_image_from() {
let config: &str = r#"
>o)
(_> <o)
(_>
"#;
let expected: &str = " \n>o) \n(_> <o)\n (_>\n";
let image = Image::from(
&config,
crate::image::RewardingScheme::UnhideWhenGuessedChar,
)
.unwrap();
assert!(image.visible_points > 0);
assert_eq!(format!("{}", image), expected);
}
#[test]
fn test_image_yaml_error() {
let config: &str = "this is no image";
let image = Image::from_yaml(&config).unwrap_err();
//println!("{:?}",image);
assert!(matches!(image, ConfigParseError::NotInYamlFormat(_)));
}
/// Test image parsing of configuration file data
#[test]
fn test_image_parser_syntax() {
let config: &str = r#"image: |1
ab
c e
df"#;
let image = Image::from_yaml(&config);
//println!("{:?}",image);
let expected = Ok(Image {
ichars: [
ImChar {
point: (0, 0),
code: 'a',
},
ImChar {
point: (1, 0),
code: 'b',
},
ImChar {
point: (0, 1),
code: 'c',
},
ImChar {
point: (0, 2),
code: 'd',
},
ImChar {
point: (2, 1),
code: 'e',
},
ImChar {
point: (1, 2),
code: 'f',
},
]
.to_vec(),
dimension: (3, 3),
visible_points: 6,
rewarding_scheme: DEFAULT_REWARDING_SCHEME,
});
assert_eq!(image, expected);
}
/// Is non image data ignored?
#[test]
fn test_image_parser_syntax_ignore() {
let config: &str = r#"image: |1
ab
c
# Comment"#;
let image = Image::from_yaml(&config).unwrap();
//println!("{:?}",image);
let expected = Image {
ichars: [
ImChar {
point: (0, 0),
code: 'a',
},
ImChar {
point: (1, 0),
code: 'b',
},
ImChar {
point: (0, 1),
code: 'c',
},
]
.to_vec(),
dimension: (2, 2),
visible_points: 3,
rewarding_scheme: DEFAULT_REWARDING_SCHEME,
};
assert_eq!(image, expected);
}
#[test]
fn test_image_from_yaml() {
let config: &str = r#"image: |1
>o)
(_> <o)
(_>
"#;
let expected: &str = ">o) \n(_> <o)\n (_>\n";
let image = Image::from_yaml(&config).unwrap();
assert!(image.visible_points > 0);
assert_eq!(format!("{}", image), expected);
}
#[test]
fn test_yaml_image_parser_disclose() {
//
// Test yaml.
let config: &str = "image: |1\n abdef\n c";
let mut image = Image::from_yaml(&config).unwrap();
//println!("{:?}",image);
let expected = Image {
ichars: [
ImChar {
point: (0, 0),
code: 'a',
},
ImChar {
point: (1, 0),
code: 'b',
},
ImChar {
point: (0, 1),
code: 'c',
},
ImChar {
point: (2, 0),
code: 'd',
},
ImChar {
point: (3, 0),
code: 'e',
},
ImChar {
point: (4, 0),
code: 'f',
},
]
.to_vec(),
dimension: (5, 2),
visible_points: 6,
rewarding_scheme: DEFAULT_REWARDING_SCHEME,
};
assert_eq!(image, expected);
image.hide((6, 6));
assert_eq!(image.visible_points, 1);
image.hide((2, 6));
assert_eq!(image.visible_points, 4);
image.hide((0, 6));
assert_eq!(image.visible_points, 6);
}
#[test]
fn disclose_signature_last() {
let image_str = "image: |1\n jensB\n AlisC";
let image = Image::from_yaml(&image_str).unwrap();
//println!("{:?}",image);
let expected = Image {
ichars: [
// These are regular image chars.
ImChar {
point: (0, 1),
code: 'A',
},
ImChar {
point: (4, 0),
code: 'B',
},
ImChar {
point: (4, 1),
code: 'C',
},
// These chars are signature chars.
ImChar {
point: (0, 0),
code: 'j',
},
ImChar {
point: (1, 0),
code: 'e',
},
ImChar {
point: (2, 0),
code: 'n',
},
ImChar {
point: (3, 0),
code: 's',
},
ImChar {
point: (1, 1),
code: 'l',
},
ImChar {
point: (2, 1),
code: 'i',
},
ImChar {
point: (3, 1),
code: 's',
},
]
.to_vec(),
dimension: (5, 2),
visible_points: 10,
rewarding_scheme: DEFAULT_REWARDING_SCHEME,
};
assert_eq!(image, expected);
}
}
|
use packed_simd::f32x8;
use ndarray::{ArrayView,ArrayViewMut,Ix2};
use crate::my_ndarray;
use crate::vectorisation;
use std::slice::{from_raw_parts, from_raw_parts_mut};
#[cfg(test)]
use crate::naive_sequential;
#[cfg(test)]
use ndarray::{linalg,Array};
#[cfg(test)]
use rand::Rng;
fn multiply_add_packed_sim( mut into: &mut [f32],
a: f32,
b: &[f32],
_awidth: usize,
_aheight: usize,
_bwidth: usize,
_bheight: usize,
_intowidth: usize,
_intoheight: usize,){
let achunk = f32x8::splat(a);
b.chunks_exact(8)
.zip(into.chunks_exact_mut(8))
.for_each(|(x,y)| {
let chunkx = f32x8::from_slice_unaligned(x);
let chunky = f32x8::from_slice_unaligned(y);
let res = chunkx.mul_add(achunk ,chunky);
res.write_to_slice_unaligned(y);
});
let len = b.len();
let calc_len = len - len%8;
b[calc_len..len].iter().zip(into[calc_len..len].iter_mut())
.for_each(|(x,y)|{
*y = a*x+*y
});
}
pub fn mult_faster_from_ndarray(a: ArrayView<f32,Ix2> ,b: ArrayView<f32,Ix2>,output: &mut ArrayViewMut<f32,Ix2>) {
let (raw_ptr_a, len_a) = my_ndarray::view_ptr(a);
let stridesa = a.strides();
let (raw_ptr_b, len_b) = my_ndarray::view_ptr(b);
let stridesb = b.strides();
let raw_ptr_r = output.as_mut_ptr();
let dimr = output.shape();
let dima = a.shape();
let dimb = b.shape();
assert_eq!(dima[0],dimr[0]);
assert_eq!(dima[1],dimb[0]);
assert_eq!(dimb[1],dimr[1]);
let (row, col) = (dimr[0], dimr[1]);
let strides = output.strides();
let len_r = (row - 1) * strides[0] as usize + col;
let slicea = unsafe { from_raw_parts(raw_ptr_a, len_a) };
let sliceb = unsafe { from_raw_parts(raw_ptr_b, len_b) };
let mut slicer = unsafe { from_raw_parts_mut(raw_ptr_r, len_r) };
vectorisation::multiply_add(
&mut slicer,
&slicea,
&sliceb,
dima[1],
dima[0],
dimb[1],
dimb[0],
dimr[1],
dimr[0],
stridesa[0] as usize,
stridesb[0] as usize,
strides[0] as usize,
multiply_add_packed_sim,
);
}
#[test]
fn test_mult_blocked() {
let height = 1000;
let width = 1000;
let mut rng = rand::thread_rng();
let random = rng.gen_range(0.0, 1.0);
let an = Array::from_shape_fn((height, width), |(i, j)| {
(((j + i * width) % 3) as f32) + random
});
let bn = Array::from_shape_fn((width, height), |(i, j)| {
(((j + 7 + i * height) % 3) as f32) - random
});
let mut dn = Array::zeros((height, height));
let (avec, bvec, rvec) = naive_sequential::cut_in_blocks(an.view(), bn.view(), dn.view_mut(),300,300);
naive_sequential::mult_blocks(avec, bvec, rvec,|a,b,mut c| mult_faster_from_ndarray(a, b, &mut c));
let mut verif = Array::zeros((height, height));
linalg::general_mat_mul(1.0, &an, &bn, 1.0, &mut verif);
assert_abs_diff_eq!(
dn.as_slice().unwrap(),
verif.as_slice().unwrap(),
epsilon = 1e-1f32
);
}
#[test]
fn test_mult_faster() {
let height = 1000;
let width = 1000;
let mut rng = rand::thread_rng();
let random = rng.gen_range(0.0, 1.0);
let an = Array::from_shape_fn((height, width), |(i, j)| {
(((j + i * width) % 3) as f32) + random
});
let bn = Array::from_shape_fn((width, height), |(i, j)| {
(((j + 7 + i * height) % 3) as f32) - random
});
let mut dn = Array::zeros((height, height));
mult_faster_from_ndarray(an.view(), bn.view(), &mut dn.view_mut());
let mut verif = Array::zeros((height, height));
linalg::general_mat_mul(1.0, &an, &bn, 1.0, &mut verif);
assert_abs_diff_eq!(
dn.as_slice().unwrap(),
verif.as_slice().unwrap(),
epsilon = 1e-1f32
);
}
|
#[doc = "Register `PDCRC` reader"]
pub type R = crate::R<PDCRC_SPEC>;
#[doc = "Register `PDCRC` writer"]
pub type W = crate::W<PDCRC_SPEC>;
#[doc = "Field `PD0` reader - PD0"]
pub type PD0_R = crate::BitReader<PD0_A>;
#[doc = "PD0\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PD0_A {
#[doc = "0: Disable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
Disabled = 0,
#[doc = "1: Enable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
Enabled = 1,
}
impl From<PD0_A> for bool {
#[inline(always)]
fn from(variant: PD0_A) -> Self {
variant as u8 != 0
}
}
impl PD0_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PD0_A {
match self.bits {
false => PD0_A::Disabled,
true => PD0_A::Enabled,
}
}
#[doc = "Disable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == PD0_A::Disabled
}
#[doc = "Enable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == PD0_A::Enabled
}
}
#[doc = "Field `PD0` writer - PD0"]
pub type PD0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PD0_A>;
impl<'a, REG, const O: u8> PD0_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Disable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(PD0_A::Disabled)
}
#[doc = "Enable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(PD0_A::Enabled)
}
}
#[doc = "Field `PD1` reader - PD1"]
pub use PD0_R as PD1_R;
#[doc = "Field `PD2` reader - PD2"]
pub use PD0_R as PD2_R;
#[doc = "Field `PD3` reader - PD3"]
pub use PD0_R as PD3_R;
#[doc = "Field `PD4` reader - PD4"]
pub use PD0_R as PD4_R;
#[doc = "Field `PD5` reader - PD5"]
pub use PD0_R as PD5_R;
#[doc = "Field `PD6` reader - PD6"]
pub use PD0_R as PD6_R;
#[doc = "Field `PD1` writer - PD1"]
pub use PD0_W as PD1_W;
#[doc = "Field `PD2` writer - PD2"]
pub use PD0_W as PD2_W;
#[doc = "Field `PD3` writer - PD3"]
pub use PD0_W as PD3_W;
#[doc = "Field `PD4` writer - PD4"]
pub use PD0_W as PD4_W;
#[doc = "Field `PD5` writer - PD5"]
pub use PD0_W as PD5_W;
#[doc = "Field `PD6` writer - PD6"]
pub use PD0_W as PD6_W;
#[doc = "Field `PD13` reader - PD13"]
pub type PD13_R = crate::BitReader<PD13_A>;
#[doc = "PD13\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PD13_A {
#[doc = "0: Disable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
Disabled = 0,
#[doc = "1: Enable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
Enabled = 1,
}
impl From<PD13_A> for bool {
#[inline(always)]
fn from(variant: PD13_A) -> Self {
variant as u8 != 0
}
}
impl PD13_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PD13_A {
match self.bits {
false => PD13_A::Disabled,
true => PD13_A::Enabled,
}
}
#[doc = "Disable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == PD13_A::Disabled
}
#[doc = "Enable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == PD13_A::Enabled
}
}
#[doc = "Field `PD13` writer - PD13"]
pub type PD13_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PD13_A>;
impl<'a, REG, const O: u8> PD13_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Disable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
#[inline(always)]
pub fn disabled(self) -> &'a mut crate::W<REG> {
self.variant(PD13_A::Disabled)
}
#[doc = "Enable the pull-down on PC\\[y\\]
when both APC bits are set in PWR control register 3 (PWR_CR3)"]
#[inline(always)]
pub fn enabled(self) -> &'a mut crate::W<REG> {
self.variant(PD13_A::Enabled)
}
}
#[doc = "Field `PD14` reader - PD14"]
pub use PD13_R as PD14_R;
#[doc = "Field `PD15` reader - Port PC\\[y\\]
pull-down (y=13 to 15)"]
pub use PD13_R as PD15_R;
#[doc = "Field `PD14` writer - PD14"]
pub use PD13_W as PD14_W;
#[doc = "Field `PD15` writer - Port PC\\[y\\]
pull-down (y=13 to 15)"]
pub use PD13_W as PD15_W;
impl R {
#[doc = "Bit 0 - PD0"]
#[inline(always)]
pub fn pd0(&self) -> PD0_R {
PD0_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - PD1"]
#[inline(always)]
pub fn pd1(&self) -> PD1_R {
PD1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - PD2"]
#[inline(always)]
pub fn pd2(&self) -> PD2_R {
PD2_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - PD3"]
#[inline(always)]
pub fn pd3(&self) -> PD3_R {
PD3_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - PD4"]
#[inline(always)]
pub fn pd4(&self) -> PD4_R {
PD4_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - PD5"]
#[inline(always)]
pub fn pd5(&self) -> PD5_R {
PD5_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - PD6"]
#[inline(always)]
pub fn pd6(&self) -> PD6_R {
PD6_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 13 - PD13"]
#[inline(always)]
pub fn pd13(&self) -> PD13_R {
PD13_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - PD14"]
#[inline(always)]
pub fn pd14(&self) -> PD14_R {
PD14_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Port PC\\[y\\]
pull-down (y=13 to 15)"]
#[inline(always)]
pub fn pd15(&self) -> PD15_R {
PD15_R::new(((self.bits >> 15) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - PD0"]
#[inline(always)]
#[must_use]
pub fn pd0(&mut self) -> PD0_W<PDCRC_SPEC, 0> {
PD0_W::new(self)
}
#[doc = "Bit 1 - PD1"]
#[inline(always)]
#[must_use]
pub fn pd1(&mut self) -> PD1_W<PDCRC_SPEC, 1> {
PD1_W::new(self)
}
#[doc = "Bit 2 - PD2"]
#[inline(always)]
#[must_use]
pub fn pd2(&mut self) -> PD2_W<PDCRC_SPEC, 2> {
PD2_W::new(self)
}
#[doc = "Bit 3 - PD3"]
#[inline(always)]
#[must_use]
pub fn pd3(&mut self) -> PD3_W<PDCRC_SPEC, 3> {
PD3_W::new(self)
}
#[doc = "Bit 4 - PD4"]
#[inline(always)]
#[must_use]
pub fn pd4(&mut self) -> PD4_W<PDCRC_SPEC, 4> {
PD4_W::new(self)
}
#[doc = "Bit 5 - PD5"]
#[inline(always)]
#[must_use]
pub fn pd5(&mut self) -> PD5_W<PDCRC_SPEC, 5> {
PD5_W::new(self)
}
#[doc = "Bit 6 - PD6"]
#[inline(always)]
#[must_use]
pub fn pd6(&mut self) -> PD6_W<PDCRC_SPEC, 6> {
PD6_W::new(self)
}
#[doc = "Bit 13 - PD13"]
#[inline(always)]
#[must_use]
pub fn pd13(&mut self) -> PD13_W<PDCRC_SPEC, 13> {
PD13_W::new(self)
}
#[doc = "Bit 14 - PD14"]
#[inline(always)]
#[must_use]
pub fn pd14(&mut self) -> PD14_W<PDCRC_SPEC, 14> {
PD14_W::new(self)
}
#[doc = "Bit 15 - Port PC\\[y\\]
pull-down (y=13 to 15)"]
#[inline(always)]
#[must_use]
pub fn pd15(&mut self) -> PD15_W<PDCRC_SPEC, 15> {
PD15_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Power Port C pull-down control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pdcrc::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pdcrc::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct PDCRC_SPEC;
impl crate::RegisterSpec for PDCRC_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`pdcrc::R`](R) reader structure"]
impl crate::Readable for PDCRC_SPEC {}
#[doc = "`write(|w| ..)` method takes [`pdcrc::W`](W) writer structure"]
impl crate::Writable for PDCRC_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets PDCRC to value 0"]
impl crate::Resettable for PDCRC_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use rltk::{Point, RGB};
use specs::prelude::*;
use specs_derive::*;
use std::cmp::{max, min, Ordering};
use std::ops::{Add, Sub};
// ------------------------------------------------------------------------------------------------------------------ //
pub fn range<T: std::cmp::Ord>(l: T, v: T, u: T) -> T {
min(u, max(v, l))
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Default, Debug, Component, Copy, Clone, PartialEq, PartialOrd)]
pub struct Position {
pub x: i32,
pub y: i32,
}
pub fn manhattan_dist(p: &Position, q: &Position) -> i32 {
(q.x - p.x) + (q.y - q.x)
}
impl Position {
pub fn new(p: &Point) -> Position {
Position { x: p.x, y: p.y }
}
}
impl Add for Position {
type Output = Self;
fn add(self, other: Self) -> Self {
Self {
x: self.x + other.x,
y: self.y + other.y,
}
}
}
impl Sub for Position {
type Output = Self;
fn sub(self, other: Self) -> Self {
Self {
x: self.x - other.x,
y: self.y - other.y,
}
}
}
impl Eq for Position {}
impl Ord for Position {
fn cmp(&self, other: &Self) -> Ordering {
let origin = Position { x: 0, y: 0 };
manhattan_dist(&origin, self)
.partial_cmp(&manhattan_dist(&origin, other))
.unwrap_or(Ordering::Equal)
}
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct CombatStats {
pub max_hp: i32,
pub hp: i32,
pub defense: i32,
pub power: i32,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug, Clone)]
pub struct WantsToMelee {
pub target: Entity,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct SufferDamage {
pub amount: Vec<i32>,
}
impl SufferDamage {
pub fn new_damage(store: &mut WriteStorage<SufferDamage>, victim: Entity, amount: i32) {
if let Some(suffering) = store.get_mut(victim) {
suffering.amount.push(amount);
} else {
let dmg = SufferDamage {
amount: vec![amount],
};
store.insert(victim, dmg).expect("unable to insert damage");
}
}
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component)]
pub struct Renderable {
pub glyph: rltk::FontCharType,
pub fg: RGB,
pub bg: RGB,
pub render_order: i32,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component)]
pub struct Viewshed {
pub visible_tiles: Vec<rltk::Point>,
pub range: i32,
pub dirty: bool,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct Name {
pub name: String,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct BlocksTile {}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct Player {}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct Monster {}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct Item {}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct Consumable {}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct ProvidesHealing {
pub amount: i32,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct Potion {
pub heal_amount: i32,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug, Clone)]
pub struct InBackpack {
pub owner: Entity,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug, Clone)]
pub struct WantsToPickupItem {
pub collected_by: Entity,
pub item: Entity,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct WantsToConsumeItem {
pub item: Entity,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct Ranged {
pub range: i32,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(Component, Debug)]
pub struct InflictsDamage {
pub amount: i32,
}
// ------------------------------------------------------------------------------------------------------------------ //
#[derive(PartialEq, Copy, Clone)]
pub enum RunState {
AwaitingInput,
PreRun,
PlayerTurn,
MonsterTurn,
ShowInventory,
ShowTargeting { range: i32, item: Entity },
}
|
use std::fs::File;
use std::io::prelude::*;
/// Given an input string, return the floor that we ended up on.
fn calculate_final_position(input_value: String) -> i32 {
let mut tally = 0;
for character in input_value.chars() {
if character == '(' {
tally += 1
} else if character == ')' {
tally -= 1
}
}
tally
}
/// Given an input string, calculate the first time that Santa enters the basement (-1)
/// If result is 0, he never went into the basement.
fn calculate_basement_entrance(input_value: String) -> usize {
let mut tally = 0;
let mut index = 0;
let characters = input_value.chars();
for (current_index, character) in characters.enumerate() {
if character == '(' {
tally += 1
} else if character == ')' {
tally -= 1
}
if tally == -1 {
// Account for 0 index.
index = current_index + 1;
break
}
}
index
}
fn main() -> std::io::Result<()> {
// Open and read the input file, put it into a string
let mut file = File::open("data.txt")?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
// I know, I know, why clone when you can pass the reference.
let result = calculate_final_position(contents.clone());
println!("Santa ended up on the {} floor", result);
let basement_result = calculate_basement_entrance(contents.clone());
println!("Santa ended up in the basement on the {} direction", basement_result);
Ok(())
}
mod test {
use super::*;
#[test]
fn test_0() {
let expected_result = 0;
let result = calculate_final_position(String::from("(())"));
assert_eq!(result, expected_result);
let result = calculate_final_position(String::from("()()"));
assert_eq!(result, expected_result);
}
#[test]
fn test_3() {
let expected_result = 3;
let result = calculate_final_position(String::from("((("));
assert_eq!(result, expected_result);
let result = calculate_final_position(String::from("(()(()("));
assert_eq!(result, expected_result);
let result = calculate_final_position(String::from("))((((("));
assert_eq!(result, expected_result);
}
#[test]
fn test_negative_1() {
let expected_result = -1;
let result = calculate_final_position(String::from("())"));
assert_eq!(result, expected_result);
let result = calculate_final_position(String::from("))("));
assert_eq!(result, expected_result);
}
#[test]
fn test_negative_3() {
let expected_result = -3;
let result = calculate_final_position(String::from(")))"));
assert_eq!(result, expected_result);
let result = calculate_final_position(String::from(")())())"));
assert_eq!(result, expected_result);
}
#[test]
fn test_basement_1() {
let expected_result = 1;
let result = calculate_basement_entrance(String::from(")"));
assert_eq!(result, expected_result);
}
#[test]
fn test_basement_5() {
let expected_result = 5;
let result = calculate_basement_entrance(String::from("()())"));
assert_eq!(result, expected_result);
}
}
|
struct Solution {}
impl Solution {
pub fn my_atoi(s: String) -> i32 {
let mut num : i32 = 0;
let observed : u8;
let ( mut signed, mut sign_observed, mut overflow ) : (bool, bool, bool) = ( false, false, false );
let mut overflow_checker : i32 = 0;
for &c in s.as_bytes() {
match c as char {
'0'..='9' => {
let diff = c as i32 - 48;
overflow_checker = num*10 + diff;
//println!("{:12} {:12}", num, overflow_checker/10);
if (num*10)/10 != num || num != (num*10 + diff)/10 {
println!("Overflow detected {}", signed );
overflow = true;
break;
}
num = num*10 + diff;
sign_observed = true;
},
'-' => {
signed = true;
if sign_observed {
break;
}
sign_observed = true;
},
'+' => {
if sign_observed {
break;
}
sign_observed = true;
continue;
},
' ' => {
if sign_observed {
break;
}
},
'.' => {
break;
},
_ => {
break
},
};
}
if overflow {
if signed {
return i32::MIN ;
}
return i32::MAX ;
}
if signed {
return -num;
}
num
}
}
fn main() {
println!("{}", Solution::my_atoi("-42+43".to_string()));
//let i : i32 = -6147483648;
//println!("{}", Solution::my_atoi("-6147483648".to_string()));
//println!("{}", Solution::my_atoi("2147483648".to_string()));
/*
println!("{}", Solution::my_atoi("2147483648".to_string()));
println!("{}", Solution::my_atoi("-042".to_string()));
println!("{}", Solution::my_atoi("42".to_string()));
println!("{}", Solution::my_atoi("words and 987".to_string()));
println!("{}", Solution::my_atoi("-91283472332".to_string()));
println!("{}", Solution::my_atoi("+1".to_string()));
println!("{}", Solution::my_atoi("+-12".to_string()));
println!("{}", Solution::my_atoi("-+12".to_string()));
*/
}
|
use crate::common::factories::RandomizedBuilder;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
#[derive(Debug, Clone)]
pub struct IpAddrBuilder {
pub version: IpVersion,
pub multicast: bool,
}
impl Default for IpAddrBuilder {
fn default() -> Self {
Self {
version: Default::default(),
multicast: false,
}
}
}
impl IpAddrBuilder {
pub fn localhost() -> IpAddr {
IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))
}
}
impl RandomizedBuilder for IpAddrBuilder {
type Item = IpAddr;
fn build(self) -> Self::Item {
use common::rand::{thread_rng, Rng};
use fake::faker::internet::raw::*;
use fake::locales::EN;
use fake::Fake;
match self.version {
IpVersion::V4 => {
if self.multicast {
let mut rng = thread_rng();
Self::Item::V4(Ipv4Addr::new(224, 0, 0, rng.gen_range(1, 254)))
} else {
Self::Item::V4(IPv4(EN).fake())
}
}
IpVersion::V6 => {
if self.multicast {
let mut rng = thread_rng();
Self::Item::V6(Ipv6Addr::new(0xff00, 0, 0, 0, 0, 0, 0, 0x3))
} else {
Self::Item::V6(IPv6(EN).fake())
}
}
}
}
}
#[derive(Debug, Clone)]
pub enum IpVersion {
V4,
V6,
}
impl Default for IpVersion {
fn default() -> Self {
Self::V4
}
}
|
// Copyright (c) 2017 Brandon Thomas <bt@brand.io>
// Painting with Lasers
extern crate argparse;
extern crate beam;
extern crate camera_capture;
extern crate image;
extern crate lase;
extern crate piston;
extern crate piston_window;
extern crate router;
extern crate rscam;
extern crate texture;
mod arguments;
mod drawing;
mod error;
use arguments::Arguments;
use drawing::Canvas;
use drawing::ImagePosition;
use image::ConvertBuffer;
use image::ImageBuffer;
use image::Pixel;
use lase::tools::find_first_etherdream_dac;
use piston_window::Button;
use piston_window::PressEvent;
use piston_window::{PistonWindow, Texture, WindowSettings, TextureSettings, clear};
use rscam::Frame;
use std::sync::Arc;
use std::time::Instant;
type ImageFrame = image::ImageBuffer<image::Rgb<u8>, Frame>;
type ImageFrameRgba = ImageBuffer<image::Rgba<u8>, Vec<u8>>;
const THRESHOLD: u8 = 180;
const TRACKING_POINTS : i32 = 5; // Num of points to blank.
fn main() {
let args = Arguments::parse_args();
let canvas = Arc::new(Canvas::new(TRACKING_POINTS as usize, &args));
let canvas2 = canvas.clone();
let mut dac = find_first_etherdream_dac().expect("Unable to find DAC");
std::thread::spawn(move || {
let mut current_point = 0;
dac.play_function(move |num_points: u16| {
let num_points = num_points as usize;
let payload = canvas.get_points(current_point, num_points)
.expect("Failure to get points!");
current_point = payload.next_cursor;
payload.points
}).expect("Projection failed.");
});
unused_webcam(canvas2, &args);
}
fn to_grayscale(frame: ImageFrame, args: &Arguments) -> ImageFrameRgba {
let (width, height) = frame.dimensions();
let mut new_image : ImageFrameRgba =
ImageBuffer::new(args.webcam_width, args.webcam_height);
for i in 0..width {
for j in 0..height {
let pix = frame.get_pixel(i, j);
let rgba = pix.to_rgba();
let mut pix2 = rgba.clone();
pix2.apply(|pix: u8| {
if pix > THRESHOLD {
255
} else {
0
}
});
new_image.put_pixel(i, j, pix2);
}
}
new_image
}
fn find_laser_position(frame: ImageFrameRgba) -> Option<ImagePosition> {
// FIXME: This crudely finds the first pixel that has a saturated green channel
// We need to find the centroid of the largest saturation cluster.
let (width, height) = frame.dimensions();
for i in 0..width {
for j in 0..height {
let pix = frame.get_pixel(i, j);
let g = pix.data[1]; // green channel
if g == 255 {
return Some(ImagePosition { x: i, y: j } )
}
}
}
None
}
fn unused_webcam(canvas: Arc<Canvas>, args: &Arguments) {
let (sender, receiver) = std::sync::mpsc::channel();
let mut tex: Option<Texture<_>> = None;
let mut window: Option<PistonWindow> = None;
if args.show_gui {
window = Some(WindowSettings::new("Webcam capture",
[args.webcam_width, args.webcam_height])
.exit_on_esc(true)
.build()
.unwrap());
}
let args2 = (*args).clone();
let canvas2 = canvas.clone();
let imgthread = std::thread::spawn(move || {
let cam = camera_capture::create(args2.webcam_index).unwrap()
.fps(30.0)
.unwrap()
.resolution(args2.webcam_width, args2.webcam_height)
.unwrap()
.start()
.unwrap();
for frame in cam {
let grayscale = to_grayscale(frame, &args2);
let converted: ImageFrameRgba = grayscale.convert();
let maybe_pos = find_laser_position(converted);
if let Some(pos) = maybe_pos {
println!("Found Point : {:?}", pos);
canvas.add_point(pos, Instant::now())
.expect("Could not add points");
}
if let Err(_) = sender.send(grayscale) {
break;
}
}
});
if args.show_gui {
for e in window.unwrap() {
if let Ok(frame) = receiver.try_recv() {
if let Some(mut t) = tex {
t.update(&mut *e.encoder.borrow_mut(), &frame).unwrap();
tex = Some(t);
} else {
tex = Texture::from_image(
&mut *e.factory.borrow_mut(),
&frame,
&TextureSettings::new()
).ok();
}
}
e.draw_2d(|c, g| {
clear([1.0; 4], g);
if let Some(ref t) = tex {
piston_window::image(t, c.transform, g);
}
});
if let Some(button) = e.press_args() {
println!("Pressed button: {:?}", button);
canvas2.reset();
}
}
}
imgthread.join().unwrap();
}
|
#[macro_use]
extern crate explanation;
#[allow(unused_variables)]
extern crate rand;
extern crate timely;
extern crate graph_map;
extern crate differential_dataflow;
use std::cell::RefCell;
use std::io::BufRead;
use graph_map::GraphMMap;
use timely::dataflow::*;
use timely::dataflow::scopes::Child;
use timely::dataflow::operators::*;
use timely::progress::timestamp::RootTimestamp;
use timely::progress::nested::product::Product;
use differential_dataflow::Collection;
use differential_dataflow::operators::*;
use explanation::{Variable, MonotonicVariable};
fn main() {
timely::execute_from_args(std::env::args(), move |root| {
// BEGIN DATAFLOW CONSTRUCTION
// Outer-most streaming scope; here inputs to the graph, labels, queries, etc may change.
let (mut graph, mut label, mut query, probe) = root.scoped::<u32, _, _>(move |streaming| {
// Construct inputs for graph data, label data, and queries made against the results.
// NOTE: label data supplied separately as per other systems, which provide graph node
// NOTE: data independently from the graph; otherwise we would compute and maintain it.
let (graph_handle, graph) = streaming.new_input(); let graph = Collection::new(graph);
let (label_handle, label) = streaming.new_input(); let label = Collection::new(label);
let (query_handle, query) = streaming.new_input(); let query = Collection::new(query);
// Iterative scope for rounds of input correction
let (mut graph_must, mut label_must) = streaming.scoped::<u32,_,_>(move |correction| {
// Bring each input into the scope.
let graph = graph.enter(correction);
let label = label.enter(correction);
let query = query.enter(correction);
// Each data input uses a MonotonicVariable to track its elements required to explain outputs.
// These collections grow monotonically in each round of correction, limited by the full set.
let mut graph_must = MonotonicVariable::new(correction);
let mut label_must = MonotonicVariable::new(correction);
// Scope for explanation derivation.
let child_scope = RefCell::new(correction.new_subscope());
let child_index = child_scope.borrow().index;
// determine and return necessary members of `graph` and `label`.
let (graph_need, label_need) = {
// wrap an explanation scope builder.
let mut explanation_scope = Child {
subgraph: &child_scope,
parent: correction.clone(),
};
// define variables for each input to the computation.
// the data source is from outside the correction loop,
// and the working source are the *_must collections.
let mut var_graph = Variable::new(graph.clone(), graph_must.stream.clone(), &mut explanation_scope);
let mut var_label = Variable::new(label.clone(), label_must.stream.clone(), &mut explanation_scope);
// transpose edges and concatenate, symmetrizing the graph.
let mut var_edges = var_graph.map_inverse(|(x,y)| (y,x), |(y,x)| (x,y))
.concat(&mut var_graph);
// actual computation loop; can you believe we do computation, too?
let mut final_labels = correction.scoped::<u32,_,_>(|inner| {
// BEGIN FEEDBACK SETUP
let (handle1, cycle1) = inner.loop_variable(u32::max_value(), 1); let cycle1 = Collection::new(cycle1);
let (handle2, cycle2) = inner.loop_variable(u32::max_value(), 1); let cycle2 = Collection::new(cycle2);
let mut var_inner = Variable::new(cycle1, cycle2, &mut explanation_scope);
// END FEEDBACK SETUP
// join edges with looped labels, then re-order to have dst as key
let mut var_transmit =
var_edges.enter(inner)
.join_u(&mut var_inner)
.map_inverse(|(x,(y,l))| (y,(l,x)), |(y,(l,x))| (x,(y,l)));
// bring in initial labels from outside, concat with proposals
let mut var_options =
var_label.enter_at(inner, |r| 256 * (((((r.0).0) as f64).ln() * 10.0) as u32))
.map_inverse(|(x,l)| (x,(l,x)), |(x,(l,_))| (x,l))
.concat(&mut var_transmit);
// group the labels by key, using min! macro
let mut var_min = min!(var_options, |(l,_d)| l, explanation_scope);
// BEGIN FEEDBACK CONNECT
var_min.stream.inner.connect_loop(handle1);
var_min.working.inner.connect_loop(handle2);
var_min.depends.add(
&var_inner.depends.stream
.filter(|&(_,_,t,_)| t.inner > 0)
.map(|(x,l,t,q)| (x,l,Product::new(t.outer, t.inner - 1),q))
);
// END FEEDBACK CONNECT
leave!(var_min, explanation_scope)
});
// introduce any query elements as initial dependences.
final_labels.depends.add(&query.enter(&explanation_scope));
// pop input requirements out of the explanation scope and return them.
(var_graph.depends.stream.leave(), var_label.depends.stream.leave())
};
// all explanation infrastructure in place; add to correct scope.
correction.add_operator_with_index(child_scope.into_inner(), child_index);
// intersect required edges and labels with existing edges and labels.
graph_must.add(&graph_need.map(|(k,v,_t,_q)| ((k,v),())).semijoin(&graph).map(|((k,v),_)| (k,v)));
label_must.add(&label_need.map(|(k,v,_t,_q)| ((k,v),())).semijoin(&label).map(|((k,v),_)| (k,v)));
// merge the things we need, pop them out of the loop, and probe
(graph_must.stream.leave(), label_must.stream.leave())
});
// print out what we require from each input.
graph_must = graph_must.inspect(|x| println!("graph_must:\t{:?}", x));
label_must = label_must.inspect(|x| println!("label_must:\t{:?}", x));
// attach a probe, so that we can await completeness.
let query_probe = graph_must.concat(&label_must).probe().0;
(graph_handle, label_handle, query_handle, query_probe)
});
// END DATAFLOW CONSTRUCTION
// BEGIN DATA LOADING
// NOTE: This could be replaced with your favorite data format.
if let Some(filename) = std::env::args().nth(1) {
let edges = GraphMMap::new(&filename);
for node in 0..edges.nodes() {
if node % root.peers() == root.index() {
if edges.edges(node).len() > 0 {
label.send(((node as u32, node as u32), 1));
}
for &edge in edges.edges(node) {
graph.send(((node as u32, edge as u32), 1));
}
}
}
}
// END DATA LOADING
// close labels, advance graph and query inputs to the next epoch.
graph.advance_to(1);
label.advance_to(1);
query.advance_to(1);
root.step_while(|| probe.lt(&query.time()));
println!("");
let timer = ::std::time::Instant::now();
root.step_while(|| probe.lt(&query.time()));
if root.index() == 0 { println!("initialization elapsed:\t{:?}", timer.elapsed()); }
let mut round = 1;
let input = std::io::stdin();
for line in input.lock().lines().map(|x| x.unwrap()) {
let mut elts = line[..].split_whitespace();
if let Some(command) = elts.next() {
if command == "query" {
if let Some(sign) = elts.next() {
let sign = if sign == "-" { -1i32 } else { 1 };
if let Some(source) = elts.next() {
if let Some(node) = source.parse::<u32>().ok() {
query.send(((
node,
0,
Product::new(RootTimestamp::new(0), u32::max_value()),
0 as u32
),sign));
}
}
}
}
if command == "graph" {
if let Some(sign) = elts.next() {
let sign = if sign == "-" { -1i32 } else { 1 };
if let Some(source) = elts.next() {
if let Some(source) = source.parse::<u32>().ok() {
if let Some(target) = elts.next() {
if let Some(target) = target.parse::<u32>().ok() {
graph.send(((source, target),sign));
}
}
}
}
}
}
if command == "label" {
if let Some(sign) = elts.next() {
let sign = if sign == "-" { -1i32 } else { 1 };
if let Some(source) = elts.next() {
if let Some(source) = source.parse::<u32>().ok() {
if let Some(target) = elts.next() {
if let Some(target) = target.parse::<u32>().ok() {
label.send(((source, target),sign));
}
}
}
}
}
}
graph.advance_to(round + 1);
label.advance_to(round + 1);
query.advance_to(round + 1);
let timer = ::std::time::Instant::now();
root.step_while(|| probe.lt(&query.time()));
if root.index() == 0 {
println!("round {:?} elapsed:\t{:?}", round, timer.elapsed());
}
round += 1;
}
}
}).unwrap();
}
|
extern crate flint;
extern crate gmp;
#[macro_use]
extern crate hilbert_qexp;
extern crate serde;
extern crate serde_pickle;
pub mod parallel_wt;
pub mod mixed_wt;
pub mod structure;
|
pub fn hellodep() -> String {
println!("I'm in a procmacro as a dependency");
String::from("hello!")
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.