text stringlengths 8 4.13M |
|---|
use bindings::{
Windows::UI::Notifications::*,
Windows::Data::Xml::Dom::XmlDocument
};
use windows::Result;
#[allow(dead_code)]
pub enum ToastDuration {
Short, Long
}
#[allow(dead_code)]
pub enum ToastAudio {
Default,
IM,
Mail,
Reminder,
SMS,
LoopingAlarm,
LoopingAlarm2,
LoopingAlarm3,
LoopingAlarm4,
LoopingAlarm5,
LoopingAlarm6,
LoopingAlarm7,
LoopingAlarm8,
LoopingAlarm9,
LoopingAlarm10,
LoopingCall,
LoopingCall2,
LoopingCall3,
LoopingCall4,
LoopingCall5,
LoopingCall6,
LoopingCall7,
LoopingCall8,
LoopingCall9,
LoopingCall10,
Silent
}
pub struct Notification<'a> {
pub app_id: &'a str,
pub title: &'a str,
pub message: &'a str,
pub body_image: &'a str,
pub icon: &'a str,
pub activation_type: &'a str,
pub activation_argument: &'a str,
pub audio: ToastAudio,
pub audio_loop: bool,
pub duration: ToastDuration,
pub actions: Vec<ToastAction<'a>>,
}
pub struct ToastAction<'a> {
pub action_type: &'a str,
pub label: &'a str,
pub arguments: &'a str,
}
impl<'a> Notification<'a> {
pub fn new() -> Notification<'a> {
Notification {
app_id: "TestApp",
title: "Noti Test",
message: "Message",
body_image: "",
icon: "",
activation_type: "",
activation_argument: "",
audio: ToastAudio::Default,
audio_loop: false,
duration: ToastDuration::Short,
actions: Vec::new(),
}
}
fn apply_default(&mut self) {
if self.activation_type == "" {
self.activation_type = "protocol";
}
}
fn build_xml(&self) -> XmlDocument {
let doc = XmlDocument::new().unwrap();
let root = doc.CreateElement("toast").unwrap();
root.SetAttribute("activationType", self.activation_type).ok();
root.SetAttribute("launch", self.activation_argument).ok();
root.SetAttribute("duration", self.duration.get_str()).ok();
// visual
let visual = doc.CreateElement("visual").unwrap();
{
let bind = doc.CreateElement("binding").unwrap();
{
bind.SetAttribute("template", "ToastGeneric").ok();
if self.icon != "" {
let icon = doc.CreateElement("image").unwrap();
icon.SetAttribute("placement", "appLogoOverride").ok();
icon.SetAttribute("src", self.icon).ok();
bind.AppendChild(icon).ok();
}
if self.title != "" {
let text = doc.CreateElement("text").unwrap();
let cdata = doc.CreateCDataSection(self.title).unwrap();
text.AppendChild(cdata).ok();
bind.AppendChild(text).ok();
}
if self.message != "" {
let text = doc.CreateElement("text").unwrap();
let cdata = doc.CreateCDataSection(self.message).unwrap();
text.AppendChild(cdata).ok();
bind.AppendChild(text).ok();
}
if self.body_image != "" {
let body_image = doc.CreateElement("image").unwrap();
body_image.SetAttribute("src", self.body_image).ok();
bind.AppendChild(body_image).ok();
}
}
visual.AppendChild(bind).ok();
}
root.AppendChild(visual).ok();
// audio
let audio = doc.CreateElement("audio").unwrap();
match &self.audio {
ToastAudio::Silent => {
audio.SetAttribute( "silent", "true" ).ok();
},
_ => {
audio.SetAttribute( "src", self.audio.get_str() ).ok();
audio.SetAttribute( "loop",
if self.audio_loop { "true" } else { "false" } ).ok();
}
}
root.AppendChild(audio).ok();
// action
if self.actions.len() > 0 {
let actions = doc.CreateElement("actions").unwrap();
for act in self.actions.iter() {
let action = doc.CreateElement("action").unwrap();
action.SetAttribute("activationType", act.action_type).ok();
action.SetAttribute("content", act.label).ok();
action.SetAttribute("arguments", act.arguments).ok();
actions.AppendChild(action).ok();
}
root.AppendChild(actions).ok();
}
doc.AppendChild( root ).ok();
doc
}
pub fn push(&mut self) -> Result<()> {
&self.apply_default();
let xml = self.build_xml();
//println!("Xml={}", xml.GetXml().unwrap());
let noti = ToastNotification::CreateToastNotification(xml)
.unwrap();
ToastNotificationManager::CreateToastNotifierWithId(self.app_id)
.unwrap()
.Show(noti)
}
}
impl<'a> ToastAction<'a> {
pub fn new( action_type: &'a str, label: &'a str, arguments: &'a str )
-> ToastAction<'a> {
ToastAction {
action_type: action_type,
label: label,
arguments: arguments
}
}
}
impl ToastAudio {
pub fn get_str(&self) -> &'static str {
match self {
ToastAudio::Default => "ms-winsoundevent:Notification.Default",
ToastAudio::IM => "ms-winsoundevent:Notification.IM",
ToastAudio::Mail => "ms-winsoundevent:Notification.Mail",
ToastAudio::Reminder => "ms-winsoundevent:Notification.Reminder",
ToastAudio::SMS => "ms-winsoundevent:Notification.SMS",
ToastAudio::LoopingAlarm => "ms-winsoundevent:Notification.Looping.Alarm",
ToastAudio::LoopingAlarm2 => "ms-winsoundevent:Notification.Looping.Alarm2",
ToastAudio::LoopingAlarm3 => "ms-winsoundevent:Notification.Looping.Alarm3",
ToastAudio::LoopingAlarm4 => "ms-winsoundevent:Notification.Looping.Alarm4",
ToastAudio::LoopingAlarm5 => "ms-winsoundevent:Notification.Looping.Alarm5",
ToastAudio::LoopingAlarm6 => "ms-winsoundevent:Notification.Looping.Alarm6",
ToastAudio::LoopingAlarm7 => "ms-winsoundevent:Notification.Looping.Alarm7",
ToastAudio::LoopingAlarm8 => "ms-winsoundevent:Notification.Looping.Alarm8",
ToastAudio::LoopingAlarm9 => "ms-winsoundevent:Notification.Looping.Alarm9",
ToastAudio::LoopingAlarm10 => "ms-winsoundevent:Notification.Looping.Alarm10",
ToastAudio::LoopingCall => "ms-winsoundevent:Notification.Looping.Call",
ToastAudio::LoopingCall2 => "ms-winsoundevent:Notification.Looping.Call2",
ToastAudio::LoopingCall3 => "ms-winsoundevent:Notification.Looping.Call3",
ToastAudio::LoopingCall4 => "ms-winsoundevent:Notification.Looping.Call4",
ToastAudio::LoopingCall5 => "ms-winsoundevent:Notification.Looping.Call5",
ToastAudio::LoopingCall6 => "ms-winsoundevent:Notification.Looping.Call6",
ToastAudio::LoopingCall7 => "ms-winsoundevent:Notification.Looping.Call7",
ToastAudio::LoopingCall8 => "ms-winsoundevent:Notification.Looping.Call8",
ToastAudio::LoopingCall9 => "ms-winsoundevent:Notification.Looping.Call9",
ToastAudio::LoopingCall10 => "ms-winsoundevent:Notification.Looping.Call10",
ToastAudio::Silent => "silent",
}
}
}
impl ToastDuration {
pub fn get_str(&self) -> &'static str {
match self {
ToastDuration::Long => "long",
ToastDuration::Short => "short",
}
}
}
|
use std::env;
use std::fs::File;
use std::io::Read;
mod interpreter;
fn get_prog() -> String {
let filename = match env::args().nth(1) {
Some(n) => n,
_ => panic!("Please provide a file to load from"),
};
let mut file = match File::open(filename) {
Ok(f) => f,
_ => panic!("That file doesn't exist"),
};
let mut prog = String::new();
if let Err(e) = file.read_to_string(&mut prog) {
println!("{}", e.to_string());
}
prog
}
fn main() {
let prog = get_prog();
interpreter::interpret(&prog);
}
|
use P80::graph_converters::labeled;
use P84::*;
pub fn main() {
let g = labeled::from_term_form(
&vec!['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'],
&vec![
('a', 'b', 5),
('a', 'd', 3),
('b', 'c', 2),
('b', 'e', 4),
('c', 'e', 6),
('d', 'e', 7),
('d', 'f', 4),
('d', 'g', 3),
('e', 'h', 5),
('f', 'g', 4),
('g', 'h', 1),
],
);
let trees = minimal_spanning_trees(&g);
for tree in trees {
println!(
"{:?} (weight={})",
labeled::to_term_form(&tree),
label_sum(&tree)
);
}
}
|
#[doc(hidden)]
#[macro_use]
pub mod test_utils;
pub mod const_utils;
#[doc(hidden)]
pub mod type_layout {
pub mod small_types;
pub mod tl_field_accessor_macro;
pub mod tl_field_macro;
pub mod tl_lifetimes_macro;
pub mod tl_multi_tl_macro;
pub mod tl_type_layout_index;
}
use core_extensions::StringExt;
/// The name mangling scheme of `abi_stable`.
#[doc(hidden)]
pub fn mangle_ident<S>(kind: &str, name: S) -> String
where
S: ::std::fmt::Display,
{
let unmangled = format!("_as.{}.{}", kind, name);
let mut mangled = String::with_capacity(unmangled.len() * 3 / 2);
for kv in unmangled.split_while(|c| c.is_alphanumeric()) {
if kv.key {
mangled.push_str(kv.str);
continue;
}
for c in kv.str.chars() {
mangled.push_str(match c {
'.' => "_0",
'_' => "_1",
'-' => "_2",
'<' => "_3",
'>' => "_4",
'(' => "_5",
')' => "_6",
'[' => "_7",
']' => "_8",
'{' => "_9",
'}' => "_a",
' ' => "_b",
',' => "_c",
':' => "_d",
';' => "_e",
'!' => "_f",
'#' => "_g",
'$' => "_h",
'%' => "_i",
'/' => "_j",
'=' => "_k",
'?' => "_l",
'¿' => "_m",
'¡' => "_o",
'*' => "_p",
'+' => "_q",
'~' => "_r",
'|' => "_s",
'°' => "_t",
'¬' => "_u",
'\'' => "_x",
'\"' => "_y",
'`' => "_z",
c => panic!("cannot currently mangle the '{}' character.", c),
});
}
}
mangled
}
/// Gets the name of the static that contains the LibHeader of an abi_stable library.
///
/// This does not have a trailing `'\0'`,
/// you need to append it to pass the name to C APIs.
pub fn mangled_root_module_loader_name() -> String {
mangle_ident("lib_header", "root module loader")
}
|
// FIXME: Make me pass! Diff budget: 25 lines.
use Duration::{MilliSeconds, Seconds, Minutes};
#[derive(Debug)]
enum Duration {
MilliSeconds(u64),
Seconds(u32),
Minutes(u16)
}
impl PartialEq for Duration {
fn eq(&self, other: &Duration) -> bool {
match (self, other) {
(&Seconds(s), &Minutes(m)) => s == m as u32 * 60,
(&MilliSeconds(ms), &Minutes(m)) => ms == m as u64 * 60 * 1000,
(&MilliSeconds(ms), &Seconds(s)) => ms == s as u64 * 1000,
_ => false
}
}
}
fn main() {
assert_eq!(Seconds(120), Minutes(2));
assert_eq!(Seconds(420), Minutes(7));
assert_eq!(MilliSeconds(420000), Minutes(7));
assert_eq!(MilliSeconds(43000), Seconds(43));
}
|
// =========
use std::cmp::{max, min};
use std::collections::{HashMap, HashSet};
use std::process::exit;
const MOD: usize = 1000000007;
macro_rules! input {
(source = $s:expr, $($r:tt)*) => {
let mut iter = $s.split_whitespace();
input_inner!{iter, $($r)*}
};
($($r:tt)*) => {
let s = {
use std::io::Read;
let mut s = String::new();
std::io::stdin().read_to_string(&mut s).unwrap();
s
};
let mut iter = s.split_whitespace();
input_inner!{iter, $($r)*}
};
}
macro_rules! input_inner {
($iter:expr) => {};
($iter:expr, ) => {};
// var... 変数の識別子, $t...型を一つよむ
($iter:expr, $var:ident : $t:tt $($r:tt)*) => {
let $var = read_value!($iter, $t);
//ここで繰り返し
input_inner!{$iter $($r)*}
};
}
macro_rules! read_value {
($iter:expr, ( $($t:tt),* )) => {
( $(read_value!($iter, $t)),* )
};
//
($iter:expr, [ $t:tt ; $len:expr ]) => {
(0..$len).map(|_| read_value!($iter, $t)).collect::<Vec<_>>()
};
($iter:expr, chars) => {
read_value!($iter, String).chars().collect::<Vec<char>>()
};
($iter:expr, usize1) => {
read_value!($iter, usize) - 1
};
// 配列の最後のNestではここで型が指定されてparseされる
($iter:expr, $t:ty) => {
$iter.next().unwrap().parse::<$t>().expect("Parse error")
};
}
// =========
// use std::cmp::Ordering;
// ======UnionFind======
#[derive(Debug)]
struct UnionFind {
// size= 親なら負のサイズ、子なら親
// number= 集合の数
table: Vec<i64>,
number: usize,
}
impl UnionFind {
fn new(n: usize) -> Self {
let mut table = vec![0; n];
for i in 0..n {
table[i] = -1;
}
UnionFind {
table: table,
number: n,
}
}
}
impl UnionFind {
fn root(&mut self, x: usize) -> usize {
// 負ならそれが親
// 他のを指しているならたどる
if self.table[x] < 0 {
x
} else {
let tmp = self.table[x] as usize;
self.table[x] = self.root(tmp) as i64;
self.table[x] as usize
}
}
fn same(&mut self, a: usize, b: usize) -> bool {
self.root(a) == self.root(b)
}
fn union(&mut self, a: usize, b: usize) -> () {
let a_root = self.root(a);
let b_root = self.root(b);
if a_root == b_root {
return ();
}
// 負なので小さい法が大きい. 大きい方につける
if self.table[a_root] > self.table[b_root] {
self.table[b_root] += self.table[a_root];
self.table[a_root] = b_root as i64;
} else {
self.table[a_root] += self.table[b_root];
self.table[b_root] = a_root as i64;
}
self.number -= 1;
}
// 親のサイズを返す
fn size(&mut self, x: usize) -> usize {
let ri = self.root(x);
-self.table[ri] as usize
}
fn count(&self) -> usize {
self.number
}
}
// ======Kruskal======
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
struct Edge {
from: usize,
to: usize,
cost: i64,
}
impl Ord for Edge {
fn cmp(&self, other: &Edge) -> std::cmp::Ordering {
self.cost.cmp(&other.cost)
}
}
impl PartialOrd for Edge {
fn partial_cmp(&self, other: &Edge) -> Option<std::cmp::Ordering> {
Some(self.cmp(&other))
}
}
#[derive(Debug)]
struct Kruscal {}
impl Kruscal {
// build minimum spanning tree
fn build(v: usize, edges: &mut Vec<Edge>) -> (Vec<Edge>, i64) {
let mut uf = UnionFind::new(v);
// sort ascending order
edges.sort();
// remove duplicated edge
edges.dedup();
let mut res_tree: Vec<Edge> = vec![];
let mut res: i64 = 0;
// till graph is connected
for e in edges {
if !uf.same(e.from, e.to) {
uf.union(e.from, e.to);
res_tree.push(*e);
res += e.cost;
}
}
(res_tree, res)
}
}
fn main() {
input! {
v: usize,
e: usize,
edata: [(usize,usize,i64);e]
}
// let mut edges: Vec<Edge> = edata
// .iter()
// .map(|(f, t, c)| Edge {
// from: *f,
// to: *t,
// cost: *c,
// })
// .collect();
let mut edges = vec![];
for (f, t, c) in edata {
edges.push(Edge {
from: f,
to: t,
cost: c,
});
}
let k = Kruscal::build(v, &mut edges);
println!("{:?}", k.1);
}
|
pub fn run() -> Result<(), anyhow::Error> {
anyhow::bail!("not yet implemented")
}
|
#![feature(
allocator_api,
anonymous_lifetime_in_impl_trait,
core_intrinsics,
fmt_internals,
iterator_try_collect,
let_chains,
nonzero_ops,
slice_ptr_get,
step_trait,
strict_provenance,
try_blocks
)]
use crate::heap::{DisplayWithSymbolTable, Struct, SymbolId, Tag};
use execution_controller::RunForever;
use fiber::{EndedReason, VmEnded};
use heap::{Function, Heap, InlineObject, SymbolTable};
use lir::Lir;
use std::borrow::Borrow;
use tracer::Tracer;
use tracing::{debug, error};
use vm::{Status, Vm};
mod builtin_functions;
pub mod channel;
pub mod execution_controller;
pub mod fiber;
pub mod heap;
pub mod lir;
pub mod mir_to_lir;
pub mod tracer;
mod utils;
pub mod vm;
impl<L: Borrow<Lir>, T: Tracer> Vm<L, T> {
pub fn run_until_completion(mut self, tracer: &mut T) -> VmEnded {
self.run(&mut RunForever, tracer);
if let Status::WaitingForOperations = self.status() {
error!("The module waits on channel operations. Perhaps, the code tried to read from a channel without sending a packet into it.");
// TODO: Show stack traces of all fibers?
}
self.tear_down(tracer)
}
}
impl VmEnded {
pub fn into_main_function(
self,
symbol_table: &SymbolTable,
) -> Result<(Heap, Function), String> {
match self.reason {
EndedReason::Finished(return_value) => {
match return_value_into_main_function(symbol_table, return_value) {
Ok(main) => Ok((self.heap, main)),
Err(err) => Err(err.to_string()),
}
}
EndedReason::Panicked(panic) => Err(format!(
"The module panicked at {}: {}",
panic.responsible, panic.reason,
)),
}
}
}
pub fn return_value_into_main_function(
symbol_table: &SymbolTable,
return_value: InlineObject,
) -> Result<Function, &'static str> {
let exported_definitions: Struct = return_value.try_into().unwrap();
debug!(
"The module exports these definitions: {}",
DisplayWithSymbolTable::to_string(&exported_definitions, symbol_table),
);
exported_definitions
.get(Tag::create(SymbolId::MAIN))
.ok_or("The module doesn't export a main function.")
.and_then(|main| {
main.try_into()
.map_err(|_| "The exported main object is not a function.")
})
}
|
extern crate oncemutex as om;
use std::sync::mpsc::{SyncSender, Receiver};
use std::io::Read;
pub mod core0;
#[derive(Default)]
pub struct Permission {
privilege: u8,
address: u32,
}
pub struct Com<T> {
pub permission: Permission,
pub bus: usize,
pub data: T,
}
impl<T> Com<T> {
fn new(permission: Permission, bus: usize, data: T) -> Self {
Com{
permission: permission,
bus: bus,
data: data,
}
}
}
/// The emulated UARC synchronous bus
pub struct SenderBus<W> {
// Associated bus ID we must send to the receiver
pub bus: usize,
// Send a stream to the target
pub stream: SyncSender<Com<Box<Read>>>,
// Incept the target
pub incept: SyncSender<Com<(Permission, Box<Read>)>>,
// Interrupt a target with a word
pub send: SyncSender<Com<W>>,
// Kill the target
pub kill: SyncSender<Com<()>>,
}
impl<W> SenderBus<W> {
/// Send a channel to the target
fn stream(&self, permission: Permission, data: Box<Read>) {
self.stream.send(Com::new(permission, self.bus, data)).ok().unwrap();
}
/// Send a channel and a fresh set of permissions to incept a core
fn incept(&self, permission: Permission, target_permission: Permission, instructions: Box<Read>) {
self.incept.send(Com::new(permission, self.bus, (target_permission, instructions))).ok().unwrap();
}
/// Send a word to the target
fn send(&self, permission: Permission, value: W) {
self.send.send(Com::new(permission, self.bus, value)).ok().unwrap();
}
/// Send a kill signal to the target
fn kill(&self, permission: Permission) {
self.kill.send(Com::new(permission, self.bus, ())).ok().unwrap();
}
}
/// Core is a trait that cores implement to allow Bus to be created connecting cores
pub trait Core<W> {
/// Set the internal buses
fn append_sender(&mut self, sender: SenderBus<W>);
/// Aquire a bus located at a particular bus ID
fn aquire_sender(&mut self) -> SenderBus<W>;
/// Begins operation in the current thread
/// Killing the core will not end the thread.
fn begin(&mut self);
}
|
use once_cell::sync::Lazy;
use prometheus::{IntCounterVec, IntGauge};
/// Counter of txn status in tx_pool
pub static TXN_STATUS_COUNTERS: Lazy<IntCounterVec> = Lazy::new(|| {
register_int_counter_vec!(
"txpool_txn_stats",
"Counters of how many txn's stats in tx pool",
&["status"]
)
.unwrap()
});
pub static TXPOOL_TXNS_GAUGE: Lazy<IntGauge> = Lazy::new(|| {
register_int_gauge!("txpool_txn_nums", "Counter of how many txns in txpool").unwrap()
});
|
use input_i_scanner::InputIScanner;
use union_find::UnionFind;
fn main() {
let stdin = std::io::stdin();
let mut _i_i = InputIScanner::from(stdin.lock());
macro_rules! scan {
(($($t: ty),+)) => {
($(scan!($t)),+)
};
($t: ty) => {
_i_i.scan::<$t>() as $t
};
(($($t: ty),+); $n: expr) => {
std::iter::repeat_with(|| scan!(($($t),+))).take($n).collect::<Vec<_>>()
};
($t: ty; $n: expr) => {
std::iter::repeat_with(|| scan!($t)).take($n).collect::<Vec<_>>()
};
}
let (n, m) = scan!((usize, usize));
let edges = scan!((usize, usize); m);
let mut g = vec![vec![]; n];
for (a, b) in edges {
g[a - 1].push(b - 1);
g[b - 1].push(a - 1);
}
let mut uf = UnionFind::new(n);
let mut con = 0usize;
let mut ans = Vec::new();
for u in (0..n).rev() {
ans.push(con);
con += 1;
for &v in &g[u] {
if v > u {
if !uf.same(u, v) {
uf.unite(u, v);
con -= 1;
}
}
}
}
ans.reverse();
for ans in ans {
println!("{}", ans);
}
}
|
use std::collections::HashMap;
use std::hash::Hash;
fn main() {
let map: HashMap<(i16, i16), usize> = include_str!("input").lines().enumerate().flat_map(|(i, x)| {
x.split("").filter(|x| !x.is_empty()).map(str::parse).enumerate().map(|(j, d)| ((i as i16, j as i16),d.unwrap())).collect::<Vec<_>>()
}).collect();
println!("Part one: {}", part_one(&map));
println!("Part two: {}", part_two(&map));
}
fn part_one(map: &HashMap<(i16, i16), usize>) -> usize {
get_lowest_score(
(0,0),
|node| *map.get(node).unwrap(),
|node| [
(node.0 + 1, node.1), (node.0, node.1 - 1), (node.0, node.1 + 1), (node.0 - 1, node.1)
].into_iter().filter(|x| {
map.get(x) != None
}).collect(),
(99,99))
}
fn part_two(map: &HashMap<(i16, i16), usize>) -> usize {
get_lowest_score(
(0,0),
|coor: &(i16, i16)| {
let x = (coor.0)%100;
let y = (coor.1)%100;
let x_inc = ((coor.0)/100) as usize;
let y_inc = ((coor.1)/100) as usize;
if let Some(risk) = map.get(&(x,y)) {
let mut new_risk = *risk + x_inc + y_inc;
if new_risk > 9 {
new_risk= (new_risk%10)+1
}
return new_risk;
};
panic!("");
},
|node| [
(node.0 + 1, node.1), (node.0, node.1 - 1), (node.0, node.1 + 1), (node.0 - 1, node.1)
].into_iter().filter(|node| {
node.0 >= 0 && node.1 >= 0 && node.0 < 500 && node.1 < 500
}).collect(),
(499,499))
}
fn get_lowest_score<T, U, V>(start_node: T, get_node_score: U, get_connected_nodes: V, end_node: T) -> usize
where
T: Clone + Eq + Hash + Copy,
U: Fn(&T) -> usize,
V: Fn(&T) -> Vec<T>, {
let mut lowest_scores = HashMap::new();
let mut working_scores = HashMap::new();
working_scores.insert(start_node, 0);
while lowest_scores.get(&end_node) == None {
let (¤t_node, ¤t_node_score) = working_scores.iter().min_by(|(_,x1), (_, x2)| {
x1.cmp(x2)
}).unwrap();
lowest_scores.insert(current_node, current_node_score);
working_scores.remove(¤t_node);
for neighbor in get_connected_nodes(¤t_node).into_iter().filter(|node| {
lowest_scores.get(node) == None
}) {
let new_score = get_node_score(&neighbor) + current_node_score;
let current_score = working_scores.entry(neighbor).or_insert(usize::MAX);
if new_score < *current_score {
*current_score = new_score;
}
};
};
*(lowest_scores.get(&end_node).unwrap())
}
|
use ggez::graphics;
use ggez::{Context, GameResult};
use nalgebra as na;
use specs::{Join, World};
use assets::Assets;
use gamestate::BaseSprite;
use gamestate::Position;
pub const TILE_SIZE_PX: (f32, f32) = (10.0, 10.0);
pub fn render(ctx: &mut Context, world: &World, assets: &Assets) -> GameResult {
let pos_s = world.read_storage::<Position>();
let vis_s = world.read_storage::<BaseSprite>();
for (pos, vis) in (&pos_s, &vis_s).join() {
graphics::draw(
ctx,
assets.fetch_drawable(vis.drawable),
(
na::Point2::new(
pos.x() as f32 * TILE_SIZE_PX.0,
pos.y() as f32 * TILE_SIZE_PX.1,
),
vis.color,
),
)?;
}
Ok(())
}
|
//! SSE4.2 (pcmpestri) accelerated substring search
//!
//! Using the two way substring search algorithm.
// wssm word size string matching<br>
// wslm word size lexicographical maximum suffix
//
#![allow(dead_code)]
extern crate unchecked_index;
extern crate memchr;
use std::cmp;
use std::iter::Zip;
use std::ptr;
use self::unchecked_index::get_unchecked;
use TwoWaySearcher;
fn zip<I, J>(i: I, j: J) -> Zip<I::IntoIter, J::IntoIter>
where I: IntoIterator,
J: IntoIterator
{
i.into_iter().zip(j)
}
/// `pcmpestri` flags
const EQUAL_ANY: u8 = 0b0000;
const EQUAL_EACH: u8 = 0b1000;
const EQUAL_ORDERED: u8 = 0b1100;
/// `pcmpestri`
///
/// “Packed compare explicit length strings (return index)”
///
/// PCMPESTRI xmm1, xmm2/m128, imm8
///
/// Return value: least index for start of (partial) match, (16 if no match).
#[inline(always)]
unsafe fn pcmpestri_16(text: *const u8, offset: usize, text_len: usize,
needle_1: u64, needle_2: u64, needle_len: usize) -> u32 {
//debug_assert!(text_len + offset <= text.len()); // saturates at 16
//debug_assert!(needle_len <= 16); // saturates at 16
let res: u32;
// 0xC = 12, Equal Ordered comparison
//
// movlhps xmm0, xmm1 Move low word of xmm1 to high word of xmm0
asm!("movlhps $1, $2
pcmpestri $1, [$3 + $4], $5"
: // output operands
"={ecx}"(res)
: // input operands
"x"(needle_1), // operand 1 = needle `x` = sse register
"x"(needle_2), // operand 1 = needle
"r"(text), // operand 2 pointer = haystack
"r"(offset), // operand 2 offset
"i"(EQUAL_ORDERED),
"{rax}"(needle_len),// length of operand 1 = needle
"{rdx}"(text_len) // length of operand 2 = haystack
: // clobbers
"cc"
: "intel" // options
);
res
}
/// `pcmpestrm`
///
/// “Packed compare explicit length strings (return mask)”
///
/// PCMPESTRM xmm1, xmm2/m128, imm8
///
/// Return value: bitmask in the 16 lsb of the return value.
#[inline(always)]
unsafe fn pcmpestrm_eq_each(text: *const u8, offset: usize, text_len: usize,
needle: *const u8, noffset: usize, needle_len: usize) -> u64 {
// NOTE: text *must* be readable for 16 bytes
// NOTE: needle *must* be readable for 16 bytes
//debug_assert!(text_len + offset <= text.len()); // saturates at 16
//debug_assert!(needle_len <= 16); // saturates at 16
let res: u64;
// 0xC = 12, Equal Ordered comparison
//
// movlhps xmm0, xmm1 Move low word of xmm1 to high word of xmm0
asm!("movdqu xmm0, [$1 + $2]
pcmpestrm xmm0, [$3 + $4], $5"
: // output operands
"={xmm0}"(res)
: // input operands
"r"(needle), // operand 1 = needle
"r"(noffset), // operand 1 = needle offset
"r"(text), // operand 2 pointer = haystack
"r"(offset), // operand 2 offset
"i"(EQUAL_EACH),
"{rax}"(needle_len),// length of operand 1 = needle
"{rdx}"(text_len) // length of operand 2 = haystack
: // clobbers
"cc"
: "intel" // options
);
res
}
/// Return critical position, period.
/// critical position is zero-based
///
/// Note: If the period is long, the correct period is not returned.
/// The approximation to a long period must be computed separately.
#[inline(never)]
fn crit_period(pat: &[u8]) -> (usize, usize) {
let (i, p) = TwoWaySearcher::maximal_suffix(pat, false);
let (j, q) = TwoWaySearcher::maximal_suffix(pat, true);
if i >= j {
(i, p)
} else {
(j, q)
}
}
/// Search for first possible match of `pat` -- might be just a byte
/// Return `(pos, length)` length of match
#[cfg(test)]
fn first_start_of_match(text: &[u8], pat: &[u8]) -> Option<(usize, usize)> {
// not safe for text that is non aligned and ends at page boundary
let patl = pat.len();
assert!(patl <= 16);
// load pat as a little endian word
let (patw1, patw2) = pat128(pat);
first_start_of_match_inner(text, pat, patw1, patw2)
}
/// Safe wrapper around pcmpestri to find first match of `pat` in `text`.
/// `p1`, `p2` are the first two words of `pat` and *must* match.
/// Length given by length of `pat`, only first 16 bytes considered.
fn first_start_of_match_inner(text: &[u8], pat: &[u8], p1: u64, p2: u64) -> Option<(usize, usize)> {
// align the text pointer
let tp = text.as_ptr();
let tp_align_offset = tp as usize & 0xF;
let init_len;
let tp_aligned;
unsafe {
if tp_align_offset != 0 {
init_len = 16 - tp_align_offset;
tp_aligned = tp.offset(-(tp_align_offset as isize));
} else {
init_len = 0;
tp_aligned = tp;
};
}
let patl = pat.len();
debug_assert!(patl <= 16);
let mut offset = 0;
// search the unaligned prefix first
if init_len > 0 {
for start in 0..cmp::min(init_len, text.len()) {
if text[start] != pat[0] {
continue;
}
let mut mlen = 1;
for (&a, &b) in zip(&text[start + 1..], &pat[1..]) {
if a != b {
mlen = 0;
break;
}
mlen += 1;
}
return Some((start, mlen))
}
offset += 16;
}
while text.len() >= offset - tp_align_offset + patl {
unsafe {
let tlen = text.len() - (offset - tp_align_offset);
let ret = pcmpestri_16(tp_aligned, offset, tlen, p1, p2, patl) as usize;
if ret == 16 {
offset += 16;
} else {
let match_len = cmp::min(patl, 16 - ret);
return Some((offset - tp_align_offset + ret, match_len));
}
}
}
None
}
/// safe to search unaligned for first start of match
///
/// unsafe because the end of text must not be close (within 16 bytes) of a page boundary
unsafe fn first_start_of_match_unaligned(text: &[u8], pat_len: usize, p1: u64, p2: u64) -> Option<(usize, usize)> {
let tp = text.as_ptr();
debug_assert!(pat_len <= 16);
debug_assert!(pat_len <= text.len());
let mut offset = 0;
while text.len() - pat_len >= offset {
let tlen = text.len() - offset;
let ret = pcmpestri_16(tp, offset, tlen, p1, p2, pat_len) as usize;
if ret == 16 {
offset += 16;
} else {
let match_len = cmp::min(pat_len, 16 - ret);
return Some((offset + ret, match_len));
}
}
None
}
#[test]
fn test_first_start_of_match() {
let text = b"abc";
let longer = "longer text and so on";
assert_eq!(first_start_of_match(text, b"d"), None);
assert_eq!(first_start_of_match(text, b"c"), Some((2, 1)));
assert_eq!(first_start_of_match(text, b"abc"), Some((0, 3)));
assert_eq!(first_start_of_match(text, b"T"), None);
assert_eq!(first_start_of_match(text, b"\0text"), None);
assert_eq!(first_start_of_match(text, b"\0"), None);
// test all windows
for wsz in 1..17 {
for window in longer.as_bytes().windows(wsz) {
let str_find = longer.find(::std::str::from_utf8(window).unwrap());
assert!(str_find.is_some());
let first_start = first_start_of_match(longer.as_bytes(), window);
assert!(first_start.is_some());
let (pos, len) = first_start.unwrap();
assert!(len <= wsz);
assert!(len == wsz && Some(pos) == str_find
|| pos <= str_find.unwrap());
}
}
}
fn find_2byte_pat(text: &[u8], pat: &[u8]) -> Option<(usize, usize)> {
debug_assert!(text.len() >= pat.len());
debug_assert!(pat.len() == 2);
// Search for the second byte of the pattern, not the first, better for
// scripts where we have two-byte encoded codepoints (the first byte will
// repeat much more often than the second).
let mut off = 1;
while let Some(i) = memchr::memchr(pat[1], &text[off..]) {
match text.get(off + i - 1) {
None => break,
Some(&c) if c == pat[0] => return Some((off + i - 1, off + i + 1)),
_ => off += i + 1,
}
}
None
}
/// Simd text search optimized for short patterns (<= 8 bytes)
fn find_short_pat(text: &[u8], pat: &[u8]) -> Option<usize> {
debug_assert!(pat.len() <= 8);
/*
if pat.len() == 2 {
return find_2byte_pat(text, pat);
}
*/
let (r1, _) = pat128(pat);
// safe part of text -- everything but the last 16 bytes
let safetext = &text[..cmp::max(text.len(), 16) - 16];
let mut pos = 0;
'search: loop {
if pos + pat.len() > safetext.len() {
break;
}
// find the next occurence
match unsafe { first_start_of_match_unaligned(&safetext[pos..], pat.len(), r1, 0) } {
None => break, // no matches
Some((mpos, mlen)) => {
pos += mpos;
if mlen < pat.len() {
if pos > text.len() - pat.len() {
return None;
}
for (&a, &b) in zip(&text[pos + mlen..], &pat[mlen..]) {
if a != b {
pos += 1;
continue 'search;
}
}
}
return Some(pos);
}
}
}
'tail: loop {
if pos > text.len() - pat.len() {
return None;
}
// find the next occurence
match first_start_of_match_inner(&text[pos..], pat, r1, 0) {
None => return None, // no matches
Some((mpos, mlen)) => {
pos += mpos;
if mlen < pat.len() {
if pos > text.len() - pat.len() {
return None;
}
for (&a, &b) in zip(&text[pos + mlen..], &pat[mlen..]) {
if a != b {
pos += 1;
continue 'tail;
}
}
}
return Some(pos);
}
}
}
}
/// `find` finds the first ocurrence of `pattern` in the `text`.
///
/// This is the SSE42 accelerated version.
pub fn find(text: &[u8], pattern: &[u8]) -> Option<usize> {
let pat = pattern;
if pat.len() == 0 {
return Some(0);
}
if text.len() < pat.len() {
return None;
}
if pat.len() == 1 {
return memchr::memchr(pat[0], text);
} else if pat.len() <= 6 {
return find_short_pat(text, pat);
}
// real two way algorithm
//
// `memory` is the number of bytes of the left half that we already know
let (crit_pos, mut period) = crit_period(pat);
let mut memory;
if &pat[..crit_pos] == &pat[period.. period + crit_pos] {
memory = 0; // use memory
} else {
memory = !0; // !0 means memory is unused
// approximation to the true period
period = cmp::max(crit_pos, pat.len() - crit_pos) + 1;
}
//println!("pat: {:?}, crit={}, period={}", pat, crit_pos, period);
let (left, right) = pat.split_at(crit_pos);
let (right16, _right17) = right.split_at(cmp::min(16, right.len()));
assert!(right.len() != 0);
let (r1, r2) = pat128(right);
// safe part of text -- everything but the last 16 bytes
let safetext = &text[..cmp::max(text.len(), 16) - 16];
let mut pos = 0;
if memory == !0 {
// Long period case -- no memory, period is an approximation
'search: loop {
if pos + pat.len() > safetext.len() {
break;
}
// find the next occurence of the right half
let start = crit_pos;
match unsafe { first_start_of_match_unaligned(&safetext[pos + start..], right16.len(), r1, r2) } {
None => break, // no matches
Some((mpos, mlen)) => {
pos += mpos;
let mut pfxlen = mlen;
if pfxlen < right.len() {
pfxlen += shared_prefix(&text[pos + start + mlen..], &right[mlen..]);
}
if pfxlen != right.len() {
// partial match
// skip by the number of bytes matched
pos += pfxlen + 1;
continue 'search;
} else {
// matches right part
}
}
}
// See if the left part of the needle matches
// XXX: Original algorithm compares from right to left here
if left != &text[pos..pos + left.len()] {
pos += period;
continue 'search;
}
return Some(pos);
}
} else {
// Short period case -- use memory, true period
'search_memory: loop {
if pos + pat.len() > safetext.len() {
break;
}
// find the next occurence of the right half
//println!("memory trace pos={}, memory={}", pos, memory);
let mut pfxlen = if memory == 0 {
let start = crit_pos;
match unsafe { first_start_of_match_unaligned(&safetext[pos + start..], right16.len(), r1, r2) } {
None => break, // no matches
Some((mpos, mlen)) => {
pos += mpos;
mlen
}
}
} else {
memory - crit_pos
};
if pfxlen < right.len() {
pfxlen += shared_prefix(&text[pos + crit_pos + pfxlen..], &right[pfxlen..]);
}
if pfxlen != right.len() {
// partial match
// skip by the number of bytes matched
pos += pfxlen + 1;
memory = 0;
continue 'search_memory;
} else {
// matches right part
}
// See if the left part of the needle matches
// XXX: Original algorithm compares from right to left here
if memory <= left.len() && &left[memory..] != &text[pos + memory..pos + left.len()] {
pos += period;
memory = pat.len() - period;
continue 'search_memory;
}
return Some(pos);
}
}
// no memory used for final part
'tail: loop {
if pos > text.len() - pat.len() {
return None;
}
// find the next occurence of the right half
let start = crit_pos;
match first_start_of_match_inner(&text[pos + start..], right16, r1, r2) {
None => return None, // no matches
Some((mpos, mlen)) => {
pos += mpos;
let mut pfxlen = mlen;
if pfxlen < right.len() {
pfxlen += shared_prefix(&text[pos + start + mlen..], &right[mlen..]);
}
if pfxlen != right.len() {
// partial match
// skip by the number of bytes matched
pos += pfxlen + 1;
continue 'tail;
} else {
// matches right part
}
}
}
// See if the left part of the needle matches
// XXX: Original algorithm compares from right to left here
if left != &text[pos..pos + left.len()] {
pos += period;
continue 'tail;
}
return Some(pos);
}
}
#[test]
fn test_find() {
let text = b"abc";
assert_eq!(find(text, b"d"), None);
assert_eq!(find(text, b"c"), Some(2));
let longer = "longer text and so on, a bit more";
// test all windows
for wsz in 1..longer.len() {
for window in longer.as_bytes().windows(wsz) {
let str_find = longer.find(::std::str::from_utf8(window).unwrap());
assert!(str_find.is_some());
assert_eq!(find(longer.as_bytes(), window), str_find);
}
}
let pat = b"ger text and so on";
assert!(pat.len() > 16);
assert_eq!(Some(3), find(longer.as_bytes(), pat));
// test short period case
let text = "cbabababcbabababab";
let n = "abababab";
assert_eq!(text.find(n), find(text.as_bytes(), n.as_bytes()));
// memoized case -- this is tricky
let text = "cbababababababababababababababab";
let n = "abababab";
assert_eq!(text.find(n), find(text.as_bytes(), n.as_bytes()));
}
/// Load the first 16 bytes of `pat` into two words, little endian
fn pat128(pat: &[u8]) -> (u64, u64) {
// load pat as a little endian word
let (mut p1, mut p2) = (0, 0);
unsafe {
let patl = pat.len();
ptr::copy_nonoverlapping(&pat[0],
&mut p1 as *mut _ as *mut _,
cmp::min(8, patl));
if patl > 8 {
ptr::copy_nonoverlapping(&pat[8],
&mut p2 as *mut _ as *mut _,
cmp::min(16, patl) - 8);
}
}
(p1, p2)
}
/// Find longest shared prefix, return its length
///
/// Alignment safe: works for any text, pat.
pub fn shared_prefix(text: &[u8], pat: &[u8]) -> usize {
let tp = text.as_ptr();
let tlen = text.len();
let pp = pat.as_ptr();
let plen = pat.len();
let len = cmp::min(tlen, plen);
unsafe {
// TODO: do non-aligned prefix manually too(?) aligned text or pat..
// all but the end we can process with pcmpestrm
let initial_part = len.saturating_sub(16);
let mut prefix_len = 0;
let mut offset = 0;
while offset < initial_part {
let initial_tail = initial_part - offset;
let mask = pcmpestrm_eq_each(tp, offset, initial_tail, pp, offset, initial_tail);
// find zero in the first 16 bits
if mask != 0xffff {
let first_bit_set = (mask ^ 0xffff).trailing_zeros() as usize;
prefix_len += first_bit_set;
return prefix_len;
} else {
prefix_len += cmp::min(initial_tail, 16);
}
offset += 16;
}
// so one block left, the last (up to) 16 bytes
// unchecked slicing .. we don't want panics in this function
let text_suffix = get_unchecked(text, prefix_len..len);
let pat_suffix = get_unchecked(pat, prefix_len..len);
for (&a, &b) in zip(text_suffix, pat_suffix) {
if a != b {
break;
}
prefix_len += 1;
}
prefix_len
}
}
#[test]
fn test_prefixlen() {
let text_long = b"0123456789abcdefeffect";
let text_long2 = b"9123456789abcdefeffect";
let text_long3 = b"0123456789abcdefgffect";
let plen = shared_prefix(text_long, text_long);
assert_eq!(plen, text_long.len());
let plen = shared_prefix(b"abcd", b"abc");
assert_eq!(plen, 3);
let plen = shared_prefix(b"abcd", b"abcf");
assert_eq!(plen, 3);
assert_eq!(0, shared_prefix(text_long, text_long2));
assert_eq!(0, shared_prefix(text_long, &text_long[1..]));
assert_eq!(16, shared_prefix(text_long, text_long3));
for i in 0..text_long.len() + 1 {
assert_eq!(text_long.len() - i, shared_prefix(&text_long[i..], &text_long[i..]));
}
let l1 = [7u8; 1024];
let mut l2 = [7u8; 1024];
let off = 1000;
l2[off] = 0;
for i in 0..off {
let plen = shared_prefix(&l1[i..], &l2[i..]);
assert_eq!(plen, off - i);
}
}
|
use crate::prelude::*;
use std::os::raw::c_void;
use std::ptr;
pub struct VkPhysicalDeviceMaintenance3PropertiesKHR {
pub sType: VkStructureType,
pub pNext: *const c_void,
pub maxPerSetDescriptors: u32,
pub maxMemoryAllocationSize: VkDeviceSize,
}
impl VkPhysicalDeviceMaintenance3PropertiesKHR {
pub fn new(max_per_set_descriptors: u32, max_memory_allocation_size: VkDeviceSize) -> Self {
VkPhysicalDeviceMaintenance3PropertiesKHR {
sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
pNext: ptr::null(),
maxPerSetDescriptors: max_per_set_descriptors,
maxMemoryAllocationSize: max_memory_allocation_size,
}
}
}
|
use {
super::{ChunkMaterial, ChunkRenderMesherData, ChunkRenderMesherWorker, WorldViewer},
rough::{
amethyst::{
assets::{AssetStorage, Handle, Loader},
core::Transform,
derive::SystemDesc,
ecs::prelude::*,
renderer::Mesh,
},
terrain::ChunkComponent,
},
};
#[derive(SystemData)]
pub struct ChunkRenderMesherSystemData<'a> {
entities: Entities<'a>,
chunk_mesher_worker: Option<Write<'a, ChunkRenderMesherWorker>>,
chunk_material: Option<Read<'a, ChunkMaterial>>,
asset_loader: ReadExpect<'a, Loader>,
mesh_storage: Read<'a, AssetStorage<Mesh>>,
world_viewers: ReadStorage<'a, WorldViewer>,
transforms: ReadStorage<'a, Transform>,
updater: Read<'a, LazyUpdate>,
_chunks: ReadStorage<'a, ChunkComponent>,
_meshes: ReadStorage<'a, Handle<Mesh>>,
}
#[derive(SystemDesc)]
pub struct ChunkRenderMesherSystem;
impl ChunkRenderMesherSystem {
pub const NAME: &'static str = "chunk_render_mesher";
}
impl<'a> System<'a> for ChunkRenderMesherSystem {
type SystemData = ChunkRenderMesherSystemData<'a>;
fn run(&mut self, system_data: Self::SystemData) {
rough::timer!("run");
let ChunkRenderMesherSystemData {
entities,
chunk_mesher_worker,
chunk_material,
asset_loader,
mesh_storage,
world_viewers,
transforms,
updater,
..
} = system_data;
let mut chunk_mesher_worker = match chunk_mesher_worker {
Some(t) => t,
None => return,
};
let chunk_material = match chunk_material {
Some(t) => t,
None => return,
};
chunk_mesher_worker.update_meshes(ChunkRenderMesherData {
entities,
chunk_material,
mesh_storage,
world_viewers,
transforms,
updater,
asset_loader,
});
}
}
|
extern crate tokio;
use std::error::Error;
use std::sync::Arc;
use tokio::net::TcpListener;
use tokio::sync::Mutex;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
let listener = TcpListener::bind("localhost:6969").await?;
let state = Arc::new(Mutex::new(String::new()));
loop {
let (stream, client_adress) = listener.accept().await?;
let state = Arc::clone(&state);
tokio::spawn(async move {
println!("Accepted connection from {}", &client_adress);
if let Err(e) = handle_ws_connection().await {
println!("an error occurred; error = {:?}", e);
}
});
}
}
async fn handle_ws_connection() -> Result<(), Box<dyn Error>> {
Ok(())
}
|
#![allow(non_snake_case)]
use super::mix;
use bulletproofs::r1cs::ConstraintSystem;
use error::SpacesuitError;
use std::iter::once;
use value::AllocatedValue;
/// Enforces that the outputs are either a merge of the inputs: `D = A + B && C = 0`,
/// or the outputs are equal to the inputs `C = A && D = B`. See spec for more details.
/// Works for `k` inputs and `k` outputs.
pub fn fill_cs<CS: ConstraintSystem>(
cs: &mut CS,
inputs: Vec<AllocatedValue>,
intermediates: Vec<AllocatedValue>,
outputs: Vec<AllocatedValue>,
) -> Result<(), SpacesuitError> {
// If there is only one input and output, just constrain the input
// and output to be equal to each other.
if inputs.len() == 1 && outputs.len() == 1 {
let i = inputs[0];
let o = outputs[0];
cs.constrain(i.q - o.q);
cs.constrain(i.a - o.a);
cs.constrain(i.t - o.t);
return Ok(());
}
if inputs.len() != outputs.len() || intermediates.len() != (inputs.len() - 2) {
return Err(SpacesuitError::InvalidR1CSConstruction);
}
let first_input = inputs[0].clone();
let last_output = outputs[outputs.len() - 1].clone();
// For each 2-mix, constrain A, B, C, D:
for (((A, B), C), D) in
// A = (first_input||intermediates)[i]
once(first_input).chain(intermediates.clone().into_iter())
// B = inputs[i+1]
.zip(inputs.into_iter().skip(1))
// C = outputs[i]
.zip(outputs.into_iter())
// D = (intermediates||last_output)[i]
.zip(intermediates.into_iter().chain(once(last_output)))
{
mix::fill_cs(cs, A, B, C, D)?
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use bulletproofs::r1cs::{ProverCS, Variable, VerifierCS};
use bulletproofs::{BulletproofGens, PedersenGens};
use curve25519_dalek::scalar::Scalar;
use merlin::Transcript;
use std::cmp::max;
use value::Value;
// Helper functions to make the tests easier to read
fn yuan(q: u64) -> Value {
Value {
q,
a: 888u64.into(),
t: 999u64.into(),
}
}
fn peso(q: u64) -> Value {
Value {
q,
a: 666u64.into(),
t: 777u64.into(),
}
}
fn zero() -> Value {
Value::zero()
}
#[test]
fn k_mix_gadget() {
// k=1
// no merge, same asset types
assert!(k_mix_helper(vec![peso(6)], vec![], vec![peso(6)]).is_ok());
// error when merging different asset types
assert!(k_mix_helper(vec![peso(3)], vec![], vec![yuan(3)]).is_err());
// k=2. More extensive k=2 tests are in the MixGadget tests
// no merge, different asset types
assert!(k_mix_helper(vec![peso(3), yuan(6)], vec![], vec![peso(3), yuan(6)],).is_ok());
// merge, same asset types
assert!(k_mix_helper(vec![peso(3), peso(6)], vec![], vec![peso(0), peso(9)],).is_ok());
// error when merging different asset types
assert!(k_mix_helper(vec![peso(3), yuan(3)], vec![], vec![peso(0), yuan(6)],).is_err());
// k=3
// no merge, same asset types
assert!(
k_mix_helper(
vec![peso(3), peso(6), peso(6)],
vec![peso(6)],
vec![peso(3), peso(6), peso(6)],
)
.is_ok()
);
// no merge, different asset types
assert!(
k_mix_helper(
vec![peso(3), yuan(6), peso(6)],
vec![yuan(6)],
vec![peso(3), yuan(6), peso(6)],
)
.is_ok()
);
// merge first two
assert!(
k_mix_helper(
vec![peso(3), peso(6), yuan(1)],
vec![peso(9)],
vec![peso(0), peso(9), yuan(1)],
)
.is_ok()
);
// merge last two
assert!(
k_mix_helper(
vec![yuan(1), peso(3), peso(6)],
vec![peso(3)],
vec![yuan(1), peso(0), peso(9)],
)
.is_ok()
);
// merge all, same asset types, zero value is different asset type
assert!(
k_mix_helper(
vec![peso(3), peso(6), peso(1)],
vec![peso(9)],
vec![zero(), zero(), peso(10)],
)
.is_ok()
);
// incomplete merge, input sum does not equal output sum
assert!(
k_mix_helper(
vec![peso(3), peso(6), peso(1)],
vec![peso(9)],
vec![zero(), zero(), peso(9)],
)
.is_err()
);
// error when merging with different asset types
assert!(
k_mix_helper(
vec![peso(3), yuan(6), peso(1)],
vec![peso(9)],
vec![zero(), zero(), peso(10)],
)
.is_err()
);
// k=4
// merge each of 2 asset types
assert!(
k_mix_helper(
vec![peso(3), peso(6), yuan(1), yuan(2)],
vec![peso(9), yuan(1)],
vec![zero(), peso(9), zero(), yuan(3)],
)
.is_ok()
);
// merge all, same asset
assert!(
k_mix_helper(
vec![peso(3), peso(2), peso(2), peso(1)],
vec![peso(5), peso(7)],
vec![zero(), zero(), zero(), peso(8)],
)
.is_ok()
);
// no merge, different assets
assert!(
k_mix_helper(
vec![peso(3), yuan(2), peso(2), yuan(1)],
vec![yuan(2), peso(2)],
vec![peso(3), yuan(2), peso(2), yuan(1)],
)
.is_ok()
);
// error when merging, output sum not equal to input sum
assert!(
k_mix_helper(
vec![peso(3), peso(2), peso(2), peso(1)],
vec![peso(5), peso(7)],
vec![zero(), zero(), zero(), peso(9)],
)
.is_err()
);
}
fn k_mix_helper(
inputs: Vec<Value>,
intermediates: Vec<Value>,
outputs: Vec<Value>,
) -> Result<(), SpacesuitError> {
// Common
let pc_gens = PedersenGens::default();
let bp_gens = BulletproofGens::new(128, 1);
let k = inputs.len();
let inter_count = intermediates.len();
if k != outputs.len() || inter_count != max(k as isize - 2, 0) as usize {
return Err(SpacesuitError::InvalidR1CSConstruction);
}
// Prover's scope
let (proof, commitments) = {
let mut values = inputs.clone();
values.append(&mut intermediates.clone());
values.append(&mut outputs.clone());
let v: Vec<Scalar> = values.iter().fold(Vec::new(), |mut vec, value| {
vec.push(value.q.into());
vec.push(value.a);
vec.push(value.t);
vec
});
let v_blinding: Vec<Scalar> = (0..v.len())
.map(|_| Scalar::random(&mut rand::thread_rng()))
.collect();
// Make v_blinding vector using RNG from transcript
let mut prover_transcript = Transcript::new(b"KMixTest");
let (mut prover_cs, variables, commitments) = ProverCS::new(
&bp_gens,
&pc_gens,
&mut prover_transcript,
v.clone(),
v_blinding.clone(),
);
// Prover adds constraints to the constraint system
let (input_vals, inter_vals, output_vals) =
organize_values(variables, &Some(values), k, inter_count);
fill_cs(&mut prover_cs, input_vals, inter_vals, output_vals)?;
let proof = prover_cs.prove()?;
(proof, commitments)
};
// Verifier makes a `ConstraintSystem` instance representing a merge gadget
let mut verifier_transcript = Transcript::new(b"KMixTest");
let (mut verifier_cs, variables) =
VerifierCS::new(&bp_gens, &pc_gens, &mut verifier_transcript, commitments);
// Verifier adds constraints to the constraint system
let (input_vals, inter_vals, output_vals) =
organize_values(variables, &None, k, inter_count);
assert!(fill_cs(&mut verifier_cs, input_vals, inter_vals, output_vals).is_ok());
Ok(verifier_cs.verify(&proof)?)
}
fn organize_values(
variables: Vec<Variable>,
assignments: &Option<Vec<Value>>,
k: usize,
inter_count: usize,
) -> (
Vec<AllocatedValue>,
Vec<AllocatedValue>,
Vec<AllocatedValue>,
) {
let val_count = variables.len() / 3;
let mut values = Vec::with_capacity(val_count);
for i in 0..val_count {
values.push(AllocatedValue {
q: variables[i * 3],
a: variables[i * 3 + 1],
t: variables[i * 3 + 2],
assignment: match assignments {
Some(ref a) => Some(a[i]),
None => None,
},
});
}
let input_vals = values[0..k].to_vec();
let inter_vals = values[k..k + inter_count].to_vec();
let output_vals = values[k + inter_count..2 * k + inter_count].to_vec();
(input_vals, inter_vals, output_vals)
}
}
|
use composer::*;
fn assert_float_eq(a: f32, b: f32) {
if (a - b).abs() > 1e-5 {
panic!(
"assertion failed: `(left == right)`\n left: `{}`,\n right: `{}`",
a, b
);
}
}
#[test]
fn test_note_length_to_float() {
use generate::note_length_to_float;
use parse::NoteLength::*;
assert_float_eq(
note_length_to_float(&[DefaultLength, Dot, Dot, Length(2), Dot], 1. / 4.),
1. / 4. + 1. / 8. + 1. / 16. + 1. / 2. + 1. / 4.,
);
}
fn pulse(frequency: f32, position: f32) -> f32 {
if frequency * position % 1.0 >= 0.5 {
1.0
} else {
-1.0
}
}
#[test]
fn test_note() {
use generate::note::Note;
use generate::ToneKind::FnTone;
let note = Note::new(10.0, FnTone(|_, _| 1.0), 1.0, 0.0, 0.0, 1.0, 2.0);
assert!(note.is_waiting(0.0));
assert!(note.is_ringing(1.0));
assert!(note.is_over(2.0));
assert_float_eq(note.get_sample(0.5), 0.0);
assert_float_eq(note.get_sample(2.0), 0.0);
for i in 0..100 {
let position = i as f32 / 100.0;
assert_float_eq(note.get_sample(position + 1.0), 1.0 - position);
}
let note = Note::new(10.0, FnTone(pulse), 1.0, 1.0, 0.0, 0.0, 1.0);
for i in 0..10 {
let position = i as f32 / 10.0;
assert_float_eq(note.get_sample(position + 0.025), -1.0);
assert_float_eq(note.get_sample(position + 0.075), 1.0);
}
let note = Note::new(10.0, FnTone(pulse), 1.0, 1.0, 0.05, 0.0, 1.0);
for i in 0..10 {
let position = i as f32 / 10.0;
assert_float_eq(note.get_sample(position + 0.025), 1.0);
assert_float_eq(note.get_sample(position + 0.075), -1.0);
}
}
#[test]
fn test_note_queue() {
use generate::note::{Note, NotesQueue};
use generate::ToneKind::FnTone;
let note_a = Note::new(10.0, FnTone(pulse), 0.8, 0.9, 0.0, 3.0, 5.0);
let note_b = Note::new(20.0, FnTone(pulse), 1.0, 0.9, 0.0, 1.0, 6.0);
let note_c = Note::new(30.0, FnTone(pulse), 0.9, 1.0, 0.0, 2.0, 4.0);
let mut queue = NotesQueue::new(vec![note_a.clone(), note_b.clone(), note_c.clone()]);
assert_eq!(queue.next_before(0.5), None);
assert_eq!(queue.next_before(1.0), Some(note_b));
assert_eq!(queue.next_before(1.0), None);
assert_eq!(queue.next_before(2.0), Some(note_c));
assert_eq!(queue.next_before(2.0), None);
assert_eq!(queue.next_before(3.0), Some(note_a));
assert_eq!(queue.next_before(3.0), None);
assert_eq!(queue.next_before(10.0), None);
}
#[test]
fn test_tone() {
use generate::ToneKind;
use std::sync::Arc;
let fn_tone = ToneKind::FnTone(|a, b| a * b);
assert_float_eq(fn_tone.sample(10.0, 20.0), 200.0);
let pcm_tone = ToneKind::PCMTone(Arc::new(vec![0.0, 1.0, 2.0, 3.0, 4.0]));
assert_float_eq(pcm_tone.sample(0.2, 1.5), 1.0);
assert_float_eq(pcm_tone.sample(0.2, 7.5), 2.0);
}
|
use lightning::chain::chaininterface::{ConfirmationTarget, FeeEstimator};
use tokio::runtime::Handle;
use crate::p2p::router::RemoteSenseiInfo;
pub struct RemoteFeeEstimator {
remote_sensei: RemoteSenseiInfo,
tokio_handle: Handle,
}
impl RemoteFeeEstimator {
pub fn new(host: String, token: String, tokio_handle: Handle) -> Self {
Self {
remote_sensei: RemoteSenseiInfo { host, token },
tokio_handle,
}
}
fn fee_rate_normal_path(&self) -> String {
format!("{}/v1/ldk/chain/fee-rate-normal", self.remote_sensei.host)
}
fn fee_rate_background_path(&self) -> String {
format!(
"{}/v1/ldk/chain/fee-rate-background",
self.remote_sensei.host
)
}
fn fee_rate_high_priority_path(&self) -> String {
format!(
"{}/v1/ldk/chain/fee-rate-high-priority",
self.remote_sensei.host
)
}
pub async fn get_fee_rate_normal(&self) -> u32 {
let client = reqwest::Client::new();
match client
.get(self.fee_rate_normal_path())
.header("token", self.remote_sensei.token.clone())
.send()
.await
{
Ok(response) => match response.text().await {
Ok(fee_rate_string) => fee_rate_string.parse().unwrap_or(2000),
Err(_) => 2000,
},
Err(_) => 2000,
}
}
pub async fn get_fee_rate_background(&self) -> u32 {
let client = reqwest::Client::new();
match client
.get(self.fee_rate_background_path())
.header("token", self.remote_sensei.token.clone())
.send()
.await
{
Ok(response) => match response.text().await {
Ok(fee_rate_string) => fee_rate_string.parse().unwrap_or(253),
Err(_) => 253,
},
Err(_) => 253,
}
}
pub async fn get_fee_rate_high_priority(&self) -> u32 {
let client = reqwest::Client::new();
match client
.get(self.fee_rate_high_priority_path())
.header("token", self.remote_sensei.token.clone())
.send()
.await
{
Ok(response) => match response.text().await {
Ok(fee_rate_string) => fee_rate_string.parse().unwrap_or(5000),
Err(_) => 5000,
},
Err(_) => 5000,
}
}
}
impl FeeEstimator for RemoteFeeEstimator {
fn get_est_sat_per_1000_weight(&self, confirmation_target: ConfirmationTarget) -> u32 {
tokio::task::block_in_place(move || {
self.tokio_handle.clone().block_on(async move {
match confirmation_target {
ConfirmationTarget::Background => self.get_fee_rate_background().await,
ConfirmationTarget::Normal => self.get_fee_rate_normal().await,
ConfirmationTarget::HighPriority => self.get_fee_rate_high_priority().await,
}
})
})
}
}
|
#[doc = "Reader of register DCISC"]
pub type R = crate::R<u32, super::DCISC>;
#[doc = "Writer for register DCISC"]
pub type W = crate::W<u32, super::DCISC>;
#[doc = "Register DCISC `reset()`'s with value 0"]
impl crate::ResetValue for super::DCISC {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DCINT0`"]
pub type DCINT0_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCINT0`"]
pub struct DCINT0_W<'a> {
w: &'a mut W,
}
impl<'a> DCINT0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `DCINT1`"]
pub type DCINT1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCINT1`"]
pub struct DCINT1_W<'a> {
w: &'a mut W,
}
impl<'a> DCINT1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `DCINT2`"]
pub type DCINT2_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCINT2`"]
pub struct DCINT2_W<'a> {
w: &'a mut W,
}
impl<'a> DCINT2_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `DCINT3`"]
pub type DCINT3_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCINT3`"]
pub struct DCINT3_W<'a> {
w: &'a mut W,
}
impl<'a> DCINT3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `DCINT4`"]
pub type DCINT4_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCINT4`"]
pub struct DCINT4_W<'a> {
w: &'a mut W,
}
impl<'a> DCINT4_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `DCINT5`"]
pub type DCINT5_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCINT5`"]
pub struct DCINT5_W<'a> {
w: &'a mut W,
}
impl<'a> DCINT5_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `DCINT6`"]
pub type DCINT6_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCINT6`"]
pub struct DCINT6_W<'a> {
w: &'a mut W,
}
impl<'a> DCINT6_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `DCINT7`"]
pub type DCINT7_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCINT7`"]
pub struct DCINT7_W<'a> {
w: &'a mut W,
}
impl<'a> DCINT7_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 0 - Digital Comparator 0 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint0(&self) -> DCINT0_R {
DCINT0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Digital Comparator 1 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint1(&self) -> DCINT1_R {
DCINT1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Digital Comparator 2 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint2(&self) -> DCINT2_R {
DCINT2_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Digital Comparator 3 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint3(&self) -> DCINT3_R {
DCINT3_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Digital Comparator 4 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint4(&self) -> DCINT4_R {
DCINT4_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Digital Comparator 5 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint5(&self) -> DCINT5_R {
DCINT5_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Digital Comparator 6 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint6(&self) -> DCINT6_R {
DCINT6_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Digital Comparator 7 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint7(&self) -> DCINT7_R {
DCINT7_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Digital Comparator 0 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint0(&mut self) -> DCINT0_W {
DCINT0_W { w: self }
}
#[doc = "Bit 1 - Digital Comparator 1 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint1(&mut self) -> DCINT1_W {
DCINT1_W { w: self }
}
#[doc = "Bit 2 - Digital Comparator 2 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint2(&mut self) -> DCINT2_W {
DCINT2_W { w: self }
}
#[doc = "Bit 3 - Digital Comparator 3 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint3(&mut self) -> DCINT3_W {
DCINT3_W { w: self }
}
#[doc = "Bit 4 - Digital Comparator 4 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint4(&mut self) -> DCINT4_W {
DCINT4_W { w: self }
}
#[doc = "Bit 5 - Digital Comparator 5 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint5(&mut self) -> DCINT5_W {
DCINT5_W { w: self }
}
#[doc = "Bit 6 - Digital Comparator 6 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint6(&mut self) -> DCINT6_W {
DCINT6_W { w: self }
}
#[doc = "Bit 7 - Digital Comparator 7 Interrupt Status and Clear"]
#[inline(always)]
pub fn dcint7(&mut self) -> DCINT7_W {
DCINT7_W { w: self }
}
}
|
/// Given the array nums consisting of 2n elements in the form [x1,x2,...,xn,y1,y2,...,yn].
///
/// Return the array in the form [x1,y1,x2,y2,...,xn,yn].
///
///
/// Example 1:
///
/// Input: nums = [2,5,1,3,4,7], n = 3
/// Output: [2,3,5,4,1,7]
/// Explanation: Since x1=2, x2=5, x3=1, y1=3, y2=4, y3=7 then the answer is [2,3,5,4,1,7].
///
/// Example 2:
///
/// Input: nums = [1,2,3,4,4,3,2,1], n = 4
/// Output: [1,4,2,3,3,2,4,1]
///
/// Example 3:
///
/// Input: nums = [1,1,2,2], n = 2
/// Output: [1,2,1,2]
///
///
/// Constraints:
///
/// 1 <= n <= 500
/// nums.length == 2n
/// 1 <= nums[i] <= 10^3
pub fn shuffle(nums: Vec<i32>, n: i32) -> Vec<i32> {
nums[..n as usize]
.iter()
.zip(nums[n as usize..].iter())
.flat_map(|(&x, &y)| vec![x, y])
.collect()
}
#[cfg(test)]
mod shuffle_tests {
use super::*;
#[test]
fn shuffle_test_one() {
// arrange
let test = vec![2, 5, 1, 3, 4, 7];
let test_n = 3;
// act
let result = shuffle(test, test_n);
// assert
assert_eq!(result, vec![2, 3, 5, 4, 1, 7]);
}
}
|
use regex::{escape, Regex};
use errors::BuildError;
pub fn parse(mut route: &str) -> Result<Regex, BuildError> {
let mut pattern = "^/?".to_string();
if route.len() != 0 && route.as_bytes()[0] == b'/' {
route = &route[1..];
}
for (i, segment) in route.split('/').enumerate() {
if i > 0 {
pattern.push('/')
}
if segment.len() > 0 && segment.as_bytes()[0] == b':' {
if !is_valid_param_name(&segment[1..]) {
return Err(BuildError::InvalidParamName);
}
push_dynamic_segment(&segment[1..], &mut pattern);
} else if segment.len() == 1 && segment.as_bytes()[0] == b'*' {
push_wildcard(&mut pattern);
break;
} else {
push_static_segment(segment, &mut pattern);
}
}
pattern.push('$');
Ok(Regex::new(&pattern).unwrap())
}
#[inline]
fn is_valid_param_name(name: &str) -> bool {
let mut chars = name.chars();
if let Some(first_char) = chars.next() {
if first_char.is_ascii_alphabetic() {
return chars.all(|c| c.is_ascii_alphanumeric());
}
}
return false;
}
#[inline]
fn push_dynamic_segment(name: &str, pattern: &mut String) {
pattern.push_str(&format!("(?P<{}>[^/]+)", escape(name)));
}
#[inline]
fn push_wildcard(pattern: &mut String) {
pattern.push_str(&format!("(.*)"));
}
#[inline]
fn push_static_segment(name: &str, pattern: &mut String) {
pattern.push_str(&escape(name));
}
|
use std::collections::HashMap;
use std::fs;
use serde::{de::DeserializeOwned, Deserialize};
use serde_json;
use tokio::sync::RwLock;
use super::character_data::CharacterData;
use super::map_data::MapData;
struct ResourceToLoad {
pub name: String,
pub resource_type: ResourceType,
}
impl ResourceToLoad {
pub fn new(name: &str, resource_type: ResourceType) -> Self {
Self {
name: name.to_string(),
resource_type,
}
}
}
enum ResourceType {
ResourceList,
Character,
Map,
}
pub struct ResourceManager {
maps: RwLock<HashMap<String, MapData>>,
characters: RwLock<HashMap<String, CharacterData>>,
}
#[derive(Debug, Deserialize)]
struct ResourceList {
pub maps: Vec<String>,
pub characters: Vec<String>,
}
impl ResourceManager {
pub fn new() -> Self {
let resources = load_resource::<ResourceList>(ResourceToLoad::new(
"resourceList",
ResourceType::ResourceList,
));
let maps = resources
.maps
.into_iter()
.map(|map| {
(
map.clone(),
load_resource::<MapData>(ResourceToLoad::new(&map, ResourceType::Map)),
)
})
.collect::<HashMap<String, MapData>>();
let characters = resources
.characters
.into_iter()
.map(|character| {
(
character.clone(),
load_resource::<CharacterData>(ResourceToLoad::new(
&character,
ResourceType::Character,
)),
)
})
.collect::<HashMap<String, CharacterData>>();
Self {
maps: RwLock::new(maps),
characters: RwLock::new(characters),
}
}
pub async fn get_map(&self, name: &str) -> Option<MapData> {
match self.maps.read().await.get(name) {
Some(data) => Some(data.clone()),
None => None,
}
}
#[allow(dead_code)]
pub async fn get_character(&self, name: &str) -> Option<CharacterData> {
match self.characters.read().await.get(name) {
Some(data) => Some(data.clone()),
None => None,
}
}
}
fn load_resource<T: DeserializeOwned>(resource: ResourceToLoad) -> T {
println!("Loading resource: {}", &resource.name);
let path = match resource.resource_type {
ResourceType::ResourceList => format!("./resources/{}.json", &resource.name),
ResourceType::Character => format!(
"./resources/characters/{}/{}.json",
&resource.name, &resource.name
),
ResourceType::Map => format!(
"./resources/maps/{}/{}.json",
&resource.name, &resource.name
),
};
let contents = fs::read_to_string(path)
.unwrap_or_else(|_| panic!("FAILED TO READ FILE: {}", &resource.name));
serde_json::from_str::<T>(&contents)
.unwrap_or_else(|_| panic!("FAILED TO PARSE JSON: {}", &resource.name))
}
|
//! https://github.com/lumen/otp/tree/lumen/lib/eunit/src
use super::*;
test_compiles_lumen_otp!(eunit);
test_compiles_lumen_otp!(eunit_autoexport);
test_compiles_lumen_otp!(eunit_data);
test_compiles_lumen_otp!(eunit_lib);
test_compiles_lumen_otp!(eunit_listener);
test_compiles_lumen_otp!(eunit_proc);
test_compiles_lumen_otp!(eunit_serial);
test_compiles_lumen_otp!(eunit_server);
test_compiles_lumen_otp!(eunit_striptests);
test_compiles_lumen_otp!(eunit_surefire);
test_compiles_lumen_otp!(eunit_test);
test_compiles_lumen_otp!(eunit_tests);
test_compiles_lumen_otp!(eunit_tty);
fn includes() -> Vec<&'static str> {
let mut includes = super::includes();
includes.extend(vec!["lib/eunit/include", "lib/eunit/src"]);
includes
}
fn relative_directory_path() -> PathBuf {
super::relative_directory_path().join("eunit/src")
}
|
use glam::{vec2, Vec2};
use rand::prelude::*;
use super::Config;
#[derive(Clone, Copy)]
pub struct RigidCircle {
pub pos: Vec2,
pub vel: Vec2,
pub radius: f32,
pub to_pos: Vec2,
pub to_vel: Vec2,
}
#[derive(Clone, Copy)]
pub struct Color {
pub val: [f32; 4],
}
impl RigidCircle {
pub fn new_rand(config: &Config) -> Self {
let bounds = config.bounds;
let pos = vec2(
thread_rng().gen_range(bounds.0.x..bounds.1.x),
thread_rng().gen_range(bounds.0.y..bounds.1.y),
);
let vel = vec2(thread_rng().gen_range(-1.0..1.0), thread_rng().gen_range(-1.0..1.0));
Self {
pos,
to_pos: pos,
vel,
to_vel: vel,
radius: config.cell_radius,
}
}
}
impl Color {
pub fn new_rand() -> Self {
Self {
val: [
thread_rng().gen_range(0.0..1.0),
thread_rng().gen_range(0.0..1.0),
thread_rng().gen_range(0.0..1.0),
1.0,
],
}
}
}
|
// Copyright 2020-2021, The Tremor Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![cfg(not(tarpaulin_include))]
use crate::source::prelude::*;
use async_std::net::UdpSocket;
#[derive(Deserialize, Debug, Clone)]
pub struct Config {
/// The port to listen on.
pub port: u16,
pub host: String,
}
impl ConfigImpl for Config {}
pub struct Udp {
pub config: Config,
onramp_id: TremorUrl,
}
struct Int {
config: Config,
socket: Option<UdpSocket>,
onramp_id: TremorUrl,
origin_uri: EventOriginUri,
}
impl std::fmt::Debug for Int {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "UDP")
}
}
impl Int {
fn from_config(uid: u64, onramp_id: TremorUrl, config: &Config) -> Self {
let origin_uri = EventOriginUri {
uid,
scheme: "tremor-udp".to_string(),
host: String::default(),
port: None,
path: vec![config.port.to_string()], // captures receive port
};
Self {
config: config.clone(),
socket: None,
onramp_id,
origin_uri,
}
}
}
impl onramp::Impl for Udp {
fn from_config(onramp_id: &TremorUrl, config: &Option<YamlValue>) -> Result<Box<dyn Onramp>> {
if let Some(config) = config {
let config: Config = Config::new(config)?;
Ok(Box::new(Self {
config,
onramp_id: onramp_id.clone(),
}))
} else {
Err("Missing config for udp onramp".into())
}
}
}
#[async_trait::async_trait]
impl Source for Int {
async fn pull_event(&mut self, _id: u64) -> Result<SourceReply> {
let mut buf = [0; 65535];
if let Some(socket) = self.socket.as_mut() {
match socket.recv_from(&mut buf).await {
Ok((n, peer)) => {
let mut origin_uri = self.origin_uri.clone();
// TODO add a method in origin_uri for changes like this?
origin_uri.host = peer.ip().to_string();
origin_uri.port = Some(peer.port());
Ok(SourceReply::Data {
origin_uri,
// ALLOW: we get n from recv
data: buf[0..n].to_vec(),
meta: None,
codec_override: None,
stream: 0,
})
}
Err(e) => {
if e.kind() == std::io::ErrorKind::WouldBlock {
Ok(SourceReply::Empty(1))
} else {
Err(e.into())
}
}
}
} else {
let socket = UdpSocket::bind((self.config.host.as_str(), self.config.port)).await?;
info!(
"[UDP Onramp] listening on {}:{}",
self.config.host, self.config.port
);
self.socket = Some(socket);
Ok(SourceReply::StateChange(SourceState::Connected))
}
}
async fn init(&mut self) -> Result<SourceState> {
let socket = UdpSocket::bind((self.config.host.as_str(), self.config.port)).await?;
info!(
"[UDP Onramp] listening on {}:{}",
self.config.host, self.config.port
);
self.socket = Some(socket);
Ok(SourceState::Connected)
}
fn id(&self) -> &TremorUrl {
&self.onramp_id
}
}
#[async_trait::async_trait]
impl Onramp for Udp {
async fn start(&mut self, config: OnrampConfig<'_>) -> Result<onramp::Addr> {
let source = Int::from_config(config.onramp_uid, self.onramp_id.clone(), &self.config);
SourceManager::start(source, config).await
}
fn default_codec(&self) -> &str {
"string"
}
}
|
#![cfg(feature = "runtime-benchmarks")]
use crate::{BalanceOf, Call, Pallet, WRAPPED_BYTES_PREFIX, WRAPPED_BYTES_POSTFIX};
use cumulus_pallet_parachain_system::Pallet as RelayPallet;
use cumulus_primitives_core::{
relay_chain::{v1::HeadData, BlockNumber as RelayChainBlockNumber},
PersistedValidationData,
};
use cumulus_primitives_parachain_inherent::ParachainInherentData;
use ed25519_dalek::Signer;
use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite};
use frame_support::{
dispatch::UnfilteredDispatchable,
inherent::{InherentData, ProvideInherent},
traits::{Currency, Get, OnFinalize, OnInitialize},
};
use frame_system::RawOrigin;
use parity_scale_codec::Encode;
use sp_core::{
crypto::{AccountId32, UncheckedFrom},
ed25519,
};
use sp_runtime::{traits::One, MultiSignature};
use sp_std::vec;
use sp_std::vec::Vec;
use sp_trie::StorageProof;
// This is a fake proof that emulates a storage proof inserted as the validation data
// We avoid using the sproof builder here because it generates an issue when compiling without std
// Fake storage proof
const MOCK_PROOF: [u8; 71] = [
127, 1, 6, 222, 61, 138, 84, 210, 126, 68, 169, 213, 206, 24, 150, 24, 242, 45, 180, 180, 157,
149, 50, 13, 144, 33, 153, 76, 133, 15, 37, 184, 227, 133, 144, 0, 0, 32, 0, 0, 0, 16, 0, 8, 0,
0, 0, 0, 4, 0, 0, 0, 1, 0, 0, 5, 0, 0, 0, 5, 0, 0, 0, 6, 0, 0, 0, 6, 0, 0, 0,
];
// fake storage root. This is valid with the previous proof
const MOCK_PROOF_HASH: [u8; 32] = [
216, 6, 227, 175, 180, 211, 98, 117, 202, 245, 206, 51, 21, 143, 100, 232, 96, 217, 14, 71,
243, 146, 7, 202, 245, 129, 165, 70, 72, 184, 130, 141,
];
// These benchmarks only work with a Runtime that uses cumulus's RelayChainBlockNumberProvider.
// This will improve once https://github.com/PureStake/crowdloan-rewards/pull/44 lands
pub trait Config: crate::Config + cumulus_pallet_parachain_system::Config {}
impl<T: crate::Config + cumulus_pallet_parachain_system::Config> Config for T {}
/// Default balance amount is minimum contribution
fn default_balance<T: Config>() -> BalanceOf<T> {
T::MinimumReward::get()
}
/// Create a funded user.
fn fund_specific_account<T: Config>(pallet_account: T::AccountId, extra: BalanceOf<T>) {
let default_balance = default_balance::<T>();
let total = default_balance + extra;
T::RewardCurrency::make_free_balance_be(&pallet_account, total);
T::RewardCurrency::issue(total);
}
/// Create a funded user.
fn create_funded_user<T: Config>(
string: &'static str,
n: u32,
extra: BalanceOf<T>,
) -> T::AccountId {
const SEED: u32 = 0;
let user = account(string, n, SEED);
let default_balance = default_balance::<T>();
let total = default_balance + extra;
T::RewardCurrency::make_free_balance_be(&user, total);
T::RewardCurrency::issue(total);
user
}
fn create_inherent_data<T: Config>(block_number: u32) -> InherentData {
//let (relay_parent_storage_root, relay_chain_state) = create_fake_valid_proof();
let vfp = PersistedValidationData {
relay_parent_number: block_number as RelayChainBlockNumber,
relay_parent_storage_root: MOCK_PROOF_HASH.into(),
max_pov_size: 1000u32,
parent_head: HeadData(vec![1, 1, 1]),
};
let inherent_data = {
let mut inherent_data = InherentData::default();
let system_inherent_data = ParachainInherentData {
validation_data: vfp.clone(),
relay_chain_state: StorageProof::new(vec![MOCK_PROOF.to_vec()]),
downward_messages: Default::default(),
horizontal_messages: Default::default(),
};
inherent_data
.put_data(
cumulus_primitives_parachain_inherent::INHERENT_IDENTIFIER,
&system_inherent_data,
)
.expect("failed to put VFP inherent");
inherent_data
};
inherent_data
}
/// Create contributors.
fn create_contributors<T: Config>(
total_number: u32,
seed_offset: u32,
) -> Vec<(T::RelayChainAccountId, Option<T::AccountId>, BalanceOf<T>)> {
let mut contribution_vec = Vec::new();
for i in 0..total_number {
let seed = SEED - seed_offset - i;
let mut account: [u8; 32] = [0u8; 32];
let seed_as_slice = seed.to_be_bytes();
for j in 0..seed_as_slice.len() {
account[j] = seed_as_slice[j]
}
let relay_chain_account: AccountId32 = account.into();
let user = create_funded_user::<T>("user", seed, 0u32.into());
let contribution: BalanceOf<T> = 100u32.into();
contribution_vec.push((relay_chain_account.into(), Some(user.clone()), contribution));
}
contribution_vec
}
/// Insert contributors.
fn insert_contributors<T: Config>(
contributors: Vec<(T::RelayChainAccountId, Option<T::AccountId>, BalanceOf<T>)>,
) -> Result<(), &'static str> {
let mut sub_vec = Vec::new();
let batch = max_batch_contributors::<T>();
// Due to the MaxInitContributors associated type, we need ton insert them in batches
// When we reach the batch size, we insert them
for i in 0..contributors.len() {
sub_vec.push(contributors[i].clone());
// If we reached the batch size, we should insert them
if i as u32 % batch == batch - 1 || i == contributors.len() - 1 {
Pallet::<T>::initialize_reward_vec(RawOrigin::Root.into(), sub_vec.clone())?;
sub_vec.clear()
}
}
Ok(())
}
/// Create a Contributor.
fn close_initialization<T: Config>(
end_vesting_block: T::VestingBlockNumber,
) -> Result<(), &'static str> {
Pallet::<T>::complete_initialization(RawOrigin::Root.into(), end_vesting_block)?;
Ok(())
}
fn create_sig<T: Config>(seed: u32, payload: Vec<u8>) -> (AccountId32, MultiSignature) {
// Crate seed
let mut seed_32: [u8; 32] = [0u8; 32];
let seed_as_slice = seed.to_be_bytes();
for j in 0..seed_as_slice.len() {
seed_32[j] = seed_as_slice[j]
}
let secret = ed25519_dalek::SecretKey::from_bytes(&seed_32).unwrap();
let public = ed25519_dalek::PublicKey::from(&secret);
let pair = ed25519_dalek::Keypair { secret, public };
let sig = pair.sign(&payload).to_bytes();
let signature: MultiSignature = ed25519::Signature::from_raw(sig).into();
let ed_public: ed25519::Public = ed25519::Public::unchecked_from(public.to_bytes());
let account: AccountId32 = ed_public.into();
(account, signature.into())
}
fn max_batch_contributors<T: Config>() -> u32 {
T::MaxInitContributors::get()
}
// This is our current number of contributors
const MAX_ALREADY_USERS: u32 = 5799;
const SEED: u32 = 999999999;
benchmarks! {
initialize_reward_vec {
let x in 1..max_batch_contributors::<T>();
let y = MAX_ALREADY_USERS;
let total_pot = 100u32*(x+y);
// We probably need to assume we have N contributors already in
// Fund pallet account
fund_specific_account::<T>(Pallet::<T>::account_id(), total_pot.into());
// Create y contributors
let contributors = create_contributors::<T>(y, 0);
// Insert them
insert_contributors::<T>(contributors)?;
// This X new contributors are the ones we will count
let new_contributors = create_contributors::<T>(x, y);
let verifier = create_funded_user::<T>("user", SEED, 0u32.into());
}: _(RawOrigin::Root, new_contributors)
verify {
assert!(Pallet::<T>::accounts_payable(&verifier).is_some());
}
complete_initialization {
// Fund pallet account
let total_pot = 100u32;
fund_specific_account::<T>(Pallet::<T>::account_id(), total_pot.into());
// 1 contributor is enough
let contributors = create_contributors::<T>(1, 0);
// Insert them
insert_contributors::<T>(contributors)?;
// We need to create the first block inherent, to initialize the initRelayBlock
let first_block_inherent = create_inherent_data::<T>(1u32);
RelayPallet::<T>::on_initialize(T::BlockNumber::one());
RelayPallet::<T>::create_inherent(&first_block_inherent)
.expect("got an inherent")
.dispatch_bypass_filter(RawOrigin::None.into())
.expect("dispatch succeeded");
RelayPallet::<T>::on_finalize(T::BlockNumber::one());
Pallet::<T>::on_finalize(T::BlockNumber::one());
}: _(RawOrigin::Root, 10u32.into())
verify {
assert!(Pallet::<T>::initialized());
}
claim {
// Fund pallet account
let total_pot = 100u32;
fund_specific_account::<T>(Pallet::<T>::account_id(), total_pot.into());
// The user that will make the call
let caller: T::AccountId = create_funded_user::<T>("user", SEED, 100u32.into());
// We verified there is no dependency of the number of contributors already inserted in claim
// Create 1 contributor
let contributors: Vec<(T::RelayChainAccountId, Option<T::AccountId>, BalanceOf<T>)> =
vec![(AccountId32::from([1u8;32]).into(), Some(caller.clone()), total_pot.into())];
// Insert them
insert_contributors::<T>(contributors)?;
// Close initialization
close_initialization::<T>(10u32.into())?;
// First inherent
let first_block_inherent = create_inherent_data::<T>(1u32);
RelayPallet::<T>::on_initialize(T::BlockNumber::one());
RelayPallet::<T>::create_inherent(&first_block_inherent)
.expect("got an inherent")
.dispatch_bypass_filter(RawOrigin::None.into())
.expect("dispatch succeeded");
RelayPallet::<T>::on_finalize(T::BlockNumber::one());
Pallet::<T>::on_finalize(T::BlockNumber::one());
// Create 4th relay block, by now the user should have vested some amount
RelayPallet::<T>::on_initialize(4u32.into());
let last_block_inherent = create_inherent_data::<T>(4u32);
RelayPallet::<T>::create_inherent(&last_block_inherent)
.expect("got an inherent")
.dispatch_bypass_filter(RawOrigin::None.into())
.expect("dispatch succeeded");
RelayPallet::<T>::on_finalize(4u32.into());
}: _(RawOrigin::Signed(caller.clone()))
verify {
assert_eq!(Pallet::<T>::accounts_payable(&caller).unwrap().total_reward, (100u32.into()));
}
update_reward_address {
// Fund pallet account
let total_pot = 100u32;
fund_specific_account::<T>(Pallet::<T>::account_id(), total_pot.into());
// The user that will make the call
let caller: T::AccountId = create_funded_user::<T>("user", SEED, 100u32.into());
let relay_account: T::RelayChainAccountId = AccountId32::from([1u8;32]).into();
// We verified there is no dependency of the number of contributors already inserted in update_reward_address
// Create 1 contributor
let contributors: Vec<(T::RelayChainAccountId, Option<T::AccountId>, BalanceOf<T>)> =
vec![(relay_account.clone(), Some(caller.clone()), total_pot.into())];
// Insert them
insert_contributors::<T>(contributors)?;
// Close initialization
close_initialization::<T>(10u32.into())?;
// First inherent
let first_block_inherent = create_inherent_data::<T>(1u32);
RelayPallet::<T>::on_initialize(T::BlockNumber::one());
RelayPallet::<T>::create_inherent(&first_block_inherent)
.expect("got an inherent")
.dispatch_bypass_filter(RawOrigin::None.into())
.expect("dispatch succeeded");
RelayPallet::<T>::on_finalize(T::BlockNumber::one());
Pallet::<T>::on_finalize(T::BlockNumber::one());
// Let's advance the relay so that the vested amount get transferred
RelayPallet::<T>::on_initialize(4u32.into());
let last_block_inherent = create_inherent_data::<T>(4u32);
RelayPallet::<T>::create_inherent(&last_block_inherent)
.expect("got an inherent")
.dispatch_bypass_filter(RawOrigin::None.into())
.expect("dispatch succeeded");
RelayPallet::<T>::on_finalize(4u32.into());
// The new user
let new_user = create_funded_user::<T>("user", SEED+1, 0u32.into());
}: _(RawOrigin::Signed(caller.clone()), new_user.clone())
verify {
assert_eq!(Pallet::<T>::accounts_payable(&new_user).unwrap().total_reward, (100u32.into()));
assert!(Pallet::<T>::claimed_relay_chain_ids(&relay_account).is_some());
}
associate_native_identity {
// Fund pallet account
let total_pot = 100u32;
fund_specific_account::<T>(Pallet::<T>::account_id(), total_pot.into());
// The caller that will associate the account
let caller: T::AccountId = create_funded_user::<T>("user", SEED, 100u32.into());
// Create a fake sig for such an account
let (relay_account, signature) = create_sig::<T>(SEED, caller.clone().encode());
// We verified there is no dependency of the number of contributors already inserted in associate_native_identity
// Create 1 contributor
let contributors: Vec<(T::RelayChainAccountId, Option<T::AccountId>, BalanceOf<T>)> =
vec![(relay_account.clone().into(), None, total_pot.into())];
// Insert them
insert_contributors::<T>(contributors)?;
// Clonse initialization
close_initialization::<T>(10u32.into())?;
// First inherent
let first_block_inherent = create_inherent_data::<T>(1u32);
RelayPallet::<T>::on_initialize(T::BlockNumber::one());
RelayPallet::<T>::create_inherent(&first_block_inherent)
.expect("got an inherent")
.dispatch_bypass_filter(RawOrigin::None.into())
.expect("dispatch succeeded");
RelayPallet::<T>::on_finalize(T::BlockNumber::one());
Pallet::<T>::on_finalize(T::BlockNumber::one());
}: _(RawOrigin::Signed(caller.clone()), caller.clone(), relay_account.into(), signature)
verify {
assert_eq!(Pallet::<T>::accounts_payable(&caller).unwrap().total_reward, (100u32.into()));
}
change_association_with_relay_keys {
// The weight will depend on the number of proofs provided
// We need to parameterize this value
// We leave this as the max batch length
let x in 1..max_batch_contributors::<T>();
// Fund pallet account
let total_pot = 100u32*x;
fund_specific_account::<T>(Pallet::<T>::account_id(), total_pot.into());
// The first reward account that will associate the account
let first_reward_account: T::AccountId = create_funded_user::<T>("user", SEED, 100u32.into());
// The account to which we will update our reward account
let second_reward_account: T::AccountId = create_funded_user::<T>("user", SEED-1, 100u32.into());
let mut proofs: Vec<(T::RelayChainAccountId, MultiSignature)> = Vec::new();
// Construct payload
let mut payload = WRAPPED_BYTES_PREFIX.to_vec();
payload.append(&mut second_reward_account.clone().encode());
payload.append(&mut first_reward_account.clone().encode());
payload.append(&mut WRAPPED_BYTES_POSTFIX.to_vec());
// Create N sigs for N accounts
for i in 0..x {
let (relay_account, signature) = create_sig::<T>(SEED-i, payload.clone());
proofs.push((relay_account.into(), signature));
}
// Create x contributors
// All of them map to the same account
let mut contributors: Vec<(T::RelayChainAccountId, Option<T::AccountId>, BalanceOf<T>)> = Vec::new();
for (relay_account, _) in proofs.clone() {
contributors.push((relay_account, Some(first_reward_account.clone()), 100u32.into()));
}
// Insert them
insert_contributors::<T>(contributors.clone())?;
// Clonse initialization
close_initialization::<T>(10u32.into())?;
// First inherent
let first_block_inherent = create_inherent_data::<T>(1u32);
RelayPallet::<T>::on_initialize(T::BlockNumber::one());
RelayPallet::<T>::create_inherent(&first_block_inherent)
.expect("got an inherent")
.dispatch_bypass_filter(RawOrigin::None.into())
.expect("dispatch succeeded");
RelayPallet::<T>::on_finalize(T::BlockNumber::one());
Pallet::<T>::on_finalize(T::BlockNumber::one());
}: _(RawOrigin::Signed(first_reward_account.clone()), second_reward_account.clone(), first_reward_account.clone(), proofs)
verify {
assert!(Pallet::<T>::accounts_payable(&second_reward_account).is_some());
assert_eq!(Pallet::<T>::accounts_payable(&second_reward_account).unwrap().total_reward, (100u32*x).into());
assert!(Pallet::<T>::accounts_payable(&first_reward_account).is_none());
}
}
#[cfg(test)]
mod tests {
use crate::mock::Test;
use sp_io::TestExternalities;
pub fn new_test_ext() -> TestExternalities {
let t = frame_system::GenesisConfig::default()
.build_storage::<Test>()
.unwrap();
TestExternalities::new(t)
}
}
impl_benchmark_test_suite!(
Pallet,
crate::benchmarks::tests::new_test_ext(),
crate::mock::Test
);
|
mod bao;
use bao::{RandomAgent, HumanAgent, TrainingAgent, Direction, Game, Mode, Player};
use rustneat::{Environment, Organism, Population};
fn random_ai_game() {
let mut results = [0; 2];
for _ in 0..100000 {
let winner_tag = Game::new(
Direction::CW,
Mode::Easy,
Player::new("Player 1", 0),
Player::new("Player 2", 1),
)
.play(&mut RandomAgent::default(), &mut RandomAgent::default())
.winner
.tag();
results[winner_tag] += 1;
}
println!("First Player: {}", results[0]);
println!("Second Player: {}", results[1]);
}
fn human_game() {
let winner = Game::new(
Direction::CW,
Mode::Easy,
Player::new("Player 1", 0),
Player::new("Player 2", 1),
)
.play(&mut HumanAgent::default(), &mut RandomAgent::default())
.winner;
println!("Winner: {}", winner.name());
}
struct GameEnvironment;
impl Environment for GameEnvironment {
fn test(&self, organism: &mut Organism) -> f64 {
if Game::new(
Direction::CW,
Mode::Easy,
Player::new("Player 1", 0),
Player::new("Player 2", 1),
)
.play(
&mut RandomAgent::default(),
&mut TrainingAgent::new(organism),
)
.winner
.tag()
== 1
{
1.0
} else {
0.0
}
}
}
fn train() {
let mut population = Population::create_population(100);
let mut environment = GameEnvironment;
let mut champion: Option<Organism> = None;
while champion.is_none() {
population.evolve();
population.evaluate_in(&mut environment);
for organism in &population.get_organisms() {
println!("Fitness: {}", organism.fitness);
if organism.fitness > 75.0 {
champion = Some(organism.clone());
}
}
}
println!("{:?}", champion.unwrap().genome);
}
fn main() {
let param: String = std::env::args().skip(1).take(1).collect();
if param == "random" {
random_ai_game();
}
if param == "human" {
human_game();
}
if param == "train" {
train();
}
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct CompositorController(pub ::windows::core::IInspectable);
impl CompositorController {
pub fn new() -> ::windows::core::Result<Self> {
Self::IActivationFactory(|f| f.activate_instance::<Self>())
}
fn IActivationFactory<R, F: FnOnce(&::windows::core::IActivationFactory) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<CompositorController, ::windows::core::IActivationFactory> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
#[cfg(feature = "Foundation")]
pub fn Close(&self) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<super::super::super::Foundation::IClosable>(self)?;
unsafe { (::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this)).ok() }
}
pub fn Compositor(&self) -> ::windows::core::Result<super::Compositor> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::Compositor>(result__)
}
}
pub fn Commit(&self) -> ::windows::core::Result<()> {
let this = self;
unsafe { (::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this)).ok() }
}
#[cfg(feature = "Foundation")]
pub fn EnsurePreviousCommitCompletedAsync(&self) -> ::windows::core::Result<super::super::super::Foundation::IAsyncAction> {
let this = self;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::super::Foundation::IAsyncAction>(result__)
}
}
#[cfg(feature = "Foundation")]
pub fn CommitNeeded<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::TypedEventHandler<CompositorController, ::windows::core::IInspectable>>>(&self, handler: Param0) -> ::windows::core::Result<super::super::super::Foundation::EventRegistrationToken> {
let this = self;
unsafe {
let mut result__: super::super::super::Foundation::EventRegistrationToken = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).9)(::core::mem::transmute_copy(this), handler.into_param().abi(), &mut result__).from_abi::<super::super::super::Foundation::EventRegistrationToken>(result__)
}
}
#[cfg(feature = "Foundation")]
pub fn RemoveCommitNeeded<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::EventRegistrationToken>>(&self, token: Param0) -> ::windows::core::Result<()> {
let this = self;
unsafe { (::windows::core::Interface::vtable(this).10)(::core::mem::transmute_copy(this), token.into_param().abi()).ok() }
}
}
unsafe impl ::windows::core::RuntimeType for CompositorController {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.UI.Composition.Core.CompositorController;{2d75f35a-70a7-4395-ba2d-cef0b18399f9})");
}
unsafe impl ::windows::core::Interface for CompositorController {
type Vtable = ICompositorController_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x2d75f35a_70a7_4395_ba2d_cef0b18399f9);
}
impl ::windows::core::RuntimeName for CompositorController {
const NAME: &'static str = "Windows.UI.Composition.Core.CompositorController";
}
impl ::core::convert::From<CompositorController> for ::windows::core::IUnknown {
fn from(value: CompositorController) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&CompositorController> for ::windows::core::IUnknown {
fn from(value: &CompositorController) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for CompositorController {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a CompositorController {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<CompositorController> for ::windows::core::IInspectable {
fn from(value: CompositorController) -> Self {
value.0
}
}
impl ::core::convert::From<&CompositorController> for ::windows::core::IInspectable {
fn from(value: &CompositorController) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for CompositorController {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a CompositorController {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[cfg(feature = "Foundation")]
impl ::core::convert::TryFrom<CompositorController> for super::super::super::Foundation::IClosable {
type Error = ::windows::core::Error;
fn try_from(value: CompositorController) -> ::windows::core::Result<Self> {
::core::convert::TryFrom::try_from(&value)
}
}
#[cfg(feature = "Foundation")]
impl ::core::convert::TryFrom<&CompositorController> for super::super::super::Foundation::IClosable {
type Error = ::windows::core::Error;
fn try_from(value: &CompositorController) -> ::windows::core::Result<Self> {
::windows::core::Interface::cast(value)
}
}
#[cfg(feature = "Foundation")]
impl<'a> ::windows::core::IntoParam<'a, super::super::super::Foundation::IClosable> for CompositorController {
fn into_param(self) -> ::windows::core::Param<'a, super::super::super::Foundation::IClosable> {
::windows::core::IntoParam::into_param(&self)
}
}
#[cfg(feature = "Foundation")]
impl<'a> ::windows::core::IntoParam<'a, super::super::super::Foundation::IClosable> for &CompositorController {
fn into_param(self) -> ::windows::core::Param<'a, super::super::super::Foundation::IClosable> {
::core::convert::TryInto::<super::super::super::Foundation::IClosable>::try_into(self).map(::windows::core::Param::Owned).unwrap_or(::windows::core::Param::None)
}
}
unsafe impl ::core::marker::Send for CompositorController {}
unsafe impl ::core::marker::Sync for CompositorController {}
#[repr(transparent)]
#[doc(hidden)]
pub struct ICompositorController(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for ICompositorController {
type Vtable = ICompositorController_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x2d75f35a_70a7_4395_ba2d_cef0b18399f9);
}
#[repr(C)]
#[doc(hidden)]
pub struct ICompositorController_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, handler: ::windows::core::RawPtr, result__: *mut super::super::super::Foundation::EventRegistrationToken) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, token: super::super::super::Foundation::EventRegistrationToken) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
);
|
use amethyst::core::Transform;
use amethyst::derive::SystemDesc;
use amethyst::ecs::{Join, Read, System, SystemData, WriteStorage};
use amethyst::input::{InputHandler, StringBindings};
use crate::components::{Direction, Player, PlayerState};
#[derive(SystemDesc)]
pub struct PlayerSystem;
impl<'s> System<'s> for PlayerSystem {
type SystemData = (
WriteStorage<'s, Transform>,
WriteStorage<'s, Player>,
Read<'s, InputHandler<StringBindings>>,
);
fn run(&mut self, (mut transforms, mut players, input): Self::SystemData) {
for (player, transform) in (&mut players, &mut transforms).join() {
let vert_movement = input.axis_value("player_updown");
let horiz_movement = input.axis_value("player_leftright");
let mut move_horiz = 0.0;
let mut move_vert = 0.0;
if let Some(mv_amount) = vert_movement {
if mv_amount != 0.0 {
move_vert = mv_amount;
}
if let Some(mv_horiz_amount) = horiz_movement {
if mv_horiz_amount != 0.0 {
move_horiz = mv_horiz_amount;
}
}
} else if let Some(mv_horiz_amount) = horiz_movement {
if mv_horiz_amount != 0.0 {
move_horiz = mv_horiz_amount;
}
}
if move_vert != 0.0 && move_horiz != 0.0 {
let speed = (player.speed.powf(2.0) / 2.0).sqrt();
let scaled_amount_y = speed * move_vert;
let scaled_amount_x = speed * move_horiz;
transform.prepend_translation_y(scaled_amount_y);
transform.prepend_translation_x(scaled_amount_x);
if move_horiz > 0.0 {
// println!("Player moving right diagonally {}", speed);
player.state = PlayerState::Running(Direction::Right);
} else {
// println!("Player moving left diagonally {}", speed);
player.state = PlayerState::Running(Direction::Left);
}
} else if move_vert != 0.0 {
let scaled_amount = player.speed * move_vert;
transform.prepend_translation_y(scaled_amount);
if move_vert > 0.0 {
// println!("Player moving up {}", move_vert);
player.state = PlayerState::Running(Direction::Up);
} else {
// println!("Player moving down {}", move_vert);
player.state = PlayerState::Running(Direction::Down);
}
} else if move_horiz != 0.0 {
let scaled_amount = player.speed * move_horiz;
transform.prepend_translation_x(scaled_amount);
if move_horiz > 0.0 {
// println!("Player moving right {}", move_horiz);
player.state = PlayerState::Running(Direction::Right);
} else {
// println!("Player moving left {}", move_horiz);
player.state = PlayerState::Running(Direction::Left);
}
} else {
// println!("PLAYER IS IDLING");
player.state = PlayerState::Idling;
}
}
}
}
|
pub mod asset;
pub mod grpc_stub;
pub mod prelude;
|
#[doc(hidden)]
#[macro_export]
macro_rules! declare_tl_lifetime_types {(
repr=$repr:ty,
attrs=[ $($extra_attrs:meta),* $(,)* ]
) => (
/// Which lifetime is being referenced by a field.
/// Allows lifetimes to be renamed,so long as the "same" lifetime is being referenced.
#[repr(transparent)]
#[derive(Copy, Clone, PartialEq, Eq)]
$(#[ $extra_attrs ])*
pub struct LifetimeIndex{
bits:$repr
}
impl LifetimeIndex {
/// A sentinel value to represent the absence of a lifetime.
///
/// Making this be all zeroes allows using `u32::leading_zeros`
/// to calculate the length of LifetimeIndexArray .
pub const NONE: Self = LifetimeIndex{bits:0};
/// A lifetime parameter in a function pointer that is only used once,
/// and does not appear in the return type.
pub const ANONYMOUS: Self = LifetimeIndex{bits:1};
/// A static lifetime.
pub const STATIC: Self = LifetimeIndex{bits:2};
const START_OF_LIFETIMES:$repr=3;
/// The maximum number of lifetime parameters.
pub const MAX_LIFETIME_PARAM:$repr=15-Self::START_OF_LIFETIMES;
/// Constructs a LifetimeIndex to the nth lifetime parameter of a type.
#[inline]
pub const fn Param(index: $repr) -> LifetimeIndex {
LifetimeIndex{
bits:index + Self::START_OF_LIFETIMES,
}
}
/// Gets which lifetiem parameter this is.
/// Returns None if it's not a lifetime parameter.
#[inline]
pub const fn to_param(self)->Option<$repr>{
if self.bits < Self::START_OF_LIFETIMES {
None
}else{
Some(self.bits-Self::START_OF_LIFETIMES)
}
}
/// Constructs a `LifetimeIndex` from its representation.
#[inline]
pub const fn from_u4(bits: u8) -> Self {
LifetimeIndex{
bits:(bits & 0b1111)as _
}
}
/// Converts a `LifetimeIndex` into its representation.
#[inline]
pub const fn to_u4(self) -> u8 {
(self.bits & 0b1111)as _
}
}
mod lifetime_index_impls{
use super::*;
use std::fmt::{self,Debug};
impl Debug for LifetimeIndex{
fn fmt(&self,f:&mut fmt::Formatter<'_>)->fmt::Result{
match *self {
Self::NONE=>f.debug_struct("NONE").finish(),
Self::ANONYMOUS=>f.debug_struct("ANONYMOUS").finish(),
Self::STATIC=>f.debug_struct("STATIC").finish(),
Self{bits}=>
f.debug_tuple("Param")
.field(&(bits-Self::START_OF_LIFETIMES))
.finish(),
}
}
}
}
/////////////////////////////////////////////////////
/// A `LifetimeIndex::NONE` terminated array of 5 lifetime indices.
///
/// This is represented as `4 bits x 5` inside a u32.
#[repr(transparent)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
$(#[ $extra_attrs ])*
pub struct LifetimeIndexArray {
/// (4 bits per LifetimeIndex)*5
bits: u32,
}
impl LifetimeIndexArray {
/// An array with no lifetimes.
pub const EMPTY: Self = Self { bits: 0 };
/// Constructs this LifetimeIndexArray from an array.
#[inline]
pub const fn from_array(array: [LifetimeIndex;5]) -> Self {
let bits= array[0].bits as u32 | ((array[1].bits as u32) << 4)
| ((array[2].bits as u32) << 8) | ((array[3].bits as u32) << 12)
| ((array[4].bits as u32) << 16);
Self {bits}
}
/// Converts this LifetimeIndexArray into an array.
#[inline]
pub const fn to_array(self) -> [LifetimeIndexPair; 3] {
[
LifetimeIndexPair::new(
LifetimeIndex{ bits: (self.bits & 0b1111)as $repr },
LifetimeIndex{ bits: ((self.bits >> 4) & 0b1111)as $repr },
),
LifetimeIndexPair::new(
LifetimeIndex{ bits: ((self.bits >> 8) & 0b1111)as $repr },
LifetimeIndex{ bits: ((self.bits >> 12) & 0b1111)as $repr },
),
LifetimeIndexPair::new(
LifetimeIndex{ bits: ((self.bits >> 16) & 0b1111)as $repr },
LifetimeIndex::NONE,
)
]
}
/// Converts this array into its representation.
#[inline]
pub const fn to_u20(self) -> u32 {
self.bits&0xF_FF_FF
}
/// Constructs this array from its representation.
#[inline]
pub const fn from_u20(bits: u32) -> Self {
Self {
bits:bits & 0xF_FF_FF
}
}
/// Gets the length of this array.
#[inline]
pub const fn len(mut self) -> usize{
(8-(self.bits.leading_zeros() >> 2))as usize
}
/// Gets whether the array is empty.
#[inline]
pub const fn is_empty(self) -> bool{
self.len() == 0
}
}
/////////////////////////////////////////////////////
/// Either a `LifetimeArray` or a range into a slice of `LifetimePair`s.
#[repr(transparent)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd)]
$(#[ $extra_attrs ])*
pub struct LifetimeRange{
/// 21 bits:
/// (13 for the start index | 12 for the LifetimeIndexArray)
/// +8 bits for the length
bits:u32,
}
impl LifetimeRange{
/// An empty `LifetimeRange`.
pub const EMPTY: Self = Self { bits: 0 };
const IS_RANGE_BIT: u32 = 1<<20;
const RANGE_LEN_OFFSET: u32 = 13;
const LEN_SR_MASK: u32 = 0b111_1111;
const LEN_BIT_SIZE: u32 = 7;
const MASK: u32 = 0x1F_FF_FF;
const START_MASK: u32 = 0b1_1111_1111_1111;
/// The amount of bits used to represent a LifetimeRnage.
pub const BIT_SIZE:u32=21;
/// The maximum value for the start of a range.
pub const MAX_START:usize=Self::START_MASK as usize;
/// The maximum length of a range.
pub const MAX_LEN:usize=Self::LEN_SR_MASK as usize;
/// Constructs a LifetimeRange from a single lifetime parameter.
pub const fn Param(index: $repr) -> Self {
Self::from_array([
LifetimeIndex::Param(index),
LifetimeIndex::NONE,
LifetimeIndex::NONE,
LifetimeIndex::NONE,
LifetimeIndex::NONE,
])
}
/// Constructs a LifetimeRange from an array of 5 lifetime indices.
#[inline]
pub const fn from_array(lia:[LifetimeIndex;5])->Self{
Self{
bits:LifetimeIndexArray::from_array(lia).to_u20()
}
}
/// Constructs a LifetimeRange from a range.
#[inline]
pub const fn from_range(range:std::ops::Range<usize>)->Self{
let len=range.end-range.start;
Self{
bits:
Self::IS_RANGE_BIT
|((range.start as u32)&Self::START_MASK)
|((len as u32 & Self::LEN_SR_MASK) << Self::RANGE_LEN_OFFSET)
}
}
#[inline]
const fn range_len(self)->usize{
((self.bits >> Self::RANGE_LEN_OFFSET) & Self::LEN_SR_MASK) as usize * 2
}
/// The amount of lifetime indices this spans.
#[inline]
pub const fn len(self) -> usize {
if self.is_range() {
self.range_len()
}else{
LifetimeIndexArray::from_u20(self.bits).len()
}
}
/// Whether this span of lifetimes is empty.
#[inline]
pub const fn is_empty(self) -> bool {
self.len() == 0
}
/// Whether this is a range into a slice of `LifetimePair`s.
#[inline]
pub const fn is_range(self)->bool{
(self.bits&Self::IS_RANGE_BIT)==Self::IS_RANGE_BIT
}
/// Converts this `LifetimeRange` into its representation
#[inline]
pub const fn to_u21(self) -> u32 {
self.bits & Self::MASK
}
/// Constructs this `LifetimeRange` from its representation
#[inline]
pub const fn from_u21(bits: u32) -> Self {
Self {
bits: bits & Self::MASK,
}
}
}
////////////////////////////////////////////////////////////////////////////////
/// A pair of `LifetimeIndex`.
#[repr(transparent)]
#[derive(Copy, Clone, PartialEq, Eq)]
$(#[ $extra_attrs ])*
pub struct LifetimeIndexPair{
bits:$repr,
}
/// The representation of `LifetimeIndexPair`.
pub type LifetimeIndexPairRepr=u8;
impl LifetimeIndexPair{
/// A pair of `LifetimeIndex::NONE`
pub const NONE:LifetimeIndexPair=
LifetimeIndexPair::new( LifetimeIndex::NONE, LifetimeIndex::NONE );
/// Constructs a `LifetimeIndexPair` from a pair of `LifetimeIndex`
#[inline]
pub const fn new(first:LifetimeIndex,second:LifetimeIndex)->Self{
Self{
bits:(first.to_u4() | (second.to_u4()<<4)) as _,
}
}
/// Gets the first `LifetimeIndex` of this pair.
#[inline]
pub const fn first(self)->LifetimeIndex{
LifetimeIndex::from_u4(self.bits as _)
}
/// Gets the second `LifetimeIndex` of this pair.
#[inline]
pub const fn second(self)->LifetimeIndex{
LifetimeIndex::from_u4((self.bits>>4) as _)
}
/// Gets both `LifetimeIndex` in this `LifetimeIndexPair`.
#[inline]
pub const fn both(self)->(LifetimeIndex,LifetimeIndex){
(self.first(),self.second())
}
/// Converts this `LifetimeIndexPair` into its representation.
pub const fn to_u8(self)->u8{
self.bits as _
}
}
mod lifetime_index_pair_impls{
use super::*;
use std::fmt::{self,Debug};
impl Debug for LifetimeIndexPair{
fn fmt(&self,f:&mut fmt::Formatter<'_>)->fmt::Result{
f.debug_list()
.entry(&self.first())
.entry(&self.second())
.finish()
}
}
}
)}
|
#![crate_name = "js"]
#![comment = "Javascript parsing, compilation and execution using LibJIT"]
#![license = "MIT"]
#![crate_type = "lib"]
#![doc(
html_favicon_url = "http://tombebbington.github.io/favicon.png"
)]
#![experimental]
#![feature(phase, macro_rules, globs)]
#![deny(non_uppercase_statics, missing_doc, unnecessary_parens, unrecognized_lint,
unreachable_code, unnecessary_allocation, unnecessary_typecast, unnecessary_allocation,
uppercase_variables, non_camel_case_types, unused_must_use)]
//! This is a library with seperate modules for Javascript parsing, the Javascript
//! standard library, and Javascript execution through LibJIT
extern crate collections;
extern crate jit;
#[phase(plugin, link)]
extern crate log;
extern crate serialize;
extern crate time;
extern crate url;
/// The backend-defining traits and the Javascript standard library
pub mod front;
/// The default backend implemented on top of LibJIT
pub mod back;
/// Javascript parsing and syntax
pub mod syntax; |
// TODO: flat::tree::Libraries needs DuplicateLibraryError, but maybe that should
// go somewhere else
pub mod errors;
mod visitor;
use super::*;
use crate::flatten::{lookup, ResolverContext, UnresolvedScope};
use crate::lexer::Span;
use crate::raw;
use crate::raw::Spanned;
use crate::span;
use errors::{Error, RawName};
use std::collections::HashMap;
use std::str::FromStr;
impl Library {
// TODO: return errors
pub fn from_files(lib_cx: ResolverContext, deps: &Libraries) -> Result<Library, Vec<Error>> {
let ResolverContext {
attributes,
name,
defined_names,
files,
} = lib_cx;
let mut terms: TermScope = HashMap::new();
let mut types: TypeScope = HashMap::new();
let mut protocols: HashMap<String, Spanned<Protocol>> = HashMap::new();
let mut services: HashMap<String, Spanned<Service>> = HashMap::new();
let mut errors: Vec<Error> = Vec::new();
for file in files {
// TODO: can we continue here? note that on import duplicates, currently the old one
// is overwritten in the new one. instead, it should just not exist?
let mut imports = FileImports::from_imports(file.imports, &deps)?;
let mut resolver = Resolver {
imports: &mut imports,
local_names: &defined_names,
deps,
};
for decl in file.decls {
match decl.value {
raw::Decl::Alias(val) => match val.resolve(&mut resolver) {
Ok((name, entry)) => drop(types.insert(name, decl.span.wrap(entry))),
Err(errs) => errors.push(errs),
},
raw::Decl::Const(val) => match val.resolve(&mut resolver) {
Ok((name, entry)) => drop(terms.insert(name, decl.span.wrap(entry))),
Err(errs) => errors.extend(errs),
},
raw::Decl::Struct(val) => match val.resolve(&mut resolver) {
Ok((name, entry)) => drop(types.insert(name, decl.span.wrap(entry))),
Err(errs) => errors.extend(errs),
},
raw::Decl::Bits(val) => match val.resolve(&mut resolver) {
Ok((name, entry)) => drop(types.insert(name, decl.span.wrap(entry))),
Err(errs) => errors.extend(errs),
},
raw::Decl::Enum(val) => match val.resolve(&mut resolver) {
Ok((name, entry)) => drop(types.insert(name, decl.span.wrap(entry))),
Err(errs) => errors.extend(errs),
},
raw::Decl::Table(val) => match val.resolve(&mut resolver) {
Ok((name, entry)) => drop(types.insert(name, decl.span.wrap(entry))),
Err(errs) => errors.extend(errs),
},
raw::Decl::Union(val) => match val.resolve(&mut resolver) {
Ok((name, entry)) => drop(types.insert(name, decl.span.wrap(entry))),
Err(errs) => errors.extend(errs),
},
raw::Decl::Protocol(val) => match val.resolve(&mut resolver) {
Ok((name, entry)) => drop(protocols.insert(name, decl.span.wrap(entry))),
Err(errs) => errors.extend(errs),
},
raw::Decl::Service(val) => match val.resolve(&mut resolver) {
Ok((name, entry)) => drop(services.insert(name, decl.span.wrap(entry))),
Err(errs) => errors.extend(errs),
},
}
}
errors.extend(imports.get_unused_imports());
}
if errors.is_empty() {
Ok(Library {
attributes,
name,
terms,
types,
protocols,
services,
})
} else {
Err(errors)
}
}
// used for testing
pub fn empty(name: String) -> Library {
let mut lib = Library::default();
lib.name = name;
lib
}
pub fn lookup(&self, name: &String) -> Option<Span> {
// NOTE: Flattener ensures that each of these scopes are mutually exclusive
if let Some(entry) = self.terms.get(name) {
return Some(entry.span);
}
if let Some(entry) = self.types.get(name) {
return Some(entry.span);
}
if let Some(entry) = self.protocols.get(name) {
return Some(entry.span);
}
if let Some(entry) = self.services.get(name) {
return Some(entry.span);
}
return None;
}
pub fn lookup_nested(&self, name: &String, member: &String) -> Option<Span> {
match self.types.get(name) {
Some(entry) => {
if let Type::Bits(bits) = &entry.value.1.value {
return bits.lookup(member);
}
if let Type::Enum(num) = &entry.value.1.value {
return num.lookup(member);
}
return None;
}
None => return None,
}
}
}
pub struct Resolver<'a> {
pub imports: &'a mut FileImports,
pub local_names: &'a UnresolvedScope,
pub deps: &'a Libraries,
}
impl<'a> Resolver<'a> {
pub fn new(
imports: &'a mut FileImports,
local_names: &'a UnresolvedScope,
deps: &'a Libraries,
) -> Self {
Resolver {
imports,
local_names,
deps,
}
}
pub fn resolve_term_boxed(
&mut self,
sp: Spanned<raw::Term>,
) -> Result<Spanned<Box<Term>>, Error> {
self.resolve_term(sp)
.map(|spanned| spanned.map(|term| Box::new(term)))
}
pub fn resolve_term(&mut self, spanned: Spanned<raw::Term>) -> Result<Spanned<Term>, Error> {
spanned.try_map(|term| match term {
raw::Term::Identifier(name) => Ok(Term::Identifier(self.resolve_name(name)?.value)),
raw::Term::Str(s) => Ok(Term::Str(s)),
raw::Term::Int(n) => Ok(Term::Int(n)),
raw::Term::Float(n) => Ok(Term::Float(n)),
raw::Term::True => Ok(Term::True),
raw::Term::False => Ok(Term::False),
})
}
pub fn resolve_type_boxed(
&mut self,
sp: Spanned<Box<raw::Type>>,
) -> Result<Spanned<Box<Type>>, Error> {
self.resolve_type(sp)
.map(|spanned| spanned.map(|ty| Box::new(ty)))
}
pub fn resolve_type(
&mut self,
spanned: Spanned<Box<raw::Type>>,
) -> Result<Spanned<Type>, Error> {
let outer_span = spanned.span;
spanned.try_map(|ty| {
if let Some(handle) = self.maybe_get_handle(&ty, outer_span)? {
return Ok(handle);
}
let name_span = ty.name.span;
// there's probably some way to use unwrap_or_else here
let target_type = match get_builtin_type(&ty.name) {
Some(val) => val,
None => Type::Identifier(self.resolve_name(ty.name)?.value),
};
let inner = if ty.layout.is_some() || ty.constraint.is_some() {
Type::TypeSubstitution(TypeSubstitution {
func: name_span.wrap(Box::new(target_type)),
// TODO: allow return possibly both errors
layout: ty
.layout
.map(|layout| self.resolve_type_boxed(layout))
.transpose()?,
constraint: ty
.constraint
.map(|constraint| self.resolve_term_boxed(constraint))
.transpose()?,
})
} else {
target_type
};
if ty.nullable {
// the span of the thing inside the ptr, is just the ptr's span
// without the ? at the end (i.e. subtract 1 from its end)
let inner_span = Span {
file: outer_span.file,
start: outer_span.start,
end: outer_span.end - 1,
};
Ok(Type::Ptr(inner_span.wrap(Box::new(inner))))
} else {
Ok(inner)
}
})
}
pub fn resolve_name(&mut self, name: raw::CompoundIdentifier) -> Result<Spanned<Name>, Error> {
let span = name.span;
let name = name.value;
match name.len() {
0 => panic!("cant have an empty name"),
1 => {
// this must be referring to a top level value in the local context
let name_str = name.into_iter().next().unwrap();
let name = Name {
library: self.lib_id(),
name: name_str.clone(),
member: None,
};
if self.local_names.contains_key(&name_str) {
Ok(span.wrap(name))
} else {
Err(Error::UndefinedLocal(span))
}
}
2 => {
// if there are two components a.b, this can refer to two things:
// decl b in library a (`dep_value`), or member b of decl a in the
// local library (`local_value`).
let local_value = RawName {
library: None,
name: name.first().unwrap().clone(),
member: Some(name.last().unwrap().clone()),
};
let dep_value = RawName {
library: Some(name.first().unwrap().clone()),
name: name.last().unwrap().clone(),
member: None,
};
self.resolve_names(span, local_value, dep_value)
}
_ => {
// if there are more than two components, this can't refer to a local value.
// it must either refer to a member or top level value in a dependency
let member_val = RawName {
library: Some(name[..name.len() - 2].join(".")),
name: name[name.len() - 2].clone(),
member: Some(name[name.len() - 1].clone()),
};
let toplevel_val = RawName {
library: Some(name[..name.len() - 1].join(".")),
name: name[name.len() - 1].clone(),
member: None,
};
self.resolve_names(span, member_val, toplevel_val)
}
}
}
fn resolve_names(
&mut self,
span: Span,
interp1: RawName,
interp2: RawName,
) -> Result<Spanned<Name>, Error> {
match (
self.resolve_raw_name(interp1),
self.resolve_raw_name(interp2),
) {
(Ok((span1, name1)), Ok((span2, name2))) => Err(Error::AmbiguousReference {
span,
interp1: span1.wrap(self.to_raw(name1)),
interp2: span2.wrap(self.to_raw(name2)),
}),
(Ok((_, name1)), Err(_)) => Ok(span.wrap(name1)),
(Err(_), Ok((_, name2))) => Ok(span.wrap(name2)),
(Err(name1), Err(name2)) => Err(Error::Undefined(span, name1, name2)),
}
}
fn resolve_raw_name(&mut self, name: RawName) -> Result<(Span, Name), RawName> {
match &name.library {
None => match lookup(&self.local_names, &name.name, &name.member) {
Some(span) => Ok((
span,
Name {
library: self.lib_id(),
name: name.name,
member: name.member,
},
)),
None => Err(name),
},
Some(ref lib_name) => {
let lib_name = self.imports.get_absolute(lib_name);
self.imports.mark_used(&lib_name);
match self.deps.lookup(&lib_name, &name.name, &name.member) {
Some(span) => Ok((
span,
Name {
// it must exist if lookup returned Some
library: self.deps.get_id(&lib_name).unwrap(),
name: name.name,
member: name.member,
},
)),
None => Err(RawName {
library: Some(lib_name),
name: name.name,
member: name.member,
}),
}
}
}
}
fn maybe_get_handle(
&mut self,
ty: &raw::Type,
outer_span: Span,
) -> Result<Option<Type>, Error> {
if ty.name.value.len() != 1 {
return Ok(None);
}
// TODO: the three cases only differ in maybe 3 lines, refactor out common stuff
match ty.name.value[0].as_str() {
"handle" => {
if let Some(_) = ty.constraint {
return Err(Error::ConstrainedHandle(ty.name.span));
}
let inner = match &ty.layout {
Some(subtype) => {
let subtype = as_handle_subtype(subtype)?;
HandleSubtype::from_str(subtype.value[0].as_str())
.map(|subtype| Type::Handle(Some(subtype)))
.map_err(|_| Error::InvalidHandleSubtype(subtype.span))
}
None => Ok(Type::Handle(None)),
}?;
if ty.nullable {
// the span of the thing inside the ptr, is just the ptr's span
// without the ? at the end (i.e. subtract 1 from its end)
let inner_span = Span {
file: outer_span.file,
start: outer_span.start,
end: outer_span.end - 1,
};
Ok(Some(Type::Ptr(inner_span.wrap(Box::new(inner)))))
} else {
Ok(Some(inner))
}
}
"client_end" => {
if let Some(_) = ty.constraint {
return Err(Error::ConstrainedHandle(ty.name.span));
}
let inner = match &ty.layout {
Some(subtype) => {
let subtype = as_handle_subtype(subtype)?;
Ok(Type::ClientEnd(self.resolve_name(subtype.clone())?.value))
}
None => Err(Error::MissingEndArg(outer_span)),
}?;
if ty.nullable {
// the span of the thing inside the ptr, is just the ptr's span
// without the ? at the end (i.e. subtract 1 from its end)
let inner_span = Span {
file: outer_span.file,
start: outer_span.start,
end: outer_span.end - 1,
};
Ok(Some(Type::Ptr(inner_span.wrap(Box::new(inner)))))
} else {
Ok(Some(inner))
}
}
"server_end" => {
if let Some(_) = ty.constraint {
return Err(Error::ConstrainedHandle(ty.name.span));
}
let inner = match &ty.layout {
Some(subtype) => {
let subtype = as_handle_subtype(subtype)?;
Ok(Type::ServerEnd(self.resolve_name(subtype.clone())?.value))
}
None => Err(Error::MissingEndArg(outer_span)),
}?;
if ty.nullable {
// the span of the thing inside the ptr, is just the ptr's span
// without the ? at the end (i.e. subtract 1 from its end)
let inner_span = Span {
file: outer_span.file,
start: outer_span.start,
end: outer_span.end - 1,
};
Ok(Some(Type::Ptr(inner_span.wrap(Box::new(inner)))))
} else {
Ok(Some(inner))
}
}
_ => Ok(None),
}
}
fn lib_id(&self) -> LibraryId {
self.deps.next_library_id()
}
// need to convert back to RawName when we want to display the Name in case of
// errors
fn to_raw(&self, name: Name) -> RawName {
RawName {
library: if name.library == self.lib_id() {
None
} else {
Some(self.deps.get_name(name.library))
},
name: name.name,
member: name.member,
}
}
}
// TODO: this needs many copies of each import string. we could do better at
// the expense of more searching (e.g. a Vector of absolute imports, and a map
// from alias to index in that owning vector)
#[derive(Debug)]
pub struct FileImports {
/// A map from alias to the full library name.
aliases: HashMap<String, String>,
/// A map from full library name to whether this import is actually used.
imports: HashMap<String, Spanned<bool>>,
}
// TODO: check that imports are in deps, and keep track of which imports are used.
// would be better to do this once we have a better idea of what Depedencies looks
// like. we could check that imports actually exist "lazily" (i.e. whenever they are
// used), and this would give the benefit that if it's not actually used we'd error with
// unused imoprt instead of invalid import. but when we error, we want the import span
// and we only have that here, so FileImports must be responsible for it.
impl FileImports {
// TODO: does it even make sense to return multiple errors here?
// TODO: now that self.imports stores the spans, we could check the imports against
// the Libraries separately, and make from_imports only take one parameter (which
// would reduce test boilerplate)
pub fn from_imports(
imports: Vec<Spanned<raw::Import>>,
deps: &Libraries,
) -> Result<FileImports, Vec<Error>> {
// map from import name (either absolute or alias) to a tuple of (its span,
// and if it's an alias, the full name of the library it aliases). it's easier
// to use a single hashmap when checking for duplicate imports or name conflicts.
let mut import_map: HashMap<String, (Span, Option<String>)> = HashMap::new();
let mut errors: Vec<Error> = Vec::new();
// check for duplicate imports and name conflicts
for import in imports {
let absolute_name = import.value.name.value.join(".");
// add the absolute import
let span = import.value.name.span;
if let Some((entry_span, entry)) =
import_map.insert(absolute_name.clone(), (span, None))
{
errors.push(match entry {
Some(name) => Error::ImportNameConflict {
name: name.clone(),
orig: entry_span,
dupe: span,
},
None => Error::DuplicateImport {
import: absolute_name.clone(),
orig: entry_span,
dupe: span,
},
});
continue;
}
// add the alias
if let Some(alias) = import.value.alias {
let span = alias.span;
let name = alias.value;
if let Some((entry_span, _)) =
import_map.insert(name.clone(), (span, Some(absolute_name.clone())))
{
errors.push(Error::ImportNameConflict {
name: name,
orig: entry_span,
dupe: span,
})
}
}
}
for (import, (span, maybe_full_name)) in &import_map {
match maybe_full_name {
None if !deps.contains_library(import) => {
errors.push(Error::DependencyNotFound(*span));
}
_ => (),
}
}
if errors.is_empty() {
// TODO: is it possible to parition without collecting?
let (full_imports, aliases): (Vec<_>, Vec<_>) =
import_map.into_iter().partition(|(_, v)| v.1.is_none());
let full_imports = full_imports
.into_iter()
.map(|(k, v)| (k, v.0.wrap(false)))
.collect::<HashMap<_, _>>();
let aliases = aliases
.into_iter()
.map(|(k, v)| (k, v.1.unwrap()))
.collect::<HashMap<_, _>>();
Ok(FileImports {
aliases,
imports: full_imports,
})
} else {
Err(errors)
}
}
// TODO: unecessary copying
// TODO: this succeeds for imports that do not exist in a using statement,
// and just returns a copy of the import back. whether this should return an
// Error or not will depend on whether we care about distinguishing between
// undefined because of an import not being imported, or because the variable doesn't
// exist. the third possible kind of failure, an import not being found is
// checked when resolving the imports (i.e. on construction of FileImports)
pub fn get_absolute(&self, import: &String) -> String {
match self.aliases.get(import) {
Some(name) => name.clone(),
_ => import.clone(),
}
}
pub fn mark_used(&mut self, absolute_import: &String) {
if let Some(is_used) = self.imports.get_mut(absolute_import) {
is_used.value = true;
}
}
pub fn get_unused_imports(&self) -> Vec<Error> {
self.imports
.iter()
.filter_map(|(_, is_used)| {
if !is_used.value {
Some(Error::UnusedImport(is_used.span))
} else {
None
}
})
.collect()
}
}
fn as_handle_subtype(ty: &Spanned<Box<raw::Type>>) -> Result<&raw::CompoundIdentifier, Error> {
let span = ty.span;
let ty = &ty.value;
if ty.constraint.is_some() || ty.layout.is_some() || ty.nullable || ty.name.value.len() != 1 {
Err(Error::InvalidHandleSubtype(span))
} else {
Ok(&ty.name)
}
}
pub fn get_builtin_type(var: &raw::CompoundIdentifier) -> Option<Type> {
if var.value.len() != 1 {
return None;
}
use PrimitiveSubtype::*;
match var.value[0].as_str() {
// TODO: there should be a way to use serde for this
"bool" => Some(Type::Primitive(Bool)),
"uint8" => Some(Type::Primitive(UInt8)),
"uint16" => Some(Type::Primitive(UInt16)),
"uint32" => Some(Type::Primitive(UInt32)),
"uint64" => Some(Type::Primitive(UInt64)),
"int8" => Some(Type::Primitive(Int8)),
"int16" => Some(Type::Primitive(Int16)),
"int32" => Some(Type::Primitive(Int32)),
"int64" => Some(Type::Primitive(Int64)),
"float32" => Some(Type::Primitive(Float32)),
"float64" => Some(Type::Primitive(Float64)),
"string" => Some(Type::Str(Str { bounds: None })),
"vector" => Some(Type::Vector(Vector {
element_type: None,
bounds: None,
})),
"array" => Some(Type::Array(Array {
element_type: None,
size: None,
})),
"byte" => unimplemented!(),
"bytes" => unimplemented!(),
_ => None,
}
}
// TODO: it doesn't really make sense to have a span for the rhs of a type decl
// (e.g. in struct Foo {...}). we could reconstruct a span starting at the open
// brace and ending at the close brace but since we currently never need to use
// this when displaying errors we just use a dummy span for now.
pub fn dummy_span<T>(value: T) -> Spanned<T> {
span::spanned(span::FileId(0), 0, 0, value)
}
#[cfg(test)]
mod test {
use super::*;
use crate::parser::parse;
use crate::source_file::SourceFile;
use crate::span::FileId;
#[test]
fn import_success() {
let mut deps = Libraries::default();
deps.add_library(Library::empty("foo".to_string())).unwrap();
deps.add_library(Library::empty("bar".to_string())).unwrap();
let contents = r#"library test;
using foo;
using bar;
"#;
let src = SourceFile::new(FileId(0), "test.fidl".to_string(), contents.to_string());
let file = parse(&src).unwrap();
assert_eq!(FileImports::from_imports(file.imports, &deps).is_ok(), true);
}
#[test]
fn import_dupe_no_alias() {
let mut deps = Libraries::default();
deps.add_library(Library::empty("foo".to_string())).unwrap();
let contents = r#"library test;
using foo;
using foo;
"#;
let src = SourceFile::new(FileId(0), "test.fidl".to_string(), contents.to_string());
let file = parse(&src).unwrap();
let errs = FileImports::from_imports(file.imports, &deps).unwrap_err();
assert_eq!(errs.len(), 1);
match errs[0] {
Error::DuplicateImport {
import: _,
orig: _,
dupe: _,
} => (),
_ => assert!(false),
}
}
#[test]
fn import_dupe_aliases() {
let mut deps = Libraries::default();
deps.add_library(Library::empty("foo".to_string())).unwrap();
let contents = r#"library test;
using foo as bar;
using foo as baz;
"#;
let src = SourceFile::new(FileId(0), "test.fidl".to_string(), contents.to_string());
let file = parse(&src).unwrap();
let errs = FileImports::from_imports(file.imports, &deps).unwrap_err();
assert_eq!(errs.len(), 1);
match errs[0] {
Error::DuplicateImport {
import: _,
orig: _,
dupe: _,
} => (),
_ => assert!(false),
}
}
#[test]
fn import_conflict() {
let mut deps = Libraries::default();
deps.add_library(Library::empty("foo".to_string())).unwrap();
deps.add_library(Library::empty("bar".to_string())).unwrap();
let contents = r#"library test;
using foo;
using bar as foo;
"#;
let src = SourceFile::new(FileId(0), "test.fidl".to_string(), contents.to_string());
let file = parse(&src).unwrap();
let errs = FileImports::from_imports(file.imports, &deps).unwrap_err();
assert_eq!(errs.len(), 1);
match errs[0] {
Error::ImportNameConflict {
name: _,
orig: _,
dupe: _,
} => (),
_ => assert!(false),
}
}
}
|
fn main() {
// `n` will take the values: 1, 2, ..., 100 in each iteration
for n in 1..101 {
if n % 15 == 0 {
println!("fizzbuzz");
} else if n % 3 == 0 {
println!("fizz");
} else if n % 5 == 0 {
println!("buzz");
} else if n % 1 == 0 {
println!("buzzfizz");
} else {
println!("{}", n);
}
}
for i in 102..302 {
if i % 15 == 0 {
println!("fizzbuzz");
} else if i % 3 == 0 {
println!("fizz");
} else if i % 5 == 0 {
println!("buzz");
} else if i % 1 == 0 {
println!("buzzfizz");
} else {
println!("{}", i);
}
}
}
|
use std::{fs, io};
use anyhow::Error;
use clap::{App, ArgMatches, SubCommand};
use varisat::checker::{Checker, CheckerError};
use varisat_lrat::WriteLrat;
use super::{banner, init_logging};
mod transcript;
pub fn check_args() -> App<'static, 'static> {
SubCommand::with_name("--check")
.arg_from_usage("[INPUT] 'The input file to use (stdin if omitted)'")
.arg_from_usage("<proof-file> --proof=[FILE] 'The varisat proof file to check.'")
.arg_from_usage("[lrat-file] --write-lrat=[FILE] 'Convert the proof to LRAT.'")
.arg_from_usage(
"[clrat-file] --write-clrat=[FILE] 'Convert the proof to compressed (binary) LRAT.'",
)
}
pub fn check_main(matches: &ArgMatches) -> Result<i32, Error> {
init_logging();
banner();
let mut checker = Checker::default();
let stdin = io::stdin();
let mut locked_stdin;
let mut opened_file;
let file = match matches.value_of("INPUT") {
Some(path) => {
log::info!("Reading file '{}'", path);
opened_file = fs::File::open(path)?;
&mut opened_file as &mut dyn io::Read
}
None => {
log::info!("Reading from stdin");
locked_stdin = stdin.lock();
&mut locked_stdin as &mut dyn io::Read
}
};
let mut transcript = transcript::Transcript::default();
checker.add_transcript(&mut transcript);
let mut lrat_processor;
if let Some(lrat_path) = matches.value_of("lrat-file") {
lrat_processor = WriteLrat::new(fs::File::create(lrat_path)?, false);
checker.add_processor(&mut lrat_processor);
}
let mut clrat_processor;
if let Some(clrat_path) = matches.value_of("clrat-file") {
clrat_processor = WriteLrat::new(fs::File::create(clrat_path)?, true);
checker.add_processor(&mut clrat_processor);
}
checker.add_dimacs_cnf(file)?;
let path = matches.value_of("proof-file").unwrap();
log::info!("Checking proof file '{}'", path);
match checker.check_proof(fs::File::open(path)?) {
Ok(()) => println!("s VERIFIED"),
Err(err) => {
log::error!("{}", err);
if let CheckerError::CheckFailed { debug_step, .. } = err {
if !debug_step.is_empty() {
log::error!("failed step was {}", debug_step)
}
}
println!("s NOT VERIFIED");
return Ok(1);
}
}
Ok(0)
}
|
#[doc = "Reader of register FTSR2"]
pub type R = crate::R<u32, super::FTSR2>;
#[doc = "Writer for register FTSR2"]
pub type W = crate::W<u32, super::FTSR2>;
#[doc = "Register FTSR2 `reset()`'s with value 0"]
impl crate::ResetValue for super::FTSR2 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Falling trigger event configuration of line 32\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum FT32_A {
#[doc = "0: Falling edge trigger is disabled"]
DISABLED = 0,
#[doc = "1: Falling edge trigger is enabled"]
ENABLED = 1,
}
impl From<FT32_A> for bool {
#[inline(always)]
fn from(variant: FT32_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `FT32`"]
pub type FT32_R = crate::R<bool, FT32_A>;
impl FT32_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FT32_A {
match self.bits {
false => FT32_A::DISABLED,
true => FT32_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == FT32_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == FT32_A::ENABLED
}
}
#[doc = "Write proxy for field `FT32`"]
pub struct FT32_W<'a> {
w: &'a mut W,
}
impl<'a> FT32_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FT32_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Falling edge trigger is disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(FT32_A::DISABLED)
}
#[doc = "Falling edge trigger is enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(FT32_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Falling trigger event configuration of line 33"]
pub type FT33_A = FT32_A;
#[doc = "Reader of field `FT33`"]
pub type FT33_R = crate::R<bool, FT32_A>;
#[doc = "Write proxy for field `FT33`"]
pub struct FT33_W<'a> {
w: &'a mut W,
}
impl<'a> FT33_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FT33_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Falling edge trigger is disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(FT32_A::DISABLED)
}
#[doc = "Falling edge trigger is enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(FT32_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Falling trigger event configuration of line 40"]
pub type FT40_A = FT32_A;
#[doc = "Reader of field `FT40`"]
pub type FT40_R = crate::R<bool, FT32_A>;
#[doc = "Write proxy for field `FT40`"]
pub struct FT40_W<'a> {
w: &'a mut W,
}
impl<'a> FT40_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FT40_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Falling edge trigger is disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(FT32_A::DISABLED)
}
#[doc = "Falling edge trigger is enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(FT32_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Falling trigger event configuration of line 41"]
pub type FT41_A = FT32_A;
#[doc = "Reader of field `FT41`"]
pub type FT41_R = crate::R<bool, FT32_A>;
#[doc = "Write proxy for field `FT41`"]
pub struct FT41_W<'a> {
w: &'a mut W,
}
impl<'a> FT41_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FT41_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Falling edge trigger is disabled"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(FT32_A::DISABLED)
}
#[doc = "Falling edge trigger is enabled"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(FT32_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
impl R {
#[doc = "Bit 0 - Falling trigger event configuration of line 32"]
#[inline(always)]
pub fn ft32(&self) -> FT32_R {
FT32_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Falling trigger event configuration of line 33"]
#[inline(always)]
pub fn ft33(&self) -> FT33_R {
FT33_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 8 - Falling trigger event configuration of line 40"]
#[inline(always)]
pub fn ft40(&self) -> FT40_R {
FT40_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - Falling trigger event configuration of line 41"]
#[inline(always)]
pub fn ft41(&self) -> FT41_R {
FT41_R::new(((self.bits >> 9) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Falling trigger event configuration of line 32"]
#[inline(always)]
pub fn ft32(&mut self) -> FT32_W {
FT32_W { w: self }
}
#[doc = "Bit 1 - Falling trigger event configuration of line 33"]
#[inline(always)]
pub fn ft33(&mut self) -> FT33_W {
FT33_W { w: self }
}
#[doc = "Bit 8 - Falling trigger event configuration of line 40"]
#[inline(always)]
pub fn ft40(&mut self) -> FT40_W {
FT40_W { w: self }
}
#[doc = "Bit 9 - Falling trigger event configuration of line 41"]
#[inline(always)]
pub fn ft41(&mut self) -> FT41_W {
FT41_W { w: self }
}
}
|
use crate::rustc_lint::LintContext;
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_macro_callsite;
use clippy_utils::sugg;
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Block, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// ### What it does
/// Looks for blocks of expressions and fires if the last expression returns
/// `()` but is not followed by a semicolon.
///
/// ### Why is this bad?
/// The semicolon might be optional but when extending the block with new
/// code, it doesn't require a change in previous last line.
///
/// ### Example
/// ```rust
/// fn main() {
/// println!("Hello world")
/// }
/// ```
/// Use instead:
/// ```rust
/// fn main() {
/// println!("Hello world");
/// }
/// ```
#[clippy::version = "1.52.0"]
pub SEMICOLON_IF_NOTHING_RETURNED,
pedantic,
"add a semicolon if nothing is returned"
}
declare_lint_pass!(SemicolonIfNothingReturned => [SEMICOLON_IF_NOTHING_RETURNED]);
impl LateLintPass<'_> for SemicolonIfNothingReturned {
fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx Block<'tcx>) {
if_chain! {
if !block.span.from_expansion();
if let Some(expr) = block.expr;
let t_expr = cx.typeck_results().expr_ty(expr);
if t_expr.is_unit();
if let snippet = snippet_with_macro_callsite(cx, expr.span, "}");
if !snippet.ends_with('}') && !snippet.ends_with(';');
if cx.sess().source_map().is_multiline(block.span);
then {
// filter out the desugared `for` loop
if let ExprKind::DropTemps(..) = &expr.kind {
return;
}
let sugg = sugg::Sugg::hir_with_macro_callsite(cx, expr, "..");
let suggestion = format!("{0};", sugg);
span_lint_and_sugg(
cx,
SEMICOLON_IF_NOTHING_RETURNED,
expr.span.source_callsite(),
"consider adding a `;` to the last statement for consistent formatting",
"add a `;` here",
suggestion,
Applicability::MaybeIncorrect,
);
}
}
}
}
|
use super::Part;
use crate::codec::{Decode, Encode};
use crate::{remote_type, RemoteObject, Vector3};
remote_type!(
/// An reaction wheel. Obtained by calling `Part::reaction_wheel().`
object SpaceCenter.ReactionWheel {
properties: {
{
Part {
/// Returns the part object for this reaction wheel.
///
/// **Game Scenes**: All
get: part -> Part
}
}
{
Active {
/// Returns whether the reaction wheel is active.
///
/// **Game Scenes**: All
get: is_active -> bool,
/// Sets whether the reaction wheel is active.
///
/// **Game Scenes**: All
set: set_active(bool)
}
}
{
Broken {
/// Returns whether the reaction wheel is broken.
///
/// **Game Scenes**: All
get: is_broken -> bool
}
}
{
AvailableTorque {
/// Returns the available torque, in Newton meters, that can be produced by this
/// engine, in the positive and negative pitch, roll and yaw axes of the vessel.
/// These axes correspond to the coordinate axes of the `Vessel::reference_frame()`.
/// Returns zero if the reaction wheel is inactive or broken.
///
/// **Game Scenes**: All
get: available_torque -> (Vector3, Vector3)
}
}
{
MaxTorque {
/// Returns the maximum torque, in Newton meters, that can be produced by this
/// reaction wheel, when it is active, in the positive and negative pitch, roll
/// and yaw axes of the vessel. These axes correspond to the coordinate axes
/// of the `Vessel::reference_frame()`.
///
/// **Game Scenes**: All
get: max_torque -> (Vector3, Vector3)
}
}
}
});
|
pub mod image;
pub(crate) mod bitmap; |
#[macro_export]
macro_rules! derive_Component {
($type_name: ty) => {
impl $crate::ecs::Component for $type_name {
type Manager = $crate::component::DefaultManager<$type_name>;
type Message = $crate::component::DefaultMessage<$type_name>;
}
}
}
#[macro_export]
macro_rules! derive_Singleton {
($type_name: ident) => {
static mut INSTANCE: Option<*mut $type_name> = None;
unsafe impl $crate::singleton::Singleton for $type_name {
fn set_instance(instance: Self) {
println!("setting instance");
if unsafe { INSTANCE.is_some() } {
panic!("Cannot create singleton instance");
}
let instance = Box::new(instance);
unsafe {
INSTANCE = Some(Box::into_raw(instance));
}
println!("done setting instance");
}
fn instance() -> &'static Self {
unsafe {
match INSTANCE {
Some(instance) => &*instance,
None => panic!("No instance found"),
}
}
}
unsafe fn destroy_instance() {
if let Some(instance) = INSTANCE {
Box::from_raw(instance);
INSTANCE = None;
}
}
}
}
}
// TODO: Make this threadsafe by useing `std::sync::Once`.
#[macro_export]
macro_rules! warn_once {
($message: expr) => {
static mut HAS_WARNED: bool = false;
unsafe {
if !HAS_WARNED {
HAS_WARNED = true;
println!($message);
}
}
}
}
|
//! `intialize` subcommand
use crate::application::app_config;
use crate::config::LightClientConfig;
use std::collections::HashMap;
use abscissa_core::status_err;
use abscissa_core::status_warn;
use abscissa_core::Command;
use abscissa_core::Options;
use abscissa_core::Runnable;
use tendermint::hash;
use tendermint::lite::Header;
use tendermint::Hash;
use tendermint_light_client::components::io::{AtHeight, Io, ProdIo};
use tendermint_light_client::operations::ProdHasher;
use tendermint_light_client::predicates::{ProdPredicates, VerificationPredicates};
use tendermint_light_client::store::sled::SledStore;
use tendermint_light_client::store::LightStore;
use tendermint_light_client::types::Status;
/// `initialize` subcommand
#[derive(Command, Debug, Default, Options)]
pub struct InitCmd {
#[options(
free,
help = "subjective height of the initial trusted state to initialize the node with"
)]
pub height: u64,
#[options(
free,
help = "hash of the initial subjectively trusted header to initialize the node with"
)]
pub header_hash: String,
}
impl Runnable for InitCmd {
fn run(&self) {
let subjective_header_hash =
Hash::from_hex_upper(hash::Algorithm::Sha256, &self.header_hash).unwrap();
let app_cfg = app_config();
let lc = app_cfg.light_clients.first().unwrap();
let mut peer_map = HashMap::new();
peer_map.insert(lc.peer_id, lc.address.clone());
let io = ProdIo::new(peer_map, Some(app_cfg.rpc_config.request_timeout));
initialize_subjectively(self.height, subjective_header_hash, &lc, &io);
}
}
// TODO(ismail): sth along these lines should live in the light-client crate / library
// instead of here.
// TODO(ismail): additionally here and everywhere else, we should return errors
// instead of std::process::exit because no destructors will be run.
fn initialize_subjectively(
height: u64,
subjective_header_hash: Hash,
l_conf: &LightClientConfig,
io: &ProdIo,
) {
let db = sled::open(l_conf.db_path.clone()).unwrap_or_else(|e| {
status_err!("could not open database: {}", e);
std::process::exit(1);
});
let mut light_store = SledStore::new(db);
if light_store.latest_trusted_or_verified().is_some() {
let lb = light_store.latest_trusted_or_verified().unwrap();
status_warn!(
"already existing trusted or verified state of height {} in database: {:?}",
lb.signed_header.header.height,
l_conf.db_path
);
}
let trusted_state = io
.fetch_light_block(l_conf.peer_id, AtHeight::At(height))
.unwrap_or_else(|e| {
status_err!("could not retrieve trusted header: {}", e);
std::process::exit(1);
});
let predicates = ProdPredicates;
let hasher = ProdHasher;
if let Err(err) = predicates.validator_sets_match(&trusted_state, &hasher) {
status_err!("invalid light block: {}", err);
std::process::exit(1);
}
// TODO(ismail): actually verify more predicates of light block before storing!?
let got_header_hash = trusted_state.signed_header.header.hash();
if got_header_hash != subjective_header_hash {
status_err!(
"received LightBlock's header hash: {} does not match the subjective hash: {}",
got_header_hash,
subjective_header_hash
);
std::process::exit(1);
}
// TODO(liamsi): it is unclear if this should be Trusted or only Verified
// - update the spec first and then use library method instead of this:
light_store.insert(trusted_state, Status::Verified);
}
|
//! UnixFS symlink support. UnixFS symlinks are UnixFS messages similar to single block files, but
//! the link name or target path is encoded in the UnixFS::Data field. This means that the target
//! path could be in any encoding, however it is always treated as an utf8 Unix path. Could be that
//! this is wrong.
use crate::pb::{FlatUnixFs, UnixFs, UnixFsType};
use alloc::borrow::Cow;
use quick_protobuf::{MessageWrite, Writer};
/// Appends a dag-pb block for for a symlink to the given target_path. It is expected that the
/// `target_path` is valid relative unix path relative to the place in which this is used but
/// targets validity cannot really be judged.
pub fn serialize_symlink_block(target_path: &str, block_buffer: &mut Vec<u8>) {
// should this fail or not? protobuf encoding cannot fail here, however we might create a too
// large block but what's the limit?
//
// why not return a (Cid, Vec<u8>) like usually with cidv0? well...
let node = FlatUnixFs {
links: Vec::new(),
data: UnixFs {
Type: UnixFsType::Symlink,
Data: Some(Cow::Borrowed(target_path.as_bytes())),
..Default::default()
},
};
let mut writer = Writer::new(block_buffer);
node.write_message(&mut writer).expect("unexpected failure");
}
#[cfg(test)]
mod tests {
use super::serialize_symlink_block;
use cid::Cid;
use core::convert::TryFrom;
use sha2::{Digest, Sha256};
#[test]
fn simple_symlink() {
let mut buf = Vec::new();
// this symlink just points to a "b" at the same level, used in `symlinks_in_trees` to
// create the `foo_directory/a` which links to sibling `b` or the directory
// `foo_directory/b`.
serialize_symlink_block("b", &mut buf);
let mh = multihash::wrap(multihash::Code::Sha2_256, &Sha256::digest(&buf));
let cid = Cid::new_v0(mh).expect("sha2_256 is the correct multihash for cidv0");
assert_eq!(
cid.to_string(),
"QmfLJN6HLyREnWr7QQNmgmuNziUhcbwUopkHQ8gD3pMfp6"
);
}
#[test]
fn symlinks_in_trees_rooted() {
use crate::dir::builder::BufferingTreeBuilder;
let mut tree = BufferingTreeBuilder::default();
tree.put_link(
"foo_directory/b/car",
Cid::try_from("QmNYVgoDXh3dqC1jjCuYqQ9w4XfiocehPZjEPiQiCVYv33").unwrap(),
12,
)
.unwrap();
tree.put_link(
"foo_directory/a",
Cid::try_from("QmfLJN6HLyREnWr7QQNmgmuNziUhcbwUopkHQ8gD3pMfp6").unwrap(),
7,
)
.unwrap();
let otn = tree.build().last().unwrap().unwrap();
assert_eq!(
otn.cid.to_string(),
"QmZDVQHwjHwA4SyzEDtJLNxmZeJVK1W8BWFAHV61x2Rs19"
);
}
#[test]
fn symlinks_in_trees_wrapped() {
use crate::dir::builder::{BufferingTreeBuilder, TreeOptions};
// note regarding the root directory; now we can add the paths without the first component
// `foo_directory` and still get the same result as in `symlinks_in_trees_rooted`.
let mut opts = TreeOptions::default();
opts.wrap_with_directory();
let mut tree = BufferingTreeBuilder::new(opts);
tree.put_link(
"b/car",
Cid::try_from("QmNYVgoDXh3dqC1jjCuYqQ9w4XfiocehPZjEPiQiCVYv33").unwrap(),
12,
)
.unwrap();
tree.put_link(
"a",
Cid::try_from("QmfLJN6HLyREnWr7QQNmgmuNziUhcbwUopkHQ8gD3pMfp6").unwrap(),
7,
)
.unwrap();
let otn = tree.build().last().unwrap().unwrap();
assert_eq!(
otn.cid.to_string(),
"QmZDVQHwjHwA4SyzEDtJLNxmZeJVK1W8BWFAHV61x2Rs19"
);
}
#[test]
fn walking_symlink_containing_tree() {
use crate::walk::{ContinuedWalk, Walker};
use hex_literal::hex;
use std::path::PathBuf;
// while this case or similar should be repeated in the walker tests, the topic of symlinks
// and how the target path names are handled (esp. on windows) is curious enough to warrant
// duplicating these three cases here.
let mut fake = crate::test_support::FakeBlockstore::default();
// if `simple_symlink` and `symlinks_in_trees_*` passed, they would had created these
// blocks, which we now take for granted.
let tree_blocks: &[(&'static str, &'static [u8])] = &[
("QmZDVQHwjHwA4SyzEDtJLNxmZeJVK1W8BWFAHV61x2Rs19", &hex!("12290a221220fc7fac69ddb44e39686ecfd1ecc6c52ab653f4227e533ee74a2e238f8b2143d3120161180712290a221220b924ddb19181d159c29eec7c98ec506976a76d40241ccd203b226849ce6e0b72120162183d0a020801")),
("QmfLJN6HLyREnWr7QQNmgmuNziUhcbwUopkHQ8gD3pMfp6", &hex!("0a050804120162")),
("QmaoNjmCQ9774sR6H4DzgGPafXyuVVTCyBeXLaxueKYRLm", &hex!("122b0a2212200308c49252eb61966f802baf45074e074f3b3b90619766e0589c1445261a1a221203636172180c0a020801")),
("QmNYVgoDXh3dqC1jjCuYqQ9w4XfiocehPZjEPiQiCVYv33", &hex!("0a0a080212046361720a1804")),
];
for (expected, bytes) in tree_blocks {
assert_eq!(*expected, fake.insert_v0(bytes).to_string());
}
let mut walker = Walker::new(
// note: this matches the `symlinks_in_trees` root cid (the last cid produced)
Cid::try_from(tree_blocks[0].0).unwrap(),
String::default(),
);
#[derive(Debug, PartialEq, Eq)]
enum Entry {
Dir(PathBuf),
Symlink(PathBuf, String),
File(PathBuf),
}
let mut actual = Vec::new();
while walker.should_continue() {
let (next, _) = walker.pending_links();
let next = fake.get_by_cid(next);
match walker.next(next, &mut None).unwrap() {
ContinuedWalk::File(_fs, _cid, path, _metadata, _total_size) => {
actual.push(Entry::File(path.into()));
}
ContinuedWalk::RootDirectory(_cid, path, _metadata)
| ContinuedWalk::Directory(_cid, path, _metadata) => {
actual.push(Entry::Dir(path.into()));
}
ContinuedWalk::Bucket(..) => { /* ignore */ }
ContinuedWalk::Symlink(link_name, _cid, path, _metadata) => {
actual.push(Entry::Symlink(
path.into(),
core::str::from_utf8(link_name).unwrap().to_owned(),
));
}
};
}
// possibly noteworthy: compare these paths to the ones used when creating; there was
// non-empty root component `foo_directory`, refer to `symlinks_in_trees_*` variants for
// more.
let expected = &[
Entry::Dir(PathBuf::from("")),
Entry::Symlink(PathBuf::from("a"), String::from("b")),
Entry::Dir(PathBuf::from("b")),
Entry::File({
let mut p = PathBuf::from("b");
p.push("car");
p
}),
];
assert_eq!(expected, actual.as_slice());
}
}
|
fn main() {
println!("Hello, world!");
}
#[derive(Debug)]
struct Trie<K: Ord + Eq + Copy, V> {
root: Node<K, V>
}
#[derive(Debug)]
struct Node<K: Ord + Eq + Copy, V> {
key : Option<K>,
value: Option<V>,
next : Vec<Node<K, V>>,
}
impl<K: Ord + Eq + Copy, V> Trie<K, V> {
fn new() -> Self {
Trie { root: Node { key: None, value: None, next: vec![] } }
}
/// trie に key => value を挿入する。
fn insert(&mut self, key: &[K], value: V) -> bool {
/// 挿入ソートで trie に key => value を挿入する。
fn _insert<K: Ord + Eq + Copy, V>(vec: &mut Vec<Node<K, V>>, new_node: Node<K, V>) -> usize {
vec.push(new_node);
let mut idx = vec.len() - 1;
while idx > 0 {
if vec[idx - 1].key > vec[idx].key {
vec.swap(idx - 1, idx);
idx -= 1;
} else {
break;
}
}
idx
}
let mut node = &mut self.root;
for k in key {
if let Some(idx) = Self::search(&node.next, &k) {
// 辿れる場合は辿る
node = &mut node.next[idx];
} else {
// 辿れなくなったら新しいノードを挿入
let new_node = Node { key: Some(*k), value: None, next: vec![] };
let next_idx = _insert(&mut node.next, new_node);
node = &mut node.next[next_idx];
}
}
// すでにkeyに対応するvalueが存在している場合は更新せずにfalseを返す
if node.value.is_some() {
return false;
}
node.value = Some(value);
true
}
/// trie から key を探索する
fn find(&self, key: &[K]) -> Option<&V> {
let mut node = &self.root;
for k in key {
if let Some(idx) = Self::search(&node.next, &k) {
node = &node.next[idx];
} else {
return None
}
}
if let Some(ref value) = node.value {
Some(value)
} else {
None
}
}
/// Node の配列から key を2分探索で探索する。
fn search(nodes: &[Node<K, V>], key: &K) -> Option<usize> {
if nodes.is_empty() {
return None
}
let mut s = 0usize;
let mut e = nodes.len();
loop {
if s >= e {
break;
}
let pivot = (s + e) / 2;
let pivot_key = nodes[pivot].key.as_ref().unwrap();
if key < pivot_key {
e = pivot;
} else if key > pivot_key {
s = pivot + 1;
} else {
return Some(pivot);
}
}
None
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_trie_1() {
let mut trie: Trie<u8, String> = Trie::new();
trie.insert(String::from("abc").as_bytes(), String::from("abc"));
let s = String::from("abc");
assert_eq!(Some(&s), trie.find(s.as_bytes()));
let s = String::from("cba");
assert_eq!(None, trie.find(s.as_bytes()));
}
#[test]
fn test_trie_2() {
let mut trie: Trie<u8, u32> = Trie::new();
trie.insert(String::from("abc").as_bytes(), 0);
trie.insert(String::from("abd").as_bytes(), 1);
trie.insert(String::from("zyx").as_bytes(), 2);
trie.insert(String::from("zwx").as_bytes(), 3);
let s = String::from("abc");
assert_eq!(Some(&0), trie.find(s.as_bytes()));
let s = String::from("abd");
assert_eq!(Some(&1), trie.find(s.as_bytes()));
let s = String::from("zyx");
assert_eq!(Some(&2), trie.find(s.as_bytes()));
let s = String::from("zwx");
assert_eq!(Some(&3), trie.find(s.as_bytes()));
}
#[test]
fn test_trie_3() {
let mut trie: Trie<u8, u32> = Trie::new();
trie.insert(String::from("あいうえお").as_bytes(), 10);
trie.insert(String::from("あいえうお").as_bytes(), 11);
trie.insert(String::from("漢字").as_bytes() , 12);
trie.insert(String::from("平仮名").as_bytes() , 13);
trie.insert(String::from("片仮名").as_bytes() , 14);
let s = String::from("あいうえお");
assert_eq!(Some(&10), trie.find(s.as_bytes()));
let s = String::from("あいえうお");
assert_eq!(Some(&11), trie.find(s.as_bytes()));
let s = String::from("漢字");
assert_eq!(Some(&12), trie.find(s.as_bytes()));
let s = String::from("平仮名");
assert_eq!(Some(&13), trie.find(s.as_bytes()));
let s = String::from("片仮名");
assert_eq!(Some(&14), trie.find(s.as_bytes()));
}
}
|
use std::error::Error;
use std::thread;
use std::time::Duration;
use rainbow_hat_rs::alphanum4::Alphanum4;
fn main() -> Result<(), Box<dyn Error>> {
let sleep_time = 500;
let msg = "HELLO WORLD ";
let mut alphanum = Alphanum4::new()?;
let mut start_index = 0;
let mut msg2 = msg.to_string();
msg2.push_str(&msg);
loop {
let substring = &msg2[start_index..=(start_index + 3)];
alphanum.print_str(substring, false);
alphanum.show()?;
thread::sleep(Duration::from_millis(sleep_time));
start_index += 1;
if start_index == msg.len() {
start_index = 0;
}
}
}
|
use crate::lws::WMessage;
use std::fmt;
use stream_cancel::Trigger;
use tokio::sync::mpsc::UnboundedSender;
pub struct Request {
pub index: u16,
pub tag: u16,
pub request_tx: Option<UnboundedSender<WMessage>>,
pub trigger: Option<Trigger>,
pub write_out: u16,
}
impl Request {
pub fn new(idx: u16) -> Request {
Request {
index: idx,
tag: 0,
request_tx: None,
trigger: None,
write_out: 0,
}
}
pub fn with(tx: UnboundedSender<WMessage>, trigger: Trigger) -> Request {
Request {
index: 0,
tag: 0,
request_tx: Some(tx),
trigger: Some(trigger),
write_out: 0,
}
}
}
impl fmt::Debug for Request {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Req {{ indx: {}, tag: {}}}", self.index, self.tag,)
}
}
|
use std::collections::HashMap;
use chrono::Local;
use crate::{HTTP_VERSION};
use std::path::PathBuf;
pub struct Response<'a> {
pub http_version: &'a str,
pub status_code: u16,
pub reason_phrase: &'a str,
pub headers: HashMap<&'a str, String>,
pub body: Option<Vec<u8>>,
}
impl Response<'_> {
/// Converts the response object to a byte vector.
///
/// The vec contains the formatted HTTP response
/// which can be sent back to the client.
pub fn to_bytes(&self) -> Vec<u8> {
let status_line = format!(
"{} {} {}",
self.http_version, self.status_code, self.reason_phrase
);
let headers = self
.headers
.iter()
.map(|(&key, value)| format!("{}: {}", key, value))
.collect::<Vec<String>>()
.join("\r\n");
let mut response = format!("{}\r\n{}\r\n\r\n", status_line, headers)
.as_bytes()
.to_vec();
let empty_body = &Vec::new();
let body = self.body.as_ref().unwrap_or(empty_body);
response.extend_from_slice(body);
response
}
/// Gets a map of base response headers
pub fn get_headers<'a>(content_length: usize, path: &str) -> HashMap<&'a str, String> {
let mut headers = HashMap::new();
headers.insert("Server", "SimpleHTTP/0.1 Rust".to_string());
headers.insert("Connection", "Keep-Alive".to_string());
headers.insert("Date", format!("{}", Local::now().to_rfc2822()));
headers.insert("Content-Length", format!("{}", content_length));
headers.insert(
"Content-Type",
mime_guess::from_path(path)
.first_or("text/html".parse().unwrap())
.to_string(),
);
headers
}
/// Returns an HTTP OK response
pub fn ok<'a>(code: u16, path: PathBuf, content: Vec<u8>) -> Response<'a> {
// ok responses should always have a 200-code
assert!(code >= 200 && code <= 300);
const STATUS_CODE: u16 = 200;
Response {
http_version: HTTP_VERSION,
status_code: STATUS_CODE,
reason_phrase: Response::reason_phrase(STATUS_CODE),
// error path here doesn't matter as we just want to get html mimetype
headers: Response::get_headers(content.len(), path.to_str().unwrap()),
body: Some(content),
}
}
/// Returns an error response for the given error code.
pub fn error<'a>(status_code: u16, details: Option<&str>) -> Response<'a> {
let content = Response::get_error_html(status_code, details);
Response {
http_version: HTTP_VERSION,
status_code,
reason_phrase: Response::reason_phrase(status_code),
// error path here doesn't matter as we just want to get html mimetype
headers: Response::get_headers(content.len(), "error.html"),
body: Some(content),
}
}
/// Loads the HTML page for the given error code
fn get_error_html(code: u16, details: Option<&str>) -> Vec<u8> {
format!(
"<!DOCTYPE html> \
<html lang=\"en\"> \
<head> \
<meta charset=\"UTF-8\"> \
<title>{code} | {reason_phrase}</title> \
</head> \
<body> \
<h1>{reason_phrase}</h1> \
<p>{details}</p> \
</body> \
</html>",
code = code,
reason_phrase = Response::reason_phrase(code),
details = details.unwrap_or("")
)
.as_bytes()
.to_vec()
}
/// Gets the reason phrase string for an error code
pub fn reason_phrase<'a>(code: u16) -> &'a str {
match code {
200 => "OK",
400 => "BAD REQUEST",
404 => "NOT FOUND",
500 => "INTERNAL SERVER ERROR",
501 => "NOT IMPLEMENTED",
_ => "UNKNOWN ERROR",
}
}
}
|
use lexical_core::Number;
use crate::database as db;
use deadpool_postgres::Pool;
use serde::{Serialize, Deserialize};
pub async fn accept_invite(invite_id: db::InviteID, session_id: db::SessionID, pool: Pool)
-> Result<Box<dyn warp::Reply>, warp::Rejection>
{
let user_id = match db::session_user_id(pool.clone(), &session_id).await? {
Some(id) => id,
None => return Ok(Box::new(warp::redirect(
format!("/login?redirect=/invite/{}", invite_id)
.parse::<warp::http::Uri>().unwrap()
)))
};
let group_id = match db::invitation_group_id(pool.clone(), invite_id).await? {
Some(id) => id,
None => return Ok(Box::new(warp::http::StatusCode::NOT_FOUND))
};
// This returns false if the user is already a member of the group but that
// doesn't matter because either way, we should take the user to the group.
db::join_group(pool.clone(), user_id, group_id).await?;
super::channel(group_id, 0, session_id, pool).await
}
#[derive(Serialize)]
struct Response {
invite_id: db::InviteID
}
#[derive(Deserialize)]
pub struct CreateInviteRequest {
group_id: db::GroupID
}
pub const CREATE_INVITE_LIMIT: u64 =
("{'group_id':}".len() + db::GroupID::FORMATTED_SIZE_DECIMAL) as u64;
pub async fn create_invite(session_id: db::SessionID, request: CreateInviteRequest, pool: Pool)
-> Result<Box<dyn warp::Reply>, warp::Rejection>
{
let user_id = match db::session_user_id(pool.clone(), &session_id).await? {
Some(id) => id,
None => return Ok(Box::new(warp::http::StatusCode::UNAUTHORIZED))
};
if !db::group_member(pool.clone(), user_id, request.group_id).await? {
return Ok(Box::new(warp::http::StatusCode::NOT_FOUND));
}
Ok(Box::new(warp::reply::json(&Response {
invite_id: db::create_invitation(pool.clone(), request.group_id).await?
})))
}
|
use crate::parse::image::Image;
use crate::parse::boolean_matrix::BooleanMatrix;
use crate::parse::target_mesh::TargetMesh;
mod boolean_matrix;
pub mod image;
mod target;
mod target_mesh;
use crate::make::scan_sheet_elements::{ALIGNER_OUTER_RADIUS};
use crate::make::scan_sheet_layout::ALIGNER_DISTANCE_FROM_CORNER;
const DARK_THRESHOLD: u8 = 110; // all pixels darker than this are target candidates
#[derive(Debug)]
pub struct BarsFound {
pub bars: Vec<(f64, f64)>,
}
impl BarsFound {
pub fn from_image(input_image: &Image) -> BarsFound {
let target_candidates = BooleanMatrix::from_image(&input_image, DARK_THRESHOLD);
target_candidates.as_image().output_to_file("bruh.png");
let mesh = TargetMesh::from_matrix(&target_candidates);
let mut debug_image = input_image.clone();
mesh.add_to_image(&mut debug_image);
debug_image.output_to_file("debug.png");
let mut aligner_centers = mesh.get_aligner_centers();
let d = ALIGNER_OUTER_RADIUS+ALIGNER_DISTANCE_FROM_CORNER;
let mut destination_centers = [(d, d), (1.0-d, d), (1.0-d, 1.0-d), (d, 1.0-d)];
dbg!();
let new_image_height = 500; // why not?
// we have to scale our transformation centers to the size of the new image
for (x, y) in aligner_centers.iter_mut() {
*x *= input_image.base as f64;
*y *= input_image.height as f64;
}
for (x, y) in destination_centers.iter_mut() {
*x *= new_image_height as f64;
*y *= new_image_height as f64;
}
let transformed_image = input_image.perspective_transform(&aligner_centers, &destination_centers, new_image_height, new_image_height);
transformed_image.output_to_file("transformed.png");
let transformed_image_matrix = BooleanMatrix::from_image(&transformed_image, DARK_THRESHOLD);
let new_target_mesh = TargetMesh::from_matrix(&transformed_image_matrix);
let mut output_image_2 = transformed_image.clone();
new_target_mesh.add_to_image(&mut output_image_2);
output_image_2.output_to_file("debug2.png");
BarsFound { bars: new_target_mesh.get_bar_centers() }
}
}
|
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn serialize_operation_create_batch_inference_job(
input: &crate::input::CreateBatchInferenceJobInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_batch_inference_job_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_campaign(
input: &crate::input::CreateCampaignInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_campaign_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_dataset(
input: &crate::input::CreateDatasetInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_dataset_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_dataset_export_job(
input: &crate::input::CreateDatasetExportJobInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_dataset_export_job_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_dataset_group(
input: &crate::input::CreateDatasetGroupInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_dataset_group_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_dataset_import_job(
input: &crate::input::CreateDatasetImportJobInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_dataset_import_job_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_event_tracker(
input: &crate::input::CreateEventTrackerInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_event_tracker_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_filter(
input: &crate::input::CreateFilterInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_filter_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_schema(
input: &crate::input::CreateSchemaInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_schema_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_solution(
input: &crate::input::CreateSolutionInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_solution_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_solution_version(
input: &crate::input::CreateSolutionVersionInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_solution_version_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_campaign(
input: &crate::input::DeleteCampaignInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_campaign_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_dataset(
input: &crate::input::DeleteDatasetInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_dataset_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_dataset_group(
input: &crate::input::DeleteDatasetGroupInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_dataset_group_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_event_tracker(
input: &crate::input::DeleteEventTrackerInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_event_tracker_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_filter(
input: &crate::input::DeleteFilterInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_filter_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_schema(
input: &crate::input::DeleteSchemaInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_schema_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_solution(
input: &crate::input::DeleteSolutionInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_solution_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_algorithm(
input: &crate::input::DescribeAlgorithmInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_algorithm_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_batch_inference_job(
input: &crate::input::DescribeBatchInferenceJobInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_batch_inference_job_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_campaign(
input: &crate::input::DescribeCampaignInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_campaign_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_dataset(
input: &crate::input::DescribeDatasetInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_dataset_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_dataset_export_job(
input: &crate::input::DescribeDatasetExportJobInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_dataset_export_job_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_dataset_group(
input: &crate::input::DescribeDatasetGroupInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_dataset_group_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_dataset_import_job(
input: &crate::input::DescribeDatasetImportJobInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_dataset_import_job_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_event_tracker(
input: &crate::input::DescribeEventTrackerInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_event_tracker_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_feature_transformation(
input: &crate::input::DescribeFeatureTransformationInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_feature_transformation_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_filter(
input: &crate::input::DescribeFilterInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_filter_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_recipe(
input: &crate::input::DescribeRecipeInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_recipe_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_schema(
input: &crate::input::DescribeSchemaInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_schema_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_solution(
input: &crate::input::DescribeSolutionInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_solution_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_solution_version(
input: &crate::input::DescribeSolutionVersionInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_solution_version_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_get_solution_metrics(
input: &crate::input::GetSolutionMetricsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_get_solution_metrics_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_batch_inference_jobs(
input: &crate::input::ListBatchInferenceJobsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_batch_inference_jobs_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_campaigns(
input: &crate::input::ListCampaignsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_campaigns_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_dataset_export_jobs(
input: &crate::input::ListDatasetExportJobsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_dataset_export_jobs_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_dataset_groups(
input: &crate::input::ListDatasetGroupsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_dataset_groups_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_dataset_import_jobs(
input: &crate::input::ListDatasetImportJobsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_dataset_import_jobs_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_datasets(
input: &crate::input::ListDatasetsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_datasets_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_event_trackers(
input: &crate::input::ListEventTrackersInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_event_trackers_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_filters(
input: &crate::input::ListFiltersInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_filters_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_recipes(
input: &crate::input::ListRecipesInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_recipes_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_schemas(
input: &crate::input::ListSchemasInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_schemas_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_solutions(
input: &crate::input::ListSolutionsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_solutions_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_solution_versions(
input: &crate::input::ListSolutionVersionsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_solution_versions_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_stop_solution_version_creation(
input: &crate::input::StopSolutionVersionCreationInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_stop_solution_version_creation_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_update_campaign(
input: &crate::input::UpdateCampaignInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_update_campaign_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
|
use sat::formula::{Lit, Var, LitMap};
use sat::formula::assignment::Assignment;
use sat::formula::clause::*;
#[derive(Clone, Copy, Debug)]
struct Watcher {
pub cref : ClauseRef,
pub blocker : Lit
}
#[derive(Debug)]
struct WatchesLine {
watchers : Vec<Watcher>,
dirty : bool
}
pub struct Watches {
watches : LitMap<WatchesLine>,
pub propagations : u64
}
impl Watches {
pub fn new() -> Watches {
Watches { watches : LitMap::new()
, propagations : 0
}
}
pub fn initVar(&mut self, var : Var) {
self.initLit(var.posLit());
self.initLit(var.negLit());
}
fn initLit(&mut self, lit : Lit) {
self.watches.insert(&lit, WatchesLine {
watchers : Vec::new(),
dirty : false,
});
}
pub fn tryClearVar(&mut self, var : Var) {
self.tryClearLit(var.posLit());
self.tryClearLit(var.negLit());
}
fn tryClearLit(&mut self, lit : Lit) {
if self.watches[&lit].watchers.is_empty() {
self.watches.remove(&lit);
}
}
pub fn watchClause(&mut self, c : &Clause, cr : ClauseRef) {
let (c0, c1) = c.headPair();
self.watches[&!c0].watchers.push(Watcher { cref : cr, blocker : c1 });
self.watches[&!c1].watchers.push(Watcher { cref : cr, blocker : c0 });
}
pub fn unwatchClauseStrict(&mut self, c : &Clause, cr : ClauseRef)
{
let (c0, c1) = c.headPair();
self.watches[&!c0].watchers.retain(|w| w.cref != cr);
self.watches[&!c1].watchers.retain(|w| w.cref != cr);
}
pub fn unwatchClauseLazy(&mut self, c : &Clause)
{
let (c0, c1) = c.headPair();
self.watches[&!c0].dirty = true;
self.watches[&!c1].dirty = true;
}
// Description:
// Propagates all enqueued facts. If a conflict arises, the conflicting clause is returned,
// otherwise CRef_Undef.
//
// Post-conditions:
// * the propagation queue is empty, even if there was a conflict.
pub fn propagate(&mut self, ca : &mut ClauseAllocator, assigns : &mut Assignment) -> Option<ClauseRef> {
while let Some(p) = assigns.dequeue() {
self.propagations += 1;
let false_lit = !p;
{
let ref mut line = self.watches[&p];
if line.dirty {
line.watchers.retain(|w| { !ca.isDeleted(w.cref) });
line.dirty = false;
}
}
let mut i = 0;
let mut j = 0;
loop {
let (cw, new_watch) = {
let ref mut p_watches = self.watches[&p].watchers;
if i >= p_watches.len() { break; }
let pwi = p_watches[i];
i += 1;
if assigns.isSat(pwi.blocker) {
p_watches[j] = pwi;
j += 1;
continue;
}
let c = ca.edit(pwi.cref);
if c.head() == false_lit {
c.swap(0, 1);
}
assert!(c[1] == false_lit);
// If 0th watch is true, then clause is already satisfied.
let cw = Watcher { cref : pwi.cref, blocker : c.head() };
if cw.blocker != pwi.blocker && assigns.isSat(cw.blocker) {
p_watches[j] = cw;
j += 1;
continue;
}
// Look for new watch:
(cw, c.pullLiteral(1, |lit| { !assigns.isUnsat(lit) }))
};
match new_watch {
Some(lit) => {
self.watches[&!lit].watchers.push(cw);
}
// Did not find watch -- clause is unit under assignment:
None => {
let ref mut p_watches = self.watches[&p].watchers;
p_watches[j] = cw;
j += 1;
if assigns.isUnsat(cw.blocker) {
assigns.dequeueAll();
// Copy the remaining watches:
while i < p_watches.len() {
p_watches[j] = p_watches[i];
j += 1;
i += 1;
}
p_watches.truncate(j);
return Some(cw.cref);
} else {
assigns.assignLit(cw.blocker, Some(cw.cref));
}
}
}
}
self.watches[&p].watchers.truncate(j);
}
None
}
pub fn relocGC(&mut self, from : &mut ClauseAllocator, to : &mut ClauseAllocator) {
for (_, line) in self.watches.iter_mut() {
line.dirty = false;
line.watchers.retain(|w| { !from.isDeleted(w.cref) });
for w in line.watchers.iter_mut() {
w.cref = from.relocTo(to, w.cref);
}
}
}
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::alloc::Layout;
use std::sync::Arc;
use common_exception::Result;
use common_expression::types::DataType;
use common_expression::DataSchemaRef;
use common_functions::aggregates::get_layout_offsets;
use common_functions::aggregates::AggregateFunctionRef;
use common_functions::aggregates::StateAddr;
use common_sql::IndexType;
use crate::pipelines::processors::transforms::group_by::Area;
pub struct AggregatorParams {
pub input_schema: DataSchemaRef,
pub group_columns: Vec<IndexType>,
pub group_data_types: Vec<DataType>,
pub aggregate_functions: Vec<AggregateFunctionRef>,
pub aggregate_functions_arguments: Vec<Vec<usize>>,
// about function state memory layout
// If there is no aggregate function, layout is None
pub layout: Option<Layout>,
pub offsets_aggregate_states: Vec<usize>,
// Limit is push down to AggregatorTransform
pub limit: Option<usize>,
}
impl AggregatorParams {
pub fn try_create(
input_schema: DataSchemaRef,
group_data_types: Vec<DataType>,
group_columns: &[usize],
agg_funcs: &[AggregateFunctionRef],
agg_args: &[Vec<usize>],
limit: Option<usize>,
) -> Result<Arc<AggregatorParams>> {
let mut states_offsets: Vec<usize> = Vec::with_capacity(agg_funcs.len());
let mut states_layout = None;
if !agg_funcs.is_empty() {
states_offsets = Vec::with_capacity(agg_funcs.len());
states_layout = Some(get_layout_offsets(agg_funcs, &mut states_offsets)?);
}
Ok(Arc::new(AggregatorParams {
input_schema,
group_columns: group_columns.to_vec(),
group_data_types,
aggregate_functions: agg_funcs.to_vec(),
aggregate_functions_arguments: agg_args.to_vec(),
layout: states_layout,
offsets_aggregate_states: states_offsets,
limit,
}))
}
pub fn alloc_layout(&self, area: &mut Area) -> StateAddr {
let layout = self.layout.unwrap();
let place = Into::<StateAddr>::into(area.alloc_layout(layout));
for idx in 0..self.offsets_aggregate_states.len() {
let aggr_state = self.offsets_aggregate_states[idx];
let aggr_state_place = place.next(aggr_state);
self.aggregate_functions[idx].init_state(aggr_state_place);
}
place
}
pub fn has_distinct_combinator(&self) -> bool {
self.aggregate_functions
.iter()
.any(|f| f.name().contains("DistinctCombinator"))
}
}
|
use std::io::{
Cursor,
Read,
};
use std::marker::PhantomData;
use std::path::PathBuf;
use regex::Regex;
use sourcerenderer_core::platform::IO;
use sourcerenderer_core::Platform;
use crate::asset::asset_manager::{
AssetContainer,
AssetFile,
};
use crate::asset::loaders::csgo_loader::CSGOMapLoaderError::CSGONotFound;
use crate::asset::loaders::vpk_container::{
CSGO_PAK_NAME_PATTERN,
CSGO_PRIMARY_PAK_NAME_PATTERN,
};
pub(super) const CSGO_MAP_NAME_PATTERN: &str = r"(de|cs|dm|am|surf|aim)_[a-zA-Z0-9_-]+\.bsp";
pub struct CSGODirectoryContainer<P: Platform> {
path: String,
map_name_regex: Regex,
primary_pak_name_regex: Regex,
pak_name_regex: Regex,
_p: PhantomData<<P::IO as IO>::File>,
}
unsafe impl<P: Platform> Send for CSGODirectoryContainer<P> {}
unsafe impl<P: Platform> Sync for CSGODirectoryContainer<P> {}
#[derive(Debug)]
pub enum CSGOMapLoaderError {
CSGONotFound,
}
impl<P: Platform> CSGODirectoryContainer<P> {
pub fn new(path: &str) -> Result<Self, CSGOMapLoaderError> {
let mut exe_path = PathBuf::new();
exe_path.push(path.to_owned());
#[cfg(target_os = "windows")]
exe_path.push("csgo.exe");
#[cfg(target_os = "linux")]
exe_path.push("csgo_linux64");
if !<P::IO as IO>::external_asset_exists(exe_path) {
return Err(CSGONotFound);
}
Ok(Self {
path: path.to_owned(),
map_name_regex: Regex::new(CSGO_MAP_NAME_PATTERN).unwrap(),
primary_pak_name_regex: Regex::new(CSGO_PRIMARY_PAK_NAME_PATTERN).unwrap(),
pak_name_regex: Regex::new(CSGO_PAK_NAME_PATTERN).unwrap(),
_p: PhantomData,
})
}
}
impl<P: Platform> AssetContainer for CSGODirectoryContainer<P> {
fn contains(&self, path: &str) -> bool {
self.map_name_regex.is_match(path)
|| self.primary_pak_name_regex.is_match(path)
|| self.pak_name_regex.is_match(path)
}
fn load(&self, path: &str) -> Option<AssetFile> {
let actual_path = if self.map_name_regex.is_match(path) {
let mut actual_path = PathBuf::new();
actual_path.push(&self.path);
actual_path.push("csgo");
actual_path.push("maps");
let mut file_name = path.to_owned();
if !file_name.ends_with(".bsp") {
file_name.push_str(".bsp");
}
actual_path.push(file_name);
actual_path
} else if self.primary_pak_name_regex.is_match(path) || self.pak_name_regex.is_match(path) {
let mut actual_path = PathBuf::new();
actual_path.push(&self.path);
actual_path.push("csgo");
let mut file_name = path.to_owned();
if !file_name.ends_with(".vpk") {
file_name.push_str(".vpk");
}
actual_path.push(file_name);
actual_path
} else {
return None;
};
let mut file = <P::IO as IO>::open_external_asset(&actual_path).ok()?;
let mut buf = Vec::<u8>::new();
file.read_to_end(&mut buf).ok()?;
Some(AssetFile {
path: path.to_string(),
data: Cursor::new(buf.into_boxed_slice()),
})
}
}
|
use nodes::prelude::*;
pub struct Definition {
name: String,
args: NodeListP,
// body of the macro declaration
body: Ptr<NodeList<NodeP>>,
// referencing macro invocations
references: RefCell<Vec<Weak<Node>>>,
env: LocalEnv
}
impl Node for Definition {
fn childs(&self, out: &mut Vec<NodeP>) {
out.push(self.args.clone().into());
out.push(self.body.clone().into());
}
fn layout(&self, env: LayoutChain, w: &mut Writer) {
w.with(&self.name,
&mut |w| self.args.layout(env.clone() /* .link(self) */, w),
&mut |w| self.body.layout(env.clone() /* .link(self) */, w)
)
}
fn add_ref(&self, source: &Rc<Node>) {
self.references.borrow_mut().push(Rc::downgrade(source));
}
fn env(&self) -> Option<&LocalEnv> {
Some(&self.env)
}
}
impl Definition {
pub fn from_param(io: Io, env: GraphChain, p: source::Parameter)
-> Box<Future<Item=Definition, Error=LoomError>>
{
let args = p.args;
let name = p.name.to_string();
let body = p.value;
let childs = body.childs;
box init_env(io.clone(), env, body.commands, body.parameters)
.and_then(move |env| {
let arglist = Ptr::new(
NodeList::from(&io,
args.into_iter()
.map(|n| item_node(&io, &env, n))
)
);
process_body(io, env, childs)
.and_then(move |(env, childs)| {
Ok(Definition {
name: name,
args: arglist,
body: childs,
references: RefCell::new(vec![]),
env: env.take()
})
})
})
}
pub fn name(&self) -> &str {
&self.name
}
}
|
use crate::LED_COUNT;
use nrf51822::GPIO;
/// A color represented in 24-bit GRB format (the same at the strip).
#[derive(Copy, Clone)]
#[repr(packed)]
pub struct Color {
pub g: u8,
pub r: u8,
pub b: u8,
}
impl Color {
pub const fn black() -> Color {
Color { g: 0, r: 0, b: 0 }
}
pub const fn white() -> Color {
Color {
g: 255,
r: 255,
b: 255,
}
}
/// Invert all components.
pub fn invert(&mut self) {
self.g = 255 - self.g;
self.r = 255 - self.r;
self.b = 255 - self.b;
}
/// Substract all components by the specific amount.
pub fn decay(&mut self, g: u8, r: u8, b: u8) {
self.g = self.g.saturating_sub(g);
self.r = self.r.saturating_sub(r);
self.b = self.b.saturating_sub(b);
}
}
/// A canvas abstracts painting over the strip.
///
/// This struct accumulates changes and only sends them to the strip when `flush` is called.
pub struct Canvas {
pin: usize,
buf: [Color; LED_COUNT],
}
impl Canvas {
pub const fn new() -> Self {
Canvas {
pin: 1,
buf: [Color::black(); LED_COUNT],
}
}
/// Flush this canvas to LED.
pub fn flush(&self, gpio: &GPIO) {
extern "C" {
fn send_buf_ws2818(
unused: usize,
mask: usize,
clraddr: *const usize,
setaddr: *const usize,
ptr: *const u8,
length: usize,
);
}
const BUF_LEN: usize = LED_COUNT * 3;
let pin_mask = 1 << self.pin;
let buf_ptr = self.buf.as_ptr() as *const _;
// These two are addresses of registers that on write to them either clear or set the
// specific bit respective bit. They are device specific and could be hardcoded in theory
// although that wouldn't be too benificial.
let gpio_clr_ptr = &gpio.outclr as *const _ as *const usize;
let gpio_set_ptr = &gpio.outset as *const _ as *const usize;
unsafe {
send_buf_ws2818(
0, // unused
pin_mask,
gpio_clr_ptr,
gpio_set_ptr,
buf_ptr,
// HardFault is raised without -1 when reading R4 on send_buffer.S:85, LOL.
BUF_LEN - 1,
);
}
}
/// Set all LEDs to black.
pub fn clear(&mut self) {
self.buf.iter_mut().for_each(|v| *v = Color::black());
}
/// Invert all colors.
pub fn invert(&mut self) {
self.buf.iter_mut().for_each(|v| v.invert());
}
/// Get a mutable reference to the color that corresponds to the given position.
pub fn at_mut(&mut self, idx: usize) -> Option<&mut Color> {
self.buf.get_mut(idx)
}
/// Returns the backing buffer as a slice.
pub fn as_slice_mut(&mut self) -> &mut [Color] {
&mut self.buf
}
/// Set the LED by the given `idx` to the specified `color`.
pub fn set_color(&mut self, idx: usize, color: Color) {
self.buf[idx] = color;
}
}
|
use utilities::prelude::*;
use crate::impl_vk_handle;
use crate::prelude::*;
use std::sync::Arc;
pub struct DescriptorPoolBuilder {
layout: Option<Arc<DescriptorSetLayout>>,
descriptor_count: u32,
flags: VkDescriptorPoolCreateFlagBits,
}
impl DescriptorPoolBuilder {
pub fn set_flags(mut self, flags: impl Into<VkDescriptorPoolCreateFlagBits>) -> Self {
self.flags |= flags.into();
self
}
pub fn set_descriptor_set_count(mut self, count: u32) -> Self {
self.descriptor_count = count;
self
}
pub fn set_layout(mut self, layout: Arc<DescriptorSetLayout>) -> Self {
self.layout = Some(layout);
self
}
pub fn build(self, device: Arc<Device>) -> VerboseResult<Arc<DescriptorPool>> {
if cfg!(debug_assertions) {
if self.layout.is_none() {
create_error!("no layout set!");
}
if self.descriptor_count == 0 {
create_error!("descriptor count must be greater than 0");
}
}
let layout = self.layout.ok_or("descriptor set layout was not set!")?;
let descriptor_pool_ci =
VkDescriptorPoolCreateInfo::new(self.flags, self.descriptor_count, layout.pool_sizes());
let descriptor_pool = device.create_descriptor_pool(&descriptor_pool_ci)?;
Ok(Arc::new(DescriptorPool {
device,
descriptor_pool,
descriptor_set_layout: layout,
}))
}
}
#[derive(Debug)]
pub struct DescriptorPool {
device: Arc<Device>,
descriptor_pool: VkDescriptorPool,
descriptor_set_layout: Arc<DescriptorSetLayout>,
}
impl DescriptorPool {
pub fn builder() -> DescriptorPoolBuilder {
DescriptorPoolBuilder {
layout: None,
descriptor_count: 1,
flags: VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT.into(),
}
}
pub fn reset(&self) -> VerboseResult<()> {
self.device
.reset_descriptor_pool(self.descriptor_pool, VK_DESCRIPTOR_POOL_RESET_NULL_BIT)
}
pub fn prepare_set(descriptor_pool: &Arc<DescriptorPool>) -> DescriptorSetBuilder {
DescriptorSet::builder(descriptor_pool.device.clone(), descriptor_pool.clone())
}
}
impl VulkanDevice for DescriptorPool {
fn device(&self) -> &Arc<Device> {
&self.device
}
}
impl_vk_handle!(DescriptorPool, VkDescriptorPool, descriptor_pool);
impl VkHandle<VkDescriptorSetLayout> for DescriptorPool {
fn vk_handle(&self) -> VkDescriptorSetLayout {
self.descriptor_set_layout.vk_handle()
}
}
impl<'a> VkHandle<VkDescriptorSetLayout> for &'a DescriptorPool {
fn vk_handle(&self) -> VkDescriptorSetLayout {
self.descriptor_set_layout.vk_handle()
}
}
impl VkHandle<VkDescriptorSetLayout> for Arc<DescriptorPool> {
fn vk_handle(&self) -> VkDescriptorSetLayout {
self.descriptor_set_layout.vk_handle()
}
}
impl<'a> VkHandle<VkDescriptorSetLayout> for &'a Arc<DescriptorPool> {
fn vk_handle(&self) -> VkDescriptorSetLayout {
self.descriptor_set_layout.vk_handle()
}
}
impl Drop for DescriptorPool {
fn drop(&mut self) {
self.device.destroy_descriptor_pool(self.descriptor_pool);
}
}
use crate::{ffi::*, handle_ffi_result};
#[no_mangle]
pub extern "C" fn create_descriptor_pool(
flags: VkDescriptorPoolCreateFlagBits,
descriptor_count: u32,
descriptor_set_layout: *const DescriptorSetLayout,
device: *const Device,
) -> *const DescriptorPool {
let device = unsafe { Arc::from_raw(device) };
let layout = unsafe { Arc::from_raw(descriptor_set_layout) };
let pool_res = DescriptorPool::builder()
.set_flags(flags)
.set_descriptor_set_count(descriptor_count)
.set_layout(layout)
.build(device);
handle_ffi_result!(pool_res)
}
#[no_mangle]
pub extern "C" fn reset_descriptor_pool(descriptor_pool: *const DescriptorPool) -> bool {
let pool = unsafe { Arc::from_raw(descriptor_pool) };
match pool.reset() {
Ok(_) => true,
Err(err) => {
update_last_error(err);
false
}
}
}
pub extern "C" fn destroy_descriptor_pool(descriptor_pool: *const DescriptorPool) {
let _pool = unsafe { Arc::from_raw(descriptor_pool) };
}
|
use dlal_component_base::{component, err, serde_json, Body, CmdResult, View};
use multiqueue2::{MPMCSender, MPMCUniReceiver};
#[derive(Debug)]
enum QueuedItem {
Command {
view: View,
body: Box<serde_json::Value>,
detach: bool,
},
Wait(usize),
}
struct Queues {
to_audio_send: MPMCSender<QueuedItem>,
to_audio_recv: MPMCUniReceiver<QueuedItem>,
fro_audio_send: MPMCSender<Box<Option<serde_json::Value>>>,
fro_audio_recv: MPMCUniReceiver<Box<Option<serde_json::Value>>>,
}
impl Queues {
fn new(size: u64) -> Self {
let (to_audio_send, to_audio_recv) = multiqueue2::mpmc_queue(size);
let (fro_audio_send, fro_audio_recv) = multiqueue2::mpmc_queue(size);
Self {
to_audio_send,
to_audio_recv: to_audio_recv.into_single().unwrap(),
fro_audio_send,
fro_audio_recv: fro_audio_recv.into_single().unwrap(),
}
}
}
impl Default for Queues {
fn default() -> Self {
Self::new(128)
}
}
component!(
{"in": ["cmd"], "out": ["cmd"]},
[
"run_size",
"uni",
{"name": "field_helpers", "fields": ["last_error"], "kinds": ["r"]},
{"name": "field_helpers", "fields": ["pause"], "kinds": ["rw"]},
],
{
queues: Queues,
wait: usize,
pause: bool,
last_error: String,
},
{
"queue": {"args": ["component", "command", "audio", "midi", "run", "body", "timeout_ms", "detach"]},
"wait": {"args": ["samples"]},
"pause": {"args": ["enable"]},
"resize": {"args": ["size"]},
},
);
impl ComponentTrait for Component {
fn run(&mut self) {
if self.pause {
return;
}
'outer: loop {
if self.wait > self.run_size {
self.wait -= self.run_size;
return;
}
while let Ok(item) = self.queues.to_audio_recv.try_recv() {
match item {
QueuedItem::Wait(wait) => {
self.wait += wait;
continue 'outer;
}
QueuedItem::Command { view, body, detach } => {
let result = view.command(&*body);
if !detach {
self.queues
.fro_audio_send
.try_send(Box::new(result))
.expect("try_send failed");
} else if let Some(result) = result {
if let Some(error) = result.get("error") {
self.last_error = error.as_str().unwrap_or(&error.to_string()).into();
}
}
}
}
}
break;
}
}
fn to_json_cmd(&mut self, _body: serde_json::Value) -> CmdResult {
Ok(None)
}
fn from_json_cmd(&mut self, _body: serde_json::Value) -> CmdResult {
Ok(None)
}
}
impl Component {
fn queue_cmd(&mut self, body: serde_json::Value) -> CmdResult {
let detach = body.arg(7)?;
if let Err(e) = self
.queues
.to_audio_send
.try_send(QueuedItem::Command {
view: View::new(&body.at("args")?)?,
body: Box::new(body.arg(5)?),
detach,
})
{
return Err(err!("try_send failed: {}", e).into());
}
if detach {
return Ok(None);
}
std::thread::sleep(std::time::Duration::from_millis(body.arg(6)?));
Ok(*self.queues.fro_audio_recv.try_recv()?)
}
fn wait_cmd(&mut self, body: serde_json::Value) -> CmdResult {
if let Err(e) = self
.queues
.to_audio_send
.try_send(QueuedItem::Wait(body.arg(0)?))
{
return Err(err!("try_send failed: {}", e).into());
}
Ok(None)
}
fn resize_cmd(&mut self, body: serde_json::Value) -> CmdResult {
self.queues = Queues::new(body.arg(0)?);
Ok(None)
}
}
|
use std::path::Path;
use actix_files::NamedFile;
use actix_web::{
middleware::Logger,
web::{self, Data},
App, HttpRequest, HttpServer, Responder,
};
use env_logger::Env;
use fintrack::utils::AuthProvider;
use fintrack::{services, Config, Db};
use true_layer::Client as TrueLayerClient;
#[actix_web::main]
async fn main() -> anyhow::Result<()> {
dotenv::dotenv().ok();
env_logger::from_env(Env::default().default_filter_or("info")).init();
let config = Config::from_env();
let db = Db::connect(&config.db_url).await?;
let true_layer = Data::new(TrueLayerClient::new(AuthProvider::new(db.clone())));
fintrack::migrations::run(&db).await?;
fintrack::sync::start_worker(db.clone(), true_layer.clone().into_inner());
let address = &config.http_address;
let port = config.http_port;
HttpServer::new({
let db = db.clone();
move || {
App::new()
.wrap(Logger::default())
.app_data(db.clone())
.app_data(true_layer.clone())
.service(services::connect("/connect"))
.service(services::api("/api"))
.default_service(web::get().to(spa_fallback))
}
})
.bind(format!("{}:{}", address, port))?
.run()
.await?;
db.close().await;
Ok(())
}
async fn spa_fallback(req: HttpRequest) -> actix_web::Result<impl Responder> {
let path = Path::new("client/build").join(req.path().trim_start_matches('/'));
if path.is_file() {
Ok(NamedFile::open(path)?)
} else {
Ok(NamedFile::open("client/build/index.html")?)
}
}
|
use super::*;
#[derive(Clone)]
pub struct TypeRef(pub Row);
impl TypeRef {
pub fn name(&self) -> &'static str {
self.0.str(1)
}
pub fn namespace(&self) -> &'static str {
self.0.str(2)
}
pub fn type_name(&self) -> TypeName {
TypeName::new(self.0.str(2), self.0.str(1))
}
}
|
use std::any::type_name;
#[cfg(feature = "use_serde")]
use serde::{Deserialize, Serialize};
use crate::clickhouse::compacted_tables::schema::ValidateSchema;
use crate::Error;
#[cfg_attr(feature = "use_serde", derive(Serialize, Deserialize))]
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum TableEngine {
ReplacingMergeTree,
SummingMergeTree(Vec<String>),
AggregatingMergeTree,
}
#[allow(clippy::derivable_impls)]
impl Default for TableEngine {
fn default() -> Self {
TableEngine::ReplacingMergeTree
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
#[cfg_attr(feature = "use_serde", derive(Serialize, Deserialize))]
#[allow(clippy::upper_case_acronyms)]
pub enum CompressionMethod {
LZ4HC(u8),
ZSTD(u8),
Delta(u8),
DoubleDelta,
Gorilla,
T64,
}
impl ValidateSchema for CompressionMethod {
fn validate(&self) -> Result<(), Error> {
// validate compression levels
// https://clickhouse.tech/docs/en/sql-reference/statements/create/table/#create-query-general-purpose-codecs
match self {
Self::ZSTD(level) => {
if !(1u8..=22u8).contains(level) {
return Err(compression_level_out_of_range(type_name::<Self>()));
}
Ok(())
}
Self::LZ4HC(level) => {
if !(1u8..=9u8).contains(level) {
return Err(compression_level_out_of_range(type_name::<Self>()));
}
Ok(())
}
Self::Delta(delta_bytes) => {
if ![1, 2, 4, 8].contains(delta_bytes) {
return Err(Error::SchemaValidationError(
"Delta compression",
format!("Unsupported value for delta_bytes: {}", delta_bytes),
));
}
Ok(())
}
_ => Ok(()),
}
}
}
fn compression_level_out_of_range(location: &'static str) -> Error {
Error::SchemaValidationError(location, "compression level out of range".to_string())
}
impl Default for CompressionMethod {
fn default() -> Self {
Self::ZSTD(6)
}
}
|
#[doc = "Reader of register MACPOCR"]
pub type R = crate::R<u32, super::MACPOCR>;
#[doc = "Writer for register MACPOCR"]
pub type W = crate::W<u32, super::MACPOCR>;
#[doc = "Register MACPOCR `reset()`'s with value 0"]
impl crate::ResetValue for super::MACPOCR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `PTOEN`"]
pub type PTOEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PTOEN`"]
pub struct PTOEN_W<'a> {
w: &'a mut W,
}
impl<'a> PTOEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `ASYNCEN`"]
pub type ASYNCEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ASYNCEN`"]
pub struct ASYNCEN_W<'a> {
w: &'a mut W,
}
impl<'a> ASYNCEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `APDREQEN`"]
pub type APDREQEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `APDREQEN`"]
pub struct APDREQEN_W<'a> {
w: &'a mut W,
}
impl<'a> APDREQEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `ASYNCTRIG`"]
pub type ASYNCTRIG_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ASYNCTRIG`"]
pub struct ASYNCTRIG_W<'a> {
w: &'a mut W,
}
impl<'a> ASYNCTRIG_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `APDREQTRIG`"]
pub type APDREQTRIG_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `APDREQTRIG`"]
pub struct APDREQTRIG_W<'a> {
w: &'a mut W,
}
impl<'a> APDREQTRIG_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `DRRDIS`"]
pub type DRRDIS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DRRDIS`"]
pub struct DRRDIS_W<'a> {
w: &'a mut W,
}
impl<'a> DRRDIS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `DN`"]
pub type DN_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DN`"]
pub struct DN_W<'a> {
w: &'a mut W,
}
impl<'a> DN_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8);
self.w
}
}
impl R {
#[doc = "Bit 0 - PTP Offload Enable"]
#[inline(always)]
pub fn ptoen(&self) -> PTOEN_R {
PTOEN_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Automatic PTP SYNC message Enable"]
#[inline(always)]
pub fn asyncen(&self) -> ASYNCEN_R {
ASYNCEN_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Automatic PTP Pdelay_Req message Enable"]
#[inline(always)]
pub fn apdreqen(&self) -> APDREQEN_R {
APDREQEN_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 4 - Automatic PTP SYNC message Trigger"]
#[inline(always)]
pub fn asynctrig(&self) -> ASYNCTRIG_R {
ASYNCTRIG_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Automatic PTP Pdelay_Req message Trigger"]
#[inline(always)]
pub fn apdreqtrig(&self) -> APDREQTRIG_R {
APDREQTRIG_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Disable PTO Delay Request/Response response generation"]
#[inline(always)]
pub fn drrdis(&self) -> DRRDIS_R {
DRRDIS_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bits 8:15 - Domain Number"]
#[inline(always)]
pub fn dn(&self) -> DN_R {
DN_R::new(((self.bits >> 8) & 0xff) as u8)
}
}
impl W {
#[doc = "Bit 0 - PTP Offload Enable"]
#[inline(always)]
pub fn ptoen(&mut self) -> PTOEN_W {
PTOEN_W { w: self }
}
#[doc = "Bit 1 - Automatic PTP SYNC message Enable"]
#[inline(always)]
pub fn asyncen(&mut self) -> ASYNCEN_W {
ASYNCEN_W { w: self }
}
#[doc = "Bit 2 - Automatic PTP Pdelay_Req message Enable"]
#[inline(always)]
pub fn apdreqen(&mut self) -> APDREQEN_W {
APDREQEN_W { w: self }
}
#[doc = "Bit 4 - Automatic PTP SYNC message Trigger"]
#[inline(always)]
pub fn asynctrig(&mut self) -> ASYNCTRIG_W {
ASYNCTRIG_W { w: self }
}
#[doc = "Bit 5 - Automatic PTP Pdelay_Req message Trigger"]
#[inline(always)]
pub fn apdreqtrig(&mut self) -> APDREQTRIG_W {
APDREQTRIG_W { w: self }
}
#[doc = "Bit 6 - Disable PTO Delay Request/Response response generation"]
#[inline(always)]
pub fn drrdis(&mut self) -> DRRDIS_W {
DRRDIS_W { w: self }
}
#[doc = "Bits 8:15 - Domain Number"]
#[inline(always)]
pub fn dn(&mut self) -> DN_W {
DN_W { w: self }
}
}
|
use super::ChunkId;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use crate::beam::reader::{ReadError, Result};
pub struct Header {
pub chunk_id: ChunkId,
pub data_size: u32,
}
impl Header {
pub fn new(chunk_id: &ChunkId, data_size: u32) -> Self {
Header {
chunk_id: *chunk_id,
data_size,
}
}
/// Alternative Implementations
/// - [In `org.elixir_lang.bean.chunk.Chunk.from` in IntelliJ Elixir](https://github.com/KronicDeth/intellij-elixir/blob/
/// 2f5c826040681e258e98c3e2f02b25985cd0766b/src/org/elixir_lang/beam/chunk/Chunk.java#
/// L36-L40) in Java.
pub fn decode<R: std::io::Read>(mut reader: R) -> std::io::Result<Self> {
let mut id = [0; 4];
reader.read_exact(&mut id)?;
let size = reader.read_u32::<BigEndian>()?;
Ok(Header::new(&id, size))
}
pub fn encode<W: std::io::Write>(&self, mut writer: W) -> std::io::Result<()> {
writer.write_all(&self.chunk_id)?;
writer.write_u32::<BigEndian>(self.data_size)?;
Ok(())
}
}
/// ## Alternative Implementations
/// - [In `org.elixir_lang.bean.chunk.Chunk.from` in IntelliJ Elixir](https://github.com/KronicDeth/intellij-elixir/blob/
/// 2f5c826040681e258e98c3e2f02b25985cd0766b/src/org/elixir_lang/beam/chunk/Chunk.java#L45) in
/// Java.
pub fn padding_size(data_size: u32) -> u32 {
(4 - data_size % 4) % 4
}
pub fn check_chunk_id(passed: &ChunkId, expected: &ChunkId) -> Result<()> {
if passed != expected {
Err(ReadError::UnexpectedChunk {
id: *passed,
expected: *expected,
})
} else {
Ok(())
}
}
|
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn serialize_operation_add_attributes_to_findings(
input: &crate::input::AddAttributesToFindingsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_add_attributes_to_findings_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_assessment_target(
input: &crate::input::CreateAssessmentTargetInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_assessment_target_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_assessment_template(
input: &crate::input::CreateAssessmentTemplateInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_assessment_template_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_exclusions_preview(
input: &crate::input::CreateExclusionsPreviewInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_exclusions_preview_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_resource_group(
input: &crate::input::CreateResourceGroupInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_resource_group_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_assessment_run(
input: &crate::input::DeleteAssessmentRunInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_assessment_run_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_assessment_target(
input: &crate::input::DeleteAssessmentTargetInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_assessment_target_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_assessment_template(
input: &crate::input::DeleteAssessmentTemplateInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_assessment_template_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_assessment_runs(
input: &crate::input::DescribeAssessmentRunsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_assessment_runs_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_assessment_targets(
input: &crate::input::DescribeAssessmentTargetsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_assessment_targets_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_assessment_templates(
input: &crate::input::DescribeAssessmentTemplatesInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_assessment_templates_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_cross_account_access_role(
_input: &crate::input::DescribeCrossAccountAccessRoleInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
Ok(smithy_http::body::SdkBody::from("{}"))
}
pub fn serialize_operation_describe_exclusions(
input: &crate::input::DescribeExclusionsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_exclusions_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_findings(
input: &crate::input::DescribeFindingsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_findings_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_resource_groups(
input: &crate::input::DescribeResourceGroupsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_resource_groups_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_rules_packages(
input: &crate::input::DescribeRulesPackagesInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_rules_packages_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_get_assessment_report(
input: &crate::input::GetAssessmentReportInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_get_assessment_report_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_get_exclusions_preview(
input: &crate::input::GetExclusionsPreviewInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_get_exclusions_preview_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_get_telemetry_metadata(
input: &crate::input::GetTelemetryMetadataInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_get_telemetry_metadata_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_assessment_run_agents(
input: &crate::input::ListAssessmentRunAgentsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_assessment_run_agents_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_assessment_runs(
input: &crate::input::ListAssessmentRunsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_assessment_runs_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_assessment_targets(
input: &crate::input::ListAssessmentTargetsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_assessment_targets_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_assessment_templates(
input: &crate::input::ListAssessmentTemplatesInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_assessment_templates_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_event_subscriptions(
input: &crate::input::ListEventSubscriptionsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_event_subscriptions_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_exclusions(
input: &crate::input::ListExclusionsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_exclusions_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_findings(
input: &crate::input::ListFindingsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_findings_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_rules_packages(
input: &crate::input::ListRulesPackagesInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_rules_packages_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_tags_for_resource(
input: &crate::input::ListTagsForResourceInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_tags_for_resource_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_preview_agents(
input: &crate::input::PreviewAgentsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_preview_agents_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_register_cross_account_access_role(
input: &crate::input::RegisterCrossAccountAccessRoleInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_register_cross_account_access_role_input(
&mut object,
input,
);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_remove_attributes_from_findings(
input: &crate::input::RemoveAttributesFromFindingsInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_remove_attributes_from_findings_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_set_tags_for_resource(
input: &crate::input::SetTagsForResourceInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_set_tags_for_resource_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_start_assessment_run(
input: &crate::input::StartAssessmentRunInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_start_assessment_run_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_stop_assessment_run(
input: &crate::input::StopAssessmentRunInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_stop_assessment_run_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_subscribe_to_event(
input: &crate::input::SubscribeToEventInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_subscribe_to_event_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_unsubscribe_from_event(
input: &crate::input::UnsubscribeFromEventInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_unsubscribe_from_event_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_update_assessment_target(
input: &crate::input::UpdateAssessmentTargetInput,
) -> Result<smithy_http::body::SdkBody, smithy_types::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_update_assessment_target_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
|
use {
amethyst::input::BindingTypes,
serde::{Deserialize, Serialize},
std::fmt,
};
#[derive(Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub enum AxisBinding {
Vertical,
Sideways,
Forward,
}
impl fmt::Display for AxisBinding {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!("{:?}", self))
}
}
#[derive(Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub enum ActionBinding {
Jump,
Crouch,
}
impl fmt::Display for ActionBinding {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!("{:?}", self))
}
}
#[derive(Debug)]
pub struct ControlBindings;
impl BindingTypes for ControlBindings {
type Axis = AxisBinding;
type Action = ActionBinding;
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This module contains the bulk of the logic for connecting user applications to a
// vsock driver.
//
// Handling user requests is complicated as there are multiple communication channels
// involved. For example a request to 'connect' will result in sending a message
// to the driver over the single DeviceProxy. If this returns with success then
// eventually a message will come over the single Callbacks stream indicating
// whether the remote accepted or rejected.
//
// Fundamentally then there needs to be mutual exclusion in accessing DeviceProxy,
// and de-multiplexing of incoming messages on the Callbacks stream. There are
// a two high level options for doing this.
// 1. Force a single threaded event driver model. This would mean that additional
// asynchronous executions are never spawned, and any use of await! or otherwise
// blocking with additional futures requires collection futures in future sets
// or having custom polling logic etc. Whilst this is probably the most resource
// efficient it restricts the service to be single threaded forever by its design,
// is harder to reason about as cannot be written very idiomatically with futures
// and is even more complicated to avoid blocking other requests whilst waiting
// on responses from the driver.
// 2. Allow multiple asynchronous executions and use some form of message passing
// and locking to handle DeviceProxy access and sharing access to the Callbacks
// stream. Potentially more resource intensive with unnecessary locking etc,
// but allows for the potential to have actual parallel execution and is much
// simpler to write the logic.
// The chosen option is (2) and the access to DeviceProxy is handled with an Arc<Mutex<State>>,
// and de-multiplexing of the Callbacks is done by registering an event whilst holding
// the mutex, and having a single asynchronous thread that is dedicated to converting
// incoming Callbacks to signaling registered events.
use {
crate::{addr, port},
crossbeam,
failure::{err_msg, Fail},
fidl::endpoints,
fidl_fuchsia_hardware_vsock::{
CallbacksMarker, CallbacksRequest, CallbacksRequestStream, DeviceProxy,
},
fidl_fuchsia_vsock::{
AcceptorProxy, ConnectionRequest, ConnectionRequestStream, ConnectionTransport,
ConnectorRequest, ConnectorRequestStream,
},
fuchsia_async as fasync,
fuchsia_syslog::{fx_log_info, fx_log_warn},
fuchsia_zircon as zx,
futures::{
channel::{mpsc, oneshot},
future, select, Future, FutureExt, Stream, StreamExt, TryFutureExt, TryStreamExt,
},
parking_lot::Mutex,
std::{
collections::HashMap,
ops::Deref,
pin::Pin,
sync::Arc,
task::{Context, Poll},
},
void::Void,
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum EventType {
Shutdown,
VmoComplete,
Response,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct Event {
action: EventType,
addr: addr::Vsock,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
enum Deregister {
Event(Event),
Listen(u32),
Port(u32),
}
#[derive(Fail, Debug)]
enum Error {
#[fail(display = "Driver returned failure status {}", _0)]
Driver(#[fail(cause)] zx::Status),
#[fail(display = "All ephemeral ports are allocated")]
OutOfPorts,
#[fail(display = "Addr has already been bound")]
AlreadyBound,
#[fail(display = "Connection refused by remote")]
ConnectionRefused,
#[fail(display = "Error whilst communication with client")]
ClientCommunication(#[fail(cause)] failure::Error),
#[fail(display = "Error whilst communication with client")]
DriverCommunication(#[fail(cause)] failure::Error),
#[fail(display = "Driver reset the connection")]
ConnectionReset,
}
impl From<oneshot::Canceled> for Error {
fn from(_: oneshot::Canceled) -> Error {
Error::ConnectionReset
}
}
impl Error {
pub fn into_status(&self) -> zx::Status {
match self {
Error::Driver(status) => *status,
Error::OutOfPorts => zx::Status::NO_RESOURCES,
Error::AlreadyBound => zx::Status::ALREADY_BOUND,
Error::ConnectionRefused => zx::Status::UNAVAILABLE,
Error::ClientCommunication(err) | Error::DriverCommunication(err) => *err
.downcast_ref::<zx::Status>()
.unwrap_or(&zx::Status::INTERNAL),
Error::ConnectionReset => zx::Status::PEER_CLOSED,
}
}
pub fn is_comm_failure(&self) -> bool {
match self {
Error::ClientCommunication(_) | Error::DriverCommunication(_) => true,
_ => false,
}
}
}
fn map_driver_result(result: Result<i32, fidl::Error>) -> Result<(), Error> {
result
.map_err(|x| Error::DriverCommunication(x.into()))
.and_then(|x| zx::Status::ok(x).map_err(Error::Driver))
}
fn send_result<T>(
result: Result<T, Error>, send: impl FnOnce(i32, Option<T>) -> Result<(), fidl::Error>,
) -> Result<(), Error> {
match result {
Ok(v) => send(zx::Status::OK.into_raw(), Some(v))
.map_err(|e| Error::ClientCommunication(e.into())),
Err(e) => {
send(e.into_status().into_raw(), None)
.map_err(|e| Error::ClientCommunication(e.into()))?;
Err(e)
}
}
}
struct State {
device: DeviceProxy,
events: HashMap<Event, oneshot::Sender<()>>,
used_ports: port::Tracker,
listens: HashMap<u32, mpsc::UnboundedSender<addr::Vsock>>,
}
pub struct LockedState {
inner: Mutex<State>,
deregister_tx: crossbeam::channel::Sender<Deregister>,
deregister_rx: crossbeam::channel::Receiver<Deregister>,
}
#[derive(Clone)]
pub struct Vsock {
inner: Arc<LockedState>,
}
impl Vsock {
/// Creates a new vsock service connected to the given `DeviceProxy`
///
/// The creation is asynchronous due to need to invoke methods on the given `DeviceProxy`. On
/// success a pair of `Self, impl Future<Result<_, Error>>` is returned. The `impl Future` is
/// a future that is listening for and processing messages from the `device`. This future needs
/// to be evaluated for other methods on the returned `Self` to complete successfully. Unless
/// a fatal error occurs the future will never yield a result and will execute infinitely.
pub async fn new(
device: DeviceProxy,
) -> Result<(Self, impl Future<Output = Result<Void, failure::Error>>), failure::Error> {
let (callbacks_client, callbacks_server) =
endpoints::create_endpoints::<CallbacksMarker>()?;
let server_stream = callbacks_server.into_stream()?;
device
.start(callbacks_client)
.map(|x| map_driver_result(x))
.err_into::<failure::Error>().await?;
let service = State {
device,
events: HashMap::new(),
used_ports: port::Tracker::new(),
listens: HashMap::new(),
};
let (tx, rx) = crossbeam::channel::unbounded();
let service = LockedState {
inner: Mutex::new(service),
deregister_tx: tx,
deregister_rx: rx,
};
let service = Vsock {
inner: Arc::new(service),
};
let callback_loop = service.clone().run_callbacks(server_stream);
Ok((service, callback_loop))
}
async fn run_callbacks(
self, mut callbacks: CallbacksRequestStream,
) -> Result<Void, failure::Error> {
while let Some(Ok(cb)) = callbacks.next().await {
self.lock().do_callback(cb);
}
// The only way to get here is if our callbacks stream ended, since our notifications
// cannot disconnect as we are holding a reference to them in |service|.
Err(err_msg("Driver disconnected"))
}
// Spawns a new asynchronous thread for listening for incoming connections on a port.
fn start_listener(
&self, acceptor: fidl::endpoints::ClientEnd<fidl_fuchsia_vsock::AcceptorMarker>,
local_port: u32,
) -> Result<(), Error> {
let acceptor = acceptor
.into_proxy()
.map_err(|x| Error::ClientCommunication(x.into()))?;
let stream = self.listen_port(local_port)?;
fasync::spawn(
self.clone()
.run_connection_listener(stream, acceptor)
.unwrap_or_else(|err| fx_log_warn!("Error {} running connection listener", err)),
);
Ok(())
}
// Handles a single incoming client request.
async fn handle_request(&self, request: ConnectorRequest) -> Result<(), Error> {
match request {
ConnectorRequest::Connect {
remote_cid,
remote_port,
con,
responder,
} => send_result(
self.make_connection(remote_cid, remote_port, con).await,
|r, v| responder.send(r, v.unwrap_or(0)),
),
ConnectorRequest::Listen {
local_port,
acceptor,
responder,
} => send_result(self.start_listener(acceptor, local_port), |r, _| {
responder.send(r)
}),
}
}
/// Evaluates messages on a `ConnectorRequestStream` until completion or error
///
/// Takes ownership of a `RequestStream` that is most likely created from a `ServicesServer`
/// and processes any incoming requests on it.
pub async fn run_client_connection(
self, request: ConnectorRequestStream,
) -> Result<(), failure::Error> {
let self_ref = &self;
let fut = request
.map_err(|err| Error::ClientCommunication(err.into()))
// TODO: The parallel limit of 4 is currently invented with no basis and should
// made something more sensible.
.try_for_each_concurrent(4, |request| {
self_ref.handle_request(request)
.or_else(|e| future::ready(if e.is_comm_failure() { Err(e) } else { Ok(()) }))
})
.err_into();
fut.await
}
fn alloc_ephemeral_port(self) -> Option<AllocatedPort> {
let p = self.lock().used_ports.allocate();
p.map(|p| AllocatedPort {
port: p,
service: self,
})
}
// Creates a `ListenStream` that will retrieve raw incoming connection requests.
// These requests come from the device via the run_callbacks future.
fn listen_port(&self, port: u32) -> Result<ListenStream, Error> {
if port::is_ephemeral(port) {
fx_log_info!("Rejecting request to listen on ephemeral port {}", port);
return Err(Error::ConnectionRefused);
}
match self.lock().listens.entry(port) {
std::collections::hash_map::Entry::Vacant(entry) => {
let (sender, receiver) = mpsc::unbounded();
let listen = ListenStream {
local_port: port,
service: self.clone(),
stream: receiver,
};
entry.insert(sender);
Ok(listen)
}
_ => {
fx_log_info!("Attempt to listen on already bound port {}", port);
Err(Error::AlreadyBound)
}
}
}
// Helper for inserting an event into the events hashmap
fn register_event(&self, event: Event) -> Result<OneshotEvent, Error> {
match self.lock().events.entry(event) {
std::collections::hash_map::Entry::Vacant(entry) => {
let (sender, receiver) = oneshot::channel();
let event = OneshotEvent {
event: Some(entry.key().clone()),
service: self.clone(),
oneshot: receiver,
};
entry.insert(sender);
Ok(event)
}
_ => Err(Error::AlreadyBound),
}
}
// These helpers are wrappers around sending a message to the device, and creating events that
// will be signaled by the run_callbacks future when it receives a message from the device.
fn send_request(
&self, addr: &addr::Vsock, data: zx::Socket,
) -> Result<impl Future<Output = Result<(OneshotEvent, OneshotEvent), Error>>, Error> {
let shutdown_callback = self.register_event(Event {
action: EventType::Shutdown,
addr: addr.clone(),
})?;
let response_callback = self.register_event(Event {
action: EventType::Response,
addr: addr.clone(),
})?;
let send_request_fut = self.lock().device.send_request(&mut addr.clone(), data);
Ok(async move {
map_driver_result(send_request_fut.await)?;
Ok((shutdown_callback, response_callback))
})
}
fn send_response(
&self, addr: &addr::Vsock, data: zx::Socket,
) -> Result<impl Future<Output = Result<OneshotEvent, Error>>, Error> {
let shutdown_callback = self.register_event(Event {
action: EventType::Shutdown,
addr: addr.clone(),
})?;
let send_request_fut = self.lock().device.send_response(&mut addr.clone(), data);
Ok(async move {
map_driver_result(send_request_fut.await)?;
Ok(shutdown_callback)
})
}
fn send_vmo(
&self, addr: &addr::Vsock, vmo: zx::Vmo, off: u64, len: u64,
) -> Result<impl Future<Output = Result<OneshotEvent, Error>>, Error> {
let vmo_callback = self.register_event(Event {
action: EventType::VmoComplete,
addr: addr.clone(),
})?;
let send_request_fut = self
.lock()
.device
.send_vmo(&mut addr.clone(), vmo, off, len);
Ok(async move {
map_driver_result(send_request_fut.await)?;
Ok(vmo_callback)
})
}
// Runs a connected socket until completion. Processes any VMO sends and shutdown events.
async fn run_connection<ShutdownFut>(
self, addr: addr::Vsock, shutdown_event: ShutdownFut,
mut requests: ConnectionRequestStream, _port: Option<AllocatedPort>,
) -> Result<(), Error>
where
ShutdownFut:
Future<Output = Result<(), futures::channel::oneshot::Canceled>> + std::marker::Unpin,
{
// This extremely awkward function definition is to temporarily work around select! not being
// nestable. Once this is fixed then this should be re-inlined into the single call site below.
// Until then don't look closely at this.
async fn wait_vmo_complete<ShutdownFut>(
mut shutdown_event: &mut futures::future::Fuse<ShutdownFut>, cb: OneshotEvent,
) -> Result<zx::Status, Result<(), Error>>
where
ShutdownFut: Future<Output = Result<(), futures::channel::oneshot::Canceled>>
+ std::marker::Unpin,
{
select! {
shutdown_event = shutdown_event => {Err(shutdown_event.map_err(|e| e.into()))},
cb = cb.fuse() => match cb {
Ok(_) => Ok(zx::Status::OK),
Err(_) => Ok(Error::ConnectionReset.into_status()),
},
}
}
let mut shutdown_event = shutdown_event.fuse();
loop {
select! {
shutdown_event = shutdown_event => {
let fut = future::ready(shutdown_event)
.err_into()
.and_then(|()| self.lock().send_rst(&addr));
return fut.await;
},
request = requests.next() => {
match request {
Some(Ok(ConnectionRequest::Shutdown{control_handle: _control_handle})) => {
let fut =
self.lock().send_shutdown(&addr)
// Wait to either receive the RST for the client or to be
// shut down for some other reason
.and_then(|()| shutdown_event.err_into());
return fut.await;
},
Some(Ok(ConnectionRequest::SendVmo{vmo, off, len, responder})) => {
// Acquire the potential future from send_vmo in a temporary so we
// can await! on it without holding the lock.
let result = self.send_vmo(&addr, vmo, off, len);
// Equivalent of and_then to expand the Ok future case.
let result = match result {
Ok(fut) => fut.await,
Err(e) => Err(e),
};
let status = match result {
Ok(cb) => {
match wait_vmo_complete(&mut shutdown_event, cb).await {
Err(e) => return e,
Ok(o) => o,
}
},
Err(e) => e.into_status(),
};
let _ = responder.send(status.into_raw());
},
// Generate a RST for a non graceful client disconnect.
Some(Err(e)) => {
let fut = self.lock().send_rst(&addr);
fut.await?;
return Err(Error::ClientCommunication(e.into()));
},
None => {
let fut = self.lock().send_rst(&addr);
return fut.await;
},
}
},
}
}
}
// Waits for incoming connections on the given `ListenStream`, checks with the
// user via the `acceptor` if it should be accepted, and if so spawns a new
// asynchronous thread to run the connection.
async fn run_connection_listener(
self, incoming: ListenStream, acceptor: AcceptorProxy,
) -> Result<(), Error> {
incoming
.then(|addr| acceptor
.accept(&mut *addr.clone())
.map_ok(|maybe_con| (maybe_con, addr)))
.map_err(|e| Error::ClientCommunication(e.into()))
.try_for_each(|(maybe_con, addr)| async {
match maybe_con {
Some(con) => {
let data = con.data;
let con = con
.con
.into_stream()
.map_err(|x| Error::ClientCommunication(x.into()))?;
let shutdown_event = self.send_response(&addr, data)?.await?;
fasync::spawn(
self.clone()
.run_connection(addr, shutdown_event, con, None)
.map_err(|err| {
fx_log_warn!("Error {} whilst running connection", err)
})
.map(|_| ()),
);
Ok(())
}
None => {
let fut = self.lock().send_rst(&addr);
fut.await
}
}
}).await
}
// Attempts to connect to the given remote cid/port. If successful spawns a new
// asynchronous thread to run the connection until completion.
async fn make_connection(
&self, remote_cid: u32, remote_port: u32, con: ConnectionTransport,
) -> Result<u32, Error> {
let data = con.data;
let con = con
.con
.into_stream()
.map_err(|x| Error::ClientCommunication(x.into()))?;
let port = self
.clone()
.alloc_ephemeral_port()
.ok_or(Error::OutOfPorts)?;
let port_value = *port;
let addr = addr::Vsock::new(port_value, remote_port, remote_cid);
let (shutdown_event, response_event) = self.send_request(&addr, data)?.await?;
let mut shutdown_event = shutdown_event.fuse();
select! {
_shutdown_event = shutdown_event => {
// Getting a RST here just indicates a rejection and
// not any underlying issues.
return Err(Error::ConnectionRefused);
},
response_event = response_event.fuse() => response_event?,
}
fasync::spawn(
self.clone()
.run_connection(addr, shutdown_event, con, Some(port))
.unwrap_or_else(|err| fx_log_warn!("Error {} whilst running connection", err)),
);
Ok(port_value)
}
}
impl Deref for Vsock {
type Target = LockedState;
fn deref(&self) -> &LockedState {
&self.inner
}
}
impl LockedState {
// Acquires the lock on `inner`, and processes any pending messages
fn lock(&self) -> parking_lot::MutexGuard<State> {
let mut guard = self.inner.lock();
self.deregister_rx
.try_iter()
.for_each(|e| guard.deregister(e));
guard
}
// Tries to acquire the lock on `inner`, and processes any pending messages
// if successful
fn try_lock(&self) -> Option<parking_lot::MutexGuard<State>> {
if let Some(mut guard) = self.inner.try_lock() {
self.deregister_rx
.try_iter()
.for_each(|e| guard.deregister(e));
Some(guard)
} else {
None
}
}
// Deregisters the specified event, or queues it for later deregistration if
// lock acquisition fails.
fn deregister(&self, event: Deregister) {
if let Some(mut service) = self.try_lock() {
service.deregister(event);
} else {
// Should not fail as we expect to be using an unbounded channel
let _ = self.deregister_tx.try_send(event);
}
}
}
impl State {
// Remove the `event` from the `events` `HashMap`
fn deregister(&mut self, event: Deregister) {
match event {
Deregister::Event(e) => {
self.events.remove(&e);
}
Deregister::Listen(p) => {
self.listens.remove(&p);
}
Deregister::Port(p) => {
self.used_ports.free(p);
}
}
}
// Wrappers around device functions with nicer type signatures
fn send_rst(&mut self, addr: &addr::Vsock) -> impl Future<Output = Result<(), Error>> {
self.device
.send_rst(&mut addr.clone())
.map(|x| map_driver_result(x))
}
fn send_shutdown(&mut self, addr: &addr::Vsock) -> impl Future<Output = Result<(), Error>> {
self.device
.send_shutdown(&mut addr.clone())
.map(|x| map_driver_result(x))
}
// Processes a single callback from the `device`. This is intended to be used by
// `Vsock::run_callbacks`
fn do_callback(&mut self, callback: CallbacksRequest) {
match callback {
CallbacksRequest::Response {
addr,
control_handle: _control_handle,
} => {
self.events
.remove(&Event {
action: EventType::Response,
addr: addr::Vsock::from(addr),
})
.map(|channel| channel.send(()));
}
CallbacksRequest::Rst {
addr,
control_handle: _control_handle,
} => {
self.events.remove(&Event {
action: EventType::Shutdown,
addr: addr::Vsock::from(addr),
});
}
CallbacksRequest::SendVmoComplete {
addr,
control_handle: _control_handle,
} => {
self.events
.remove(&Event {
action: EventType::VmoComplete,
addr: addr::Vsock::from(addr),
})
.map(|channel| channel.send(()));
}
CallbacksRequest::Request {
addr,
control_handle: _control_handle,
} => {
let addr = addr::Vsock::from(addr);
match self.listens.get(&addr.local_port) {
Some(sender) => {
let _ = sender.unbounded_send(addr.clone());
}
None => {
fx_log_warn!("Request on port {} with no listener", addr.local_port);
fasync::spawn(self.send_rst(&addr).map(|_| ()));
}
}
}
CallbacksRequest::Shutdown {
addr,
control_handle: _control_handle,
} => {
self.events
.remove(&Event {
action: EventType::Shutdown,
addr: addr::Vsock::from(addr),
})
.map(|channel| channel.send(()));
}
CallbacksRequest::TransportReset {
new_cid: _new_cid,
responder,
} => {
self.events.clear();
let _ = responder.send();
}
}
}
}
struct AllocatedPort {
service: Vsock,
port: u32,
}
impl Deref for AllocatedPort {
type Target = u32;
fn deref(&self) -> &u32 {
&self.port
}
}
impl Drop for AllocatedPort {
fn drop(&mut self) {
self.service.deregister(Deregister::Port(self.port));
}
}
struct OneshotEvent {
event: Option<Event>,
service: Vsock,
oneshot: oneshot::Receiver<()>,
}
impl Drop for OneshotEvent {
fn drop(&mut self) {
self.event
.take()
.map(|e| self.service.deregister(Deregister::Event(e)));
}
}
impl Future for OneshotEvent {
type Output = <oneshot::Receiver<()> as Future>::Output;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
match self.oneshot.poll_unpin(cx) {
Poll::Ready(x) => {
// Take the event so that we don't try to deregister it later,
// as by having sent the message we just received the callbacks
// thread will already have removed it
self.event.take();
Poll::Ready(x)
}
p => p,
}
}
}
struct ListenStream {
local_port: u32,
service: Vsock,
stream: mpsc::UnboundedReceiver<addr::Vsock>,
}
impl Drop for ListenStream {
fn drop(&mut self) {
self.service.deregister(Deregister::Listen(self.local_port));
}
}
impl Stream for ListenStream {
type Item = addr::Vsock;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.stream.poll_next_unpin(cx)
}
}
|
use super::RuntimeErr;
use crate::runtime::Ctx;
pub trait Runnable<'a> {
fn back(&mut self, _ctx: &mut dyn Ctx<'a>) -> Result<(), RuntimeErr> {
Ok(())
}
fn run(&mut self, _ctx: &mut dyn Ctx<'a>) -> Result<(), RuntimeErr> {
Ok(())
}
}
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -C codegen-units=8 -Z thinlto
// ignore-windows
#![feature(linkage)]
pub mod foo {
#[linkage = "weak"]
#[no_mangle]
pub extern "C" fn FOO() -> i32 {
0
}
}
mod bar {
extern "C" {
fn FOO() -> i32;
}
pub fn bar() -> i32 {
unsafe { FOO() }
}
}
fn main() {
bar::bar();
}
|
use flexi_logger::{style, DeferredNow, LogSpecBuilder, LogTarget, Logger, ReconfigurationHandle};
use log::{LevelFilter, Record};
use std::error::Error;
use super::debug_view::DebugView;
use super::Conf;
pub fn setup(conf: &Conf, debug_view: &mut DebugView) -> Result<(), Box<dyn Error>> {
let mut builder = LogSpecBuilder::new();
builder
.default(LevelFilter::Off)
.module("memu", conf.log_level);
let mut logger = Logger::with(builder.build());
logger = logger
.format_for_stderr(padded_colored_format)
.format_for_writer(padded_plain_format);
// Redirect log output to the debug view if it's enabled.
logger = match debug_view.log_writer() {
Some(writer) => logger.log_target(LogTarget::Writer(writer)),
None => logger,
};
let handle = logger.start()?;
debug_view.log_handle(handle);
Ok(())
}
pub fn disable(handle: &mut ReconfigurationHandle) {
let mut builder = LogSpecBuilder::new();
builder.module("memu", LevelFilter::Error);
handle.push_temp_spec(builder.build());
}
pub fn enable(handle: &mut ReconfigurationHandle) {
handle.pop_temp_spec();
}
fn padded_colored_format(
w: &mut dyn std::io::Write,
_now: &mut DeferredNow,
record: &Record,
) -> Result<(), std::io::Error> {
let level = record.level();
write!(
w,
"{:<5} [{:<25}] {}",
style(level, level),
record.module_path().unwrap_or("<unnamed>"),
style(level, record.args())
)
}
fn padded_plain_format(
w: &mut dyn std::io::Write,
_now: &mut DeferredNow,
record: &Record,
) -> Result<(), std::io::Error> {
write!(
w,
"{:<5} [{:<25}] {}",
record.level(),
record.module_path().unwrap_or("<unnamed>"),
record.args()
)
}
|
#[cfg(not(feature = "high-dpi"))]
#[deprecated(note = "Specifying the default process DPI awareness via API is not recommended. Use the '<dpiAware>true</dpiAware>' setting in the application manifest. https://docs.microsoft.com/ru-ru/windows/win32/hidpi/setting-the-default-dpi-awareness-for-a-process")]
pub unsafe fn set_dpi_awareness() {
}
#[cfg(feature = "high-dpi")]
#[deprecated(note = "Specifying the default process DPI awareness via API is not recommended. Use the '<dpiAware>true</dpiAware>' setting in the application manifest. https://docs.microsoft.com/ru-ru/windows/win32/hidpi/setting-the-default-dpi-awareness-for-a-process")]
pub unsafe fn set_dpi_awareness() {
use winapi::um::winuser::SetProcessDPIAware;
SetProcessDPIAware();
}
#[cfg(not(feature = "high-dpi"))]
pub fn scale_factor() -> f64 {
return 1.0;
}
#[cfg(feature = "high-dpi")]
pub fn scale_factor() -> f64 {
use winapi::um::winuser::USER_DEFAULT_SCREEN_DPI;
let dpi = unsafe { dpi() };
f64::from(dpi) / f64::from(USER_DEFAULT_SCREEN_DPI)
}
#[cfg(not(feature = "high-dpi"))]
pub unsafe fn logical_to_physical(x: i32, y: i32) -> (i32, i32) {
(x, y)
}
#[cfg(feature = "high-dpi")]
pub unsafe fn logical_to_physical(x: i32, y: i32) -> (i32, i32) {
use muldiv::MulDiv;
use winapi::um::winuser::USER_DEFAULT_SCREEN_DPI;
let dpi = dpi();
let x = x.mul_div_round(dpi, USER_DEFAULT_SCREEN_DPI).unwrap_or(x);
let y = y.mul_div_round(dpi, USER_DEFAULT_SCREEN_DPI).unwrap_or(y);
(x, y)
}
#[cfg(not(feature = "high-dpi"))]
pub unsafe fn physical_to_logical(x: i32, y: i32) -> (i32, i32) {
(x, y)
}
#[cfg(feature = "high-dpi")]
pub unsafe fn physical_to_logical(x: i32, y: i32) -> (i32, i32) {
use muldiv::MulDiv;
use winapi::um::winuser::USER_DEFAULT_SCREEN_DPI;
let dpi = dpi();
let x = x.mul_div_round(USER_DEFAULT_SCREEN_DPI, dpi).unwrap_or(x);
let y = y.mul_div_round(USER_DEFAULT_SCREEN_DPI, dpi).unwrap_or(y);
(x, y)
}
pub unsafe fn dpi() -> i32 {
use winapi::um::winuser::GetDC;
use winapi::um::wingdi::GetDeviceCaps;
use winapi::um::wingdi::LOGPIXELSX;
let screen = GetDC(std::ptr::null_mut());
let dpi = GetDeviceCaps(screen, LOGPIXELSX);
dpi
}
|
pub(crate) mod body_generators {
use crate::tetromino::GRID_SIZE;
pub(crate) fn generate_i() -> Vec<(i16, i16)> {
vec![
(GRID_SIZE.0 / 2, -2),
(GRID_SIZE.0 / 2, -1),
(GRID_SIZE.0 / 2, -0),
(GRID_SIZE.0 / 2, 1),
]
}
pub(crate) fn generate_l() -> Vec<(i16, i16)> {
vec![
(GRID_SIZE.0 / 2, -1),
(GRID_SIZE.0 / 2, -0),
(GRID_SIZE.0 / 2, 1),
(GRID_SIZE.0 / 2 - 1, 1),
]
}
pub(crate) fn generate_j() -> Vec<(i16, i16)> {
vec![
(GRID_SIZE.0 / 2, -1),
(GRID_SIZE.0 / 2, -0),
(GRID_SIZE.0 / 2, 1),
(GRID_SIZE.0 / 2 + 1, 1),
]
}
pub(crate) fn generate_o() -> Vec<(i16, i16)> {
vec![
(GRID_SIZE.0 / 2, -1),
(GRID_SIZE.0 / 2 - 1, -1),
(GRID_SIZE.0 / 2, 0),
(GRID_SIZE.0 / 2 - 1, 0),
]
}
pub(crate) fn generate_s() -> Vec<(i16, i16)> {
vec![
(GRID_SIZE.0 / 2 + 1, -1),
(GRID_SIZE.0 / 2, -1),
(GRID_SIZE.0 / 2, 0),
(GRID_SIZE.0 / 2 - 1, 0),
]
}
pub(crate) fn generate_t() -> Vec<(i16, i16)> {
vec![
(GRID_SIZE.0 / 2 + 1, -1),
(GRID_SIZE.0 / 2, -1),
(GRID_SIZE.0 / 2, 0),
(GRID_SIZE.0 / 2 - 1, -1),
]
}
pub(crate) fn generate_z() -> Vec<(i16, i16)> {
vec![
(GRID_SIZE.0 / 2 - 1, -1),
(GRID_SIZE.0 / 2, -1),
(GRID_SIZE.0 / 2, 0),
(GRID_SIZE.0 / 2 + 1, 0),
]
}
}
|
use cgmath::{Vector3, Matrix4, vec3, Deg};
use crate::entities::camera::Camera;
pub fn create_transformation_matrix(translation: &Vector3<f32>, rx: f32, ry: f32,
rz: f32, scale: f32) -> Matrix4<f32> {
let mut matrix = Matrix4::<f32>::from_translation(*translation);
matrix = matrix * Matrix4::<f32>::from_axis_angle(vec3(1.0, 0.0, 0.0), Deg(rx));
matrix = matrix * Matrix4::<f32>::from_axis_angle(vec3(0.0, 1.0, 0.0), Deg(ry));
matrix = matrix * Matrix4::<f32>::from_axis_angle(vec3(1.0, 0.0, 1.0), Deg(rz));
matrix = matrix * Matrix4::<f32>::from_scale(scale);
return matrix;
}
pub fn create_view_matrix(camera: &Camera) -> Matrix4<f32> {
let mut view_matrix = Matrix4::<f32>::from_axis_angle(vec3(1.0, 0.0, 0.0), Deg(*camera.get_pitch()));
view_matrix = view_matrix * Matrix4::<f32>::from_axis_angle(vec3(0.0, 1.0, 0.0), Deg(*camera.get_yaw()));
let camera_pos = camera.get_position();
let negative_camera_pos = vec3(-camera_pos.x, -camera_pos.y, -camera_pos.z);
view_matrix = view_matrix * Matrix4::<f32>::from_translation(negative_camera_pos);
return view_matrix;
}
|
use cosmwasm_std::StdError;
use cw721_base::ContractError as CW721ContractError;
use thiserror::Error;
/// This overrides the ContractError enum defined in cw721-base
#[derive(Error, Debug, PartialEq)]
pub enum ContractError {
#[error("{0}")]
Std(#[from] StdError),
#[error("Unauthorized")]
Unauthorized {},
#[error("Coordinates already claimed")]
Claimed {},
#[error("Cannot set approval that is already expired")]
Expired {},
#[error("xyz token supply has been exhausted")]
SupplyExhausted {},
#[error("Per-wallet token allotment exceeded")]
WalletLimit {},
#[error("Move target out-of-bounds or already occupied")]
InvalidMoveTarget {},
#[error("Move already in progress")]
MoveInProgress {},
}
impl From<CW721ContractError> for ContractError {
fn from(msg: CW721ContractError) -> ContractError {
match msg {
CW721ContractError::Unauthorized {} => ContractError::Unauthorized {},
CW721ContractError::Claimed {} => ContractError::Claimed {},
CW721ContractError::Expired {} => ContractError::Expired {},
CW721ContractError::Std(e) => ContractError::Std(e),
}
}
}
|
use aoc2019::aoc_input::get_input;
use aoc2019::intcode::*;
fn main() {
let input = get_input(19);
let tape = parse_intcode_program(&input);
let mut pulled_locations = 0;
for y in 0..50 {
for x in 0..50 {
let mut machine = IntcodeMachine::new(tape.clone());
machine.input.borrow_mut().extend(&[x, y]);
machine.run().unwrap();
pulled_locations += machine.output.borrow_mut().pop_front().unwrap();
}
}
println!("Pulled locations: {}", pulled_locations);
}
|
// misc_instructions.rs for BIT instruction, NOP and more
use super::flags::*;
use crate::mem_map;
use crate::memory::{RAM, *};
pub fn bittest_zero_page(
pc_reg: &mut u16,
accumulator: u8,
operand: u8,
mem: &mut RAM,
status_flags: &mut u8,
cycles: &mut u8,
) {
let value = mem.read_mem_value(operand as u16);
let result = accumulator & value;
if result == 0 {
*status_flags |= ZERO_BIT;
} else {
*status_flags &= !ZERO_BIT;
}
if (value & 0x40) != 0 {
*status_flags |= OVERFLOW_BIT;
} else {
*status_flags &= !OVERFLOW_BIT;
}
if (value & 0x80) != 0 {
*status_flags |= NEGATIVE_BIT;
} else {
*status_flags &= !NEGATIVE_BIT;
}
*cycles = 3;
*pc_reg += 2;
}
pub fn bittest_absolute(
pc_reg: &mut u16,
accumulator: u8,
operand: u16,
mem: &mut RAM,
status_flags: &mut u8,
cycles: &mut u8,
) {
let value = mem.read_mem_value(operand);
let result = accumulator & value;
if result == 0 {
*status_flags |= ZERO_BIT;
} else {
*status_flags &= !ZERO_BIT;
}
if (value & 0x40) != 0 {
*status_flags |= OVERFLOW_BIT;
} else {
*status_flags &= !OVERFLOW_BIT;
}
if (value & 0x80) != 0 {
*status_flags |= NEGATIVE_BIT;
} else {
*status_flags &= !NEGATIVE_BIT;
}
*cycles = 4;
*pc_reg += 3;
}
pub fn NOP(pc_reg: &mut u16, cycles: &mut u8) {
*pc_reg += 1;
*cycles = 2;
}
pub fn break_force_interrupt(
pc_reg: &mut u16,
status: &mut u8,
stack_ptr: &mut u8,
test_ram: &mut RAM,
cycles: &mut u8,
) {
*pc_reg += 1;
*cycles = 7;
test_ram.push_address_on_stack(stack_ptr, *pc_reg);
test_ram.push_value_on_stack(stack_ptr, *status);
// get interrupt vector
*pc_reg = test_ram.read_mem_address(0xFFFE);
//TODO push pc and status onto stack, load IRQ into PC
*status |= BREAK_CMD_BIT;
}
pub fn push_acc_on_stack(
pc_reg: &mut u16,
accumulator: u8,
stack_ptr: &mut u8,
test_ram: &mut RAM,
cycles: &mut u8,
) {
*pc_reg += 1;
*cycles = 3;
test_ram.push_value_on_stack(stack_ptr, accumulator);
}
pub fn push_status_on_stack(
pc_reg: &mut u16,
status: u8,
stack_ptr: &mut u8,
test_ram: &mut RAM,
cycles: &mut u8,
) {
*pc_reg += 1;
*cycles = 3;
let mut temp_status = status;
temp_status |= 0b0011_0000;
test_ram.push_value_on_stack(stack_ptr, temp_status);
}
pub fn pull_acc_from_stack(
pc_reg: &mut u16,
accumulator: &mut u8,
status: &mut u8,
stack_ptr: &mut u8,
test_ram: &mut RAM,
cycles: &mut u8,
) {
*pc_reg += 1;
*cycles = 4;
*accumulator = test_ram.pop_value_off_stack(stack_ptr);
if *accumulator == 0 {
*status |= ZERO_BIT;
} else {
*status &= !ZERO_BIT;
}
if (*accumulator & 0x80) != 0 {
*status |= NEGATIVE_BIT;
} else {
*status &= !NEGATIVE_BIT;
}
}
pub fn pull_status_from_stack(
pc_reg: &mut u16,
status: &mut u8,
stack_ptr: &mut u8,
test_ram: &mut RAM,
cycles: &mut u8,
) {
*pc_reg += 1;
*cycles = 4;
*status = test_ram.pop_value_off_stack(stack_ptr);
// set bit 4 to 0
*status &= !0b0001_0000;
// set bit 5 to 1
*status |= 0b0010_0000;
}
// transfer_source_to_dest is intended for the many variants of transfer functions, like TAY Transfer Accumulator to Y
// only expception is transfer x to stack pointer, as there are no flags set
pub fn transfer_source_to_dest(
pc_reg: &mut u16,
source: u8,
dest: &mut u8,
status: &mut u8,
cycles: &mut u8,
) {
*pc_reg += 1;
*cycles = 2;
*dest = source;
if *dest == 0 {
*status |= ZERO_BIT;
} else {
*status &= !ZERO_BIT;
}
if (*dest & 0x80) != 0 {
*status |= NEGATIVE_BIT;
} else {
*status &= !NEGATIVE_BIT;
}
}
pub fn transfer_x_to_stack_pointer(pc_reg: &mut u16, x: u8, stack_ptr: &mut u8, cycles: &mut u8) {
*pc_reg += 1;
*cycles = 2;
*stack_ptr = x;
}
#[cfg(test)]
mod tests {
#[test]
fn test_misc() {
use super::*;
use crate::memory;
let operand = 7;
let mut pc_reg = 0;
let mut accumulator = 7;
let mut status: u8 = 0;
let mut test_memory: memory::RAM = memory::RAM::new();
let mut cycles = 0;
let mut stack = 0;
// init mem
for i in 0..2048 {
test_memory.write_mem_value(i, i as u8);
}
bittest_zero_page(
&mut pc_reg,
accumulator,
7,
&mut test_memory,
&mut status,
&mut cycles,
);
assert_eq!(pc_reg, 2);
assert_eq!(accumulator, 7);
assert_eq!(status, 0);
accumulator = 192;
bittest_zero_page(
&mut pc_reg,
accumulator,
194,
&mut test_memory,
&mut status,
&mut cycles,
);
assert_eq!(pc_reg, 4);
assert_eq!(accumulator, 192);
assert_eq!(status, 96);
bittest_zero_page(
&mut pc_reg,
accumulator,
1,
&mut test_memory,
&mut status,
&mut cycles,
);
assert_eq!(pc_reg, 6);
assert_eq!(accumulator, 192);
assert_eq!(status, 2);
bittest_absolute(
&mut pc_reg,
accumulator,
290,
&mut test_memory,
&mut status,
&mut cycles,
);
assert_eq!(pc_reg, 9);
assert_eq!(accumulator, 192);
assert_eq!(status, 2);
NOP(&mut pc_reg, &mut cycles);
assert_eq!(pc_reg, 10);
break_force_interrupt(
&mut pc_reg,
&mut status,
&mut stack,
&mut test_memory,
&mut cycles,
);
assert_eq!(stack, 3);
assert_eq!(status, 18);
status = 0;
pc_reg = 0;
push_status_on_stack(&mut pc_reg, 128, &mut stack, &mut test_memory, &mut cycles);
assert_eq!(stack, 4);
assert_eq!(pc_reg, 1);
pull_acc_from_stack(
&mut pc_reg,
&mut accumulator,
&mut status,
&mut stack,
&mut test_memory,
&mut cycles,
);
assert_eq!(stack, 3);
assert_eq!(accumulator, 128);
push_status_on_stack(&mut pc_reg, 0, &mut stack, &mut test_memory, &mut cycles);
pull_acc_from_stack(
&mut pc_reg,
&mut accumulator,
&mut status,
&mut stack,
&mut test_memory,
&mut cycles,
);
assert_eq!(status, 2);
transfer_x_to_stack_pointer(&mut pc_reg, 244, &mut stack, &mut cycles);
assert_eq!(stack, 244);
}
}
|
struct Solution {}
impl Solution {
pub fn reverse_words(s: String) -> String {
let mut xs = s
.split(" ")
.map(|word| word.trim())
.filter(|word| !word.is_empty())
.collect::<Vec<_>>();
xs.reverse();
xs.join(" ")
}
}
fn main() {
println!(
"{}",
Solution::reverse_words(String::from(" hello world"))
);
}
|
extern crate bindgen;
use std::env;
use std::path::PathBuf;
fn main() {
println!("cargo:rustc-link-lib=glpk");
println!("cargo:rebuild-if-changed=wrapper.h");
let bindings = bindgen::Builder::default()
.header("wrapper.h")
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
// We explicitly whitelist the used functions in order to avoid hundreds of dead code
// warnings.
.whitelist_function("glp_create_prob")
.whitelist_function("glp_set_obj_dir")
.whitelist_function("glp_add_rows")
.whitelist_function("glp_add_cols")
.whitelist_function("glp_load_matrix")
.whitelist_function("glp_set_row_bnds")
.whitelist_function("glp_set_col_bnds")
.whitelist_function("glp_set_obj_coef")
.whitelist_function("glp_init_smcp")
.whitelist_function("glp_simplex")
.whitelist_function("glp_get_status")
.whitelist_function("glp_get_obj_val")
.whitelist_function("glp_delete_prob")
.whitelist_type("glp_smcp")
.whitelist_var("GLP_MAX")
.whitelist_var("GLP_UP")
.whitelist_var("GLP_FR")
.whitelist_var("GLP_OPT")
.whitelist_var("GLP_NOFEAS")
.whitelist_var("GLP_UNBND")
.whitelist_var("GLP_MSG_OFF")
.generate()
.expect("Unable to generate bindings");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings");
}
|
//! Router server entrypoint.
use self::{grpc::RpcWriteGrpcDelegate, http::HttpDelegate};
use std::sync::Arc;
use trace::TraceCollector;
pub mod grpc;
pub mod http;
/// The [`RpcWriteRouterServer`] manages the lifecycle and contains all state for a
/// `router-rpc-write` server instance.
#[derive(Debug)]
pub struct RpcWriteRouterServer<D, N> {
metrics: Arc<metric::Registry>,
trace_collector: Option<Arc<dyn TraceCollector>>,
http: HttpDelegate<D, N>,
grpc: RpcWriteGrpcDelegate,
}
impl<D, N> RpcWriteRouterServer<D, N> {
/// Initialise a new [`RpcWriteRouterServer`] using the provided HTTP and gRPC
/// handlers.
pub fn new(
http: HttpDelegate<D, N>,
grpc: RpcWriteGrpcDelegate,
metrics: Arc<metric::Registry>,
trace_collector: Option<Arc<dyn TraceCollector>>,
) -> Self {
Self {
metrics,
trace_collector,
http,
grpc,
}
}
/// Return the [`metric::Registry`] used by the router.
pub fn metric_registry(&self) -> Arc<metric::Registry> {
Arc::clone(&self.metrics)
}
/// Trace collector associated with this server.
pub fn trace_collector(&self) -> &Option<Arc<dyn TraceCollector>> {
&self.trace_collector
}
/// Get a reference to the router http delegate.
pub fn http(&self) -> &HttpDelegate<D, N> {
&self.http
}
/// Get a reference to the router grpc delegate.
pub fn grpc(&self) -> &RpcWriteGrpcDelegate {
&self.grpc
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
create_net_enum! {
MessageType,
MembershipQuery: MEMBERSHIP_QUERY = 0x11,
MembershipReportV1: MEMBERSHIP_REPORT_V1 = 0x12,
MembershipReportV2: MEMBERSHIP_REPORT_V2 = 0x16,
MembershipReportV3: MEMBERSHIP_REPORT_V3 = 0x22,
LeaveVroup: LEAVE_GROUP = 0x17,
}
|
#[derive(Debug, Clone)]
pub struct WildcardMatcher {
chars: Vec<char>,
}
impl WildcardMatcher {
pub fn new(s: &str) -> Self {
Self {
chars: s.chars().collect::<Vec<char>>(),
}
}
pub fn is_match(&self, s: &str) -> bool {
let mut chars = s.chars();
let mut dot = false;
for ch in &self.chars {
match ch {
'*' => {
match chars.next() {
Some(c) => {
if c == '.' {
return false;
}
}
None => return false,
}
while let Some(n) = chars.next() {
if n == '.' {
dot = true;
break;
}
}
}
word => {
if dot {
if word == &'.' {
dot = false;
continue;
} else {
return false;
}
}
match chars.next() {
Some(ch) => {
if word != &ch {
return false;
}
}
None => return false,
}
}
}
}
if dot {
return false;
}
chars.next().is_none()
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn wildcard() {
let matcher = WildcardMatcher::new("*");
assert!(matcher.is_match("localhost"));
assert!(!matcher.is_match(".localhost"));
assert!(!matcher.is_match("localhost."));
assert!(!matcher.is_match("local.host"));
let matcher = WildcardMatcher::new("*.com");
assert!(matcher.is_match("test.com"));
assert!(matcher.is_match("example.com"));
assert!(!matcher.is_match("test.test"));
assert!(!matcher.is_match(".test.com"));
assert!(!matcher.is_match("test.com."));
assert!(!matcher.is_match("test.test.com"));
let matcher = WildcardMatcher::new("*.*");
assert!(matcher.is_match("test.test"));
assert!(!matcher.is_match(".test.test"));
assert!(!matcher.is_match("test.test."));
assert!(!matcher.is_match("test.test.test"));
let matcher = WildcardMatcher::new("*.example.com");
assert!(matcher.is_match("test.example.com"));
assert!(matcher.is_match("example.example.com"));
assert!(!matcher.is_match("test.example.com.com"));
assert!(!matcher.is_match("test.test.example.com"));
let matcher = WildcardMatcher::new("*.example.*");
assert!(matcher.is_match("test.example.com"));
assert!(matcher.is_match("example.example.com"));
assert!(!matcher.is_match("test.test.example.test"));
assert!(!matcher.is_match("test.example.test.test"));
}
}
|
use bitvec::prelude::Msb0;
use bitvec::vec::BitVec;
use stark_hash::Felt;
use crate::hash::FeltHash;
/// A node in a Starknet patricia-merkle trie.
///
/// See pathfinders merkle-tree crate for more information.
#[derive(Debug, Clone, PartialEq)]
pub enum TrieNode {
Binary { left: Felt, right: Felt },
Edge { child: Felt, path: BitVec<Msb0, u8> },
}
impl TrieNode {
pub fn hash<H: FeltHash>(&self) -> Felt {
match self {
TrieNode::Binary { left, right } => H::hash(*left, *right),
TrieNode::Edge { child, path } => {
let mut length = [0; 32];
// Safe as len() is guaranteed to be <= 251
length[31] = path.len() as u8;
let path = Felt::from_bits(path).unwrap();
let length = Felt::from_be_bytes(length).unwrap();
H::hash(*child, path) + length
}
}
}
}
|
use crate::params::named_params;
use pathfinder_common::{
BlockHash, BlockNumber, ClassCommitment, StateCommitment, StorageCommitment,
};
use rusqlite::{OptionalExtension, Transaction};
use crate::params::RowExt;
pub(crate) fn migrate(tx: &Transaction<'_>) -> anyhow::Result<()> {
tx.execute("DROP TABLE l1_state", [])?;
tx.execute(
r"CREATE TABLE l1_state (
starknet_block_number INTEGER PRIMARY KEY,
starknet_block_hash BLOB NOT NULL,
starknet_state_root BLOB NOT NULL
)",
[],
)?;
let maybe_head = tx
.query_row(
r"SELECT b.number, b.hash, b.root, b.class_commitment
FROM starknet_blocks b
INNER JOIN refs r ON r.l1_l2_head == b.number
LIMIT 1",
[],
|row| {
let number: BlockNumber = row.get_block_number(0)?;
let hash: BlockHash = row.get_block_hash(1)?;
let storage: StorageCommitment = row.get_storage_commitment(2)?;
let class: ClassCommitment = row.get_class_commitment(3)?;
Ok((number, hash, storage, class))
},
)
.optional()?;
if let Some((number, hash, storage, class)) = maybe_head {
let root = StateCommitment::calculate(storage, class);
tx.execute(
r"INSERT OR REPLACE INTO l1_state (
starknet_block_number,
starknet_block_hash,
starknet_state_root
) VALUES (
:starknet_block_number,
:starknet_block_hash,
:starknet_state_root
)",
named_params! {
":starknet_block_number": &number,
":starknet_block_hash": &hash,
":starknet_state_root": &root,
},
)?;
}
Ok(())
}
|
use std::sync::mpsc::{channel, Receiver, Sender};
use anyhow::{Context, Result, bail};
use cpal::{
traits::{DeviceTrait, HostTrait, StreamTrait},
BufferSize, Sample, Stream, StreamConfig, SupportedBufferSize, SizedSample, FromSample,
};
use super::bus::Busable;
pub struct Sound {
_stream: Stream,
state: SynthRegState,
tx: Sender<SynthRegState>,
}
impl Sound {
pub fn new() -> Result<Self> {
let host = cpal::default_host();
let device = host
.default_output_device()
.context("no output device available")?;
/*
let mut supported_configs_range = device
.supported_output_configs()
.context("error while querying configs")?;
for a in supported_configs_range {
println!("Supported audio: {:?}", a);
}
*/
let mut supported_configs_range = device
.supported_output_configs()
.context("error while querying configs")?;
let supported_config = supported_configs_range
.find(|c| c.channels() == 2 && matches!(c.sample_format(), SampleFormat::F32))
.context("no suitable config")?
.with_max_sample_rate()
;
#[cfg(feature = "audio-log")]
println!("Supported audio config: {supported_config:?}");
use cpal::SampleFormat;
let sample_format = supported_config.sample_format();
let min_bufsize = match supported_config.buffer_size() {
&SupportedBufferSize::Range { min, max: _ } => min,
_ => 0,
};
let mut config: StreamConfig = supported_config.into();
config.buffer_size = BufferSize::Fixed((512).max(min_bufsize));
#[cfg(feature = "audio-log")]
println!("Audio config: {config:?}");
let (synth, tx) = new_synth(&config);
let state = Default::default();
let stream = match sample_format {
SampleFormat::F32 => start_audio_stream::<f32>(&device, &config, synth),
SampleFormat::I16 => start_audio_stream::<i16>(&device, &config, synth),
SampleFormat::U16 => start_audio_stream::<u16>(&device, &config, synth),
other => bail!("Unsupported sample format {other}")
}
.context("Failed to build output audio stream")?;
stream.play().context("Failed to play stream")?;
Ok(Self {
_stream: stream,
tx,
state,
})
}
}
impl Busable for Sound {
fn read(&self, addr: u16) -> u8 {
match addr {
0xff10 => 0x80 | self.state.sweep_time_1 | (self.state.negate_1 as u8) << 3,
0xff12 => {
self.state.envelope_vol_1 << 4
| (self.state.envelope_increase_1 as u8) << 3
| self.state.envelope_sweep_1 & 0x7
}
0xff25 => {
self.state.channel_pan
}
_ => {
#[cfg(feature = "audio-log")]
eprintln!("Sound read at {addr:#x}");
0
} // TODO: panic
}
}
fn write(&mut self, addr: u16, value: u8) {
#[cfg(feature = "audio-log")]
println!("Audio write ({value:#x})to {addr:#x}");
match addr {
0xff10 => {
self.state.sweep_shift_1 = value & 0x07;
self.state.negate_1 = value & 0x08 != 0;
self.state.sweep_time_1 = (value & 0xf0) >> 4;
}
0xff11 => {
self.state.wave_pattern_1 = value >> 6;
self.state.sound_length_1 = 64 - (value & 0x3f);
}
0xff12 => {
self.state.envelope_vol_1 = value >> 4;
self.state.envelope_increase_1 = value & 0x08 != 0;
self.state.envelope_sweep_1 = value & 0x07;
}
0xff13 => {
self.state.frequency_1 = self.state.frequency_1 & 0xff00 | value as u16;
}
0xff14 => {
self.state.frequency_1 =
self.state.frequency_1 & 0x00ff | ((value & 0x7) as u16) << 8;
self.state.trigger_1 = value & 0x80 != 0;
self.state.length_en_1 = value & 0x40 != 0;
}
0xff15 => {
#[cfg(feature = "audio-log")]
eprintln!("Write to unused sound reguister ff15");
}
0xff16 => {
self.state.wave_pattern_2 = value >> 6;
self.state.sound_length_2 = 64 - (value & 0x3f);
}
0xff17 => {
self.state.envelope_vol_2 = value >> 4;
self.state.envelope_increase_2 = value & 0x08 != 0;
self.state.envelope_sweep_2 = value & 0x07;
}
0xff18 => {
self.state.frequency_2 = self.state.frequency_1 & 0xff00 | value as u16;
}
0xff19 => {
self.state.frequency_2 =
self.state.frequency_2 & 0x00ff | ((value & 0x7) as u16) << 8;
self.state.trigger_2 = value & 0x80 != 0;
self.state.length_en_2 = value & 0x40 != 0;
}
0xff1a => {
self.state.wave.enabled = value & 0x80 != 0;
}
0xff1b => {
self.state.wave.length = 0x100 - value as u16;
}
0xff1c => {
self.state.wave.volume_shift = match (value >> 5) & 0x3 {
0 => 4,
1 => 0,
2 => 1,
3 => 2,
_ => unreachable!(),
};
}
0xff1d => {
self.state.wave.frequency = self.state.wave.frequency & 0xff00 | value as u16;
}
0xff1e => {
self.state.wave.frequency =
self.state.wave.frequency & 0x00ff | ((value & 0x7) as u16) << 8;
self.state.wave.trigger = value & 0x80 != 0;
self.state.wave.length_en = value & 0x40 != 0;
}
0xff20 => {
self.state.noise.length = 64 - (value & 0x3f);
}
0xff21 => {
self.state.noise.envelope_vol = value >> 4;
self.state.noise.envelope_increase = value & 0x08 != 0;
self.state.noise.envelope_sweep = value & 0x07;
}
0xff22 => {
let div_code = (value & 0x7) as u32;
let shift_clock_frequency = (value >> 4) as u16;
self.state.noise.short_pattern = value & 0x08 != 0;
self.state.noise.frequency =
if div_code > 0 { div_code << 4 } else { 1 << 3 } << shift_clock_frequency;
}
0xff23 => {
self.state.noise.trigger = value & 0x80 != 0;
self.state.noise.length_en = value & 0x40 != 0;
}
0xff24 => {
self.state.left_vol = (value & 0x70) >> 4;
self.state.right_vol = value & 0x07;
}
0xff25 => {
self.state.channel_pan = value;
}
0xff26 => {
self.state.sound_enable = value & 0x80 != 0;
}
0xff30..=0xff3f => {
let index = (addr - 0xff30) as usize;
self.state.wave.pattern[index * 2] = value >> 4;
self.state.wave.pattern[index * 2 + 1] = value & 0x0f;
}
_ => {#[cfg(feature = "audio-log")]eprintln!("Invalid sound write at {addr:#x}")}
}
self.tx
.send(self.state.clone())
.expect("Failed to send SyntCmd to audio thread");
self.state.trigger_1 = false;
self.state.trigger_2 = false;
self.state.wave.trigger = false;
self.state.noise.trigger = false;
}
}
#[derive(Clone, Default)]
struct SynthRegState {
sound_enable: bool,
sweep_time_1: u8,
negate_1: bool,
sweep_shift_1: u8,
wave_pattern_1: u8,
sound_length_1: u8,
envelope_vol_1: u8,
envelope_increase_1: bool,
envelope_sweep_1: u8,
frequency_1: u16,
trigger_1: bool,
length_en_1: bool,
wave_pattern_2: u8,
sound_length_2: u8,
envelope_vol_2: u8,
envelope_increase_2: bool,
envelope_sweep_2: u8,
frequency_2: u16,
trigger_2: bool,
length_en_2: bool,
channel_pan: u8,
left_vol: u8,
right_vol: u8,
wave: SynthWave,
noise: SynthNoise,
}
#[derive(Default, Clone)]
struct SynthWave {
enabled: bool,
length: u16,
volume_shift: u8,
frequency: u16,
length_en: bool,
trigger: bool,
pattern: [u8; 32],
}
#[derive(Clone)]
struct SynthNoise {
length: u8,
envelope_vol: u8,
envelope_increase: bool,
envelope_sweep: u8,
frequency: u32,
short_pattern: bool,
length_en: bool,
trigger: bool,
}
impl Default for SynthNoise {
fn default() -> Self {
Self {
length: Default::default(),
envelope_vol: Default::default(),
envelope_increase: Default::default(),
envelope_sweep: Default::default(),
frequency: 8,
short_pattern: Default::default(),
length_en: Default::default(),
trigger: Default::default(),
}
}
}
const SQUARE_PATTERN: [[f32; 8]; 4] = [
[-1., -1., -1., -1., -1., -1., -1., 1.],
[-1., -1., -1., -1., -1., -1., 1., 1.],
[-1., -1., -1., -1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1., -1., -1.],
];
fn start_audio_stream<T: Sample + SizedSample + FromSample<f32>>(
device: &cpal::Device,
config: &cpal::StreamConfig,
mut synth: Synth,
) -> Result<Stream> {
let err_fn = |err| eprintln!("an error occurred on the output audio stream: {err}");
let stream = device
.build_output_stream(
config,
move |data: &mut [T], _: &cpal::OutputCallbackInfo| audio_thread(data, &mut synth),
err_fn,
None,
)
.context("Failed start audio thread")?;
Ok(stream)
}
fn audio_thread<T: Sample + FromSample<f32>>(data: &mut [T], synth: &mut Synth) {
synth.update_cmd();
for channels in data.chunks_mut(2) {
let sample = synth.next_sample();
channels[0] = Sample::from_sample::<f32>(sample.0);
channels[1] = Sample::from_sample::<f32>(sample.1);
}
}
struct Synth {
rx: Receiver<SynthRegState>,
reg_state: SynthRegState,
n: u64,
sample_rate: u32,
timer_512_reset: u32,
timer_512: u32,
length_timer: u8,
envelope_master_timer: u8,
hz_frequency_1: u32,
sound_length_1: u8,
current_vol_1: u8,
envelope_timer_1: u8,
cycle_index_1: u8,
square_timer_1: Timer,
hz_frequency_2: u32,
sound_length_2: u8,
current_vol_2: u8,
envelope_timer_2: u8,
cycle_index_2: u8,
square_timer_2: Timer,
hz_frequency_3: u32,
sound_length_3: u16,
wave_timer: Timer,
pattern_index_3: u32,
hz_frequency_4: u32,
sound_length_4: u16,
current_vol_4: u8,
envelope_timer_4: u8,
lfsr: u16,
noise_timer: Timer,
}
impl Synth {
fn new(rx: Receiver<SynthRegState>, cfg: &StreamConfig) -> Self {
let sample_rate = cfg.sample_rate.0;
Self {
rx,
reg_state: Default::default(),
n: 0,
sample_rate,
timer_512_reset: sample_rate / 512,
timer_512: 0,
envelope_master_timer: 0,
length_timer: 0,
hz_frequency_1: 0,
sound_length_1: 0,
envelope_timer_1: 0,
current_vol_1: 0,
cycle_index_1: 0,
square_timer_1: Timer::new(0, sample_rate),
hz_frequency_2: 0,
sound_length_2: 0,
envelope_timer_2: 0,
current_vol_2: 0,
cycle_index_2: 0,
square_timer_2: Timer::new(0, sample_rate),
hz_frequency_3: 0,
sound_length_3: 0,
wave_timer: Timer::new(0, sample_rate),
pattern_index_3: 0,
hz_frequency_4: 1,
sound_length_4: 0,
noise_timer: Timer::new(0, sample_rate),
current_vol_4: 0,
lfsr: 0,
envelope_timer_4: 0,
}
}
fn update_cmd(&mut self) {
let mut new_state = self.reg_state.clone();
let mut trigger_1 = false;
let mut trigger_2 = false;
let mut trigger_3 = false;
let mut trigger_4 = false;
while let Ok(state) = self.rx.try_recv() {
trigger_1 |= state.trigger_1;
if state.trigger_1 {
self.hz_frequency_1 =
(131072. / (2048. - (state.frequency_1 as f32)).round()) as u32;
self.current_vol_1 = state.envelope_vol_1;
self.envelope_timer_1 = state.envelope_sweep_1;
self.cycle_index_1 = 0;
self.square_timer_1 = Timer::new(self.hz_frequency_1 * 8, self.sample_rate);
}
trigger_2 |= state.trigger_2;
if state.trigger_2 {
self.hz_frequency_2 =
(131072. / (2048. - (state.frequency_2 as f32)).round()) as u32;
self.current_vol_2 = state.envelope_vol_2;
self.envelope_timer_2 = state.envelope_sweep_2;
self.cycle_index_2 = 0;
self.square_timer_2 = Timer::new(self.hz_frequency_2 * 8, self.sample_rate);
}
trigger_3 |= state.wave.trigger;
if state.wave.trigger {
self.hz_frequency_3 =
32 * (65536. / (2048. - (state.wave.frequency as f32)).round()) as u32;
self.wave_timer = Timer::new(self.hz_frequency_3, self.sample_rate);
self.pattern_index_3 = 2;
}
trigger_4 |= state.noise.trigger;
if state.noise.trigger {
self.hz_frequency_4 = (524288u32 << 3) / (state.noise.frequency as u32);
self.noise_timer = Timer::new(self.hz_frequency_4, self.sample_rate);
self.current_vol_4 = state.noise.envelope_vol;
self.envelope_timer_4 = state.noise.envelope_sweep;
self.lfsr = 0xffff;
}
new_state = state;
}
new_state.trigger_1 = trigger_1;
new_state.trigger_2 = trigger_2;
new_state.wave.trigger = trigger_3;
new_state.noise.trigger = trigger_4;
self.reg_state = new_state;
}
fn next_sample(&mut self) -> (f32, f32) {
if !self.reg_state.sound_enable {
return (0., 0.);
}
self.timer_512 += 1;
if self.timer_512 >= self.timer_512_reset {
self.timer_512 = 0;
}
if self.envelope_master_timer == 0 {
self.envelope_master_timer = 1;
}
if self.length_timer == 0 {
self.length_timer += 1;
}
if self.timer_512 == 0 {
self.length_timer += 1;
if self.length_timer >= 3 {
// 0 only triggered for 1 sample
self.length_timer = 0;
}
self.envelope_master_timer += 1;
if self.envelope_master_timer >= 9 {
// 0 only triggered for 1 sample
self.envelope_master_timer = 0;
}
}
let square1 = self.next_square_1();
let square2 = self.next_square_2();
let wave = self.next_wave();
let noise = self.next_noise();
self.n += 1;
if self.n > u64::MAX / 2 && self.n % (self.sample_rate as u64) == 0 {
self.n = 0; // TODO: better with lcm ?
}
let mut left = 0.;
let mut right = 0.;
if self.reg_state.channel_pan & 0x10 != 0 {
left += square1;
}
if self.reg_state.channel_pan & 0x20 != 0 {
left += square2;
}
if self.reg_state.channel_pan & 0x40 != 0 {
left += wave;
}
if self.reg_state.channel_pan & 0x80 != 0 {
left += noise;
}
if self.reg_state.channel_pan & 0x01 != 0 {
right += square1;
}
if self.reg_state.channel_pan & 0x02 != 0 {
right += square2;
}
if self.reg_state.channel_pan & 0x04 != 0 {
right += wave;
}
if self.reg_state.channel_pan & 0x08 != 0 {
right += noise;
}
(
left * 0.4 * (1.+self.reg_state.left_vol as f32) / 9.,
right * 0.4 * (1.+self.reg_state.right_vol as f32) / 9.,
)
}
fn next_square_1(&mut self) -> f32 {
if self.reg_state.length_en_1 && self.sound_length_1 != 0 && self.length_timer == 0 {
self.sound_length_1 -= 1;
}
if self.reg_state.trigger_1 {
self.sound_length_1 = self.reg_state.sound_length_1;
self.reg_state.trigger_1 = false;
}
if self.sound_length_1 == 0 {
return 0.;
}
if self.reg_state.envelope_sweep_1 != 0 && self.envelope_master_timer == 0 {
if self.envelope_timer_1 == 0 {
self.envelope_timer_1 = self.reg_state.envelope_sweep_1;
if self.reg_state.envelope_increase_1 && self.current_vol_1 != 0xf {
self.current_vol_1 += 1;
} else if !self.reg_state.envelope_increase_1 && self.current_vol_1 != 0x0 {
self.current_vol_1 -= 1;
}
} else {
self.envelope_timer_1 -= 1;
}
}
self.square_timer_1.sample_tick();
if self.square_timer_1.is_triggered() {
self.cycle_index_1 = (self.cycle_index_1 + 1) % 8;
}
SQUARE_PATTERN[self.reg_state.wave_pattern_1 as usize][self.cycle_index_1 as usize]
* self.current_vol_1 as f32
/ 15.
}
fn next_square_2(&mut self) -> f32 {
if self.reg_state.length_en_2 && self.sound_length_2 != 0 && self.length_timer == 0 {
self.sound_length_2 -= 1;
}
if self.reg_state.trigger_2 {
self.sound_length_2 = self.reg_state.sound_length_2;
self.reg_state.trigger_2 = false;
}
if self.sound_length_2 == 0 {
return 0.;
}
if self.reg_state.envelope_sweep_2 != 0 && self.envelope_master_timer == 0 {
if self.envelope_timer_2 == 0 {
self.envelope_timer_2 = self.reg_state.envelope_sweep_2;
if self.reg_state.envelope_increase_2 && self.current_vol_2 != 0xf {
self.current_vol_2 += 1;
} else if !self.reg_state.envelope_increase_2 && self.current_vol_2 != 0x0 {
self.current_vol_2 -= 1;
}
} else {
self.envelope_timer_2 -= 1;
}
}
self.square_timer_2.sample_tick();
if self.square_timer_2.is_triggered() {
self.cycle_index_2 = (self.cycle_index_2 + 1) % 8;
}
SQUARE_PATTERN[self.reg_state.wave_pattern_2 as usize][self.cycle_index_2 as usize]
* self.current_vol_2 as f32
/ 15.
}
fn next_wave(&mut self) -> f32 {
if !self.reg_state.wave.enabled {
return 0.;
}
if self.reg_state.wave.length_en && self.sound_length_3 != 0 && self.length_timer == 0 {
self.sound_length_3 -= 1;
}
if self.reg_state.wave.trigger {
self.sound_length_3 = self.reg_state.wave.length;
self.reg_state.wave.trigger = false;
}
if self.sound_length_3 == 0 {
return 0.;
}
self.wave_timer.sample_tick();
if self.wave_timer.is_triggered() {
self.pattern_index_3 += 1;
if self.pattern_index_3 as usize >= self.reg_state.wave.pattern.len() {
self.pattern_index_3 = 0;
}
}
((self.reg_state.wave.pattern[self.pattern_index_3 as usize]
>> self.reg_state.wave.volume_shift) as f32)
/ 7.5
- 0.5
}
fn next_noise(&mut self) -> f32 {
if self.reg_state.noise.length_en && self.sound_length_4 != 0 && self.length_timer == 0 {
self.sound_length_4 -= 1;
}
if self.reg_state.noise.trigger {
self.sound_length_4 = self.reg_state.noise.length as u16;
self.reg_state.noise.trigger = false;
self.lfsr = 0xffff;
}
if self.sound_length_4 == 0 {
return 0.;
}
if self.reg_state.noise.envelope_sweep != 0 && self.envelope_master_timer == 0 {
if self.envelope_timer_4 == 0 {
self.envelope_timer_4 = self.reg_state.noise.envelope_sweep;
if self.reg_state.noise.envelope_increase && self.current_vol_4 != 0xf {
self.current_vol_4 += 1;
} else if !self.reg_state.noise.envelope_increase && self.current_vol_4 != 0x0 {
self.current_vol_4 -= 1;
}
} else {
self.envelope_timer_4 -= 1;
}
}
self.noise_timer.sample_tick();
if self.noise_timer.is_triggered() {
self.noise_timer = Timer::new(self.hz_frequency_4, self.sample_rate);
let xor_result = (self.lfsr & 0b01) ^ ((self.lfsr & 0b10) >> 1);
self.lfsr = (self.lfsr >> 1) | (xor_result << 14);
if self.reg_state.noise.short_pattern {
self.lfsr &= !(1 << 6);
self.lfsr |= xor_result << 6;
}
}
(!self.lfsr & 1) as f32 * self.current_vol_4 as f32 / 15. - 0.5
}
}
fn new_synth(cfg: &StreamConfig) -> (Synth, Sender<SynthRegState>) {
let (tx, rx) = channel();
(Synth::new(rx, cfg), tx)
}
struct Timer {
sample_period: f32,
last_trigger: u32,
sample_counter: u64,
trigger: bool,
enabled: bool,
}
impl Timer {
fn new(hz_frequency: u32, sample_rate: u32) -> Self {
if hz_frequency == 0 {
return Self {
sample_period: 0.,
last_trigger: 0,
sample_counter: 0,
trigger: false,
enabled: false,
};
}
let sample_period = ((sample_rate as f64) / (hz_frequency as f64)) as f32;
Self {
sample_period,
last_trigger: 0,
sample_counter: 0,
trigger: false,
enabled: true,
}
}
fn sample_tick(&mut self) {
if !self.enabled {
return;
}
self.sample_counter += 1;
let approx_index = f32::trunc(self.sample_counter as f32 / self.sample_period) as u32;
if approx_index != self.last_trigger {
self.trigger = true;
if self.sample_counter as f32 % self.sample_period < 0.001 {
self.sample_counter = 0;
self.last_trigger = 0;
} else {
self.last_trigger = approx_index;
}
} else {
self.trigger = false;
}
}
fn is_triggered(&self) -> bool {
self.trigger
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn wave_timer() {
let sample_rate = 51200;
let freq = 440;
let mut timer = Timer::new(freq, sample_rate);
let mut timer_count = 0;
for _ in 0..(sample_rate * 2) {
timer.sample_tick();
if timer.is_triggered() {
timer_count += 1;
}
}
assert_eq!(timer_count, freq * 2);
}
#[test]
fn wave_timer_lowfreq() {
let sample_rate = 51200;
let freq = 100;
let mut timer = Timer::new(freq, sample_rate);
let mut timer_count = 0;
for _ in 0..(sample_rate * 2) {
timer.sample_tick();
if timer.is_triggered() {
timer_count += 1;
}
}
assert_eq!(timer_count, freq * 2);
}
}
|
extern crate serde;
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate nom;
extern crate regex;
extern crate xrl;
extern crate futures;
mod client;
mod disambiguation_map;
mod insert_mode;
mod key;
mod maps;
mod mode;
mod mode_map;
mod normal_mode;
mod op;
mod ordered_vec_map;
mod pending_mode;
mod state;
mod state_machine;
mod typeahead;
pub mod vixi;
|
use crate::ast::syntax_type::{SimpleSyntaxType, SyntaxType};
use crate::types::{Bool, Color, Float, Int, PineFrom, PineRef, RefData, Series, Tuple, NA};
use std::mem;
pub fn convert<'a>(val: PineRef<'a>, dest_type: &SyntaxType<'a>) -> PineRef<'a> {
match dest_type {
SyntaxType::Series(SimpleSyntaxType::Bool) => {
let s: RefData<Series<Bool>> = Series::implicity_from(val).unwrap();
s.into_pf()
}
SyntaxType::Series(SimpleSyntaxType::Na) => {
let s: RefData<Series<NA>> = Series::implicity_from(val).unwrap();
s.into_pf()
}
SyntaxType::Series(SimpleSyntaxType::Int) => {
let s: RefData<Series<Int>> = Series::implicity_from(val).unwrap();
s.into_pf()
}
SyntaxType::Series(SimpleSyntaxType::Float) => {
let s: RefData<Series<Float>> = Series::implicity_from(val).unwrap();
s.into_pf()
}
SyntaxType::Series(SimpleSyntaxType::Color) => {
let s: RefData<Series<Color>> = Series::implicity_from(val).unwrap();
s.into_pf()
}
SyntaxType::Series(SimpleSyntaxType::String) => {
let s: RefData<Series<String>> = Series::implicity_from(val).unwrap();
s.into_pf()
}
SyntaxType::Simple(SimpleSyntaxType::Bool) => Bool::implicity_from(val).unwrap().into_pf(),
SyntaxType::Simple(SimpleSyntaxType::Na) => NA::implicity_from(val).unwrap().into_pf(),
SyntaxType::Simple(SimpleSyntaxType::Int) => Int::implicity_from(val).unwrap().into_pf(),
SyntaxType::Simple(SimpleSyntaxType::Float) => {
Float::implicity_from(val).unwrap().into_pf()
}
SyntaxType::Simple(SimpleSyntaxType::Color) => {
Color::implicity_from(val).unwrap().into_pf()
}
SyntaxType::Simple(SimpleSyntaxType::String) => {
String::implicity_from(val).unwrap().into_pf()
}
SyntaxType::Void => val,
SyntaxType::Tuple(tuple) => {
let mut tuple_val = Tuple::implicity_from(val).unwrap();
let res: Vec<_> = tuple
.iter()
.zip(mem::replace(&mut tuple_val.0, vec![]).into_iter())
.map(|d| convert(d.1, d.0))
.collect();
PineRef::new_box(Tuple(res))
}
_ => val, // _ => val,
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::{
cloud_action_provider::get_cloud_actions,
mod_manager::ModManager,
story_context_store::StoryContextStore,
story_manager::StoryManager,
story_storage::{LedgerStorage, MemoryStorage, StoryStorage},
suggestion_providers::{
ActionSuggestionsProvider, ContextualSuggestionsProvider, PackageSuggestionsProvider,
StorySuggestionsProvider,
},
suggestions_manager::SuggestionsManager,
suggestions_service::SuggestionsService,
},
failure::{Error, ResultExt},
fidl_fuchsia_app_discover::{
DiscoverRegistryRequestStream, SessionDiscoverContextRequestStream,
SuggestionsRequestStream,
},
fidl_fuchsia_modular::{
EntityResolverMarker, LifecycleRequest, LifecycleRequestStream, PuppetMasterMarker,
},
fuchsia_async as fasync,
fuchsia_component::{client::connect_to_service, server::ServiceFs},
fuchsia_syslog::{self as syslog, macros::*},
futures::prelude::*,
parking_lot::Mutex,
std::sync::Arc,
};
#[macro_use]
mod testing;
mod action_match;
mod cloud_action_provider;
mod constants;
mod discover_registry;
mod indexing;
mod local_action_provider;
mod mod_manager;
mod models;
mod session_context;
mod story_context_store;
mod story_graph;
mod story_manager;
mod story_module;
mod story_storage;
mod suggestion_providers;
mod suggestions_manager;
mod suggestions_service;
mod utils;
// The directory name where the discovermgr FIDL services are exposed.
static SERVICE_DIRECTORY: &str = "svc";
enum IncomingServices {
DiscoverRegistry(DiscoverRegistryRequestStream),
Suggestions(SuggestionsRequestStream),
Lifecycle(LifecycleRequestStream),
SessionDiscoverContext(SessionDiscoverContextRequestStream),
}
async fn run_lifecycle_server(mut stream: LifecycleRequestStream) -> Result<(), Error> {
while let Some(request) = stream.try_next().await.context("Error running lifecycle")? {
match request {
LifecycleRequest::Terminate { .. } => {
std::process::exit(0);
}
}
}
Ok(())
}
/// Handle incoming service requests
async fn run_fidl_service(
story_context_store: Arc<Mutex<StoryContextStore>>,
story_manager: Arc<Mutex<StoryManager>>,
suggestions_manager: Arc<Mutex<SuggestionsManager>>,
mod_manager: Arc<Mutex<ModManager<StoryContextStore>>>,
incoming_service_stream: IncomingServices,
) -> Result<(), Error> {
match incoming_service_stream {
IncomingServices::DiscoverRegistry(stream) => {
discover_registry::run_server(story_context_store, mod_manager, stream).await
}
IncomingServices::Suggestions(stream) => {
let mut service = SuggestionsService::new(story_context_store, suggestions_manager);
service.handle_client(stream).await
}
IncomingServices::Lifecycle(stream) => run_lifecycle_server(stream).await,
IncomingServices::SessionDiscoverContext(stream) => {
session_context::run_server(stream, story_manager).await
}
}
}
#[fasync::run_singlethreaded]
async fn main() -> Result<(), Error> {
syslog::init_with_tags(&["discovermgr"])?;
let entity_resolver = connect_to_service::<EntityResolverMarker>()
.context("failed to connect to entity resolver")?;
let story_context_store = Arc::new(Mutex::new(StoryContextStore::new(entity_resolver)));
let puppet_master =
connect_to_service::<PuppetMasterMarker>().context("failed to connect to puppet master")?;
let cloud_actions = get_cloud_actions().await.unwrap_or_else(|e| {
fx_log_err!("Error fetching cloud actions index: {}", e);
vec![]
});
let actions_arc = Arc::new(cloud_actions);
let storage =
LedgerStorage::new().map(|s| Box::new(s) as Box<dyn StoryStorage>).unwrap_or_else(|_| {
fx_log_err!("Error in creating LedgerStorage, Use MemoryStorage instead");
Box::new(MemoryStorage::new()) as Box<dyn StoryStorage>
});
let story_manager = Arc::new(Mutex::new(StoryManager::new(storage)));
let mod_manager = Arc::new(Mutex::new(ModManager::new(
story_context_store.clone(),
puppet_master,
story_manager.clone(),
actions_arc.clone(),
)));
let mut suggestions_manager = SuggestionsManager::new(mod_manager.clone());
suggestions_manager.register_suggestions_provider(Box::new(
ContextualSuggestionsProvider::new(actions_arc.clone()),
));
suggestions_manager
.register_suggestions_provider(Box::new(ActionSuggestionsProvider::new(actions_arc)));
suggestions_manager.register_suggestions_provider(Box::new(StorySuggestionsProvider::new(
story_manager.clone(),
)));
suggestions_manager.register_suggestions_provider(Box::new(PackageSuggestionsProvider::new()));
let suggestions_manager_ref = Arc::new(Mutex::new(suggestions_manager));
let mut fs = ServiceFs::new_local();
fs.dir(SERVICE_DIRECTORY)
.add_fidl_service(IncomingServices::DiscoverRegistry)
.add_fidl_service(IncomingServices::Suggestions)
.add_fidl_service(IncomingServices::Lifecycle)
.add_fidl_service(IncomingServices::SessionDiscoverContext);
fs.take_and_serve_directory_handle()?;
const MAX_CONCURRENT: usize = 10_000;
let fut = fs.for_each_concurrent(MAX_CONCURRENT, |incoming_service_stream| {
run_fidl_service(
story_context_store.clone(),
story_manager.clone(),
suggestions_manager_ref.clone(),
mod_manager.clone(),
incoming_service_stream,
)
.unwrap_or_else(|e| fx_log_err!("{:?}", e))
});
fut.await;
Ok(())
}
#[cfg(test)]
mod test {
use {
super::*,
crate::models::Action,
fuchsia_async as fasync,
std::{collections::HashSet, iter::FromIterator},
};
// Verify the logic for removing duplicates
#[fasync::run_singlethreaded(test)]
async fn test_duplicates() -> Result<(), Error> {
let cloud_actions: Vec<Action> =
serde_json::from_str(include_str!("../test_data/test_actions.json")).unwrap();
// test_actions_dupes contains 1 duplicate and 1 new
let local_actions: Vec<Action> =
serde_json::from_str(include_str!("../test_data/test_actions_dupes.json")).unwrap();
let cloud_actions_len = cloud_actions.len();
// This is the logic used above
let actions: Vec<Action> = HashSet::<Action>::from_iter(
cloud_actions.into_iter().chain(local_actions.into_iter()),
)
.into_iter()
.collect::<Vec<Action>>();
// check if the new and duplicated are added/filtered
assert_eq!(cloud_actions_len + 1, actions.len());
Ok(())
}
}
|
pub mod utilities;
use proffer::{Attribute, SrcCode};
#[test]
fn test_attribute_attr() {
let ann = "#[attr]";
let attribute = Attribute::from(ann);
match &attribute {
&Attribute::ItemAttr(ref s) => assert_eq!(&s, &ann),
_ => panic!("Expected to match to Attribute::ItemAttr, got {:?}", ann),
};
assert_eq!(&attribute.generate(), ann);
}
#[test]
fn test_attribute_mod_attr() {
let ann = "#![foo_attr]";
let attribute = Attribute::from(ann);
match &attribute {
&Attribute::ScopeAttr(ref s) => assert_eq!(&s, &ann),
_ => panic!("Expected to match to Attribute::ScopeAttr, got {:?}", ann),
};
assert_eq!(&attribute.generate(), ann);
}
|
//! https://github.com/lumen/otp/tree/lumen/lib/inets/src/http_lib
use super::*;
test_compiles_lumen_otp!(http_chunk);
test_compiles_lumen_otp!(http_request);
test_compiles_lumen_otp!(http_response);
test_compiles_lumen_otp!(http_transport);
test_compiles_lumen_otp!(http_uri);
test_compiles_lumen_otp!(http_util);
fn includes() -> Vec<&'static str> {
let mut includes = super::includes();
includes.push("lib/inets/src/http_lib");
includes
}
fn relative_directory_path() -> PathBuf {
super::relative_directory_path().join("http_lib")
}
|
//! Плиты перекрытия
use crate::sig::rab_e::sec::BoxSec;
use crate::sig::HasWrite;
use nom::{
bytes::complete::take,
number::complete::{le_f32, le_u16, le_u8},
IResult,
};
use std::borrow::Borrow;
use std::fmt;
#[derive(Debug)]
pub struct Slab {
//1b
bf: u8, //Флаг bF. 0=нет, 1=есть
b: f32, //Толщина стены, см
area: f32, //площадь плиты
wtf1: f32,
poly_from: u16, //С полилинии N
poly_to: u16, //До полилинии N
poly_num: u16, //Количество полилиний
c_load: f32, //Постоянная нагрузка на плиту
l_load: f32, //Длительная нагрузка на плиту
s_load: f32, //Кратковременная нагрузка на плиту
//12b
wtf2: f32,
//2b
unc_num: u16, //0=без расчета, 3=расчет, МКЭ. При 3 добавляется 6*4b в конце сигнатуры
//12b
cons_1: u16, //Всегда 1
mat: u16, //Номер материала плиты
wtf3: f32,
emerge: u8, //Появляется после. 0=всего здания, 1=этажа N, 2=своего этажа
em_etazh: u8, //Появляется после этажа N
//58b
ws: Vec<u8>, //85b
load_vec: Vec<u8>, //0b без расчета, 24b расчетб МКЭ
}
impl HasWrite for Slab {
fn write(&self) -> Vec<u8> {
let mut out: Vec<u8> = vec![];
out.extend(&self.ws[0..1]);
out.extend(&self.bf.to_le_bytes());
out.extend(&self.b.to_le_bytes());
out.extend(&self.area.to_le_bytes());
out.extend(&self.wtf1.to_le_bytes());
out.extend(&self.poly_from.to_le_bytes());
out.extend(&self.poly_to.to_le_bytes());
out.extend(&self.poly_num.to_le_bytes());
out.extend(&self.c_load.to_le_bytes());
out.extend(&self.l_load.to_le_bytes());
out.extend(&self.s_load.to_le_bytes());
out.extend(&self.ws[1..13]);
out.extend(&self.wtf2.to_le_bytes());
out.extend(&self.ws[13..15]);
out.extend(&self.unc_num.to_le_bytes());
out.extend(&self.ws[15..27]);
out.extend(&self.cons_1.to_le_bytes());
out.extend(&self.mat.to_le_bytes());
out.extend(&self.wtf3.to_le_bytes());
out.extend(&self.emerge.to_le_bytes());
out.extend(&self.em_etazh.to_le_bytes());
out.extend(&self.ws[27..85]);
out.extend(&self.load_vec);
out
}
fn name(&self) -> &str {
""
}
}
impl fmt::Display for Slab {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"b: {}, loads |const: {}, long: {}, short: {}|",
&self.b, &self.c_load, &self.l_load, &self.s_load
)
}
}
pub fn read_slab(i: &[u8]) -> IResult<&[u8], Slab> {
let (i, ws1) = take(1u8)(i)?;
let (i, bf) = le_u8(i)?;
let (i, b) = le_f32(i)?;
let (i, area) = le_f32(i)?;
let (i, wtf1) = le_f32(i)?;
let (i, poly_from) = le_u16(i)?;
let (i, poly_to) = le_u16(i)?;
let (i, poly_num) = le_u16(i)?;
let (i, c_load) = le_f32(i)?;
let (i, l_load) = le_f32(i)?;
let (i, s_load) = le_f32(i)?;
let (i, ws2) = take(12u8)(i)?;
let (i, wtf2) = le_f32(i)?;
let (i, ws3) = take(2u8)(i)?;
let (i, unc_num) = le_u16(i)?;
let (i, ws4) = take(12u8)(i)?;
let (i, cons_1) = le_u16(i)?;
let (i, mat) = le_u16(i)?;
let (i, wtf3) = le_f32(i)?;
let (i, emerge) = le_u8(i)?;
let (i, em_etazh) = le_u8(i)?;
let (i, ws5) = take(58u8)(i)?;
let mut last = 0u8;
if unc_num as usize == 3 {
last = 24u8
}
let (i, load_vec) = take(last)(i)?;
let load_vec = load_vec.to_vec();
let mut ws = ws1.to_vec();
ws.extend_from_slice(ws2);
ws.extend_from_slice(ws3);
ws.extend_from_slice(ws4);
ws.extend_from_slice(ws5);
Ok((
i,
Slab {
bf,
b,
area,
wtf1,
poly_from,
poly_to,
poly_num,
c_load,
l_load,
s_load,
wtf2,
unc_num,
cons_1,
mat,
wtf3,
emerge,
em_etazh,
ws,
load_vec,
},
))
}
#[cfg(test)]
fn test_slab(path_str: &str) {
use crate::tests::rab_e_sig_test::read_test_sig;
let original_in = read_test_sig(path_str);
let (_, slab) = read_slab(&original_in).expect("couldn't read_slab");
assert_eq!(original_in, slab.write());
}
#[test]
fn slab_1_etazh4_test() {
test_slab("test_sig/slabs/slab_1_etazh4.test");
}
#[test]
fn slab_1_f_all_test() {
test_slab("test_sig/slabs/slab_1_f_all.test");
}
#[test]
fn slab_1_mat_test() {
test_slab("test_sig/slabs/slab_1_mat.test");
}
#[test]
fn slab_1_nf_test() {
test_slab("test_sig/slabs/slab_1_nf.test");
}
#[test]
fn slab_1_opening_test() {
test_slab("test_sig/slabs/slab_1_opening.test");
}
#[test]
fn slab_1_self_etazh_test() {
test_slab("test_sig/slabs/slab_1_self_etazh.test");
}
#[test]
fn slab_1_triple_opening_1_test() {
test_slab("test_sig/slabs/slab_1_triple_opening_1.test");
}
#[test]
fn slab_1_triple_opening_2_test() {
test_slab("test_sig/slabs/slab_1_triple_opening_2.test");
}
#[test]
fn slab_1_triple_opening_3_test() {
test_slab("test_sig/slabs/slab_1_triple_opening_3.test");
}
#[test]
fn slab_3angle_test() {
test_slab("test_sig/slabs/slab_3angle.test");
}
#[test]
fn slab_dabble_1_test() {
test_slab("test_sig/slabs/slab_dabble_1.test");
}
#[test]
fn slab_dabble_2_test() {
test_slab("test_sig/slabs/slab_dabble_2.test");
}
#[test]
fn part_test() {
test_slab("test_sig/slabs/s_slab.test");
}
#[test]
fn s_slab_full_value_test() {
use crate::tests::rab_e_sig_test::read_test_sig;
let original_in = read_test_sig("test_sig/slabs/s_slab.test");
let (_, slab) = read_slab(&original_in).expect("couldn't read_slab");
let mut ws = vec![];
for i in 1..=85 {
ws.push(i);
}
let mut load_vec: Vec<u8> = vec![];
load_vec.extend(1.1f32.to_le_bytes());
load_vec.extend(2.2f32.to_le_bytes());
load_vec.extend(3.3f32.to_le_bytes());
load_vec.extend(1.1f32.to_le_bytes());
load_vec.extend(2.2f32.to_le_bytes());
load_vec.extend(3.3f32.to_le_bytes());
let c_slab = Slab {
bf: 8u8,
b: 60.000_004f32,
area: 3.999_998_3f32,
wtf1: 2f32,
poly_from: 0u16,
poly_to: 0u16,
poly_num: 1u16,
c_load: 1.1f32,
l_load: 2.2f32,
s_load: 3.3f32,
wtf2: 0.005f32,
unc_num: 3u16,
cons_1: 769u16, // !
mat: 1u16,
wtf3: 95.659_966f32,
emerge: 0u8,
em_etazh: 0u8,
ws,
load_vec,
};
assert_eq!(slab.write(), c_slab.write())
}
|
import option::none;
import option::some;
import util::orb;
type vbuf = rustrt::vbuf;
type operator2[T, U, V] = fn(&T, &U) -> V ;
type array[T] = vec[mutable? T];
native "rust" mod rustrt {
type vbuf;
fn vec_buf[T](v: vec[T], offset: uint) -> vbuf;
fn vec_len[T](v: vec[T]) -> uint;
/**
* Sometimes we modify the vec internal data via vec_buf and need to
* update the vec's fill length accordingly.
*/
fn vec_len_set[T](v: vec[T], n: uint);
/**
* The T in vec_alloc[T, U] is the type of the vec to allocate. The
* U is the type of an element in the vec. So to allocate a vec[U] we
* want to invoke this as vec_alloc[vec[U], U].
*/
fn vec_alloc[T, U](n_elts: uint) -> vec[U];
fn vec_alloc_mut[T, U](n_elts: uint) -> vec[mutable U];
fn refcount[T](v: vec[T]) -> uint;
fn vec_print_debug_info[T](v: vec[T]);
fn vec_from_vbuf[T](v: vbuf, n_elts: uint) -> vec[T];
fn unsafe_vec_to_mut[T](v: vec[T]) -> vec[mutable T];
}
fn alloc[T](n_elts: uint) -> vec[T] {
ret rustrt::vec_alloc[vec[T], T](n_elts);
}
fn alloc_mut[T](n_elts: uint) -> vec[mutable T] {
ret rustrt::vec_alloc_mut[vec[mutable T], T](n_elts);
}
fn refcount[T](v: array[T]) -> uint {
let r = rustrt::refcount[T](v);
if r == dbg::const_refcount { ret r; } else { ret r - 1u; }
}
fn vec_from_vbuf[T](v: vbuf, n_elts: uint) -> vec[T] {
ret rustrt::vec_from_vbuf[T](v, n_elts);
}
// FIXME: Remove me; this is a botch to get around rustboot's bad typechecker.
fn empty[T]() -> vec[T] { ret alloc[T](0u); }
// FIXME: Remove me; this is a botch to get around rustboot's bad typechecker.
fn empty_mut[T]() -> vec[mutable T] { ret alloc_mut[T](0u); }
type init_op[T] = fn(uint) -> T ;
fn init_fn[@T](op: &init_op[T], n_elts: uint) -> vec[T] {
let v: vec[T] = alloc[T](n_elts);
let i: uint = 0u;
while i < n_elts { v += [op(i)]; i += 1u; }
ret v;
}
fn init_fn_mut[@T](op: &init_op[T], n_elts: uint) -> vec[mutable T] {
let v: vec[mutable T] = alloc_mut[T](n_elts);
let i: uint = 0u;
while i < n_elts { v += [mutable op(i)]; i += 1u; }
ret v;
}
// init_elt: creates and returns a vector of length n_elts, filled with
// that many copies of element t.
fn init_elt[@T](t: &T, n_elts: uint) -> vec[T] {
/**
* FIXME (issue #81): should be:
*
* fn elt_op[T](&T x, uint i) -> T { ret x; }
* let init_op[T] inner = bind elt_op[T](t, _);
* ret init_fn[T](inner, n_elts);
*/
let v: vec[T] = alloc[T](n_elts);
let i: uint = n_elts;
while i > 0u { i -= 1u; v += [t]; }
ret v;
}
fn init_elt_mut[@T](t: &T, n_elts: uint) -> vec[mutable T] {
let v: vec[mutable T] = alloc_mut[T](n_elts);
let i: uint = n_elts;
while i > 0u { i -= 1u; v += [mutable t]; }
ret v;
}
fn buf[T](v: array[T]) -> vbuf { ret rustrt::vec_buf[T](v, 0u); }
fn len[T](v: array[T]) -> uint { ret rustrt::vec_len[T](v); }
fn len_set[T](v: array[T], n: uint) { rustrt::vec_len_set[T](v, n); }
fn buf_off[T](v: array[T], offset: uint) -> vbuf {
assert (offset < len[T](v));
ret rustrt::vec_buf[T](v, offset);
}
fn print_debug_info[T](v: array[T]) { rustrt::vec_print_debug_info[T](v); }
// FIXME: typestate precondition (list is non-empty)
// Returns the last element of v.
fn last[@T](v: array[T]) -> option::t[T] {
let l = len[T](v);
if l == 0u { ret none[T]; }
ret some[T](v.(l - 1u));
}
// Returns elements from [start..end) from v.
fn slice[@T](v: array[T], start: uint, end: uint) -> vec[T] {
assert (start <= end);
assert (end <= len[T](v));
let result = alloc[T](end - start);
let i: uint = start;
while i < end { result += [v.(i)]; i += 1u; }
ret result;
}
// FIXME: Should go away eventually.
fn slice_mut[@T](v: array[T], start: uint, end: uint) -> vec[mutable T] {
assert (start <= end);
assert (end <= len[T](v));
let result = alloc_mut[T](end - start);
let i: uint = start;
while i < end { result += [mutable v.(i)]; i += 1u; }
ret result;
}
fn shift[@T](v: &mutable array[T]) -> T {
let ln = len[T](v);
assert (ln > 0u);
let e = v.(0);
v = slice[T](v, 1u, ln);
ret e;
}
fn pop[@T](v: &mutable array[T]) -> T {
let ln = len[T](v);
assert (ln > 0u);
ln -= 1u;
let e = v.(ln);
v = slice[T](v, 0u, ln);
ret e;
}
fn top[@T](v: &array[T]) -> T {
let ln = len[T](v);
assert (ln > 0u);
ret v.(ln - 1u);
}
fn push[@T](v: &mutable array[T], t: &T) { v += [t]; }
fn unshift[@T](v: &mutable array[T], t: &T) {
let rs = alloc[T](len[T](v) + 1u);
rs += [t];
rs += v;
v = rs;
}
fn grow[@T](v: &mutable array[T], n: uint, initval: &T) {
let i: uint = n;
while i > 0u { i -= 1u; v += [initval]; }
}
fn grow_set[@T](v: &mutable vec[mutable T], index: uint, initval: &T,
val: &T) {
let length = vec::len(v);
if index >= length { grow(v, index - length + 1u, initval); }
v.(index) = val;
}
fn grow_init_fn[@T](v: &mutable array[T], n: uint, init_fn: fn() -> T ) {
let i: uint = n;
while i > 0u { i -= 1u; v += [init_fn()]; }
}
fn grow_init_fn_set[@T](v: &mutable array[T], index: uint, init_fn: fn() -> T,
val: &T) {
let length = vec::len(v);
if index >= length { grow_init_fn(v, index - length + 1u, init_fn); }
v.(index) = val;
}
fn map[@T, @U](f: &fn(&T) -> U , v: &vec[T]) -> vec[U] {
let rs: vec[U] = alloc[U](len[T](v));
for ve: T in v { rs += [f(ve)]; }
ret rs;
}
fn filter_map[@T, @U](f: &fn(&T) -> option::t[U] , v: &vec[T]) -> vec[U] {
let rs: vec[U] = [];
for ve: T in v { alt f(ve) { some(elt) { rs += [elt]; } none. { } } }
ret rs;
}
fn map2[@T, @U, @V](f: &operator2[T, U, V], v0: &vec[T], v1: &vec[U])
-> vec[V] {
let v0_len = len[T](v0);
if v0_len != len[U](v1) { fail; }
let u: vec[V] = alloc[V](v0_len);
let i = 0u;
while i < v0_len { u += [f({ v0.(i) }, { v1.(i) })]; i += 1u; }
ret u;
}
fn find[@T](f: fn(&T) -> bool , v: &vec[T]) -> option::t[T] {
for elt: T in v { if f(elt) { ret some[T](elt); } }
ret none[T];
}
fn position[@T](x: &T, v: &array[T]) -> option::t[uint] {
let i: uint = 0u;
while i < len(v) { if x == v.(i) { ret some[uint](i); } i += 1u; }
ret none[uint];
}
fn position_pred[T](f: fn(&T) -> bool , v: &vec[T]) -> option::t[uint] {
let i: uint = 0u;
while i < len(v) { if f(v.(i)) { ret some[uint](i); } i += 1u; }
ret none[uint];
}
fn member[T](x: &T, v: &array[T]) -> bool {
for elt: T in v { if x == elt { ret true; } }
ret false;
}
fn count[T](x: &T, v: &array[T]) -> uint {
let cnt = 0u;
for elt: T in v { if x == elt { cnt += 1u; } }
ret cnt;
}
fn foldl[@T, @U](p: fn(&U, &T) -> U , z: &U, v: &vec[T]) -> U {
let sz = len[T](v);
if sz == 0u {
ret z;
} else {
let rest = slice[T](v, 1u, sz);
ret p(foldl[T, U](p, z, rest), v.(0));
}
}
fn unzip[@T, @U](v: &vec[{_0: T, _1: U}]) -> {_0: vec[T], _1: vec[U]} {
let sz = len(v);
if sz == 0u {
ret {_0: alloc[T](0u), _1: alloc[U](0u)};
} else {
let rest = slice(v, 1u, sz);
let tl = unzip[T, U](rest);
let a = [v.(0)._0];
let b = [v.(0)._1];
ret {_0: a + tl._0, _1: b + tl._1};
}
}
// FIXME make the lengths being equal a constraint
fn zip[@T, @U](v: &vec[T], u: &vec[U]) -> vec[{_0: T, _1: U}] {
let sz = len(v);
assert (sz == len(u));
if sz == 0u {
ret alloc(0u);
} else {
let rest = zip(slice(v, 1u, sz), slice(u, 1u, sz));
vec::push(rest, {_0: v.(0), _1: u.(0)});
ret rest;
}
}
fn or(v: &vec[bool]) -> bool {
let f = orb;
ret vec::foldl[bool, bool](f, false, v);
}
fn any[T](f: &fn(&T) -> bool , v: &vec[T]) -> bool {
for t: T in v { if f(t) { ret true; } }
ret false;
}
fn all[T](f: &fn(&T) -> bool , v: &vec[T]) -> bool {
for t: T in v { if !f(t) { ret false; } }
ret true;
}
fn clone[@T](v: &vec[T]) -> vec[T] { ret slice[T](v, 0u, len[T](v)); }
fn plus_option[@T](v: &mutable vec[T], o: &option::t[T]) {
alt o { none. { } some(x) { v += [x]; } }
}
fn cat_options[@T](v: &vec[option::t[T]]) -> vec[T] {
let rs: vec[T] = [];
for o: option::t[T] in v { alt o { none. { } some(t) { rs += [t]; } } }
ret rs;
}
// TODO: Remove in favor of built-in "freeze" operation when it's implemented.
fn freeze[@T](v: vec[mutable T]) -> vec[T] {
let result: vec[T] = [];
for elem: T in v { result += [elem]; }
ret result;
}
// Swaps two elements in a vector
fn swap[@T](v: &vec[mutable T], a: uint, b: uint) {
let t: T = v.(a);
v.(a) = v.(b);
v.(b) = t;
}
// In place vector reversal
fn reverse[@T](v: &vec[mutable T]) {
let i: uint = 0u;
let ln = len[T](v);
while i < ln / 2u { swap(v, i, ln - i - 1u); i += 1u; }
}
// Functional vector reversal. Returns a reversed copy of v.
fn reversed[@T](v: vec[T]) -> vec[T] {
let rs: vec[T] = [];
let i = len[T](v);
if i == 0u { ret rs; } else { i -= 1u; }
while i != 0u { push[T](rs, v.(i)); i -= 1u; }
push[T](rs, v.(0));
ret rs;
}
/// Truncates the vector to length `new_len`.
/// FIXME: This relies on a typechecker bug (covariance vs. invariance).
fn truncate[@T](v: &mutable vec[mutable? T], new_len: uint) {
v = slice[T](v, 0u, new_len);
}
// Local Variables:
// mode: rust;
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// compile-command: "make -k -C $RBUILD 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
// End:
|
use iface::iface::example_one::ExampleOne;
use iface::iface::example_one::SearchQuery;
use iface::iface::example_one::SearchResult;
use apilib::apilib::request::TRequest;
use apilib::apilib::response::TResponse;
#[derive(new, Debug)]
pub struct ExampleOneService {}
impl ExampleOne for ExampleOneService {
fn search(&self, request: TRequest<SearchQuery>) -> TResponse<SearchResult> {
TResponse::Ok(SearchResult::new(vec![
request.value.keywords.clone() + " one",
request.value.keywords.clone() + " two",
request.value.keywords + " three",
]))
}
}
|
mod bam;
fn main() {
let path = std::env::current_dir()
.unwrap()
.join("data")
.join("htsnexus_test_NA12878.bam");
let ref_seqs = bam::bam_blocks(path).unwrap();
println!("{}", serde_yaml::to_string(&ref_seqs).unwrap());
}
|
use super::*;
use crate::errors::{span_to_snippet, spans_to_snippet, two_spans_to_snippet, ErrText};
use crate::flat::Sort;
use crate::lexer::Span;
use crate::source_file::FileMap;
use annotate_snippets::snippet::{Annotation, AnnotationType, Snippet};
use std::fmt;
#[derive(Debug, Clone)]
pub enum Error {
SortError {
/// Span of the occurence of this error
span: Span,
// TODO: we may want to include definitions of these variables as well
expected: Sort,
actual: Sort,
},
/// A param was passed to a Type that does not accept it
InvalidTypeParam {
func_call: Span,
param: ParamType,
// this Span is optional since it only makes sense to provide it if `func`
// is a user defined type. If it's a builtin (or, eventually, if it's an
// inline/anonymous type), the func call span will include the definition
// TODO: this span doesn't work for e.g. structs
func_def: Option<Span>,
},
NonConcreteType {
span: Span,
missing: Vec<ParamType>,
},
VarCycle(Vec<(Name, Span)>),
InfiniteType(Vec<(Name, Span)>),
TypeError {
actual: Spanned<Type>,
expected: Type,
},
IntCoercionError {
span: Span,
ty: Type,
},
StringBoundsError {
/// The span is on the string literal
length: Spanned<usize>,
/// The span is on the bounds
bounds: Spanned<u64>,
},
InvalidBitsType(Spanned<Type>),
InvalidEnumType(Spanned<Type>),
DuplicateMemberValue {
original: Span,
dupe: Span,
/// either enum or bits
decl_kind: &'static str,
},
NullableMember {
span: Span,
// either union or table
decl_kind: &'static str,
},
// nullability
TypeCantBeNullable(Spanned<Type>),
DoubleNullability(Span), // TODO: include definition as help
}
#[derive(Debug, Copy, Clone)]
pub enum ParamType {
Constraint,
Layout,
}
impl Error {
pub fn into_snippet(self, srcs: &FileMap) -> Snippet {
use Error::*;
match self {
SortError {
span,
expected,
actual,
} => span_to_snippet(
span,
srcs,
ErrText {
text: "sort error".to_string(),
ty: AnnotationType::Error,
},
ErrText {
text: format!("expected a {}, but got a {} instead", expected, actual),
ty: AnnotationType::Error,
},
None,
),
InvalidTypeParam {
func_call,
param,
func_def,
} => {
let title = ErrText {
text: "invalid type parameter".to_string(),
ty: AnnotationType::Error,
};
let call_annotation = ErrText {
text: format!(
"type was passed a {} parameter, which it does not support",
param
),
ty: AnnotationType::Error,
};
match func_def {
None => span_to_snippet(func_call, srcs, title, call_annotation, None),
Some(defn) => two_spans_to_snippet(
func_call,
defn,
srcs,
title,
call_annotation,
ErrText {
text: "which is defined here".to_string(),
ty: AnnotationType::Help,
},
None,
),
}
}
NonConcreteType { span, missing } => span_to_snippet(
span,
srcs,
ErrText {
text: "type is not fully resolved".to_string(),
ty: AnnotationType::Error,
},
ErrText {
text: "expected a fully resolved type here".to_string(),
ty: AnnotationType::Error,
},
Some(Annotation {
// TODO: print this nicer
label: Some(format!("type is missing: {:?}", missing)),
id: None,
annotation_type: AnnotationType::Help,
}),
),
VarCycle(cycle) => {
// note: the top level error name will be the last name, since
// the first element in the vector is the rhs of the first type
// that caused this error
let name = cycle[cycle.len() - 1].0.clone();
let spans: Vec<_> = cycle
.into_iter()
.map(|(name, span)| {
(
span,
ErrText {
text: format!("...which requires processing `{}`...", name.name),
ty: AnnotationType::Info,
},
)
})
.collect();
spans_to_snippet(
ErrText {
text: format!("cycle detected when processing {}", name.name),
ty: AnnotationType::Error,
},
spans,
srcs,
None,
)
}
InfiniteType(cycle) => {
let name = cycle[cycle.len() - 1].0.clone();
let spans: Vec<_> = cycle
.into_iter()
.map(|(name, span)| {
(
span,
ErrText {
text: format!("contains infinitely sized `{}`...", name.name),
ty: AnnotationType::Info,
},
)
})
.collect();
spans_to_snippet(
ErrText {
text: format!("{} is an infinitely sized type", name.name),
ty: AnnotationType::Error,
},
spans,
srcs,
Some(Annotation {
label: Some("types that can _only_ be infinitely sized can not be represented on the wire".to_string()),
id: None,
annotation_type: AnnotationType::Info,
})
)
}
TypeError { actual, expected } => span_to_snippet(
actual.span,
srcs,
ErrText {
text: "type error".to_string(),
ty: AnnotationType::Error,
},
ErrText {
text: format!("value has type {}, but expected {}", actual.value, expected),
ty: AnnotationType::Error,
},
None,
),
IntCoercionError { span, ty } => span_to_snippet(
span,
srcs,
ErrText {
text: "int coercion error".to_string(),
ty: AnnotationType::Error,
},
ErrText {
text: format!("cannot be casted to {} - value is out of bounds", ty),
ty: AnnotationType::Error,
},
None,
),
StringBoundsError { length, bounds } => two_spans_to_snippet(
length.span,
bounds.span,
srcs,
ErrText {
text: "string bounds error".to_string(),
ty: AnnotationType::Error,
},
ErrText {
text: format!(
"string has a length of {}, but the type bounds are specified as {}",
length.value, bounds.value
),
ty: AnnotationType::Error,
},
ErrText {
text: "bounds were specified here".to_string(),
ty: AnnotationType::Help,
},
None,
),
InvalidBitsType(ty) => span_to_snippet(
ty.span,
srcs,
ErrText {
text: "invalid bits type".to_string(),
ty: AnnotationType::Error,
},
ErrText {
text: format!(
"bits type specified as {}, but must be an unsigned integral type",
ty.value
),
ty: AnnotationType::Error,
},
None,
),
InvalidEnumType(ty) => span_to_snippet(
ty.span,
srcs,
ErrText {
text: "invalid enum type".to_string(),
ty: AnnotationType::Error,
},
ErrText {
text: format!(
"enum type specified as {}, but must be an integral type",
ty.value
),
ty: AnnotationType::Error,
},
None,
),
DuplicateMemberValue {
original,
dupe,
decl_kind,
} => two_spans_to_snippet(
original,
dupe,
srcs,
ErrText {
text: format!("{} has duplicate member values", decl_kind),
ty: AnnotationType::Error,
},
ErrText {
text: "value conflicts with existing member".to_string(),
ty: AnnotationType::Error,
},
ErrText {
text: "original member defined here".to_string(),
ty: AnnotationType::Info,
},
None,
),
NullableMember { span, decl_kind } => span_to_snippet(
span,
srcs,
ErrText {
text: format!("nullable {} member", decl_kind),
ty: AnnotationType::Error,
},
ErrText {
text: format!("{} member cannot be nullable", decl_kind),
ty: AnnotationType::Error,
},
Some(Annotation {
label: Some("try moving the nullable member into a struct".to_string()),
id: None,
annotation_type: AnnotationType::Help,
}),
),
TypeCantBeNullable(ty) => span_to_snippet(
ty.span,
srcs,
ErrText {
text: "invalid nullable type".to_string(),
ty: AnnotationType::Error,
},
ErrText {
text: format!("type {} cannot be used as nullable", ty.value),
ty: AnnotationType::Error,
},
None, // "try wrapping it in a struct"?
),
DoubleNullability(span) => span_to_snippet(
span,
srcs,
ErrText {
text: "double nullability".to_string(),
ty: AnnotationType::Error,
},
ErrText {
text: "type is indicated as nullable twice".to_string(),
ty: AnnotationType::Error,
},
None,
),
}
}
}
// TODO: move this along their definitions?
impl fmt::Display for ParamType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ParamType::Constraint => write!(f, "constraint"),
ParamType::Layout => write!(f, "layout"),
}
}
}
impl fmt::Display for Sort {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Sort::Term => write!(f, "const"),
Sort::Type => write!(f, "type"),
Sort::Protocol => write!(f, "protocol"),
Sort::Service => write!(f, "service"),
}
}
}
impl fmt::Display for Type {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Type::Struct(_) => write!(f, "struct"),
Type::Bits(_) => write!(f, "bits"),
Type::Enum(_) => write!(f, "enum"),
Type::Table(_) => write!(f, "table"),
Type::Union(_) => write!(f, "union"),
Type::Ptr(ty) => write!(f, "nullable {}", ty.value),
Type::Array(_) => write!(f, "array"),
Type::Vector(_) => write!(f, "vector"),
Type::Str(_) => write!(f, "string"),
Type::Handle(_) | Type::ClientEnd(_) | Type::ServerEnd(_) => write!(f, "handle"),
Type::Primitive(sub) => write!(f, "{}", sub),
Type::Any => write!(f, "any"),
Type::Int => write!(f, "int"),
// these shouldn't be called: should pass in the evaled type to the error
Type::TypeSubstitution(_) | Type::Identifier(_) => write!(f, "todo"),
}
}
}
impl fmt::Display for PrimitiveSubtype {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", serde_json::to_string(self).unwrap())
}
}
|
use std::cell::RefCell;
use std::error::Error;
use std::ptr;
use std::slice;
thread_local! {
static LAST_ERROR: RefCell<Option<Box<dyn Error>>> = RefCell::new(None);
}
pub fn set_last_error<E: Error + 'static>(err: E) {
LAST_ERROR.with(|prev| {
*prev.borrow_mut() = Some(Box::new(err));
});
}
pub fn take_last_error() -> Option<Box<dyn Error>> {
LAST_ERROR.with(|prev| prev.borrow_mut().take())
}
pub fn clear_last_error() {
let _ = take_last_error();
}
/// Checks if there was an error before.
///
/// # Returns
///
/// `0` if there was no error, `1` if error had occured.
#[no_mangle]
pub extern "C" fn battery_have_last_error() -> libc::c_int {
LAST_ERROR.with(|prev| match *prev.borrow() {
Some(_) => 1,
None => 0,
})
}
/// Gets error message length if any error had occurred.
///
/// # Returns
///
/// If there was no error before, returns `0`,
/// otherwise returns message length including trailing `\0`.
#[no_mangle]
pub extern "C" fn battery_last_error_length() -> libc::c_int {
// TODO: Support Windows UTF-16 strings
LAST_ERROR.with(|prev| match *prev.borrow() {
Some(ref err) => err.to_string().len() as libc::c_int + 1,
None => 0,
})
}
/// Fills passed buffer with an error message.
///
/// Buffer length can be get with [battery_last_error_length](fn.battery_last_error_length.html) function.
///
/// # Returns
///
/// Returns `-1` is passed buffer is `NULL` or too small for error message.
/// Returns `0` if there was no error previously.
///
/// In all other cases returns error message length.
#[no_mangle]
pub unsafe extern "C" fn battery_last_error_message(buffer: *mut libc::c_char, length: libc::c_int) -> libc::c_int {
if buffer.is_null() {
return -1;
}
let last_error = match take_last_error() {
Some(err) => err,
None => return 0,
};
let error_message = last_error.to_string();
let buffer = slice::from_raw_parts_mut(buffer as *mut u8, length as usize);
if error_message.len() >= buffer.len() {
return -1;
}
ptr::copy_nonoverlapping(error_message.as_ptr(), buffer.as_mut_ptr(), error_message.len());
buffer[error_message.len()] = b'\0';
error_message.len() as libc::c_int
}
|
//! Core Paillier encryption scheme supporting ciphertext addition and plaintext multiplication.
use traits::*;
use std::ops::{Add, Sub, Mul, Div, Rem};
use num_traits::{One};
use arithimpl::traits::*;
/// Representation of a keypair from which encryption and decryption keys can be derived.
pub struct Keypair<I> {
pub p: I,
pub q: I,
}
impl<'p, 'q, I> From<(&'p I, &'q I)> for Keypair<I>
where
I: Clone,
{
fn from((p, q) : (&'p I, &'q I)) -> Keypair<I> {
Keypair {
p: p.clone(),
q: q.clone(),
}
}
}
/// Representation of unencrypted message.
#[derive(Debug,Clone,PartialEq)]
pub struct Plaintext<I>(pub I);
/// Representation of encrypted message.
#[derive(Debug,Clone)]
pub struct Ciphertext<I>(pub I);
impl<I> DefaultKeys for Keypair<I>
where // TODO clean up bounds
I: From<u64>,
I: Clone,
I: Samplable,
I: ModInv,
I: One,
I: Mul<Output=I>,
for<'a> &'a I: Mul<I, Output=I>,
for<'b> I: Mul<&'b I, Output=I>,
for<'a,'b> &'a I: Mul<&'b I, Output=I>,
for<'a,'b> &'a I: Add<&'b I, Output=I>,
for<'a> &'a I: Sub<I, Output=I>,
for<'b> I: Sub<&'b I, Output=I>,
for<'a,'b> &'a I: Sub<&'b I, Output=I>,
for<'b> I: Div<&'b I, Output=I>,
for<'a,'b> &'a I: Div<&'b I, Output=I>,
for<'a> I: Rem<&'a I, Output=I>,
for<'a,'b> &'a I: Rem<&'b I, Output=I>,
{
type EK = standard::EncryptionKey<I>;
type DK = crt::DecryptionKey<I>;
fn encryption_key(&self) -> Self::EK {
standard::EncryptionKey::from(self)
}
fn decryption_key(&self) -> Self::DK {
crt::DecryptionKey::from(self)
}
}
impl<I, T> From<T> for Plaintext<I>
where
T: Copy, // marker to avoid infinite loop by excluding Plaintext
I: From<T>,
{
fn from(x: T) -> Plaintext<I> {
Plaintext(I::from(x))
}
}
use std::fmt;
impl<I> fmt::Display for Plaintext<I>
where
I: fmt::Display
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
// impl<I, T> Encoding<T, Plaintext<I>> for Scheme<I>
// where
// T: Copy,
// Plaintext<I> : From<T>,
// {
// fn encode(x: &T) -> Plaintext<I> {
// Plaintext::from(*x)
// }
// }
//
// impl<I, T> Decoding<Plaintext<I>, T> for Scheme<I>
// where
// Plaintext<I>: Copy,
// T: From<Plaintext<I>>,
// {
// fn decode(x: &Plaintext<I>) -> T {
// T::from(*x)
// }
// }
fn l<I>(u: &I, n: &I) -> I
where
I: One,
for<'a> &'a I: Sub<I, Output=I>,
for<'b> I: Div<&'b I, Output=I>,
{
(u - I::one()) / n
}
pub mod generic;
pub mod standard;
pub mod crt;
#[cfg(feature="keygen")]
pub mod keygen;
#[cfg(feature="keygen")]
pub use self::keygen::*;
bigint!(I,
#[cfg(test)]
mod tests {
use super::I;
use ::AbstractPaillier;
use ::core::*;
fn test_keypair() -> Keypair<I> {
let p = str::parse("148677972634832330983979593310074301486537017973460461278300587514468301043894574906886127642530475786889672304776052879927627556769456140664043088700743909632312483413393134504352834240399191134336344285483935856491230340093391784574980688823380828143810804684752914935441384845195613674104960646037368551517").unwrap();
let q = str::parse("158741574437007245654463598139927898730476924736461654463975966787719309357536545869203069369466212089132653564188443272208127277664424448947476335413293018778018615899291704693105620242763173357203898195318179150836424196645745308205164116144020613415407736216097185962171301808761138424668335445923774195463").unwrap();
Keypair {
p: p,
q: q,
}
}
#[test]
fn test_correct_encryption_decryption() {
let (ek, dk) = test_keypair().keys();
let m = Plaintext::from(10);
let c = AbstractPaillier::encrypt(&ek, &m);
let recovered_m = AbstractPaillier::decrypt(&dk, &c);
assert_eq!(recovered_m, m);
}
#[test]
fn test_correct_addition() {
let (ek, dk) = test_keypair().keys();
let m1 = Plaintext::from(10);
let c1 = AbstractPaillier::encrypt(&ek, &m1);
let m2 = Plaintext::from(20);
let c2 = AbstractPaillier::encrypt(&ek, &m2);
let c = AbstractPaillier::add(&ek, &c1, &c2);
let m = AbstractPaillier::decrypt(&dk, &c);
assert_eq!(m, Plaintext::from(30));
}
#[test]
fn correct_multiplication() {
let (ek, dk) = test_keypair().keys();
let m1 = Plaintext::from(10);
let c1 = AbstractPaillier::encrypt(&ek, &m1);
let m2 = Plaintext::from(20);
let c = AbstractPaillier::mul(&ek, &c1, &m2);
let m = AbstractPaillier::decrypt(&dk, &c);
assert_eq!(m, Plaintext::from(200));
}
#[cfg(feature="keygen")]
#[test]
fn test_correct_keygen() {
let (ek, dk): (standard::EncryptionKey<I>, _) = AbstractPaillier::keypair_with_modulus_size(2048).keys();
let m = Plaintext::from(10);
let c = AbstractPaillier::encrypt(&ek, &m);
let recovered_m = AbstractPaillier::decrypt(&dk, &c);
assert_eq!(recovered_m, m);
}
});
|
#[doc = "Reader of register DAC_STMODR"]
pub type R = crate::R<u32, super::DAC_STMODR>;
#[doc = "Writer for register DAC_STMODR"]
pub type W = crate::W<u32, super::DAC_STMODR>;
#[doc = "Register DAC_STMODR `reset()`'s with value 0"]
impl crate::ResetValue for super::DAC_STMODR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `STRSTTRIGSEL1`"]
pub type STRSTTRIGSEL1_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `STRSTTRIGSEL1`"]
pub struct STRSTTRIGSEL1_W<'a> {
w: &'a mut W,
}
impl<'a> STRSTTRIGSEL1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "Reader of field `STINCTRIGSEL1`"]
pub type STINCTRIGSEL1_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `STINCTRIGSEL1`"]
pub struct STINCTRIGSEL1_W<'a> {
w: &'a mut W,
}
impl<'a> STINCTRIGSEL1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);
self.w
}
}
#[doc = "Reader of field `STRSTTRIGSEL2`"]
pub type STRSTTRIGSEL2_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `STRSTTRIGSEL2`"]
pub struct STRSTTRIGSEL2_W<'a> {
w: &'a mut W,
}
impl<'a> STRSTTRIGSEL2_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16);
self.w
}
}
#[doc = "Reader of field `STINCTRIGSEL2`"]
pub type STINCTRIGSEL2_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `STINCTRIGSEL2`"]
pub struct STINCTRIGSEL2_W<'a> {
w: &'a mut W,
}
impl<'a> STINCTRIGSEL2_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 24)) | (((value as u32) & 0x0f) << 24);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - DAC Channel 1 Sawtooth Reset trigger selection"]
#[inline(always)]
pub fn strsttrigsel1(&self) -> STRSTTRIGSEL1_R {
STRSTTRIGSEL1_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 8:11 - DAC Channel 1 Sawtooth Increment trigger selection"]
#[inline(always)]
pub fn stinctrigsel1(&self) -> STINCTRIGSEL1_R {
STINCTRIGSEL1_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 16:19 - DAC Channel 1 Sawtooth Reset trigger selection"]
#[inline(always)]
pub fn strsttrigsel2(&self) -> STRSTTRIGSEL2_R {
STRSTTRIGSEL2_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bits 24:27 - DAC Channel 2 Sawtooth Increment trigger selection"]
#[inline(always)]
pub fn stinctrigsel2(&self) -> STINCTRIGSEL2_R {
STINCTRIGSEL2_R::new(((self.bits >> 24) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - DAC Channel 1 Sawtooth Reset trigger selection"]
#[inline(always)]
pub fn strsttrigsel1(&mut self) -> STRSTTRIGSEL1_W {
STRSTTRIGSEL1_W { w: self }
}
#[doc = "Bits 8:11 - DAC Channel 1 Sawtooth Increment trigger selection"]
#[inline(always)]
pub fn stinctrigsel1(&mut self) -> STINCTRIGSEL1_W {
STINCTRIGSEL1_W { w: self }
}
#[doc = "Bits 16:19 - DAC Channel 1 Sawtooth Reset trigger selection"]
#[inline(always)]
pub fn strsttrigsel2(&mut self) -> STRSTTRIGSEL2_W {
STRSTTRIGSEL2_W { w: self }
}
#[doc = "Bits 24:27 - DAC Channel 2 Sawtooth Increment trigger selection"]
#[inline(always)]
pub fn stinctrigsel2(&mut self) -> STINCTRIGSEL2_W {
STINCTRIGSEL2_W { w: self }
}
}
|
pub use crate::table::{chart::Chart, charts::Charts, table::*};
|
pub mod domain;
pub mod gateway;
pub mod port;
pub mod usecase;
#[macro_use]
extern crate mockall;
|
//! read contains the Reader struct and its associated helpers and data
//! structures.
//!
//! Initial master regex is taken from MAL:
//! [\s,]*(~@|[\[\]{}()'`~^@]|"(?:\\.|[^\\"])*"|;.*|[^\s\[\]{}('"`,;)]*)
//!
//! https://doc.rust-lang.org/regex/regex/index.html
extern crate regex;
use self::regex::{Captures, Regex};
use std::collections::HashMap;
use super::types::LispVal;
// lazy_static! {
// /// The special quote characters that need additional parsing at the
// reader level pub static ref QUOTES: HashMap<&'static str, LispVal> =
// map!{ "'" => LispVal::Symbol("quote".to_string()),
// "`" => LispVal::Symbol("quasiquote".to_string()),
// "~" => LispVal::Symbol("unquote".to_string()),
// "~@" => LispVal::Symbol("unquote-splicing".to_string())
// };
// }
// A token used for parsing into internal datatypes
#[derive(Debug)]
struct Token {
tag: String,
text: String,
}
/// A Reader takes string input,either directly or from a file and produces
/// internal data forms.
pub struct Reader {
regex: regex::Regex,
input: String,
ix: i64,
tokens: Vec<Token>,
}
impl Reader {
pub fn new() -> Reader {
// (?x) turns on multi-line and comment mode
let regex = Regex::new(
r#"(?x)
[\s,]* # Ignore leading whitespace
(?P<splice>~@) # >> Individual special characters get identified
|(?P<unquote>~) # here rather than globbing into a single capture
|(?P<readermacro>\^) # group and then further parsing in the main read
|(?P<quote>') # loop later on.
|(?P<quasiquote>`)
|(?P<deref>@) # See https://clojuredocs.org/clojure.core/deref
|(?P<lparen>\()
|(?P<rparen>\))
|(?P<lbracket>\[)
|(?P<rbracket>\])
|(?P<lcurly>\{)
|(?P<rcurly>\})
|(?P<string>"(?:\\.|[^\\"])*") # String literal
|(?P<comment>;.*) # Comments get discarded
|(?P<literal>[^\s\[\]{}('"`,;)]*) # Anything else is a valid literal we need to identify
|(?P<error>.) # If we hit here it's a syntax error
"#,
).unwrap();
let input = String::new();
let ix = 0;
let tokens = Vec::new();
Reader {
regex,
input,
ix,
tokens,
}
}
/// read is currently showing the tokens that it finds
/// It should be changed to return an iterator that can be parsed for the
/// next step in evaluation.
pub fn read(&mut self, text: String) -> String {
self.input = text.clone();
// iterate over the matches and get our tokens
let tokens: Vec<Token> = self.regex
.captures_iter(text.as_str())
.map(|c| self.get_token(c))
.collect();
// XXX :: This is just temporary during development
println!("{:?}", tokens);
return text.clone();
}
/// Find the group name and build the corresponding token
fn get_token(&self, c: Captures) -> Token {
for group in self.regex.capture_names() {
if let Some(name) = group {
// This is a named group and not our whitespace discard
if let Some(text) = c.name(name) {
// Pull out the text and return the token
return Token {
tag: String::from(name),
text: String::from(text.as_str()),
};
}
}
}
// If we had captures to begin with then we've handled it
unreachable!();
}
}
|
use crate::prelude::*;
#[repr(C)]
#[derive(Debug, Clone)]
pub struct VkStencilOpState {
pub failOp: VkStencilOp,
pub passOp: VkStencilOp,
pub depthFailOp: VkStencilOp,
pub compareOp: VkCompareOp,
pub compareMask: u32,
pub writeMask: u32,
pub reference: u32,
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.