text stringlengths 8 4.13M |
|---|
struct LogMessageParams {
type: i32, // messagetype
message: String,
}
impl Notification for LogMessageParams {
method = "window/logMessage";
}
|
// Copyright 2017 tokio-jsonrpc Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
#![doc(html_root_url = "https://docs.rs/tokio-jsonrpc/0.9.1/tokio_jsonrpc/")]
//! A JSON RPC protocol for the [tokio](https://tokio.rs) framework.
//!
//! This implements the handling of the
//! [JSON RPC 2.0](http://www.jsonrpc.org/specification) specification. The low-level parts are in
//! the [`message`](message/index.html) and the [`codec`](codec/index.html) modules. The first
//! draft of the higher-lever API is in the [`endpoint`](endpoint/index.html) module. Some helpers
//! to compose the server part is in the [`server`](server/index.html) module.
//!
//! # Examples
//!
//! A skeleton of reading messages from the other side, mapping them to answers and sending them
//! back.
//!
//! ```rust
//! # extern crate tokio_core;
//! # extern crate tokio_io;
//! # extern crate tokio_jsonrpc;
//! # extern crate futures;
//! #
//! # use tokio_core::reactor::Core;
//! # use tokio_core::net::TcpListener;
//! # use tokio_io::AsyncRead;
//! # use tokio_jsonrpc::LineCodec;
//! # use futures::{Stream, Sink, Future};
//! #
//! # fn main() {
//! let mut core = Core::new().unwrap();
//! let handle = core.handle();
//!
//! let listener = TcpListener::bind(&"127.0.0.1:2345".parse().unwrap(), &handle).unwrap();
//! let connections = listener.incoming();
//! let service = connections.for_each(|(stream, _)| {
//! let messages = stream.framed(LineCodec::new());
//! let (write, read) = messages.split();
//! let answers = read.filter_map(|message| {
//! match message {
//! _ => unimplemented!(),
//! }
//! });
//! handle.spawn(write.send_all(answers).map(|_| ()).map_err(|_| ()));
//! Ok(())
//! });
//! # }
//! ```
//!
//! Provide a server that greets through an RPC.
//!
//! ```rust,no_run
//! # extern crate tokio_core;
//! # extern crate tokio_io;
//! # extern crate tokio_jsonrpc;
//! # extern crate futures;
//! # extern crate serde_json;
//! #
//! # use tokio_core::reactor::Core;
//! # use tokio_core::net::TcpListener;
//! # use tokio_io::AsyncRead;
//! # use tokio_jsonrpc::{LineCodec, Server, ServerCtl, RpcError, Endpoint};
//! # use futures::{Future, Stream};
//! # use serde_json::Value;
//! #
//! # fn main() {
//! let mut core = Core::new().unwrap();
//! let handle = core.handle();
//!
//! let listener = TcpListener::bind(&"127.0.0.1:2346".parse().unwrap(), &handle).unwrap();
//!
//! struct UselessServer;
//!
//! impl Server for UselessServer {
//! type Success = String;
//! type RpcCallResult = Result<String, RpcError>;
//! type NotificationResult = Result<(), ()>;
//! fn rpc(&self,
//! ctl: &ServerCtl,
//! method: &str,
//! _params: &Option<Value>)
//! -> Option<Self::RpcCallResult> {
//! match method {
//! // Accept a hello message and finish the greeting
//! "hello" => Some(Ok("world".to_owned())),
//! // When the other side says bye, terminate the connection
//! "bye" => {
//! ctl.terminate();
//! Some(Ok("bye".to_owned()))
//! },
//! _ => None
//! }
//! }
//! }
//!
//! let connections = listener.incoming().for_each(|(stream, _)| {
//! // Greet every new connection
//! let (client, _) = Endpoint::new(stream.framed(LineCodec::new()), UselessServer)
//! .start(&handle);
//! let notified = client.notify("hello".to_owned(), None)
//! .map(|_| ())
//! .map_err(|_| ());
//! handle.spawn(notified);
//! Ok(())
//! });
//!
//! core.run(connections).unwrap();
//! # }
//! ```
extern crate serde;
// We use the json! macro only in the tests
extern crate bytes;
extern crate futures;
#[macro_use]
extern crate serde_derive;
#[cfg_attr(test, macro_use)]
extern crate serde_json;
#[macro_use]
extern crate slog;
extern crate tokio_core;
extern crate tokio_io;
extern crate uuid;
pub mod codec;
pub mod endpoint;
pub mod message;
pub mod server;
/// This contains some reexports so macros can find them.
///
/// It isn't for the direct use of the library consumer.
pub mod macro_exports {
pub use serde_json::{from_value, Value};
pub use std::option::Option;
pub use std::result::Result;
}
pub use codec::{Boundary as BoundaryCodec, Line as LineCodec};
pub use endpoint::{Client, Endpoint, ServerCtl};
pub use message::{Message, Parsed, RpcError};
pub use server::Server;
|
use glam::Vec3;
pub struct Light(Vec3);
impl Light {
pub fn new(pos: Vec3) -> Self {
Light(pos)
}
pub fn get_pos(&self) -> Vec3 {
self.0.clone()
}
pub fn set_pos(&mut self, pos: Vec3) {
self.0 = pos;
}
}
|
use std::{borrow::Borrow, mem};
use super::*;
use crate::{Error, Result};
/// Persistent array using rope-data-structure.
pub struct Vector<T>
where
T: Sized,
{
len: usize,
root: Ref<Node<T>>,
auto_rebalance: bool,
leaf_cap: usize,
}
impl<T> Clone for Vector<T> {
fn clone(&self) -> Vector<T> {
Vector {
len: self.len,
root: Ref::clone(&self.root),
auto_rebalance: self.auto_rebalance,
leaf_cap: self.leaf_cap,
}
}
}
impl<T> From<Vector<T>> for Vec<T>
where
T: Clone,
{
fn from(val: Vector<T>) -> Vec<T> {
let mut arr = vec![];
let root = Ref::clone(&val.root);
for leaf in Node::collect_leaf_nodes(root, false, val.leaf_cap) {
match leaf.borrow() {
Node::Z { data } => arr.extend_from_slice(data),
_ => unreachable!(),
}
}
arr
}
}
#[cfg(any(feature = "arbitrary", test))]
impl<T> arbitrary::Arbitrary for Vector<T>
where
T: Clone + arbitrary::Arbitrary,
{
fn arbitrary(
u: &mut arbitrary::unstructured::Unstructured,
) -> arbitrary::Result<Self> {
let k = std::mem::size_of::<T>();
let leaf_cap = *u.choose(&[k, k * 2, k * 100, k * 1000, k * 10000])?;
let auto_reb = *u.choose(&[true, false])?; // auto_rebalance
let arr: Vec<T> = u.arbitrary()?;
let mut arr = Vector::from_slice(&arr, Some(leaf_cap));
arr.set_auto_rebalance(auto_reb);
Ok(arr)
}
}
impl<T> IntoIterator for Vector<T>
where
T: Clone,
{
type Item = T;
type IntoIter = IntoIter<T>;
fn into_iter(self) -> Self::IntoIter {
let mut iter = IntoIter {
stack: Vec::default(),
node: None,
off: 0,
};
Node::build_into_iter_stack(&self.root, &mut iter);
iter
}
}
impl<T> Default for Vector<T> {
fn default() -> Vector<T> {
Vector {
len: 0,
root: Node::empty_leaf(),
auto_rebalance: true,
leaf_cap: crate::LEAF_CAP,
}
}
}
impl<T> Vector<T>
where
T: Sized,
{
/// Construct a new vector with an initial array of values.
pub fn from_slice(slice: &[T], leaf_node_size: Option<usize>) -> Vector<T>
where
T: Clone,
{
let n = max_leaf_items::<T>(leaf_node_size.unwrap_or(crate::LEAF_CAP));
let mut leafs: Vec<Ref<Node<T>>> =
slice.chunks(n).map(|x| Ref::new(Node::from(x))).collect();
leafs.reverse();
let depth = (leafs.len() as f64).log2().ceil() as usize;
let (root, _) = Node::build_bottoms_up(depth, &mut leafs);
debug_assert!(leafs.is_empty());
Vector {
len: slice.len(),
root,
auto_rebalance: true,
leaf_cap: leaf_node_size.unwrap_or(crate::LEAF_CAP),
}
}
/// Set the size of the leaf node in bytes. Number of items inside
/// the leaf node is computed as `(leaf_size / mem::size_of::<T>()) + 1`
/// Setting a large value will make the tree shallow giving better
/// read performance, at the expense of write performance.
pub fn set_leaf_size(&mut self, leaf_size: usize) -> &mut Self {
self.leaf_cap = leaf_size;
self
}
/// Auto rebalance is enabled by default. This has some penalty for write
/// heavy situations, since every write op will try to rebalance the tree
/// when it goes too much off-balance. Application can disable
/// auto-rebalance to get maximum efficiency, and call [Self::rebalance]
/// method as and when required. Make sure *you know what you are doing*
/// before disabling auto-rebalance.
pub fn set_auto_rebalance(&mut self, rebalance: bool) -> &mut Self {
self.auto_rebalance = rebalance;
self
}
}
impl<T> Vector<T>
where
T: Sized,
{
/// Return the length of the vector, that is, number of elements in the
/// vector.
#[inline]
pub fn len(&self) -> usize {
self.len
}
/// Return whether empty vector
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Return the memory foot-print for this instance.
pub fn footprint(&self) -> usize {
mem::size_of_val(self) + self.root.footprint()
}
/// Return a reference to the element at that position or `IndexFail` error
/// if out of bounds.
pub fn get(&self, index: usize) -> Result<&T> {
if index < self.len {
Ok(self.root.get(index))
} else {
err_at!(IndexFail, msg: "index {} out of bounds", index)?
}
}
/// Insert an element at `off` position within the vector, or `IndexFail`
/// error if out of bounds. Call this for copy-on-write insert, especially
/// when `Vector` is shared among multiple owners. In cases of
/// single-ownership use `insert_mut`, which does in-place mutation, for
/// better performance.
pub fn insert(&mut self, off: usize, value: T) -> Result<()>
where
T: Clone,
{
let (root, _) = if off <= self.len {
let rn = Rebalance::new(self);
self.root.insert(off, value, &rn)?
} else {
err_at!(IndexFail, msg: "index {} out of bounds", off)?
};
self.root = root;
self.len += 1;
Ok(())
}
/// Insert an element at `off` position within the vector, or `IndexFail`
/// error if out of bounds. Call this for in-place insert and only when
/// `Vector` is under single ownership. In cases of shared-ownership
/// use `insert` api which does copy-on-write.
///
/// **causes panic when used under shared-ownership**
pub fn insert_mut(&mut self, off: usize, value: T) -> Result<()>
where
T: Clone,
{
if off <= self.len {
let rn = Rebalance::new(self);
let depth = Ref::get_mut(&mut self.root)
.unwrap()
.insert_mut(off, value, &rn)?;
let packed = false;
let force = false;
let (root, _) =
Node::auto_rebalance(Ref::clone(&self.root), depth, packed, force, &rn);
self.root = root;
self.len += 1;
Ok(())
} else {
err_at!(IndexFail, msg: "index {} out of bounds", off)?
}
}
/// Update the element at `off` position within the vector, or `IndexFail`
/// error if out of bounds. Call this for copy-on-write update, especially
/// when `Vector` is shared among multiple owners. In cases of
/// single-ownership use `update_mut`, which does in-place mutation, for
/// better performance.
pub fn update(&mut self, off: usize, value: T) -> Result<T>
where
T: Clone,
{
let (root, val) = if off < self.len {
self.root.update(off, value)
} else {
err_at!(IndexFail, msg: "offset {} out of bounds", off)?
};
self.root = root;
Ok(val)
}
/// Update an element at `off` position within the vector, or `IndexFail`
/// error if out of bounds. Call this for in-place update and only when
/// `Vector` is under single ownership. In cases of shared-ownership
/// use `update` api which does copy-on-write.
///
/// **causes panic when used under shared-ownership**
pub fn update_mut(&mut self, off: usize, value: T) -> Result<T>
where
T: Clone,
{
if off < self.len {
Ok(Ref::get_mut(&mut self.root).unwrap().update_mut(off, value))
} else {
err_at!(IndexFail, msg: "offset {} out of bounds", off)
}
}
/// Remove and return the element at `off` position within the vector,
/// or `IndexFail` error if out of bounds. Call this for copy-on-write
/// remove, especially when `Vector` is shared among multiple owners.
/// In cases of single-ownership use `remove_mut`, which does in-place
/// mutation, for better performance.
pub fn remove(&mut self, off: usize) -> Result<T>
where
T: Clone,
{
let (root, val) = if off < self.len {
self.root.remove(off)
} else {
err_at!(IndexFail, msg: "offset {} out of bounds", off)?
};
self.root = root;
self.len -= 1;
Ok(val)
}
/// Remove and return the element at `off` position within the vector,
/// or `IndexFail` error if out of bounds. Call this for in-place update
/// and only when `Vector` is under single ownership. In cases of
/// shared-ownership use `remove` api which does copy-on-write.
///
/// **causes panic when used under shared-ownership**
pub fn remove_mut(&mut self, off: usize) -> Result<T>
where
T: Clone,
{
let val = if off < self.len {
Ref::get_mut(&mut self.root).unwrap().remove_mut(off)
} else {
err_at!(IndexFail, msg: "offset {} out of bounds", off)?
};
self.len -= 1;
Ok(val)
}
/// Return an iterator over each element in Vector.
pub fn iter(&self) -> Iter<T> {
Iter::new(&self.root)
}
/// Splits the collection into two at the given index.
///
/// Returns a new Vector containing the elements in the range [at, len).
/// After the call, the original vector will be left containing the
/// elements [0, at) with its previous capacity unchanged.
///
/// Optionally, application can call [Self::rebalance] on `self`, and
/// the returned vector, to make the vectors fully balanced.
pub fn split_off(&mut self, off: usize) -> Result<Vector<T>>
where
T: Clone,
{
let val = match off {
off if off > self.len => {
err_at!(IndexFail, msg: "offset {} out of bounds", off)?
}
off if off == self.len => Vector {
len: 0,
root: Node::empty_leaf(),
auto_rebalance: self.auto_rebalance,
leaf_cap: self.leaf_cap,
},
off => {
let (node, root, n) = self.root.split_off(off, self.len);
self.root = node;
self.len -= n;
Vector {
len: n,
root,
auto_rebalance: self.auto_rebalance,
leaf_cap: self.leaf_cap,
}
}
};
Ok(val)
}
/// Join `other` Vector into this vector.
///
/// Call [Self::rebalance] on `self` to make the vectors fully balanced.
pub fn append(&mut self, other: Vector<T>)
where
T: Clone,
{
let other = if other.leaf_cap != self.leaf_cap {
let arr: Vec<T> = other.into();
Vector::from_slice(&arr, Some(self.leaf_cap))
} else {
other
};
let root = {
let left = Ref::clone(&self.root);
let right = Ref::clone(&other.root);
Node::newm(left, right, self.len)
};
self.root = root;
self.len += other.len;
}
/// When auto-rebalance is disabled, use this method to rebalance the tree.
/// Calling it with `packed` as true will make sure that the leaf nodes
/// are fully packed when rebuilding the tree.
pub fn rebalance(&self, packed: bool) -> Result<Self>
where
T: Clone,
{
let rn = Rebalance::new(self);
let root = Ref::clone(&self.root);
let (root, _depth) = Node::auto_rebalance(root, 0, packed, true, &rn);
let val = Vector {
len: self.len,
root,
auto_rebalance: self.auto_rebalance,
leaf_cap: self.leaf_cap,
};
Ok(val)
}
// return only nodes that is referenced in multiple-versions. and
// the total number of nodes in the tree.
#[cfg(test)]
pub fn fetch_multiversions(&self) -> (Vec<*const u8>, usize) {
let mut acc = vec![];
let n = self.root.fetch_multiversions(&mut acc);
(acc, n)
}
#[cfg(test)]
#[allow(dead_code)]
fn pretty_print(&self) {
self.root.pretty_print("".to_string(), self.len)
}
}
enum Node<T>
where
T: Sized,
{
M {
weight: usize,
left: Ref<Node<T>>,
right: Ref<Node<T>>,
},
Z {
data: Vec<T>,
},
}
impl<'a, T> From<&'a [T]> for Node<T>
where
T: Clone,
{
fn from(val: &'a [T]) -> Self {
Node::Z { data: val.to_vec() }
}
}
impl<T> Node<T>
where
T: Sized,
{
fn newm(left: Ref<Node<T>>, right: Ref<Node<T>>, weight: usize) -> Ref<Node<T>> {
Ref::new(Node::M {
left,
right,
weight,
})
}
fn empty_leaf() -> Ref<Node<T>> {
Ref::new(Node::Z {
data: Vec::default(),
})
}
fn len(&self) -> usize {
match self {
Node::M { weight, right, .. } => weight + right.len(),
Node::Z { data } => data.len(),
}
}
fn cow(&self) -> Node<T>
where
T: Clone,
{
match self {
Node::Z { data } => Node::Z {
data: data.to_vec(),
},
_ => unreachable!(),
}
}
fn pack(&mut self, other: &Self, cap: usize) -> Option<Self>
where
T: Clone,
{
use std::cmp::min;
match (self, other) {
(Node::Z { data }, Node::Z { data: other }) => {
let other = if data.len() < cap {
let n = min(cap - data.len(), other.len());
data.extend_from_slice(&other[..n]);
&other[n..]
} else {
other
};
if !other.is_empty() {
Some(Node::Z {
data: other.to_vec(),
})
} else {
None
}
}
(_, _) => unreachable!(),
}
}
fn footprint(&self) -> usize {
let n = mem::size_of_val(self);
n + match self {
Node::Z { data } => data.capacity() * mem::size_of::<T>(),
Node::M { left, right, .. } => left.footprint() + right.footprint(),
}
}
fn get(&self, off: usize) -> &T {
match self {
Node::M { weight, left, .. } if off < *weight => left.get(off),
Node::M { weight, right, .. } => right.get(off - *weight),
Node::Z { data } => &data[off],
}
}
// return (value, max_depth)
fn insert(&self, off: usize, val: T, rn: &Rebalance) -> Result<(Ref<Node<T>>, usize)>
where
T: Clone,
{
let (node, depth) = match self {
Node::M {
weight,
left,
right,
} => {
let weight = *weight;
let (weight, left, right, depth) = if off < weight {
let (left, depth) = left.insert(off, val, rn)?;
(weight + 1, left, Ref::clone(right), depth)
} else {
let off = off - weight;
let (right, depth) = right.insert(off, val, rn)?;
(weight, Ref::clone(left), right, depth)
};
(Node::newm(left, right, weight), depth + 1)
}
Node::Z { data } if data.len() < max_leaf_items::<T>(rn.leaf_cap) => {
let mut ndata = data[..off].to_vec();
ndata.push(val);
ndata.extend_from_slice(&data[off..]);
(Ref::new(Node::Z { data: ndata }), 1)
}
Node::Z { data } => (Self::split_insert(data, off, val), 2),
};
let (node, depth) = Node::auto_rebalance(node, depth, false, false, rn);
Ok((node, depth))
}
fn insert_mut(&mut self, off: usize, val: T, rn: &Rebalance) -> Result<usize>
where
T: Clone,
{
let depth = match self {
Node::M {
weight,
left,
right,
} => {
if off < *weight {
let depth = Ref::get_mut(left).unwrap().insert_mut(off, val, rn)?;
*weight += 1;
depth
} else {
let off = off - *weight;
Ref::get_mut(right).unwrap().insert_mut(off, val, rn)?
}
}
Node::Z { data } if data.len() < max_leaf_items::<T>(rn.leaf_cap) => {
data.insert(off, val);
1
}
Node::Z { data } => {
*self = Ref::try_unwrap(Self::split_insert(data, off, val))
.ok()
.unwrap();
2
}
};
Ok(depth)
}
fn update(&self, off: usize, value: T) -> (Ref<Node<T>>, T)
where
T: Clone,
{
match self {
Node::M {
weight,
left,
right,
} if off < *weight => {
let (left, old) = left.update(off, value);
(Node::newm(left, Ref::clone(right), *weight), old)
}
Node::M {
weight,
left,
right,
} => {
let (right, old) = right.update(off - *weight, value);
(Node::newm(Ref::clone(left), right, *weight), old)
}
Node::Z { data } => {
let old = data[off].clone();
let mut data = data.to_vec();
data[off] = value;
(Ref::new(Node::Z { data }), old)
}
}
}
fn update_mut(&mut self, off: usize, value: T) -> T
where
T: Clone,
{
match self {
Node::M { weight, left, .. } if off < *weight => {
Ref::get_mut(left).unwrap().update_mut(off, value)
}
Node::M { weight, right, .. } => Ref::get_mut(right)
.unwrap()
.update_mut(off - *weight, value),
Node::Z { data } => {
let old = data[off].clone();
data[off] = value;
old
}
}
}
fn remove(&self, off: usize) -> (Ref<Node<T>>, T)
where
T: Clone,
{
match self {
Node::M {
weight,
left,
right,
} => {
let weight = *weight;
if off < weight {
let (left, old) = left.remove(off);
(Node::newm(left, Ref::clone(right), weight - 1), old)
} else {
let (right, old) = right.remove(off - weight);
(Node::newm(Ref::clone(left), right, weight), old)
}
}
Node::Z { data } => {
let old = data[off].clone();
let mut ndata = data[..off].to_vec();
ndata.extend_from_slice(&data[(off + 1)..]);
(Ref::new(Node::Z { data: ndata }), old)
}
}
}
fn remove_mut(&mut self, off: usize) -> T
where
T: Clone,
{
match self {
Node::M {
weight,
left,
right,
} => {
if off < *weight {
*weight -= 1;
Ref::get_mut(left).unwrap().remove_mut(off)
} else {
Ref::get_mut(right).unwrap().remove_mut(off - *weight)
}
}
Node::Z { data } => {
let old = data[off].clone();
data.remove(off);
if (data.len() * 2) < data.capacity() {
data.shrink_to_fit()
}
old
}
}
}
fn split_insert(data: &[T], off: usize, val: T) -> Ref<Node<T>>
where
T: Clone,
{
let (mut ld, mut rd) = {
let m = data.len() / 2;
match data.len() {
0 => (vec![], vec![]),
1 => (data.to_vec(), vec![]),
_ => (data[..m].to_vec(), data[m..].to_vec()),
}
};
let weight = match ld.len() {
w if off < w => {
ld.insert(off, val);
ld.len()
}
w => {
rd.insert(off - w, val);
w
}
};
Ref::new(Node::M {
weight,
left: Ref::new(Node::Z { data: ld }),
right: Ref::new(Node::Z { data: rd }),
})
}
fn split_off(&self, off: usize, len: usize) -> (Ref<Node<T>>, Ref<Node<T>>, usize)
where
T: Clone,
{
match self {
Node::M {
left,
right,
weight,
} if off < *weight => {
let (left, root, n) = left.split_off(off, *weight);
let root = Node::newm(root, Ref::clone(right), n);
let node = Node::newm(left, Node::empty_leaf(), weight - n);
(node, root, n + (len - weight))
}
Node::M {
left,
right,
weight,
} => {
let (right, root, n) = right.split_off(off - weight, len - weight);
let node = Node::newm(Ref::clone(left), right, *weight);
(node, root, n)
}
Node::Z { data } if off == 0 => {
let node = Node::empty_leaf();
let root = Ref::new(Node::Z {
data: data.to_vec(),
});
(node, root, data.len())
}
Node::Z { data } => {
let node = Ref::new(Node::Z {
data: data[..off].to_vec(),
});
let root = Ref::new(Node::Z {
data: data[off..].to_vec(),
});
(node, root, data[off..].len())
}
}
}
fn auto_rebalance(
node: Ref<Node<T>>,
depth: usize,
packed: bool,
force: bool,
rn: &Rebalance,
) -> (Ref<Node<T>>, usize)
where
T: Clone,
{
let doit = force || rn.auto_rebalance && rn.can_rebalance(depth);
match doit {
false => (node, depth),
true => {
let mut leafs = Node::collect_leaf_nodes(node, packed, rn.leaf_cap);
leafs.reverse();
let depth = (leafs.len() as f64).log2().ceil() as usize;
let (nroot, _) = Node::build_bottoms_up(depth, &mut leafs);
debug_assert!(leafs.is_empty());
(nroot, depth)
}
}
}
fn collect_leaf_nodes(
root: Ref<Node<T>>,
packed: bool,
leaf_cap: usize,
) -> Vec<Ref<Node<T>>>
where
T: Clone,
{
let (mut stack, mut acc) = (vec![], vec![]);
let mut node = root;
let leafs = loop {
match node.borrow() {
Node::Z { .. } if stack.is_empty() => {
acc.push(Ref::clone(&node));
break acc;
}
Node::Z { .. } => {
acc.push(Ref::clone(&node));
node = stack.pop().unwrap();
}
Node::M { left, right, .. } => {
stack.push(Ref::clone(right));
node = Ref::clone(left);
}
}
};
if packed {
let mut packed_leafs: Vec<Node<T>> = vec![];
let cap = max_leaf_items::<T>(leaf_cap);
for leaf in leafs.into_iter() {
match packed_leafs.last_mut() {
None => packed_leafs.push(leaf.cow()),
Some(last) => {
if let Some(next) = last.pack(leaf.borrow(), cap) {
packed_leafs.push(next)
}
}
}
}
packed_leafs.into_iter().map(Ref::new).collect()
} else {
leafs
}
}
fn build_bottoms_up(
depth: usize,
leafs: &mut Vec<Ref<Node<T>>>,
) -> (Ref<Node<T>>, usize) {
let (root, n) = match (depth, leafs.len()) {
(0, 0) => (Ref::new(Node::Z { data: vec![] }), 0),
(0, 1) | (1, 1) => {
let node = leafs.pop().unwrap();
let n = node.len();
(node, n)
}
(1, n) if n >= 2 => {
let (left, right) = (leafs.pop().unwrap(), leafs.pop().unwrap());
let weight = left.len();
let n = weight + right.len();
let node = Node::M {
weight,
left,
right,
};
(Ref::new(node), n)
}
(_, 1) => Self::build_bottoms_up(1, leafs),
(_, 2) => Self::build_bottoms_up(1, leafs),
(depth, _) => {
let (left, weight) = Self::build_bottoms_up(depth - 1, leafs);
match leafs.len() {
0 => (left, weight),
1 => {
let right = leafs.pop().unwrap();
let m = right.len();
let node = Node::M {
weight,
left,
right,
};
(Ref::new(node), weight + m)
}
_ => {
let (right, m) = Self::build_bottoms_up(depth - 1, leafs);
let node = Node::M {
weight,
left,
right,
};
(Ref::new(node), weight + m)
}
}
}
};
(root, n)
}
fn build_iter_stack<'a, 'b>(node: &'a Node<T>, iter: &'b mut Iter<'a, T>) {
match node {
Node::M { left, right, .. } => {
iter.stack.push(&right);
Self::build_iter_stack(left, iter);
}
node @ Node::Z { .. } => {
iter.node = Some(node);
}
}
}
fn build_into_iter_stack(node: &Ref<Node<T>>, iter: &mut IntoIter<T>) {
match node.as_ref() {
Node::M { left, right, .. } => {
iter.stack.push(Ref::clone(right));
Self::build_into_iter_stack(left, iter);
}
Node::Z { .. } => {
iter.node = Some(Ref::clone(node));
}
}
}
// only used with src/bin/fuzzy program
#[cfg(test)]
fn fetch_multiversions(&self, acc: &mut Vec<*const u8>) -> usize {
match self {
Node::M { left, right, .. } => {
if Ref::strong_count(left) > 1 {
let ptr = Ref::as_ptr(left);
acc.push(ptr as *const u8);
}
let mut n = left.fetch_multiversions(acc);
if Ref::strong_count(right) > 1 {
let ptr = Ref::as_ptr(right);
acc.push(ptr as *const u8);
}
n += right.fetch_multiversions(acc);
n + 1
}
Node::Z { .. } => 1,
}
}
#[cfg(test)]
#[allow(dead_code)]
fn pretty_print(&self, mut prefix: String, len: usize) {
match self {
Node::M {
left,
right,
weight,
} => {
println!("{}nodem:{}", prefix, len);
prefix.push_str(" ");
left.pretty_print(prefix.clone(), *weight);
right.pretty_print(prefix, len - *weight);
}
Node::Z { data } => {
println!("{}nodez:{}", prefix, data.len());
}
}
}
}
struct Rebalance {
n_leafs: f64,
auto_rebalance: bool,
leaf_cap: usize,
}
impl Rebalance {
fn new<T: Sized>(r: &Vector<T>) -> Rebalance {
let n_leafs = r.len / max_leaf_items::<T>(r.leaf_cap);
Rebalance {
n_leafs: n_leafs as f64,
auto_rebalance: r.auto_rebalance,
leaf_cap: r.leaf_cap,
}
}
fn can_rebalance(&self, depth: usize) -> bool {
match depth {
n if n < crate::REBALANCE_THRESHOLD => false,
_ if (depth as f64) > (self.n_leafs.log2() * 3_f64) => true,
_ => false,
}
}
}
/// An iterator for Vector.
///
/// Created by the iter method on Vector.
pub struct Iter<'a, T> {
stack: Vec<&'a Node<T>>,
node: Option<&'a Node<T>>,
off: usize,
}
impl<'a, T> Iter<'a, T> {
fn new(root: &'a Node<T>) -> Iter<'a, T> {
let mut iter = Iter {
stack: Vec::default(),
node: None,
off: 0,
};
Node::build_iter_stack(root, &mut iter);
iter
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
match self.node {
Some(Node::Z { data }) if self.off < data.len() => {
let item = &data[self.off];
self.off += 1;
Some(item)
}
Some(Node::Z { .. }) | None => match self.stack.pop() {
Some(node) => {
self.off = 0;
Node::build_iter_stack(node, self);
self.next()
}
None => None,
},
Some(_) => unreachable!(),
}
}
}
/// An iterator that moves elements out of Vector.
///
/// Created by the into_iter method on Vector (provided by the
/// IntoIterator trait).
pub struct IntoIter<T> {
stack: Vec<Ref<Node<T>>>,
node: Option<Ref<Node<T>>>,
off: usize,
}
impl<T> Iterator for IntoIter<T>
where
T: Clone,
{
type Item = T;
fn next(&mut self) -> Option<T> {
match self.node.as_ref().map(|x| x.as_ref()) {
Some(Node::Z { data }) if self.off < data.len() => {
let item = data[self.off].clone();
self.off += 1;
Some(item)
}
Some(Node::Z { .. }) | None => match self.stack.pop() {
Some(node) => {
self.off = 0;
Node::build_into_iter_stack(&node, self);
self.next()
}
None => None,
},
Some(_) => unreachable!(),
}
}
}
fn max_leaf_items<T>(cap: usize) -> usize {
let s = mem::size_of::<T>();
(cap / s) + if cap % s == 0 { 0 } else { 1 }
}
#[cfg(test)]
pub fn validate<T>(arr: &Vector<T>, refv: &[T])
where
T: std::fmt::Debug + Clone + Eq + PartialEq,
{
let k = std::mem::size_of::<T>();
validate_mem_ratio(k, arr.footprint(), arr.len());
debug_assert_eq!(refv.len(), arr.len());
debug_assert_eq!(arr.len(), arr.root.len());
for (off, val) in refv.iter().enumerate() {
debug_assert_eq!(arr.get(off).unwrap(), val, "off-{}", off);
}
debug_assert!(arr.get(arr.len()).is_err());
}
#[cfg(test)]
pub fn validate_mem_ratio(k: usize, mem: usize, n: usize) {
match n {
0 => debug_assert!(mem < 1000, "n:{} footp:{}", n, mem),
n if n < 200 => {
let cap = k * n * 3 + 1000;
debug_assert!(mem < cap, "n:{} footp:{}", n, mem)
}
n => {
let k = k as f64;
let ratio = ((((mem as f64) / (n as f64)) - k) / k) * 100.0;
debug_assert!(
(ratio < 120.0) || (n <= 1000),
"n:{} footp:{} ratio:{}",
n,
mem,
ratio,
);
}
}
}
#[cfg(test)]
#[path = "ppar_test.rs"]
mod ppar_test;
#[cfg(test)]
#[path = "fuzzy_test.rs"]
mod fuzzy_test;
|
use bigdecimal::{BigDecimal, Zero};
use chrono::{Datelike, Local, NaiveDate};
use diesel::prelude::*;
use crate::apps::index_response::Data;
use crate::db::{
models::{Budget, SerializedBudget},
pagination::*,
schema::budgets_budget,
DatabaseQuery, PooledConnection,
};
use crate::errors::DbResult;
pub type GetBudgetsResult = DbResult<Data<SerializedBudget>>;
#[derive(Clone)]
pub struct GetBudgets {
pub user_id: i32,
pub page: i64,
pub per_page: i64,
}
impl DatabaseQuery for GetBudgets {
type Data = Data<SerializedBudget>;
fn execute(&self, connection: PooledConnection) -> GetBudgetsResult {
handle(self, &connection)
}
}
fn budget_spent(budget: &Budget, connection: &PooledConnection) -> DbResult<BigDecimal> {
use crate::db::schema::records_record;
use diesel::dsl::{not, sum};
let first_month_day = Local::now().naive_local().with_day0(0).unwrap();
let query = records_record::table
.select(sum(records_record::amount))
.filter(
records_record::user_id.eq(budget.user_id).and(
records_record::transaction_type
.eq("EXP")
.and(records_record::created_at.ge(first_month_day)),
),
);
let query_result = match budget.tags_type.as_str() {
"INCL" => query
.filter(records_record::tags.overlaps_with(&budget.tags))
.first::<Option<BigDecimal>>(connection)?,
"EXCL" => query
.filter(not(records_record::tags.overlaps_with(&budget.tags)))
.first::<Option<BigDecimal>>(connection)?,
_ => query.first::<Option<BigDecimal>>(connection)?,
};
Ok(query_result.unwrap_or_else(BigDecimal::zero).with_scale(2))
}
fn ndays_in_the_current_month(today: NaiveDate) -> u32 {
let year = today.year();
let month = today.month();
// the first day of the next month...
let (y, m) = if month == 12 {
(year + 1, 1)
} else {
(year, month + 1)
};
let d = NaiveDate::from_ymd(y, m, 1);
// ...is preceded by the last day of the original month
d.pred().day()
}
// TODO: add tests
fn serialize_budget(budget: Budget, conn: &PooledConnection) -> DbResult<SerializedBudget> {
use bigdecimal::ToPrimitive;
let mut res = SerializedBudget::default();
let today = Local::today().naive_local();
let spent = budget_spent(&budget, conn)?;
let days_in_this_month = ndays_in_the_current_month(today);
// we need to take into account spendings for today
let rest_days = days_in_this_month - today.day0() + 1;
let left = (budget.amount.clone() - spent.clone())
.to_f64()
.unwrap_or(0.0);
res.spent = spent.to_f64().unwrap_or(0.0);
res.left = left;
res.average_per_day = (budget.amount.clone() / BigDecimal::from(days_in_this_month))
.to_f64()
.unwrap_or(0.0);
res.left_average_per_day = left / rest_days.to_f64().unwrap_or(0.0f64);
res.name = budget.name;
res.amount = budget.amount;
Ok(res)
}
fn get_page_of_budgets(msg: &GetBudgets, conn: &PooledConnection) -> DbResult<(Vec<Budget>, i64)> {
let query = budgets_budget::table
.select(budgets_budget::all_columns)
.filter(budgets_budget::user_id.eq(msg.user_id))
.order(budgets_budget::id.asc())
.paginate(msg.page)
.per_page(msg.per_page);
let query_results = query.load::<(Budget, i64)>(conn)?;
let total = query_results.get(0).map(|x| x.1).unwrap_or(0);
let results: Vec<Budget> = query_results.into_iter().map(|x| x.0).collect();
Ok((results, total))
}
fn handle(msg: &GetBudgets, conn: &PooledConnection) -> GetBudgetsResult {
let (results, total) = get_page_of_budgets(&msg, conn)?;
let total_pages = (total as f64 / msg.per_page as f64).ceil() as i64;
let results = results
.into_iter()
.map(|budget| serialize_budget(budget, conn))
.collect::<DbResult<Vec<SerializedBudget>>>()?;
let previous = msg.page > 1;
let next = msg.page < total_pages;
Ok(Data {
total,
results,
next,
previous,
})
}
#[cfg(test)]
mod tests;
|
#![feature(arbitrary_self_types, futures_api, pin)]
use std::sync::mpsc::{sync_channel, SyncSender};
use std::future::{Future, FutureObj};
use std::mem::PinMut;
use std::sync::{Arc, Mutex};
use std::task::{
Context,
Executor,
local_waker_from_nonlocal,
Poll,
SpawnObjError,
Wake,
};
struct Exec;
impl Executor for Exec {
fn spawn_obj(&mut self, _obj: FutureObj<'static, ()>) -> Result<(), SpawnObjError> {
Ok(())
}
}
struct MyFuture(bool);
impl Future for MyFuture {
type Output = ();
fn poll(mut self: PinMut<Self>, cx: &mut Context) -> Poll<Self::Output> {
if self.0 {
return Poll::Ready(());
}
self.0 = true;
cx.waker().wake();
Poll::Pending
}
}
struct Task {
sender: SyncSender<Arc<Task>>,
future: Mutex<MyFuture>
}
impl Wake for Task {
fn wake(arc_self: &Arc<Self>) {
let cloned = arc_self.clone();
let _ = arc_self.sender.send(cloned);
}
}
fn main() {
let mut exec = Exec;
let (tx, rx) = sync_channel(1000);
let task = Arc::new(Task { future: Mutex::new(MyFuture(false)), sender: tx.clone() });
let waker = local_waker_from_nonlocal(task.clone());
let cx = &mut Context::new(&waker, &mut exec);
let _ = tx.send(task);
while let Ok(task) = rx.recv() {
let mut future = task.future.lock().unwrap();
match PinMut::new(&mut *future).poll(cx) {
Poll::Pending => println!("Pending"),
Poll::Ready(()) => {
println!("Ready");
break;
},
}
}
} |
#![cfg_attr(not(feature = "std"), no_std)]
pub mod weights;
use weights::WeightInfo;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
use frame_support::{
decl_error, decl_event, decl_module, decl_storage,
dispatch::DispatchResult,
ensure,
traits::{Currency, ExistenceRequirement, Get, Imbalance, OnUnbalanced, WithdrawReasons},
weights::DispatchClass,
weights::WeightToFeePolynomial,
};
use frame_system::{ensure_root, ensure_signed};
use sp_runtime::{
traits::{DispatchInfoOf, PostDispatchInfoOf, Saturating, Zero},
transaction_validity::{InvalidTransaction, TransactionValidityError},
};
use sp_std::prelude::*;
use pallet_transaction_payment::OnChargeTransaction;
use sp_std::marker::PhantomData;
use frame_support::weights::{Pays, Weight};
use orml_traits::{MultiCurrency, MultiCurrencyExtended};
use primitives::asset::AssetPair;
use primitives::traits::{CurrencySwap, AMM};
use primitives::{Amount, AssetId, Balance, CORE_ASSET_ID};
use orml_utilities::with_transaction_result;
use orml_utilities::OrderedSet;
type NegativeImbalanceOf<C, T> = <C as Currency<<T as frame_system::Config>::AccountId>>::NegativeImbalance;
pub trait Config: frame_system::Config + pallet_transaction_payment::Config {
/// Because this pallet emits events, it depends on the runtime's definition of an event.
type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;
/// The currency type in which fees will be paid.
type Currency: Currency<Self::AccountId> + Send + Sync;
/// Multi Currency
type MultiCurrency: MultiCurrency<Self::AccountId>
+ MultiCurrencyExtended<Self::AccountId, CurrencyId = AssetId, Balance = Balance, Amount = Amount>;
/// AMM pool to swap for native currency
type AMMPool: AMM<Self::AccountId, AssetId, AssetPair, Balance>;
/// Weight information for the extrinsics.
type WeightInfo: WeightInfo;
/// Should fee be paid for setting a currency
type WithdrawFeeForSetCurrency: Get<Pays>;
/// Convert a weight value into a deductible fee based on the currency type.
type WeightToFee: WeightToFeePolynomial<Balance = Balance>;
}
decl_event!(
pub enum Event<T>
where
AccountId = <T as frame_system::Config>::AccountId,
{
/// CurrencySet
/// [who, currency]
CurrencySet(AccountId, AssetId),
/// New accepted currency added
/// [who, currency]
CurrencyAdded(AccountId, AssetId),
/// Accepted currency removed
/// [who, currency]
CurrencyRemoved(AccountId, AssetId),
/// Member added
/// [who]
MemberAdded(AccountId),
/// Member removed
/// [who]
MemberRemoved(AccountId),
}
);
// The pallet's errors
decl_error! {
pub enum Error for Module<T: Config> {
/// Selected currency is not supported
UnsupportedCurrency,
/// Zero Balance of selected currency
ZeroBalance,
/// Not allowed to add or remove accepted currency
NotAllowed,
/// Currency being added is already in the list of accpeted currencies
AlreadyAccepted,
/// Currency being added is already in the list of accpeted currencies
CoreAssetNotAllowed,
/// Account is already a member of authorities
AlreadyMember,
/// Account is not a member of authorities
NotAMember,
}
}
decl_storage! {
trait Store for Module<T: Config> as TransactionPayment {
/// Account currency map
pub AccountCurrencyMap get(fn get_currency): map hasher(blake2_128_concat) T::AccountId => Option<AssetId>;
pub AcceptedCurrencies get(fn currencies) config(): OrderedSet<AssetId>;
pub Authorities get(fn authorities) config(): Vec<T::AccountId>;
}
}
decl_module! {
pub struct Module<T: Config> for enum Call where origin: T::Origin {
// Errors must be initialized if they are used by the pallet.
type Error = Error<T>;
// Events must be initialized if they are used by the pallet.
fn deposit_event() = default;
/// Set currency in which transaction fees are paid.
/// This is feeless transaction.
/// Selected currency must have non-zero balance otherwise is not allowed to be set.
#[weight = (<T as Config>::WeightInfo::set_currency(), Pays::No)]
pub fn set_currency(
origin,
currency: AssetId,
) -> DispatchResult{
let who = ensure_signed(origin)?;
if currency == CORE_ASSET_ID || Self::currencies().contains(¤cy){
if T::MultiCurrency::free_balance(currency, &who) == Balance::zero(){
return Err(Error::<T>::ZeroBalance.into());
}
return with_transaction_result(|| {
<AccountCurrencyMap<T>>::insert(who.clone(), currency);
if T::WithdrawFeeForSetCurrency::get() == Pays::Yes{
Self::withdraw_set_fee(&who, currency)?;
}
Self::deposit_event(RawEvent::CurrencySet(who, currency));
Ok(())
});
}
Err(Error::<T>::UnsupportedCurrency.into())
}
/// Add additional currency to the list of supported currencies which fees can be paid in
/// Only selected members can perform this action
#[weight = (<T as Config>::WeightInfo::add_currency(), Pays::No)]
pub fn add_currency(origin, currency: AssetId) -> DispatchResult{
let who = ensure_signed(origin)?;
ensure!(
currency != CORE_ASSET_ID,
Error::<T>::CoreAssetNotAllowed
);
// Only selected accounts can perform this action
ensure!(
Self::authorities().contains(&who),
Error::<T>::NotAllowed
);
if AcceptedCurrencies::mutate(|x| x.insert(currency)) {
Self::deposit_event(RawEvent::CurrencyAdded(who, currency));
return Ok(());
}
Err(Error::<T>::AlreadyAccepted.into())
}
/// Remove currency from the list of supported currencies
/// Only selected members can perform this action
#[weight = (<T as Config>::WeightInfo::remove_currency(), Pays::No)]
pub fn remove_currency(origin, currency: AssetId) -> DispatchResult{
let who = ensure_signed(origin)?;
ensure!(
currency != CORE_ASSET_ID,
Error::<T>::CoreAssetNotAllowed
);
// Only selected accounts can perform this action
ensure!(
Self::authorities().contains(&who),
Error::<T>::NotAllowed
);
if AcceptedCurrencies::mutate(|x| x.remove(¤cy)) {
Self::deposit_event(RawEvent::CurrencyRemoved(who, currency));
return Ok(());
}
Err(Error::<T>::UnsupportedCurrency.into())
}
/// Add an account as member to list of authorities who can manage list of accepted currencies
#[weight = (<T as Config>::WeightInfo::add_member(), Pays::No)]
pub fn add_member(origin, member: T::AccountId) -> DispatchResult{
ensure_root(origin)?;
ensure!(
! Self::authorities().contains(&member),
Error::<T>::AlreadyMember
);
Self::add_new_member(&member);
Self::deposit_event(RawEvent::MemberAdded(member));
Ok(())
}
/// Add an account as member to list of authorities who can manage list of accepted currencies
#[weight = (<T as Config>::WeightInfo::remove_member(), Pays::No)]
pub fn remove_member(origin, member: T::AccountId) -> DispatchResult{
ensure_root(origin)?;
ensure!(
Self::authorities().contains(&member),
Error::<T>::NotAMember
);
Authorities::<T>::mutate(|x| x.retain(|val| *val != member));
Self::deposit_event(RawEvent::MemberRemoved(member));
Ok(())
}
}
}
impl<T: Config> Module<T> {
pub fn swap_currency(who: &T::AccountId, fee: Balance) -> DispatchResult {
// Let's determine currency in which user would like to pay the fee
let fee_currency = match Module::<T>::get_currency(who) {
Some(c) => c,
_ => CORE_ASSET_ID,
};
// If not native currency, let's buy CORE asset first and then pay with that.
if fee_currency != CORE_ASSET_ID {
T::AMMPool::buy(&who, AssetPair{asset_out: CORE_ASSET_ID, asset_in: fee_currency}, fee, 2u128 * fee, false)?;
}
Ok(())
}
pub fn add_new_member(who: &T::AccountId) {
Authorities::<T>::mutate(|x| x.push(who.clone()));
}
pub fn withdraw_set_fee(who: &T::AccountId, currency: AssetId) -> DispatchResult {
let base_fee = Self::weight_to_fee(T::BlockWeights::get().get(DispatchClass::Normal).base_extrinsic);
let adjusted_weight_fee = Self::weight_to_fee(T::WeightInfo::set_currency());
let fee = base_fee.saturating_add(adjusted_weight_fee);
Self::swap_currency(who, fee)?;
T::MultiCurrency::withdraw(currency, who, fee)?;
Ok(())
}
fn weight_to_fee(weight: Weight) -> Balance {
// cap the weight to the maximum defined in runtime, otherwise it will be the
// `Bounded` maximum of its data type, which is not desired.
let capped_weight: Weight = weight.min(T::BlockWeights::get().max_block);
<T as Config>::WeightToFee::calc(&capped_weight)
}
}
impl<T: Config> CurrencySwap<<T as frame_system::Config>::AccountId, Balance> for Module<T> {
fn swap_currency(who: &T::AccountId, fee: u128) -> DispatchResult {
Self::swap_currency(who, fee)
}
}
/// Implements the transaction payment for native as well as non-native currencies
pub struct MultiCurrencyAdapter<C, OU, SW>(PhantomData<(C, OU, SW)>);
impl<T, C, OU, SW> OnChargeTransaction<T> for MultiCurrencyAdapter<C, OU, SW>
where
T: Config,
T::TransactionByteFee: Get<<C as Currency<<T as frame_system::Config>::AccountId>>::Balance>,
C: Currency<<T as frame_system::Config>::AccountId>,
C::PositiveImbalance:
Imbalance<<C as Currency<<T as frame_system::Config>::AccountId>>::Balance, Opposite = C::NegativeImbalance>,
C::NegativeImbalance:
Imbalance<<C as Currency<<T as frame_system::Config>::AccountId>>::Balance, Opposite = C::PositiveImbalance>,
OU: OnUnbalanced<NegativeImbalanceOf<C, T>>,
C::Balance: Into<Balance>,
SW: CurrencySwap<T::AccountId, Balance>,
{
type LiquidityInfo = Option<NegativeImbalanceOf<C, T>>;
type Balance = <C as Currency<<T as frame_system::Config>::AccountId>>::Balance;
/// Withdraw the predicted fee from the transaction origin.
///
/// Note: The `fee` already includes the `tip`.
fn withdraw_fee(
who: &T::AccountId,
_call: &T::Call,
_info: &DispatchInfoOf<T::Call>,
fee: Self::Balance,
tip: Self::Balance,
) -> Result<Self::LiquidityInfo, TransactionValidityError> {
if fee.is_zero() {
return Ok(None);
}
let withdraw_reason = if tip.is_zero() {
WithdrawReasons::TRANSACTION_PAYMENT
} else {
WithdrawReasons::TRANSACTION_PAYMENT | WithdrawReasons::TIP
};
if SW::swap_currency(&who, fee.into()).is_err() {
return Err(InvalidTransaction::Payment.into());
}
match C::withdraw(who, fee, withdraw_reason, ExistenceRequirement::KeepAlive) {
Ok(imbalance) => Ok(Some(imbalance)),
Err(_) => Err(InvalidTransaction::Payment.into()),
}
}
/// Hand the fee and the tip over to the `[OnUnbalanced]` implementation.
/// Since the predicted fee might have been too high, parts of the fee may
/// be refunded.
///
/// Note: The `fee` already includes the `tip`.
/// Note: This is the default implementation
fn correct_and_deposit_fee(
who: &T::AccountId,
_dispatch_info: &DispatchInfoOf<T::Call>,
_post_info: &PostDispatchInfoOf<T::Call>,
corrected_fee: Self::Balance,
tip: Self::Balance,
already_withdrawn: Self::LiquidityInfo,
) -> Result<(), TransactionValidityError> {
if let Some(paid) = already_withdrawn {
// Calculate how much refund we should return
let refund_amount = paid.peek().saturating_sub(corrected_fee);
// refund to the the account that paid the fees. If this fails, the
// account might have dropped below the existential balance. In
// that case we don't refund anything.
let refund_imbalance =
C::deposit_into_existing(&who, refund_amount).unwrap_or_else(|_| C::PositiveImbalance::zero());
// merge the imbalance caused by paying the fees and refunding parts of it again.
let adjusted_paid = paid
.offset(refund_imbalance)
.map_err(|_| TransactionValidityError::Invalid(InvalidTransaction::Payment))?;
// Call someone else to handle the imbalance (fee and tip separately)
let imbalances = adjusted_paid.split(tip);
OU::on_unbalanceds(Some(imbalances.0).into_iter().chain(Some(imbalances.1)));
}
Ok(())
}
}
|
use error::CommandResult;
use source::Source;
use ident::Ident;
use asset::Asset;
#[derive(Debug)]
pub struct ReceivedAsset {
pub uuid: String,
pub md5: String,
pub mime: String,
pub name: Option<String>,
pub source: Source,
pub tags: Option<Vec<String>>
}
pub trait DataBackend {
type Asset: Asset;
fn add_asset(&mut self, Source, Option<Vec<String>>) -> CommandResult<Self::Asset>;
fn receive_asset(&mut self, ReceivedAsset) -> CommandResult<Self::Asset>;
fn get_asset(&mut self, Ident) -> CommandResult<Self::Asset>;
fn find_assets(&mut self, Option<String>) -> CommandResult<Vec<Self::Asset>>;
fn gen_key(&mut self, &str) -> CommandResult<()>;
//fn set_fingerprint(&mut self, &str, &Self::Asset) -> CommandResult<()>;
fn authenticate(&self, &str, &[u8]) -> CommandResult<bool>;
}
|
//! Responses from any call to the Cosmos API.
#![allow(missing_docs)]
mod create_collection_response;
mod create_reference_attachment_response;
mod create_slug_attachment_response;
mod create_stored_procedure_response;
mod create_trigger_response;
mod create_user_defined_function_response;
mod delete_attachment_response;
mod delete_stored_procedure_response;
mod delete_trigger_response;
mod delete_user_defined_function_response;
mod execute_stored_procedure_response;
mod get_attachment_response;
mod get_partition_key_ranges_response;
mod list_attachments_response;
mod list_documents_response;
mod list_permissions_response;
mod list_stored_procedures_response;
mod list_triggers_response;
mod list_user_defined_functions_response;
mod query_documents_response;
mod replace_reference_attachment_response;
mod replace_stored_procedure_response;
pub use create_collection_response::CreateCollectionResponse;
pub use create_reference_attachment_response::CreateReferenceAttachmentResponse;
pub use create_slug_attachment_response::CreateSlugAttachmentResponse;
pub use create_stored_procedure_response::CreateStoredProcedureResponse;
pub use create_trigger_response::CreateTriggerResponse;
pub use create_user_defined_function_response::CreateUserDefinedFunctionResponse;
pub use delete_attachment_response::DeleteAttachmentResponse;
pub use delete_stored_procedure_response::DeleteStoredProcedureResponse;
pub use delete_trigger_response::DeleteTriggerResponse;
pub use delete_user_defined_function_response::DeleteUserDefinedFunctionResponse;
pub use execute_stored_procedure_response::ExecuteStoredProcedureResponse;
pub use get_attachment_response::GetAttachmentResponse;
pub use get_partition_key_ranges_response::GetPartitionKeyRangesResponse;
pub use list_attachments_response::ListAttachmentsResponse;
pub use list_documents_response::{
ListDocumentsResponse, ListDocumentsResponseAttributes, ListDocumentsResponseEntities,
};
pub use list_permissions_response::ListPermissionsResponse;
pub use list_stored_procedures_response::ListStoredProceduresResponse;
pub use list_triggers_response::ListTriggersResponse;
pub use list_user_defined_functions_response::ListUserDefinedFunctionsResponse;
pub use query_documents_response::{
QueryDocumentsResponse, QueryDocumentsResponseDocuments, QueryDocumentsResponseRaw,
QueryResponseMeta, QueryResult,
};
pub use replace_reference_attachment_response::ReplaceReferenceAttachmentResponse;
pub use replace_stored_procedure_response::ReplaceStoredProcedureResponse;
|
use crate::{random_id, JsObject};
use js_sys::Array;
use sainome::Ref;
use wasm_bindgen::{prelude::*, JsCast};
#[derive(Clone)]
pub enum PropertyValue {
None,
Num(f64),
Str(String),
Children(Vec<Property>),
}
#[derive(Clone)]
pub struct Property {
id: u128,
name: String,
is_selected_to_show: bool,
value: PropertyValue,
}
impl PropertyValue {
pub fn type_name(&self) -> &'static str {
match self {
Self::None => "None",
Self::Num(..) => "Num",
Self::Str(..) => "Str",
Self::Children(..) => "Children",
}
}
pub fn as_object(&self) -> JsObject {
let payload: JsValue = match self {
Self::None => JsValue::undefined(),
Self::Num(x) => JsValue::from(*x),
Self::Str(x) => JsValue::from(x),
Self::Children(children) => {
let payload = Array::new();
for child in children {
payload.push(child.as_object().as_ref());
}
payload.into()
}
};
object! {
type: self.type_name(),
payload: payload
}
}
pub fn as_option_string(&self) -> Option<String> {
match &self {
Self::None => None,
Self::Children(..) => None,
Self::Num(x) => Some(x.to_string()),
Self::Str(x) => Some(x.to_string()),
}
}
}
impl Property {
pub fn new_as_none() -> Self {
Self {
id: random_id::u128val(),
name: "".into(),
is_selected_to_show: false,
value: PropertyValue::None,
}
}
pub fn new_as_num() -> Self {
Self {
id: random_id::u128val(),
name: "".into(),
is_selected_to_show: false,
value: PropertyValue::Num(0.0),
}
}
pub fn new_as_str() -> Self {
Self {
id: random_id::u128val(),
name: "".into(),
is_selected_to_show: false,
value: PropertyValue::Str("".into()),
}
}
pub fn new_as_parent() -> Self {
Self {
id: random_id::u128val(),
name: "".into(),
is_selected_to_show: false,
value: PropertyValue::Children(vec![]),
}
}
pub fn with_name(mut self, name: impl Into<String>) -> Self {
self.name = name.into();
self
}
pub fn with_selected_to_show(mut self) -> Self {
self.is_selected_to_show = true;
self
}
pub fn push(&mut self, prop: Property) {
match &mut self.value {
PropertyValue::Children(children) => {
children.push(prop);
}
PropertyValue::None => {
self.value = PropertyValue::Children(vec![prop]);
}
_ => {}
}
}
pub fn get(&self, id: &u128) -> Option<&Self> {
if self.id == *id {
Some(self)
} else if let PropertyValue::Children(children) = &self.value {
children.iter().find_map(|x| x.get(id))
} else {
None
}
}
pub fn get_mut(&mut self, id: &u128) -> Option<&mut Self> {
if self.id == *id {
Some(self)
} else if let PropertyValue::Children(children) = &mut self.value {
children.iter_mut().find_map(|x| x.get_mut(id))
} else {
None
}
}
pub fn selecteds(&self) -> Vec<&Self> {
if self.is_selected_to_show {
vec![self]
} else if let PropertyValue::Children(children) = &self.value {
let mut selecteds = vec![];
for child in children {
selecteds.append(&mut child.selecteds());
}
selecteds
} else {
vec![]
}
}
pub fn remove(&mut self, id: u128) {
if let PropertyValue::Children(children) = &mut self.value {
let remove_position = children.iter().position(|x| *x.id() == id);
if let Some(remove_position) = remove_position {
children.remove(remove_position);
} else {
for i in 0..children.len() {
children[i].remove(id);
}
}
}
}
pub fn id(&self) -> &u128 {
&self.id
}
pub fn set_name(&mut self, name: String) {
self.name = name;
}
pub fn name(&self) -> &String {
&self.name
}
pub fn set_value(&mut self, value: PropertyValue) {
self.value = value;
}
pub fn value(&self) -> &PropertyValue {
&self.value
}
pub fn is_selected_to_show(&self) -> bool {
self.is_selected_to_show
}
pub fn set_is_selected_to_show(&mut self, is_selected_to_show: bool) {
self.is_selected_to_show = is_selected_to_show;
}
pub fn as_object(&self) -> JsObject {
object! {
id: self.id.to_string(),
name: &self.name,
is_selected_to_show: self.is_selected_to_show,
value: self.value.as_object()
}
}
pub fn as_sainome_ref(&self) -> Ref {
let mut r = Ref::new(self.value.as_option_string());
if let PropertyValue::Children(children) = &self.value {
for child in children {
let name = child.name.to_string();
r.insert(name, child.as_sainome_ref());
}
}
r
}
}
impl From<JsObject> for PropertyValue {
fn from(object: JsObject) -> Self {
if let Some(type_name) = object.get("type").and_then(|x| x.as_string()) {
match type_name.as_str() {
"Num" => object
.get("payload")
.and_then(|x| x.as_f64())
.map(|x| Self::Num(x))
.unwrap_or(Self::None),
"Str" => object
.get("payload")
.and_then(|x| x.as_string())
.map(|x| Self::Str(x))
.unwrap_or(Self::None),
"Children" => object
.get("payload")
.map(|x| Array::from(&x))
.map(|x| {
Self::Children(
x.to_vec()
.into_iter()
.filter_map(|child| {
child
.dyn_into::<JsObject>()
.ok()
.map(|object| Property::from(object))
})
.collect(),
)
})
.unwrap_or(Self::None),
_ => Self::None,
}
} else {
Self::None
}
}
}
impl From<JsObject> for Property {
fn from(object: JsObject) -> Self {
let id = object
.get("id")
.and_then(|x| x.as_string())
.and_then(|x| x.parse().ok())
.unwrap_or(0);
let name = object
.get("name")
.and_then(|x| x.as_string())
.unwrap_or("".into());
let value = object
.get("value")
.map(|x| PropertyValue::from(x))
.unwrap_or(PropertyValue::None);
let is_selected_to_show = object
.get("is_selected_to_show")
.and_then(|x| x.as_bool())
.unwrap_or(false);
Self {
id,
name,
is_selected_to_show,
value,
}
}
}
|
use crossterm::event::{self, Event, KeyCode, KeyEvent, KeyModifiers};
use rustyline::{error::ReadlineError, Editor};
pub fn read_key() -> crossterm::Result<KeyEvent> {
loop {
match event::read()? {
Event::Key(key_event) => {
return Ok(key_event);
}
_ => (),
}
}
}
pub fn key_to_char(key: KeyEvent) -> Option<char> {
match key {
KeyEvent {
code: KeyCode::Char(c),
modifiers: m,
} => {
if m == KeyModifiers::SHIFT {
Some(c.to_ascii_uppercase())
} else if m.is_empty() {
Some(c)
} else {
None
}
}
_ => None,
}
}
pub fn read_line() -> Result<String, ReadlineError> {
let mut readline = Editor::<()>::new();
match readline.readline("") {
Ok(line) => Ok(line),
Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => {
Ok("".into())
}
Err(error) => Err(error),
}
}
|
use std::cell::RefCell;
use std::rc::Rc;
mod symbol_table;
use crate::evaluator::objects;
use crate::parser::ast;
use crate::vm::{bytecode, opcode};
pub use symbol_table::SymbolTable;
mod preludes {
pub use super::super::preludes::*;
}
use preludes::*;
#[derive(Debug, Clone, Default)]
struct CompilationScope {
instructions: bytecode::Instructions,
last_instruction: Option<EmittedInstruction>,
prev_instruction: Option<EmittedInstruction>,
}
impl CompilationScope {
fn new() -> Self {
Self::default()
}
fn add_instructions(&mut self, instructions: &mut bytecode::Instructions) -> Pos {
let pos_new_instruction = Pos::try_from(self.instructions.0.len()).unwrap();
self.instructions.0.append(&mut instructions.0);
pos_new_instruction
}
fn set_last_instruction(&mut self, op: opcode::Opcode, pos: Pos) {
let prev = self.last_instruction.clone();
let last = EmittedInstruction {
opcode: op,
position: pos,
};
self.prev_instruction = prev;
self.last_instruction = Some(last);
}
fn last_instruction_is(&self, opcode: &opcode::Opcode) -> bool {
if self.instructions.0.is_empty() {
return false;
}
match &self.last_instruction {
Some(inst) => &inst.opcode == opcode,
None => false,
}
}
fn remove_last_pop(&mut self) -> Result<()> {
if let Some(last_isnt) = &self.last_instruction {
let prev = self.prev_instruction.clone();
let new = self.instructions.0[..usize::from(last_isnt.position)].to_vec();
self.instructions = new.into();
self.last_instruction = prev;
Ok(())
} else {
Err(anyhow::format_err!("uninitialized"))
}
}
fn replace_instructions(&mut self, pos: Pos, new_instructions: bytecode::Instructions) {
new_instructions
.0
.into_iter()
.enumerate()
.for_each(|(i, inst)| {
self.instructions.0[i + usize::from(pos)] = inst;
});
}
fn change_operand(&mut self, op_pos: Pos, operand: u16) -> Result<()> {
let op = opcode::Opcode::try_from(&self.instructions.0[usize::from(op_pos)..])?;
match op {
opcode::Opcode::JumpNotTruthy(mut op) => {
op.0 = operand;
self.replace_instructions(op_pos, op.into());
}
opcode::Opcode::Jump(mut op) => {
op.0 = operand;
self.replace_instructions(op_pos, op.into());
}
_ => {
return Err(anyhow::format_err!(
"Expected JumpNotTruthy or Jump. received {}",
op
))
}
}
Ok(())
}
fn replace_last_pop_with_return(&mut self) -> Result<()> {
let last_pos = match &self.last_instruction {
Some(inst) => inst.position,
None => return Err(anyhow::format_err!("uninitialized")),
};
self.replace_instructions(last_pos, opcode::ReturnValue.into());
self.last_instruction = Some(EmittedInstruction {
opcode: opcode::ReturnValue.into(),
..self.last_instruction.clone().unwrap()
});
Ok(())
}
}
#[derive(Debug, Clone)]
struct CompilationScopes {
data: Vec<Rc<RefCell<CompilationScope>>>,
pointer: usize,
}
impl Default for CompilationScopes {
fn default() -> Self {
Self {
data: vec![Rc::new(RefCell::new(CompilationScope::new()))],
pointer: 0,
}
}
}
impl CompilationScopes {
fn new() -> Self {
Self::default()
}
fn push_new_scope(&mut self) {
let scope = CompilationScope::new();
self.data.push(Rc::new(RefCell::new(scope)));
self.pointer += 1;
}
fn pop(&mut self) -> Option<Rc<RefCell<CompilationScope>>> {
self.pointer -= 1;
self.data.pop()
}
fn current(&self) -> Rc<RefCell<CompilationScope>> {
Rc::clone(&self.data[self.pointer])
}
fn add_instructions(&mut self, instructions: &mut bytecode::Instructions) -> Pos {
self.current().borrow_mut().add_instructions(instructions)
}
fn set_last_instruction(&mut self, op: opcode::Opcode, pos: Pos) {
self.current().borrow_mut().set_last_instruction(op, pos);
}
fn last_instruction_is(&self, opcode: &opcode::Opcode) -> bool {
self.current().borrow().last_instruction_is(opcode)
}
fn remove_last_pop(&mut self) -> Result<()> {
self.current().borrow_mut().remove_last_pop()
}
fn change_operand(&mut self, op_pos: Pos, operand: u16) -> Result<()> {
self.current().borrow_mut().change_operand(op_pos, operand)
}
fn replace_last_pop_with_return(&mut self) -> Result<()> {
self.current().borrow_mut().replace_last_pop_with_return()
}
}
#[derive(Debug)]
pub struct Compiler<'a> {
constants: &'a mut Vec<objects::Object>,
symbol_table: Rc<RefCell<symbol_table::SymbolTable>>,
scopes: CompilationScopes,
}
#[derive(Debug, Clone)]
struct EmittedInstruction {
opcode: opcode::Opcode,
position: Pos,
}
type Pos = u16;
impl<'a> Compiler<'a> {
pub fn compile(&mut self, node: ast::Node) -> Result<()> {
match node {
ast::Node::Program(pg) => {
pg.statements
.into_iter()
.try_for_each(|stmt| self.compile(stmt.into()))?;
}
ast::Node::Stmt(stmt) => match stmt {
ast::Stmt::ExprStmt(stmt) => {
self.compile(stmt.expr.into())?;
self.emit(opcode::Pop.into());
}
ast::Stmt::Block(block) => {
block
.statements
.into_iter()
.try_for_each(|stmt| self.compile(stmt.into()))?;
}
ast::Stmt::Let(l) => {
let symbol = {
let table = Rc::clone(&self.symbol_table);
let mut table = table.borrow_mut();
table.define(l.name.value)
};
self.compile(l.value.into())?;
match &*symbol.borrow() {
symbol_table::Symbol::Global { index, .. } => {
let op = opcode::SetGlobal(*index).into();
self.emit(op);
}
symbol_table::Symbol::Local { index, .. } => {
let op = opcode::SetLocal(*index).into();
self.emit(op);
}
symbol_table::Symbol::Builtin { .. } => unreachable!(),
symbol_table::Symbol::Free { .. } => unreachable!(),
symbol_table::Symbol::Function { .. } => unreachable!(),
};
}
ast::Stmt::Return(r) => {
self.compile(r.return_value.into())?;
self.emit(opcode::ReturnValue.into());
}
},
ast::Node::Expr(expr) => match expr {
ast::Expr::InfixExpr(expr) => {
if expr.ope == ast::Operator::Lt {
self.compile((*expr.right).into())?;
self.compile((*expr.left).into())?;
self.emit(opcode::GreaterThan.into());
} else {
self.compile((*expr.left).into())?;
self.compile((*expr.right).into())?;
match expr.ope {
ast::Operator::Plus => self.emit(opcode::Add.into()),
ast::Operator::Minus => self.emit(opcode::Sub.into()),
ast::Operator::Asterisk => self.emit(opcode::Mul.into()),
ast::Operator::Slash => self.emit(opcode::Div.into()),
ast::Operator::Gt => self.emit(opcode::GreaterThan.into()),
ast::Operator::Equal => self.emit(opcode::Equal.into()),
ast::Operator::NotEqual => self.emit(opcode::NotEqual.into()),
ast::Operator::Lt => unreachable!(),
unknown => {
return Err(anyhow::format_err!("unknown operator {}", unknown))
}
};
}
}
ast::Expr::PrefixExpr(expr) => {
self.compile((*expr.right).into())?;
match expr.ope {
ast::Operator::Minus => self.emit(opcode::Minus.into()),
ast::Operator::Bang => self.emit(opcode::Bang.into()),
unknown => return Err(anyhow::format_err!("unknown operator {}", unknown)),
};
}
ast::Expr::If(expr) => {
self.compile((*expr.cond).into())?;
let jump_not_truthy_pos = self.emit(opcode::JumpNotTruthy(9999).into());
self.compile(ast::Stmt::from(expr.consequence).into())?;
if self.scopes.last_instruction_is(&opcode::Pop.into()) {
self.scopes.remove_last_pop()?;
}
let jump_pos = self.emit(opcode::Jump(9999).into());
{
let len = self.scopes.current().borrow().instructions.0.len();
self.scopes
.change_operand(jump_not_truthy_pos, u16::try_from(len)?)?;
}
if let Some(alternative) = expr.alternative {
self.compile(ast::Stmt::from(alternative).into())?;
if self.scopes.last_instruction_is(&opcode::Pop.into()) {
self.scopes.remove_last_pop()?;
}
} else {
self.emit(opcode::Null.into());
}
{
let len = self.scopes.current().borrow().instructions.0.len();
self.scopes.change_operand(jump_pos, u16::try_from(len)?)?;
}
}
ast::Expr::Integer(int) => {
let int = objects::Integer { value: int.value };
let op = opcode::Constant::from(self.add_constant(int.into()));
self.emit(op.into());
}
ast::Expr::Boolean(b) => {
self.emit(match b.value {
true => opcode::True.into(),
false => opcode::False.into(),
});
}
ast::Expr::StringLit(s) => {
let s = objects::StringLit { value: s.value };
let op = opcode::Constant::from(self.add_constant(s.into()));
self.emit(op.into());
}
ast::Expr::Identifier(id) => {
let table = Rc::clone(&self.symbol_table);
let mut table = table.borrow_mut();
let symbol = table.resolve(&id.value);
match symbol {
Some(symbol) => {
self.load_symbol(&symbol.borrow());
}
None => return Err(anyhow::format_err!("undefined variable {}", id.value)),
};
}
ast::Expr::Array(array) => {
let arr_len = array.elements.len();
array
.elements
.into_iter()
.try_for_each(|ele| self.compile(ele.into()))?;
self.emit(opcode::Array(arr_len.try_into()?).into());
}
ast::Expr::Hash(hash) => {
let hash_len = u16::try_from(hash.pairs.len() * 2)?;
hash.pairs.into_iter().try_for_each(|pair| {
self.compile(pair.key.into())?;
self.compile(pair.value.into())
})?;
self.emit(opcode::Hash(hash_len).into());
}
ast::Expr::Index(index) => {
self.compile((*index.left).into())?;
self.compile((*index.index).into())?;
self.emit(opcode::Index.into());
}
ast::Expr::Function(func) => {
self.enter_scope();
if !func.name.is_empty() {
self.symbol_table
.borrow_mut()
.define_function_name(func.name);
}
let num_parameters = u8::try_from(func.params.len())?;
func.params.into_iter().for_each(|param| {
self.symbol_table.borrow_mut().define(param.value);
});
self.compile(ast::Stmt::from(func.body).into())?;
if self.scopes.last_instruction_is(&opcode::Pop.into()) {
self.scopes.replace_last_pop_with_return()?;
}
if !self.scopes.last_instruction_is(&opcode::ReturnValue.into()) {
self.emit(opcode::Return.into());
}
let free_symbols = self.symbol_table.borrow().free_symbols.clone();
let num_locals = self.symbol_table.borrow().num_definitions;
let scope = self.leave_scope()?;
let instructions = scope.borrow().instructions.clone();
free_symbols.iter().for_each(|sym| {
self.load_symbol(&sym.borrow());
});
let compiled_func = objects::CompiledFunction {
instructions,
num_locals,
num_parameters,
};
let constant = self.add_constant(compiled_func.into());
self.emit(opcode::Closure(constant, u8::try_from(free_symbols.len())?).into());
}
ast::Expr::Call(call) => {
self.compile((*call.func).into())?;
let arg_len = u8::try_from(call.args.len())?;
call.args
.into_iter()
.try_for_each(|arg| self.compile(arg.into()))?;
self.emit(opcode::Call(arg_len).into());
}
_ => unimplemented!(),
},
}
Ok(())
}
pub fn current_instructions(&self) -> bytecode::Instructions {
self.scopes.current().borrow().instructions.clone()
}
fn add_constant(&mut self, obj: objects::Object) -> Pos {
self.constants.push(obj);
Pos::try_from(self.constants.len()).unwrap() - 1
}
fn emit(&mut self, op: opcode::Opcode) -> Pos {
let mut ins: bytecode::Instructions = op.to_bytes().into();
let pos = self.scopes.add_instructions(&mut ins);
self.scopes.set_last_instruction(op, pos);
pos
}
fn enter_scope(&mut self) {
self.scopes.push_new_scope();
self.symbol_table = {
let table = Rc::clone(&self.symbol_table);
Rc::new(RefCell::new(SymbolTable::new_enclosed(table)))
};
}
fn leave_scope(&mut self) -> Result<Rc<RefCell<CompilationScope>>> {
let scope = self.scopes.pop();
let scope = scope.ok_or_else(|| anyhow::format_err!("Empty scope"))?;
let table = self.symbol_table.borrow_mut().outer.take();
if let Some(table) = table {
self.symbol_table = table;
}
Ok(scope)
}
fn load_symbol(&mut self, symbol: &symbol_table::Symbol) {
let symbol = match symbol {
symbol_table::Symbol::Global { index, .. } => opcode::GetGlobal(*index).into(),
symbol_table::Symbol::Local { index, .. } => opcode::GetLocal(*index).into(),
symbol_table::Symbol::Builtin { index, .. } => opcode::GetBuiltin(*index).into(),
symbol_table::Symbol::Free { index, .. } => opcode::GetFree(*index).into(),
symbol_table::Symbol::Function { .. } => opcode::CurrentClosure.into(),
};
self.emit(symbol);
}
}
impl<'a> Compiler<'a> {
pub fn new_with_state(
sym_table: Rc<RefCell<symbol_table::SymbolTable>>,
constants: &'a mut Vec<objects::Object>,
) -> Self {
Self {
symbol_table: sym_table,
constants,
scopes: CompilationScopes::new(),
}
}
}
impl From<Compiler<'_>> for crate::vm::bytecode::Bytecode {
fn from(value: Compiler) -> Self {
Self {
instructions: value.current_instructions(),
constants: value.constants.clone(),
}
}
}
#[cfg(test)]
mod tests {
use crate::lexer;
use crate::parser;
use super::*;
enum Expected {
Int(i64),
String(String),
Function(bytecode::Instructions),
}
impl From<i64> for Expected {
fn from(value: i64) -> Self {
Expected::Int(value)
}
}
impl From<&str> for Expected {
fn from(value: &str) -> Self {
Expected::String(value.into())
}
}
impl From<Vec<opcode::Opcode>> for Expected {
fn from(value: Vec<opcode::Opcode>) -> Self {
Expected::Function(value.into())
}
}
struct Tests(Vec<(&'static str, Vec<Expected>, bytecode::Instructions)>);
impl<T, U> From<Vec<(&'static str, Vec<T>, U)>> for Tests
where
T: Into<Expected>,
U: Into<bytecode::Instructions>,
{
fn from(value: Vec<(&'static str, Vec<T>, U)>) -> Self {
Tests(
value
.into_iter()
.map(|val| {
(
val.0,
val.1.into_iter().map(|v| v.into()).collect::<Vec<_>>(),
val.2.into(),
)
})
.collect::<Vec<_>>(),
)
}
}
#[test]
fn test_compiler() {
let tests = vec![(
"1 + 2",
vec![1, 2],
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Add.into(),
opcode::Pop.into(),
]),
)]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_integer_arithmetic() {
let tests: Tests = vec![
(
"1 + 2",
vec![1, 2],
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Add.into(),
opcode::Pop.into(),
]),
),
(
"1 - 2",
vec![1, 2],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Sub.into(),
opcode::Pop.into(),
],
),
(
"1 * 2",
vec![1, 2],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Mul.into(),
opcode::Pop.into(),
],
),
(
"1 / 2",
vec![1, 2],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Div.into(),
opcode::Pop.into(),
],
),
(
"1; 2",
vec![1, 2],
vec![
opcode::Constant(0).into(),
opcode::Pop.into(),
opcode::Constant(1).into(),
opcode::Pop.into(),
],
),
(
"-1",
vec![1],
vec![
opcode::Constant(0).into(),
opcode::Minus.into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_boolean_expressions() {
let tests: Tests = vec![
(
"true",
vec![],
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![opcode::True.into(), opcode::Pop.into()]),
),
(
"false",
vec![],
vec![opcode::False.into(), opcode::Pop.into()],
),
(
"1 > 2",
vec![1, 2],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::GreaterThan.into(),
opcode::Pop.into(),
],
),
(
"1 < 2",
vec![2, 1],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::GreaterThan.into(),
opcode::Pop.into(),
],
),
(
"1 == 2",
vec![1, 2],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Equal.into(),
opcode::Pop.into(),
],
),
(
"1 != 2",
vec![1, 2],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::NotEqual.into(),
opcode::Pop.into(),
],
),
(
"true == false",
vec![],
vec![
opcode::True.into(),
opcode::False.into(),
opcode::Equal.into(),
opcode::Pop.into(),
],
),
(
"true != false",
vec![],
vec![
opcode::True.into(),
opcode::False.into(),
opcode::NotEqual.into(),
opcode::Pop.into(),
],
),
(
"!true",
vec![],
vec![opcode::True.into(), opcode::Bang.into(), opcode::Pop.into()],
),
(
"!false",
vec![],
vec![
opcode::False.into(),
opcode::Bang.into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_conditionals() {
let tests: Tests = vec![
(
"if (true) { 10 }; 3333;",
vec![10, 3333],
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![
opcode::True.into(),
opcode::JumpNotTruthy(10).into(),
opcode::Constant(0).into(),
opcode::Jump(11).into(),
opcode::Null.into(),
opcode::Pop.into(),
opcode::Constant(1).into(),
opcode::Pop.into(),
]),
),
(
"if (true) { 10 } else { 20 }; 3333;",
vec![10, 20, 3333],
vec![
opcode::True.into(),
opcode::JumpNotTruthy(10).into(),
opcode::Constant(0).into(),
opcode::Jump(13).into(),
opcode::Constant(1).into(),
opcode::Pop.into(),
opcode::Constant(2).into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_global_let_statements() {
let tests: Tests = vec![
(
"
let one = 1;
let two = 2;
",
vec![1, 2],
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![
opcode::Constant(0).into(),
opcode::SetGlobal(0).into(),
opcode::Constant(1).into(),
opcode::SetGlobal(1).into(),
]),
),
(
"
let one = 1;
one;
",
vec![1],
vec![
opcode::Constant(0).into(),
opcode::SetGlobal(0).into(),
opcode::GetGlobal(0).into(),
opcode::Pop.into(),
],
),
(
"
let one = 1;
let two = one;
two;
",
vec![1],
vec![
opcode::Constant(0).into(),
opcode::SetGlobal(0).into(),
opcode::GetGlobal(0).into(),
opcode::SetGlobal(1).into(),
opcode::GetGlobal(1).into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_string_expressions() {
let tests: Tests = vec![
(
r#""monkey""#,
vec!["monkey"],
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![opcode::Constant(0).into(), opcode::Pop.into()]),
),
(
r#""mon" + "key""#,
vec!["mon", "key"],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Add.into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_array_literals() {
let tests: Tests = vec![
(
"[]",
vec![],
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![opcode::Array(0).into(), opcode::Pop.into()]),
),
(
"[1, 2, 3]",
vec![1, 2, 3],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Constant(2).into(),
opcode::Array(3).into(),
opcode::Pop.into(),
],
),
(
"[1 + 2, 3 - 4, 5 * 6]",
vec![1, 2, 3, 4, 5, 6],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Add.into(),
opcode::Constant(2).into(),
opcode::Constant(3).into(),
opcode::Sub.into(),
opcode::Constant(4).into(),
opcode::Constant(5).into(),
opcode::Mul.into(),
opcode::Array(3).into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_hash_literals() {
let tests: Tests = vec![
(
"{}",
vec![],
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![opcode::Hash(0).into(), opcode::Pop.into()]),
),
(
"{1: 2, 3: 4, 5: 6}",
vec![1, 2, 3, 4, 5, 6],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Constant(2).into(),
opcode::Constant(3).into(),
opcode::Constant(4).into(),
opcode::Constant(5).into(),
opcode::Hash(6).into(),
opcode::Pop.into(),
],
),
(
"{1: 2 + 3, 4: 5 * 6}",
vec![1, 2, 3, 4, 5, 6],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Constant(2).into(),
opcode::Add.into(),
opcode::Constant(3).into(),
opcode::Constant(4).into(),
opcode::Constant(5).into(),
opcode::Mul.into(),
opcode::Hash(4).into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_index_expressions() {
let tests: Tests = vec![
(
"[1, 2, 3][1 + 1]",
vec![1, 2, 3, 1, 1],
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Constant(2).into(),
opcode::Array(3).into(),
opcode::Constant(3).into(),
opcode::Constant(4).into(),
opcode::Add.into(),
opcode::Index.into(),
opcode::Pop.into(),
]),
),
(
"{1: 2}[2 - 1]",
vec![1, 2, 2, 1],
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Hash(2).into(),
opcode::Constant(2).into(),
opcode::Constant(3).into(),
opcode::Sub.into(),
opcode::Index.into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_function() {
let tests: Tests = vec![
(
"fn() { return 5 + 10 }",
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<Expected>::from(vec![
5.into(),
10.into(),
Vec::<opcode::Opcode>::from(vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Add.into(),
opcode::ReturnValue.into(),
])
.into(),
]),
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![opcode::Closure(2, 0).into(), opcode::Pop.into()]),
),
(
"fn() { 5 + 10 }",
vec![
5.into(),
10.into(),
vec![
opcode::Constant(0).into(),
opcode::Constant(1).into(),
opcode::Add.into(),
opcode::ReturnValue.into(),
]
.into(),
],
vec![opcode::Closure(2, 0).into(), opcode::Pop.into()],
),
(
"fn() { 1; 2 }",
vec![
1.into(),
2.into(),
vec![
opcode::Constant(0).into(),
opcode::Pop.into(),
opcode::Constant(1).into(),
opcode::ReturnValue.into(),
]
.into(),
],
vec![opcode::Closure(2, 0).into(), opcode::Pop.into()],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_compiler_scopes() {
let sym_table = Default::default();
let mut constants = Default::default();
let mut compiler = Compiler::new_with_state(sym_table, &mut constants);
assert_eq!(compiler.scopes.pointer, 0);
assert_eq!(compiler.scopes.data.len(), 1);
compiler.emit(opcode::Mul.into());
assert_eq!(compiler.scopes.pointer, 0);
assert_eq!(compiler.scopes.data[0].borrow().instructions.0.len(), 1);
compiler.enter_scope();
assert_eq!(compiler.scopes.pointer, 1);
assert_eq!(compiler.scopes.data.len(), 2);
assert_eq!(compiler.scopes.data[1].borrow().instructions.0.len(), 0);
compiler.emit(opcode::Sub.into());
assert_eq!(compiler.scopes.pointer, 1);
assert_eq!(compiler.scopes.data[0].borrow().instructions.0.len(), 1);
assert_eq!(compiler.scopes.data[1].borrow().instructions.0.len(), 1);
let last = &compiler.scopes.data[compiler.scopes.pointer];
let last = last.borrow().last_instruction.clone().unwrap();
match &last.opcode {
opcode::Opcode::Sub(_) => (),
op => panic!("expected opcode::Sub. received {}", op),
}
compiler.leave_scope().unwrap();
assert_eq!(compiler.scopes.pointer, 0);
compiler.emit(opcode::Add.into());
assert_eq!(compiler.scopes.data.len(), 1);
assert_eq!(
compiler.scopes.data[compiler.scopes.pointer]
.borrow()
.instructions
.0
.len(),
2
);
let last = &compiler.scopes.data[compiler.scopes.pointer];
let last = last.borrow().last_instruction.clone().unwrap();
match &last.opcode {
opcode::Opcode::Add(_) => (),
op => panic!("expected opcode::Add. received {}", op),
}
let prev = &compiler.scopes.data[compiler.scopes.pointer];
let prev = prev.borrow().prev_instruction.clone().unwrap();
match &prev.opcode {
opcode::Opcode::Mul(_) => (),
op => panic!("expected opcode::Mul. received {}", op),
}
}
#[test]
fn test_functions_without_return_value() {
let tests: Tests = vec![(
"fn() { }",
vec![vec![opcode::Return.into()]],
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![opcode::Closure(0, 0).into(), opcode::Pop.into()]),
)]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_function_calls() {
let tests: Tests = vec![
(
"fn() { 24 }()",
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<Expected>::from(vec![
24.into(),
vec![opcode::Constant(0).into(), opcode::ReturnValue.into()].into(),
]),
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![
opcode::Closure(1, 0).into(),
opcode::Call(0).into(),
opcode::Pop.into(),
]),
),
(
"
let no_arg = fn() { 24 };
no_arg();
",
vec![
24.into(),
vec![opcode::Constant(0).into(), opcode::ReturnValue.into()].into(),
],
vec![
opcode::Closure(1, 0).into(),
opcode::SetGlobal(0).into(),
opcode::GetGlobal(0).into(),
opcode::Call(0).into(),
opcode::Pop.into(),
],
),
(
"
let one_arg = fn(a) { };
one_arg(24);
",
vec![vec![opcode::Return.into()].into(), 24.into()],
vec![
opcode::Closure(0, 0).into(),
opcode::SetGlobal(0).into(),
opcode::GetGlobal(0).into(),
opcode::Constant(1).into(),
opcode::Call(1).into(),
opcode::Pop.into(),
],
),
(
"
let many_arg = fn(a, b, c) { };
many_arg(24, 25, 26);
",
vec![
vec![opcode::Return.into()].into(),
24.into(),
25.into(),
26.into(),
],
vec![
opcode::Closure(0, 0).into(),
opcode::SetGlobal(0).into(),
opcode::GetGlobal(0).into(),
opcode::Constant(1).into(),
opcode::Constant(2).into(),
opcode::Constant(3).into(),
opcode::Call(3).into(),
opcode::Pop.into(),
],
),
(
"
let one_arg = fn(a) { a };
one_arg(24);
",
vec![
vec![opcode::GetLocal(0).into(), opcode::ReturnValue.into()].into(),
24.into(),
],
vec![
opcode::Closure(0, 0).into(),
opcode::SetGlobal(0).into(),
opcode::GetGlobal(0).into(),
opcode::Constant(1).into(),
opcode::Call(1).into(),
opcode::Pop.into(),
],
),
(
"
let many_arg = fn(a, b, c) { a; b; c; };
many_arg(24, 25, 26);
",
vec![
vec![
opcode::GetLocal(0).into(),
opcode::Pop.into(),
opcode::GetLocal(1).into(),
opcode::Pop.into(),
opcode::GetLocal(2).into(),
opcode::ReturnValue.into(),
]
.into(),
24.into(),
25.into(),
26.into(),
],
vec![
opcode::Closure(0, 0).into(),
opcode::SetGlobal(0).into(),
opcode::GetGlobal(0).into(),
opcode::Constant(1).into(),
opcode::Constant(2).into(),
opcode::Constant(3).into(),
opcode::Call(3).into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_let_statement_scopes() {
let tests: Tests = vec![
(
"
let num = 55;
fn() { num }
",
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<Expected>::from(vec![
55.into(),
vec![opcode::GetGlobal(0).into(), opcode::ReturnValue.into()].into(),
]),
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![
opcode::Constant(0).into(),
opcode::SetGlobal(0).into(),
opcode::Closure(1, 0).into(),
opcode::Pop.into(),
]),
),
(
"
fn() {
let num = 55;
num
}
",
vec![
55.into(),
vec![
opcode::Constant(0).into(),
opcode::SetLocal(0).into(),
opcode::GetLocal(0).into(),
opcode::ReturnValue.into(),
]
.into(),
],
vec![opcode::Closure(1, 0).into(), opcode::Pop.into()],
),
(
"
fn() {
let a = 55;
let b = 77;
a + b
}
",
vec![
55.into(),
77.into(),
vec![
opcode::Constant(0).into(),
opcode::SetLocal(0).into(),
opcode::Constant(1).into(),
opcode::SetLocal(1).into(),
opcode::GetLocal(0).into(),
opcode::GetLocal(1).into(),
opcode::Add.into(),
opcode::ReturnValue.into(),
]
.into(),
],
vec![opcode::Closure(2, 0).into(), opcode::Pop.into()],
),
(
"
let one = fn() { let one = 1; one };
one();
",
vec![
1.into(),
vec![
opcode::Constant(0).into(),
opcode::SetLocal(0).into(),
opcode::GetLocal(0).into(),
opcode::ReturnValue.into(),
]
.into(),
],
vec![
opcode::Closure(1, 0).into(),
opcode::SetGlobal(0).into(),
opcode::GetGlobal(0).into(),
opcode::Call(0).into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_builtins() {
let tests: Tests = vec![
(
"
len([]);
push([], 1);
",
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<Expected>::from(vec![1.into()]),
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![
opcode::GetBuiltin(0).into(),
opcode::Array(0).into(),
opcode::Call(1).into(),
opcode::Pop.into(),
opcode::GetBuiltin(4).into(),
opcode::Array(0).into(),
opcode::Constant(0).into(),
opcode::Call(2).into(),
opcode::Pop.into(),
]),
),
(
"fn() { len([]) }",
vec![vec![
opcode::GetBuiltin(0).into(),
opcode::Array(0).into(),
opcode::Call(1).into(),
opcode::ReturnValue.into(),
]
.into()],
vec![opcode::Closure(0, 0).into(), opcode::Pop.into()],
),
(
r#"len("")"#,
vec!["".into()],
vec![
opcode::GetBuiltin(0).into(),
opcode::Constant(0).into(),
opcode::Call(1).into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_closures() {
let tests: Tests = vec![
(
"
fn(a) {
fn(b) {
a + b
}
}
",
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<Expected>::from(vec![
vec![
opcode::GetFree(0).into(),
opcode::GetLocal(0).into(),
opcode::Add.into(),
opcode::ReturnValue.into(),
]
.into(),
vec![
opcode::GetLocal(0).into(),
opcode::Closure(0, 1).into(),
opcode::ReturnValue.into(),
]
.into(),
]),
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![opcode::Closure(1, 0).into(), opcode::Pop.into()]),
),
(
"
fn(a) {
fn(b) {
fn(c) {
a + b + c
}
}
}
",
vec![
vec![
opcode::GetFree(0).into(),
opcode::GetFree(1).into(),
opcode::Add.into(),
opcode::GetLocal(0).into(),
opcode::Add.into(),
opcode::ReturnValue.into(),
]
.into(),
vec![
opcode::GetFree(0).into(),
opcode::GetLocal(0).into(),
opcode::Closure(0, 2).into(),
opcode::ReturnValue.into(),
]
.into(),
vec![
opcode::GetLocal(0).into(),
opcode::Closure(1, 1).into(),
opcode::ReturnValue.into(),
]
.into(),
],
vec![opcode::Closure(2, 0).into(), opcode::Pop.into()],
),
(
"
let global = 55;
fn() {
let a = 66;
fn() {
let b = 77;
fn() {
let c = 88;
global + a + b + c;
}
}
}
",
vec![
55.into(),
66.into(),
77.into(),
88.into(),
vec![
opcode::Constant(3).into(),
opcode::SetLocal(0).into(),
opcode::GetGlobal(0).into(),
opcode::GetFree(0).into(),
opcode::Add.into(),
opcode::GetFree(1).into(),
opcode::Add.into(),
opcode::GetLocal(0).into(),
opcode::Add.into(),
opcode::ReturnValue.into(),
]
.into(),
vec![
opcode::Constant(2).into(),
opcode::SetLocal(0).into(),
opcode::GetFree(0).into(),
opcode::GetLocal(0).into(),
opcode::Closure(4, 2).into(),
opcode::ReturnValue.into(),
]
.into(),
vec![
opcode::Constant(1).into(),
opcode::SetLocal(0).into(),
opcode::GetLocal(0).into(),
opcode::Closure(5, 1).into(),
opcode::ReturnValue.into(),
]
.into(),
],
vec![
opcode::Constant(0).into(),
opcode::SetGlobal(0).into(),
opcode::Closure(6, 0).into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
#[test]
fn test_recursive_functions() {
let tests: Tests = vec![
(
"
let count_down = fn(x) { count_down(x - 1); };
count_down(1);
",
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<Expected>::from(vec![
1.into(),
vec![
opcode::CurrentClosure.into(),
opcode::GetLocal(0).into(),
opcode::Constant(0).into(),
opcode::Sub.into(),
opcode::Call(1).into(),
opcode::ReturnValue.into(),
]
.into(),
1.into(),
]),
#[allow(clippy::useless_conversion)] // Bug in clippy
Vec::<opcode::Opcode>::from(vec![
opcode::Closure(1, 0).into(),
opcode::SetGlobal(0).into(),
opcode::GetGlobal(0).into(),
opcode::Constant(2).into(),
opcode::Call(1).into(),
opcode::Pop.into(),
]),
),
(
"
let wrapper = fn() {
let count_down = fn(x) { count_down(x - 1); };
count_down(1);
}
wrapper();
",
vec![
1.into(),
vec![
opcode::CurrentClosure.into(),
opcode::GetLocal(0).into(),
opcode::Constant(0).into(),
opcode::Sub.into(),
opcode::Call(1).into(),
opcode::ReturnValue.into(),
]
.into(),
1.into(),
vec![
opcode::Closure(1, 0).into(),
opcode::SetLocal(0).into(),
opcode::GetLocal(0).into(),
opcode::Constant(2).into(),
opcode::Call(1).into(),
opcode::ReturnValue.into(),
]
.into(),
],
vec![
opcode::Closure(3, 0).into(),
opcode::SetGlobal(0).into(),
opcode::GetGlobal(0).into(),
opcode::Call(0).into(),
opcode::Pop.into(),
],
),
]
.into();
run_compiler_tests(tests);
}
fn run_compiler_tests(tests: Tests) {
tests
.0
.into_iter()
.for_each(|(input, expected_constants, expected_instructure)| {
let program = parse_test_input(input);
let sym_table = Rc::new(RefCell::new(SymbolTable::new_with_builtin()));
let mut constants = Default::default();
let mut compiler = Compiler::new_with_state(sym_table, &mut constants);
if let Err(e) = compiler.compile(program.into()) {
panic!("{}", e);
};
let bytecode::Bytecode {
instructions,
constants,
} = compiler.into();
test_instructions(instructions, expected_instructure);
test_constants(constants, expected_constants);
});
}
fn test_instructions(actual: bytecode::Instructions, expected: bytecode::Instructions) {
assert_eq!(actual.to_string(), expected.to_string());
assert_eq!(actual, expected);
}
#[test]
fn test_format_instructions() {
let tests: Vec<(Vec<opcode::Opcode>, &str)> = vec![
(
vec![opcode::Constant(65534).into(), opcode::Constant(1).into()],
"0000 Constant 65534¥n0003 Constant 1¥n",
),
(
vec![opcode::Constant(65534).into()],
"0000 Constant 65534¥n",
),
(
vec![
opcode::Constant(1).into(),
opcode::Constant(2).into(),
opcode::Add.into(),
],
"0000 Constant 1¥n0003 Constant 2¥n0006 Add¥n",
),
(vec![opcode::Sub.into()], "0000 Sub¥n"),
(vec![opcode::Mul.into()], "0000 Mul¥n"),
(vec![opcode::Div.into()], "0000 Div¥n"),
(vec![opcode::Pop.into()], "0000 Pop¥n"),
(vec![opcode::True.into()], "0000 True¥n"),
(vec![opcode::False.into()], "0000 False¥n"),
(vec![opcode::Equal.into()], "0000 Equal¥n"),
(vec![opcode::NotEqual.into()], "0000 NotEqual¥n"),
(vec![opcode::GreaterThan.into()], "0000 GreaterThan¥n"),
(vec![opcode::Minus.into()], "0000 Minus¥n"),
(vec![opcode::Bang.into()], "0000 Bang¥n"),
(
vec![opcode::JumpNotTruthy(65534).into()],
"0000 JumpNotTruthy 65534¥n",
),
(vec![opcode::Jump(65534).into()], "0000 Jump 65534¥n"),
(vec![opcode::Null.into()], "0000 Null¥n"),
(
vec![opcode::GetGlobal(65534).into()],
"0000 GetGlobal 65534¥n",
),
(
vec![opcode::SetGlobal(65534).into()],
"0000 SetGlobal 65534¥n",
),
(vec![opcode::Array(65534).into()], "0000 Array 65534¥n"),
(vec![opcode::Hash(65534).into()], "0000 Hash 65534¥n"),
(vec![opcode::Index.into()], "0000 Index¥n"),
(vec![opcode::Call(254).into()], "0000 Call 254¥n"),
(vec![opcode::ReturnValue.into()], "0000 ReturnValue¥n"),
(vec![opcode::Return.into()], "0000 Return¥n"),
(vec![opcode::GetLocal(254).into()], "0000 GetLocal 254¥n"),
(vec![opcode::SetLocal(254).into()], "0000 SetLocal 254¥n"),
(
vec![opcode::GetBuiltin(254).into()],
"0000 GetBuiltin 254¥n",
),
(
vec![opcode::Closure(65534, 254).into()],
"0000 Closure 65534 254¥n",
),
(vec![opcode::GetFree(254).into()], "0000 GetFree 254¥n"),
];
tests.into_iter().for_each(|(input, expected)| {
let instructions: bytecode::Instructions = input
.into_iter()
.flat_map(|v| v.to_bytes().to_vec())
.collect::<Vec<bytecode::Instruction>>()
.into();
assert_eq!(format!("{}", instructions), expected);
});
}
fn test_constants(actual: Vec<objects::Object>, expected: Vec<Expected>) {
actual
.into_iter()
.zip(expected)
.for_each(|(input, expected)| match expected {
Expected::Int(i) => {
test_integer_object(input, i);
}
Expected::String(s) => test_string_object(input, s.as_str()),
Expected::Function(f) => test_compiled_function_object(input, f),
});
}
fn test_integer_object(actual: objects::Object, expected: i64) {
match actual {
objects::Object::Integer(int_o) => {
assert_eq!(int_o.value, expected);
}
o => panic!("expected object::Integer. received {}", o),
}
}
fn test_string_object(actual: objects::Object, expected: &str) {
match actual {
objects::Object::StringLit(s) => {
assert_eq!(s.value, expected);
}
o => panic!("expected object::StringLit. received {}", o),
}
}
fn test_compiled_function_object(actual: objects::Object, expected: bytecode::Instructions) {
match actual {
objects::Object::CompiledFunction(f) => test_instructions(f.instructions, expected),
o => panic!("expected object::CompiledFunction. received {}", o),
}
}
fn parse_test_input(input: &str) -> ast::Program {
let l = lexer::Lexer::new(input.into());
let mut p = parser::Parser::new(l);
p.parse_program().unwrap()
}
}
|
use std::time::{ SystemTime };
pub fn get_id() -> String {
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_millis()
.to_string()
} |
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
union Foo {
a: u8,
b: Bar,
}
#[derive(Copy, Clone)]
enum Bar {}
const BAD_BAD_BAD: Bar = unsafe { Foo { a: 1 }.b};
//~^ ERROR this constant likely exhibits undefined behavior
fn main() {
}
|
#![feature(crate_visibility_modifier)]
#[macro_use]
pub mod binary_stream;
pub mod binary; |
#[doc = "Register `D3CCIPR` reader"]
pub type R = crate::R<D3CCIPR_SPEC>;
#[doc = "Register `D3CCIPR` writer"]
pub type W = crate::W<D3CCIPR_SPEC>;
#[doc = "Field `LPUART1SEL` reader - LPUART1 kernel clock source selection"]
pub type LPUART1SEL_R = crate::FieldReader<LPUART1SEL_A>;
#[doc = "LPUART1 kernel clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum LPUART1SEL_A {
#[doc = "0: rcc_pclk_d3 selected as peripheral clock"]
RccPclkD3 = 0,
#[doc = "1: pll2_q selected as peripheral clock"]
Pll2Q = 1,
#[doc = "2: pll3_q selected as peripheral clock"]
Pll3Q = 2,
#[doc = "3: hsi_ker selected as peripheral clock"]
HsiKer = 3,
#[doc = "4: csi_ker selected as peripheral clock"]
CsiKer = 4,
#[doc = "5: LSE selected as peripheral clock"]
Lse = 5,
}
impl From<LPUART1SEL_A> for u8 {
#[inline(always)]
fn from(variant: LPUART1SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for LPUART1SEL_A {
type Ux = u8;
}
impl LPUART1SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<LPUART1SEL_A> {
match self.bits {
0 => Some(LPUART1SEL_A::RccPclkD3),
1 => Some(LPUART1SEL_A::Pll2Q),
2 => Some(LPUART1SEL_A::Pll3Q),
3 => Some(LPUART1SEL_A::HsiKer),
4 => Some(LPUART1SEL_A::CsiKer),
5 => Some(LPUART1SEL_A::Lse),
_ => None,
}
}
#[doc = "rcc_pclk_d3 selected as peripheral clock"]
#[inline(always)]
pub fn is_rcc_pclk_d3(&self) -> bool {
*self == LPUART1SEL_A::RccPclkD3
}
#[doc = "pll2_q selected as peripheral clock"]
#[inline(always)]
pub fn is_pll2_q(&self) -> bool {
*self == LPUART1SEL_A::Pll2Q
}
#[doc = "pll3_q selected as peripheral clock"]
#[inline(always)]
pub fn is_pll3_q(&self) -> bool {
*self == LPUART1SEL_A::Pll3Q
}
#[doc = "hsi_ker selected as peripheral clock"]
#[inline(always)]
pub fn is_hsi_ker(&self) -> bool {
*self == LPUART1SEL_A::HsiKer
}
#[doc = "csi_ker selected as peripheral clock"]
#[inline(always)]
pub fn is_csi_ker(&self) -> bool {
*self == LPUART1SEL_A::CsiKer
}
#[doc = "LSE selected as peripheral clock"]
#[inline(always)]
pub fn is_lse(&self) -> bool {
*self == LPUART1SEL_A::Lse
}
}
#[doc = "Field `LPUART1SEL` writer - LPUART1 kernel clock source selection"]
pub type LPUART1SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O, LPUART1SEL_A>;
impl<'a, REG, const O: u8> LPUART1SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "rcc_pclk_d3 selected as peripheral clock"]
#[inline(always)]
pub fn rcc_pclk_d3(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1SEL_A::RccPclkD3)
}
#[doc = "pll2_q selected as peripheral clock"]
#[inline(always)]
pub fn pll2_q(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1SEL_A::Pll2Q)
}
#[doc = "pll3_q selected as peripheral clock"]
#[inline(always)]
pub fn pll3_q(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1SEL_A::Pll3Q)
}
#[doc = "hsi_ker selected as peripheral clock"]
#[inline(always)]
pub fn hsi_ker(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1SEL_A::HsiKer)
}
#[doc = "csi_ker selected as peripheral clock"]
#[inline(always)]
pub fn csi_ker(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1SEL_A::CsiKer)
}
#[doc = "LSE selected as peripheral clock"]
#[inline(always)]
pub fn lse(self) -> &'a mut crate::W<REG> {
self.variant(LPUART1SEL_A::Lse)
}
}
#[doc = "Field `I2C4SEL` reader - I2C4 kernel clock source selection"]
pub type I2C4SEL_R = crate::FieldReader<I2C4SEL_A>;
#[doc = "I2C4 kernel clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum I2C4SEL_A {
#[doc = "0: rcc_pclk4 selected as peripheral clock"]
RccPclk4 = 0,
#[doc = "1: pll3_r selected as peripheral clock"]
Pll3R = 1,
#[doc = "2: hsi_ker selected as peripheral clock"]
HsiKer = 2,
#[doc = "3: csi_ker selected as peripheral clock"]
CsiKer = 3,
}
impl From<I2C4SEL_A> for u8 {
#[inline(always)]
fn from(variant: I2C4SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for I2C4SEL_A {
type Ux = u8;
}
impl I2C4SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> I2C4SEL_A {
match self.bits {
0 => I2C4SEL_A::RccPclk4,
1 => I2C4SEL_A::Pll3R,
2 => I2C4SEL_A::HsiKer,
3 => I2C4SEL_A::CsiKer,
_ => unreachable!(),
}
}
#[doc = "rcc_pclk4 selected as peripheral clock"]
#[inline(always)]
pub fn is_rcc_pclk4(&self) -> bool {
*self == I2C4SEL_A::RccPclk4
}
#[doc = "pll3_r selected as peripheral clock"]
#[inline(always)]
pub fn is_pll3_r(&self) -> bool {
*self == I2C4SEL_A::Pll3R
}
#[doc = "hsi_ker selected as peripheral clock"]
#[inline(always)]
pub fn is_hsi_ker(&self) -> bool {
*self == I2C4SEL_A::HsiKer
}
#[doc = "csi_ker selected as peripheral clock"]
#[inline(always)]
pub fn is_csi_ker(&self) -> bool {
*self == I2C4SEL_A::CsiKer
}
}
#[doc = "Field `I2C4SEL` writer - I2C4 kernel clock source selection"]
pub type I2C4SEL_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, I2C4SEL_A>;
impl<'a, REG, const O: u8> I2C4SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "rcc_pclk4 selected as peripheral clock"]
#[inline(always)]
pub fn rcc_pclk4(self) -> &'a mut crate::W<REG> {
self.variant(I2C4SEL_A::RccPclk4)
}
#[doc = "pll3_r selected as peripheral clock"]
#[inline(always)]
pub fn pll3_r(self) -> &'a mut crate::W<REG> {
self.variant(I2C4SEL_A::Pll3R)
}
#[doc = "hsi_ker selected as peripheral clock"]
#[inline(always)]
pub fn hsi_ker(self) -> &'a mut crate::W<REG> {
self.variant(I2C4SEL_A::HsiKer)
}
#[doc = "csi_ker selected as peripheral clock"]
#[inline(always)]
pub fn csi_ker(self) -> &'a mut crate::W<REG> {
self.variant(I2C4SEL_A::CsiKer)
}
}
#[doc = "Field `LPTIM2SEL` reader - LPTIM2 kernel clock source selection"]
pub type LPTIM2SEL_R = crate::FieldReader<LPTIM2SEL_A>;
#[doc = "LPTIM2 kernel clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum LPTIM2SEL_A {
#[doc = "0: rcc_pclk4 selected as peripheral clock"]
RccPclk4 = 0,
#[doc = "1: pll2_p selected as peripheral clock"]
Pll2P = 1,
#[doc = "2: pll3_r selected as peripheral clock"]
Pll3R = 2,
#[doc = "3: LSE selected as peripheral clock"]
Lse = 3,
#[doc = "4: LSI selected as peripheral clock"]
Lsi = 4,
#[doc = "5: PER selected as peripheral clock"]
Per = 5,
}
impl From<LPTIM2SEL_A> for u8 {
#[inline(always)]
fn from(variant: LPTIM2SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for LPTIM2SEL_A {
type Ux = u8;
}
impl LPTIM2SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<LPTIM2SEL_A> {
match self.bits {
0 => Some(LPTIM2SEL_A::RccPclk4),
1 => Some(LPTIM2SEL_A::Pll2P),
2 => Some(LPTIM2SEL_A::Pll3R),
3 => Some(LPTIM2SEL_A::Lse),
4 => Some(LPTIM2SEL_A::Lsi),
5 => Some(LPTIM2SEL_A::Per),
_ => None,
}
}
#[doc = "rcc_pclk4 selected as peripheral clock"]
#[inline(always)]
pub fn is_rcc_pclk4(&self) -> bool {
*self == LPTIM2SEL_A::RccPclk4
}
#[doc = "pll2_p selected as peripheral clock"]
#[inline(always)]
pub fn is_pll2_p(&self) -> bool {
*self == LPTIM2SEL_A::Pll2P
}
#[doc = "pll3_r selected as peripheral clock"]
#[inline(always)]
pub fn is_pll3_r(&self) -> bool {
*self == LPTIM2SEL_A::Pll3R
}
#[doc = "LSE selected as peripheral clock"]
#[inline(always)]
pub fn is_lse(&self) -> bool {
*self == LPTIM2SEL_A::Lse
}
#[doc = "LSI selected as peripheral clock"]
#[inline(always)]
pub fn is_lsi(&self) -> bool {
*self == LPTIM2SEL_A::Lsi
}
#[doc = "PER selected as peripheral clock"]
#[inline(always)]
pub fn is_per(&self) -> bool {
*self == LPTIM2SEL_A::Per
}
}
#[doc = "Field `LPTIM2SEL` writer - LPTIM2 kernel clock source selection"]
pub type LPTIM2SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O, LPTIM2SEL_A>;
impl<'a, REG, const O: u8> LPTIM2SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "rcc_pclk4 selected as peripheral clock"]
#[inline(always)]
pub fn rcc_pclk4(self) -> &'a mut crate::W<REG> {
self.variant(LPTIM2SEL_A::RccPclk4)
}
#[doc = "pll2_p selected as peripheral clock"]
#[inline(always)]
pub fn pll2_p(self) -> &'a mut crate::W<REG> {
self.variant(LPTIM2SEL_A::Pll2P)
}
#[doc = "pll3_r selected as peripheral clock"]
#[inline(always)]
pub fn pll3_r(self) -> &'a mut crate::W<REG> {
self.variant(LPTIM2SEL_A::Pll3R)
}
#[doc = "LSE selected as peripheral clock"]
#[inline(always)]
pub fn lse(self) -> &'a mut crate::W<REG> {
self.variant(LPTIM2SEL_A::Lse)
}
#[doc = "LSI selected as peripheral clock"]
#[inline(always)]
pub fn lsi(self) -> &'a mut crate::W<REG> {
self.variant(LPTIM2SEL_A::Lsi)
}
#[doc = "PER selected as peripheral clock"]
#[inline(always)]
pub fn per(self) -> &'a mut crate::W<REG> {
self.variant(LPTIM2SEL_A::Per)
}
}
#[doc = "Field `LPTIM345SEL` reader - LPTIM3,4,5 kernel clock source selection"]
pub use LPTIM2SEL_R as LPTIM345SEL_R;
#[doc = "Field `LPTIM345SEL` writer - LPTIM3,4,5 kernel clock source selection"]
pub use LPTIM2SEL_W as LPTIM345SEL_W;
#[doc = "Field `ADCSEL` reader - SAR ADC kernel clock source selection"]
pub type ADCSEL_R = crate::FieldReader<ADCSEL_A>;
#[doc = "SAR ADC kernel clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum ADCSEL_A {
#[doc = "0: pll2_p selected as peripheral clock"]
Pll2P = 0,
#[doc = "1: pll3_r selected as peripheral clock"]
Pll3R = 1,
#[doc = "2: PER selected as peripheral clock"]
Per = 2,
}
impl From<ADCSEL_A> for u8 {
#[inline(always)]
fn from(variant: ADCSEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for ADCSEL_A {
type Ux = u8;
}
impl ADCSEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<ADCSEL_A> {
match self.bits {
0 => Some(ADCSEL_A::Pll2P),
1 => Some(ADCSEL_A::Pll3R),
2 => Some(ADCSEL_A::Per),
_ => None,
}
}
#[doc = "pll2_p selected as peripheral clock"]
#[inline(always)]
pub fn is_pll2_p(&self) -> bool {
*self == ADCSEL_A::Pll2P
}
#[doc = "pll3_r selected as peripheral clock"]
#[inline(always)]
pub fn is_pll3_r(&self) -> bool {
*self == ADCSEL_A::Pll3R
}
#[doc = "PER selected as peripheral clock"]
#[inline(always)]
pub fn is_per(&self) -> bool {
*self == ADCSEL_A::Per
}
}
#[doc = "Field `ADCSEL` writer - SAR ADC kernel clock source selection"]
pub type ADCSEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O, ADCSEL_A>;
impl<'a, REG, const O: u8> ADCSEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "pll2_p selected as peripheral clock"]
#[inline(always)]
pub fn pll2_p(self) -> &'a mut crate::W<REG> {
self.variant(ADCSEL_A::Pll2P)
}
#[doc = "pll3_r selected as peripheral clock"]
#[inline(always)]
pub fn pll3_r(self) -> &'a mut crate::W<REG> {
self.variant(ADCSEL_A::Pll3R)
}
#[doc = "PER selected as peripheral clock"]
#[inline(always)]
pub fn per(self) -> &'a mut crate::W<REG> {
self.variant(ADCSEL_A::Per)
}
}
#[doc = "Field `SAI4ASEL` reader - Sub-Block A of SAI4 kernel clock source selection"]
pub type SAI4ASEL_R = crate::FieldReader<SAI4ASEL_A>;
#[doc = "Sub-Block A of SAI4 kernel clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum SAI4ASEL_A {
#[doc = "0: pll1_q selected as peripheral clock"]
Pll1Q = 0,
#[doc = "1: pll2_p selected as peripheral clock"]
Pll2P = 1,
#[doc = "2: pll3_p selected as peripheral clock"]
Pll3P = 2,
#[doc = "3: i2s_ckin selected as peripheral clock"]
I2sCkin = 3,
#[doc = "4: PER selected as peripheral clock"]
Per = 4,
}
impl From<SAI4ASEL_A> for u8 {
#[inline(always)]
fn from(variant: SAI4ASEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for SAI4ASEL_A {
type Ux = u8;
}
impl SAI4ASEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<SAI4ASEL_A> {
match self.bits {
0 => Some(SAI4ASEL_A::Pll1Q),
1 => Some(SAI4ASEL_A::Pll2P),
2 => Some(SAI4ASEL_A::Pll3P),
3 => Some(SAI4ASEL_A::I2sCkin),
4 => Some(SAI4ASEL_A::Per),
_ => None,
}
}
#[doc = "pll1_q selected as peripheral clock"]
#[inline(always)]
pub fn is_pll1_q(&self) -> bool {
*self == SAI4ASEL_A::Pll1Q
}
#[doc = "pll2_p selected as peripheral clock"]
#[inline(always)]
pub fn is_pll2_p(&self) -> bool {
*self == SAI4ASEL_A::Pll2P
}
#[doc = "pll3_p selected as peripheral clock"]
#[inline(always)]
pub fn is_pll3_p(&self) -> bool {
*self == SAI4ASEL_A::Pll3P
}
#[doc = "i2s_ckin selected as peripheral clock"]
#[inline(always)]
pub fn is_i2s_ckin(&self) -> bool {
*self == SAI4ASEL_A::I2sCkin
}
#[doc = "PER selected as peripheral clock"]
#[inline(always)]
pub fn is_per(&self) -> bool {
*self == SAI4ASEL_A::Per
}
}
#[doc = "Field `SAI4ASEL` writer - Sub-Block A of SAI4 kernel clock source selection"]
pub type SAI4ASEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O, SAI4ASEL_A>;
impl<'a, REG, const O: u8> SAI4ASEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "pll1_q selected as peripheral clock"]
#[inline(always)]
pub fn pll1_q(self) -> &'a mut crate::W<REG> {
self.variant(SAI4ASEL_A::Pll1Q)
}
#[doc = "pll2_p selected as peripheral clock"]
#[inline(always)]
pub fn pll2_p(self) -> &'a mut crate::W<REG> {
self.variant(SAI4ASEL_A::Pll2P)
}
#[doc = "pll3_p selected as peripheral clock"]
#[inline(always)]
pub fn pll3_p(self) -> &'a mut crate::W<REG> {
self.variant(SAI4ASEL_A::Pll3P)
}
#[doc = "i2s_ckin selected as peripheral clock"]
#[inline(always)]
pub fn i2s_ckin(self) -> &'a mut crate::W<REG> {
self.variant(SAI4ASEL_A::I2sCkin)
}
#[doc = "PER selected as peripheral clock"]
#[inline(always)]
pub fn per(self) -> &'a mut crate::W<REG> {
self.variant(SAI4ASEL_A::Per)
}
}
#[doc = "Field `SAI4BSEL` reader - Sub-Block B of SAI4 kernel clock source selection"]
pub use SAI4ASEL_R as SAI4BSEL_R;
#[doc = "Field `SAI4BSEL` writer - Sub-Block B of SAI4 kernel clock source selection"]
pub use SAI4ASEL_W as SAI4BSEL_W;
#[doc = "Field `SPI6SEL` reader - SPI6 kernel clock source selection"]
pub type SPI6SEL_R = crate::FieldReader<SPI6SEL_A>;
#[doc = "SPI6 kernel clock source selection\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum SPI6SEL_A {
#[doc = "0: rcc_pclk4 selected as peripheral clock"]
RccPclk4 = 0,
#[doc = "1: pll2_q selected as peripheral clock"]
Pll2Q = 1,
#[doc = "2: pll3_q selected as peripheral clock"]
Pll3Q = 2,
#[doc = "3: hsi_ker selected as peripheral clock"]
HsiKer = 3,
#[doc = "4: csi_ker selected as peripheral clock"]
CsiKer = 4,
#[doc = "5: HSE selected as peripheral clock"]
Hse = 5,
}
impl From<SPI6SEL_A> for u8 {
#[inline(always)]
fn from(variant: SPI6SEL_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for SPI6SEL_A {
type Ux = u8;
}
impl SPI6SEL_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<SPI6SEL_A> {
match self.bits {
0 => Some(SPI6SEL_A::RccPclk4),
1 => Some(SPI6SEL_A::Pll2Q),
2 => Some(SPI6SEL_A::Pll3Q),
3 => Some(SPI6SEL_A::HsiKer),
4 => Some(SPI6SEL_A::CsiKer),
5 => Some(SPI6SEL_A::Hse),
_ => None,
}
}
#[doc = "rcc_pclk4 selected as peripheral clock"]
#[inline(always)]
pub fn is_rcc_pclk4(&self) -> bool {
*self == SPI6SEL_A::RccPclk4
}
#[doc = "pll2_q selected as peripheral clock"]
#[inline(always)]
pub fn is_pll2_q(&self) -> bool {
*self == SPI6SEL_A::Pll2Q
}
#[doc = "pll3_q selected as peripheral clock"]
#[inline(always)]
pub fn is_pll3_q(&self) -> bool {
*self == SPI6SEL_A::Pll3Q
}
#[doc = "hsi_ker selected as peripheral clock"]
#[inline(always)]
pub fn is_hsi_ker(&self) -> bool {
*self == SPI6SEL_A::HsiKer
}
#[doc = "csi_ker selected as peripheral clock"]
#[inline(always)]
pub fn is_csi_ker(&self) -> bool {
*self == SPI6SEL_A::CsiKer
}
#[doc = "HSE selected as peripheral clock"]
#[inline(always)]
pub fn is_hse(&self) -> bool {
*self == SPI6SEL_A::Hse
}
}
#[doc = "Field `SPI6SEL` writer - SPI6 kernel clock source selection"]
pub type SPI6SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O, SPI6SEL_A>;
impl<'a, REG, const O: u8> SPI6SEL_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "rcc_pclk4 selected as peripheral clock"]
#[inline(always)]
pub fn rcc_pclk4(self) -> &'a mut crate::W<REG> {
self.variant(SPI6SEL_A::RccPclk4)
}
#[doc = "pll2_q selected as peripheral clock"]
#[inline(always)]
pub fn pll2_q(self) -> &'a mut crate::W<REG> {
self.variant(SPI6SEL_A::Pll2Q)
}
#[doc = "pll3_q selected as peripheral clock"]
#[inline(always)]
pub fn pll3_q(self) -> &'a mut crate::W<REG> {
self.variant(SPI6SEL_A::Pll3Q)
}
#[doc = "hsi_ker selected as peripheral clock"]
#[inline(always)]
pub fn hsi_ker(self) -> &'a mut crate::W<REG> {
self.variant(SPI6SEL_A::HsiKer)
}
#[doc = "csi_ker selected as peripheral clock"]
#[inline(always)]
pub fn csi_ker(self) -> &'a mut crate::W<REG> {
self.variant(SPI6SEL_A::CsiKer)
}
#[doc = "HSE selected as peripheral clock"]
#[inline(always)]
pub fn hse(self) -> &'a mut crate::W<REG> {
self.variant(SPI6SEL_A::Hse)
}
}
impl R {
#[doc = "Bits 0:2 - LPUART1 kernel clock source selection"]
#[inline(always)]
pub fn lpuart1sel(&self) -> LPUART1SEL_R {
LPUART1SEL_R::new((self.bits & 7) as u8)
}
#[doc = "Bits 8:9 - I2C4 kernel clock source selection"]
#[inline(always)]
pub fn i2c4sel(&self) -> I2C4SEL_R {
I2C4SEL_R::new(((self.bits >> 8) & 3) as u8)
}
#[doc = "Bits 10:12 - LPTIM2 kernel clock source selection"]
#[inline(always)]
pub fn lptim2sel(&self) -> LPTIM2SEL_R {
LPTIM2SEL_R::new(((self.bits >> 10) & 7) as u8)
}
#[doc = "Bits 13:15 - LPTIM3,4,5 kernel clock source selection"]
#[inline(always)]
pub fn lptim345sel(&self) -> LPTIM345SEL_R {
LPTIM345SEL_R::new(((self.bits >> 13) & 7) as u8)
}
#[doc = "Bits 16:17 - SAR ADC kernel clock source selection"]
#[inline(always)]
pub fn adcsel(&self) -> ADCSEL_R {
ADCSEL_R::new(((self.bits >> 16) & 3) as u8)
}
#[doc = "Bits 21:23 - Sub-Block A of SAI4 kernel clock source selection"]
#[inline(always)]
pub fn sai4asel(&self) -> SAI4ASEL_R {
SAI4ASEL_R::new(((self.bits >> 21) & 7) as u8)
}
#[doc = "Bits 24:26 - Sub-Block B of SAI4 kernel clock source selection"]
#[inline(always)]
pub fn sai4bsel(&self) -> SAI4BSEL_R {
SAI4BSEL_R::new(((self.bits >> 24) & 7) as u8)
}
#[doc = "Bits 28:30 - SPI6 kernel clock source selection"]
#[inline(always)]
pub fn spi6sel(&self) -> SPI6SEL_R {
SPI6SEL_R::new(((self.bits >> 28) & 7) as u8)
}
}
impl W {
#[doc = "Bits 0:2 - LPUART1 kernel clock source selection"]
#[inline(always)]
#[must_use]
pub fn lpuart1sel(&mut self) -> LPUART1SEL_W<D3CCIPR_SPEC, 0> {
LPUART1SEL_W::new(self)
}
#[doc = "Bits 8:9 - I2C4 kernel clock source selection"]
#[inline(always)]
#[must_use]
pub fn i2c4sel(&mut self) -> I2C4SEL_W<D3CCIPR_SPEC, 8> {
I2C4SEL_W::new(self)
}
#[doc = "Bits 10:12 - LPTIM2 kernel clock source selection"]
#[inline(always)]
#[must_use]
pub fn lptim2sel(&mut self) -> LPTIM2SEL_W<D3CCIPR_SPEC, 10> {
LPTIM2SEL_W::new(self)
}
#[doc = "Bits 13:15 - LPTIM3,4,5 kernel clock source selection"]
#[inline(always)]
#[must_use]
pub fn lptim345sel(&mut self) -> LPTIM345SEL_W<D3CCIPR_SPEC, 13> {
LPTIM345SEL_W::new(self)
}
#[doc = "Bits 16:17 - SAR ADC kernel clock source selection"]
#[inline(always)]
#[must_use]
pub fn adcsel(&mut self) -> ADCSEL_W<D3CCIPR_SPEC, 16> {
ADCSEL_W::new(self)
}
#[doc = "Bits 21:23 - Sub-Block A of SAI4 kernel clock source selection"]
#[inline(always)]
#[must_use]
pub fn sai4asel(&mut self) -> SAI4ASEL_W<D3CCIPR_SPEC, 21> {
SAI4ASEL_W::new(self)
}
#[doc = "Bits 24:26 - Sub-Block B of SAI4 kernel clock source selection"]
#[inline(always)]
#[must_use]
pub fn sai4bsel(&mut self) -> SAI4BSEL_W<D3CCIPR_SPEC, 24> {
SAI4BSEL_W::new(self)
}
#[doc = "Bits 28:30 - SPI6 kernel clock source selection"]
#[inline(always)]
#[must_use]
pub fn spi6sel(&mut self) -> SPI6SEL_W<D3CCIPR_SPEC, 28> {
SPI6SEL_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "RCC Domain 3 Kernel Clock Configuration Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`d3ccipr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`d3ccipr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct D3CCIPR_SPEC;
impl crate::RegisterSpec for D3CCIPR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`d3ccipr::R`](R) reader structure"]
impl crate::Readable for D3CCIPR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`d3ccipr::W`](W) writer structure"]
impl crate::Writable for D3CCIPR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets D3CCIPR to value 0"]
impl crate::Resettable for D3CCIPR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::commands::wallet::wallet_update;
use crate::lib::environment::Environment;
use crate::lib::error::DfxResult;
use clap::Clap;
use ic_types::Principal;
/// Deauthorize a wallet custodian.
#[derive(Clap)]
pub struct DeauthorizeOpts {
/// Principal of the custodian to deauthorize.
custodian: String,
}
pub async fn exec(env: &dyn Environment, opts: DeauthorizeOpts) -> DfxResult {
let custodian = Principal::from_text(opts.custodian.clone())?;
wallet_update(env, "deauthorize", custodian).await?;
println!("Deauthorized {} as a custodian.", opts.custodian);
Ok(())
}
|
//! A widget for plotting a BassCalc graph
//! Based on https://github.com/PistonDevelopers/conrod/blob/master/src/widget/plot_path.rs
use conrod::{Color, Colorable, Positionable, Scalar, Sizeable, Widget};
use conrod::{widget, utils};
use parameters::Parameters;
use functions::{BassFnData, bass_fn_point};
/// A widget that plots a BassCalc function, which depends on `Parameters`
///
/// The function returns a list of points, which the widget uses to draw lines according
/// to the current X and Y scales
///
/// The resulting "path" is drawn using conrod's `PointPath` primitive widget.
#[derive(WidgetCommon)]
pub struct BassGraph<'a, F> {
#[conrod(common_builder)]
common: widget::CommonBuilder,
style: Style,
min_freq: f64,
max_freq: f64,
step: f64,
params: &'a Parameters,
f: F,
}
/// Unique styling parameters for the `BassGraph` widget.
#[derive(Copy, Clone, Debug, Default, PartialEq, WidgetStyle)]
pub struct Style {
/// The thickness of the plotted line.
#[conrod(default = "1.0")]
pub thickness: Option<Scalar>,
/// The color of the line.
#[conrod(default = "theme.shape_color")]
pub color: Option<Color>,
}
widget_ids! {
struct Ids {
point_path,
}
}
/// Unique state stored between updates for the `BassGraph` widget.
pub struct State {
ids: Ids,
}
impl<'a, F> BassGraph<'a, F> {
/// Begin building a new `BassGraph` widget instance.
pub fn new(min_freq: f64, max_freq: f64, step: f64, params: &'a Parameters, f: F) -> Self {
BassGraph {
common: widget::CommonBuilder::default(),
style: Style::default(),
min_freq: min_freq,
max_freq: max_freq,
step: step,
params: params,
f: f,
}
}
/// The thickness of the point path used to draw the plot.
pub fn thickness(mut self, thickness: Scalar) -> Self {
self.style.thickness = Some(thickness);
self
}
}
impl<'a, F> Widget for BassGraph<'a, F>
where F: Fn(&Parameters) -> BassFnData,
{
type State = State;
type Style = Style;
type Event = ();
fn init_state(&self, id_gen: widget::id::Generator) -> Self::State {
State {
ids: Ids::new(id_gen),
}
}
fn style(&self) -> Self::Style {
self.style.clone()
}
/// Update the state of the BassGraph.
fn update(self, args: widget::UpdateArgs<Self>) -> Self::Event {
let widget::UpdateArgs { id, state, style, rect, ui, .. } = args;
let BassGraph { min_freq, max_freq, step, f, params, .. } = self;
let y_to_scalar =
|y| utils::map_range(y, -1., 1., rect.bottom(), rect.top());
let scalar_to_x =
|s| utils::map_range(s, rect.left(), rect.right(), min_freq.clone(), max_freq.clone());
let data = f(¶ms);
let point_iter = (0 .. rect.w() as usize)
.map(|x_scalar| {
let x_scalar = x_scalar as Scalar + rect.x.start;
let x = scalar_to_x(x_scalar);
let y = bass_fn_point(&data, x);
//println!("{}, {}", x, y);
let y_scalar = y_to_scalar(y);
[x_scalar, y_scalar]
});
let thickness = style.thickness(ui.theme());
let color = style.color(ui.theme());
widget::PointPath::new(point_iter)
.wh(rect.dim())
.xy(rect.xy())
.color(color)
.thickness(thickness)
.parent(id)
.graphics_for(id)
.set(state.ids.point_path, ui);
}
}
impl<'a, F> Colorable for BassGraph<'a, F> {
builder_method!(color { style.color = Some(Color) });
}
|
#[macro_use]
extern crate log;
mod support;
use self::support::*;
#[test]
fn inbound_sends_telemetry() {
let _ = env_logger::try_init();
info!("running test server");
let srv = server::new().route("/hey", "hello").run();
let mut ctrl = controller::new();
let reports = ctrl.reports();
let proxy = proxy::new()
.controller(ctrl.run())
.inbound(srv)
.metrics_flush_interval(Duration::from_millis(500))
.run();
let client = client::new(proxy.inbound, "tele.test.svc.cluster.local");
info!("client.get(/hey)");
assert_eq!(client.get("/hey"), "hello");
info!("awaiting report");
let report = reports.wait().next().unwrap().unwrap();
// proxy inbound
assert_eq!(report.proxy, 0);
// process
assert_eq!(report.process.as_ref().unwrap().node, "");
assert_eq!(report.process.as_ref().unwrap().scheduled_instance, "");
assert_eq!(report.process.as_ref().unwrap().scheduled_namespace, "test");
// requests
assert_eq!(report.requests.len(), 1);
let req = &report.requests[0];
assert_eq!(req.ctx.as_ref().unwrap().authority, "tele.test.svc.cluster.local");
//assert_eq!(req.ctx.as_ref().unwrap().method, GET);
assert_eq!(req.count, 1);
assert_eq!(req.responses.len(), 1);
// responses
let res = &req.responses[0];
assert_eq!(res.ctx.as_ref().unwrap().http_status_code, 200);
// response latencies should always have a length equal to the number
// of latency buckets in the latency histogram.
assert_eq!(
res.response_latency_counts.len(),
report.histogram_bucket_bounds_tenth_ms.len()
);
assert_eq!(res.ends.len(), 1);
// ends
let ends = &res.ends[0];
assert_eq!(ends.streams, 1);
}
#[test]
fn http1_inbound_sends_telemetry() {
let _ = env_logger::try_init();
info!("running test server");
let srv = server::http1().route("/hey", "hello").run();
let mut ctrl = controller::new();
let reports = ctrl.reports();
let proxy = proxy::new()
.controller(ctrl.run())
.inbound(srv)
.metrics_flush_interval(Duration::from_millis(500))
.run();
let client = client::http1(proxy.inbound, "tele.test.svc.cluster.local");
info!("client.get(/hey)");
assert_eq!(client.get("/hey"), "hello");
info!("awaiting report");
let report = reports.wait().next().unwrap().unwrap();
// proxy inbound
assert_eq!(report.proxy, 0);
// requests
assert_eq!(report.requests.len(), 1);
let req = &report.requests[0];
assert_eq!(req.ctx.as_ref().unwrap().authority, "tele.test.svc.cluster.local");
//assert_eq!(req.ctx.as_ref().unwrap().method, GET);
assert_eq!(req.count, 1);
assert_eq!(req.responses.len(), 1);
// responses
let res = &req.responses[0];
assert_eq!(res.ctx.as_ref().unwrap().http_status_code, 200);
// response latencies should always have a length equal to the number
// of latency buckets in the latency histogram.
assert_eq!(
res.response_latency_counts.len(),
report.histogram_bucket_bounds_tenth_ms.len()
);
assert_eq!(res.ends.len(), 1);
// ends
let ends = &res.ends[0];
assert_eq!(ends.streams, 1);
}
#[test]
fn inbound_aggregates_telemetry_over_several_requests() {
let _ = env_logger::try_init();
info!("running test server");
let srv = server::new()
.route("/hey", "hello")
.route("/hi", "good morning")
.run();
let mut ctrl = controller::new();
let reports = ctrl.reports();
let proxy = proxy::new()
.controller(ctrl.run())
.inbound(srv)
.metrics_flush_interval(Duration::from_millis(500))
.run();
let client = client::new(proxy.inbound, "tele.test.svc.cluster.local");
info!("client.get(/hey)");
assert_eq!(client.get("/hey"), "hello");
info!("client.get(/hi)");
assert_eq!(client.get("/hi"), "good morning");
assert_eq!(client.get("/hi"), "good morning");
info!("awaiting report");
let report = reports.wait().next().unwrap().unwrap();
// proxy inbound
assert_eq!(report.proxy, 0);
// requests -----------------------
assert_eq!(report.requests.len(), 2);
// -- first request -----------------
let req = &report.requests[0];
assert_eq!(req.ctx.as_ref().unwrap().authority, "tele.test.svc.cluster.local");
assert_eq!(req.count, 1);
assert_eq!(req.responses.len(), 1);
// ---- response --------------------
let res = &req.responses[0];
assert_eq!(res.ctx.as_ref().unwrap().http_status_code, 200);
// response latencies should always have a length equal to the number
// of latency buckets in the latency histogram.
assert_eq!(
res.response_latency_counts.len(),
report.histogram_bucket_bounds_tenth_ms.len()
);
assert_eq!(res.ends.len(), 1);
// ------ ends ----------------------
let ends = &res.ends[0];
assert_eq!(ends.streams, 1);
// -- second request ----------------
let req = &report.requests[1];
assert_eq!(req.ctx.as_ref().unwrap().authority, "tele.test.svc.cluster.local");
// repeated twice
assert_eq!(req.count, 2);
assert_eq!(req.responses.len(), 1);
// ---- response -------------------
let res = &req.responses[0];
assert_eq!(res.ctx.as_ref().unwrap().http_status_code, 200);
// response latencies should always have a length equal to the number
// of latency buckets in the latency histogram.
assert_eq!(
res.response_latency_counts.len(),
report.histogram_bucket_bounds_tenth_ms.len()
);
assert_eq!(res.ends.len(), 1);
// ------ ends ----------------------
let ends = &res.ends[0];
assert_eq!(ends.streams, 2);
}
// Ignore this test on CI, because our method of adding latency to requests
// (calling `thread::sleep`) is likely to be flakey on Travis.
// Eventually, we can add some kind of mock timer system for simulating latency
// more reliably, and re-enable this test.
#[test]
#[cfg_attr(not(feature = "flaky_tests"), ignore)]
fn records_latency_statistics() {
let _ = env_logger::try_init();
info!("running test server");
let srv = server::new()
.route_with_latency("/hey", "hello", Duration::from_millis(500))
.route_with_latency("/hi", "good morning", Duration::from_millis(40))
.run();
let mut ctrl = controller::new();
let reports = ctrl.reports();
let proxy = proxy::new()
.controller(ctrl.run())
.inbound(srv)
.metrics_flush_interval(Duration::from_secs(5))
.run();
let client = client::new(proxy.inbound, "tele.test.svc.cluster.local");
info!("client.get(/hey)");
assert_eq!(client.get("/hey"), "hello");
info!("client.get(/hi)");
assert_eq!(client.get("/hi"), "good morning");
assert_eq!(client.get("/hi"), "good morning");
info!("awaiting report");
let report = reports.wait().next().unwrap().unwrap();
// requests -----------------------
assert_eq!(report.requests.len(), 2);
// first request
let req = &report.requests[0];
assert_eq!(req.ctx.as_ref().unwrap().authority, "tele.test.svc.cluster.local");
let res = &req.responses[0];
// response latencies should always have a length equal to the number
// of latency buckets in the latency histogram.
assert_eq!(
res.response_latency_counts.len(),
report.histogram_bucket_bounds_tenth_ms.len()
);
for (idx, bucket) in res.response_latency_counts.iter().enumerate() {
// 500 ms of extra latency should put us in the 500-1000
// millisecond bucket (the 15th bucket)
if idx == 15 {
assert_eq!(*bucket, 1, "poorly bucketed latencies: {:?}", res.response_latency_counts);
} else {
assert_eq!(*bucket, 0, "poorly bucketed latencies: {:?}", res.response_latency_counts);
}
}
// second request
let req = &report.requests.get(1).expect("second report");
assert_eq!(req.ctx.as_ref().unwrap().authority, "tele.test.svc.cluster.local");
assert_eq!(req.count, 2);
assert_eq!(req.responses.len(), 1);
let res = req.responses.get(0).expect("responses[0]");
// response latencies should always have a length equal to the number
// of latency buckets in the latency histogram.
assert_eq!(
res.response_latency_counts.len(),
report.histogram_bucket_bounds_tenth_ms.len()
);
for (idx, bucket) in res.response_latency_counts.iter().enumerate() {
// 40 ms of extra latency should put us in the 40-50
// millisecond bucket (the 10th bucket)
if idx == 9 {
assert_eq!(*bucket, 2, "poorly bucketed latencies: {:?}", res.response_latency_counts);
} else {
assert_eq!(*bucket, 0, "poorly bucketed latencies: {:?}", res.response_latency_counts);
}
}
}
#[test]
fn telemetry_report_errors_are_ignored() {}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// TimeseriesWidgetDefinitionType : Type of the timeseries widget.
/// Type of the timeseries widget.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum TimeseriesWidgetDefinitionType {
#[serde(rename = "timeseries")]
TIMESERIES,
}
impl ToString for TimeseriesWidgetDefinitionType {
fn to_string(&self) -> String {
match self {
Self::TIMESERIES => String::from("timeseries"),
}
}
}
|
#![allow(dead_code)]
#![allow(unused_imports)]
use mongodb::{
bson,
bson::{doc, Bson},
// bson::document::Document,
error::Result,
Client
};
use std::env;
use futures_util::StreamExt;
// use futures::future::join_all;
// use futures::join;
use futures::try_join;
use futures::future::try_join_all;
use serde::{Deserialize, Serialize};
use rand::Rng;
use std::collections::HashMap;
fn dump_user_data(UserData{ name, created_at, .. }: UserData) {
print!("[at: {}, of size: {}]", created_at, name);
}
fn dump_user_data_to_str(UserData{ name, created_at, .. }: UserData) -> String {
format!("[at: {}, of size: {}]", created_at, name)
}
async fn dump(client: &Client) -> Result<()> {
let db = client.database("users");
for collection_name in db.list_collection_names(None).await? {
println!(">>{}", collection_name);
// let cursor = db.collection(&collection_name).find(None, None).await?;
// let entries: Vec<_> = cursor.collect().await;
// println!("<<{} entries>>", entries.len());
let mut cursor = db.collection(&collection_name).find(None, None).await?;
while let Some(document) = cursor.next().await {
let user_data = bson::from_bson(Bson::Document(document?))?;
print!("\t");
dump_user_data(user_data);
println!();
}
}
println!();
Ok(())
}
async fn dump_to_str(client: &Client) -> Result<String> {
let mut result = String::new();
let db = client.database("users");
for collection_name in db.list_collection_names(None).await? {
result.push_str(&format!(">>{}\n", collection_name));
// let cursor = db.collection(&collection_name).find(None, None).await?;
// let entries: Vec<_> = cursor.collect().await;
// println!("<<{} entries>>", entries.len());
let mut cursor = db.collection(&collection_name).find(None, None).await?;
while let Some(document) = cursor.next().await {
let user_data = bson::from_bson(Bson::Document(document?))?;
result.push('\t');
result.push_str(&dump_user_data_to_str(user_data));
result.push('\n');
}
}
result.push('\n');
Ok(result)
}
fn bytes_to_human(mut size: u32) -> String {
if size < 1024 { return size.to_string() + " B"; }
let mut letter_index = 0;
let mut full;
loop {
full = size / 1024;
if full < 1024 { break; }
letter_index += 1;
size /= 1024;
}
let mut string = full.to_string();
let remainder = size % 1024;
if remainder != 0 {
string += ".";
string += &(remainder * 10 / 1024).to_string();
}
string += " ";
string += "KMG".get(letter_index..letter_index+1).expect("Size too large");
string += "B";
string
}
#[derive(Deserialize, Serialize)]
struct UserData {
name: String,
size_bytes: u32,
content: Vec<u32>,
created_at: Time,
}
fn user_data_to_doc(UserData {name, size_bytes, content, created_at}: UserData) -> bson::Document {
doc! { "name": name, "size_bytes": size_bytes, "content": content, "created_at": created_at }
}
async fn add_data(collection: &mongodb::Collection, data: UserData) -> Result<()> {
collection.insert_one(user_data_to_doc(data), None).await?;
Ok(())
}
fn create_user_data(size_4_bytes: u32, created_at: Time) -> UserData {
UserData {
name: bytes_to_human(size_4_bytes * 4),
size_bytes: size_4_bytes * 4,
content: vec![25; size_4_bytes as usize],
created_at,
}
}
fn to_collection(client: &Client, id: UserId) -> mongodb::Collection {
client.database("users").collection(&format!("user{}", id))
}
// ============================================================================
// ============================================================================
// ============================================================================
#[derive(Debug, Copy, Clone, Deserialize, Serialize)]
enum Location {
Virginia, Belgium, Ireland, Tokyo
}
impl Location {
fn random() -> Location {
let mut rng = rand::thread_rng();
match rng.gen_range(0..4) {
0 => Location::Virginia,
1 => Location::Belgium,
2 => Location::Ireland,
3 => Location::Tokyo,
_ => panic!("Invalid range generated for Location"),
}
}
}
async fn create_data(collection: &mongodb::Collection, time: Time) -> Result<()>{
let size = 15 * 1024;
let data = create_user_data(size, time);
add_data(&collection, data).await?;
Ok(())
}
async fn delete_data(collection: &mongodb::Collection, time: Time) -> Result<()>{
collection.delete_many(doc!{ "created_at": time }, None).await?;
Ok(())
}
async fn read_data(collection: &mongodb::Collection, time: Time) -> Result<()>{
collection.find_one(doc!{ "created_at": time }, None).await?;
Ok(())
}
async fn update_data(collection: &mongodb::Collection, time: Time) -> Result<()>{
// TODO: stay within field for "at"
let size = 10 * 1024;
let data = user_data_to_doc(create_user_data(size, 10 * time));
collection.replace_one(doc!{ "created_at": time }, data, None).await?;
Ok(())
}
#[derive(Debug, Copy, Clone, Deserialize, Serialize)]
enum OperationType {
Create, Read, Update, Delete
}
impl OperationType {
fn random() -> OperationType {
let mut rng = rand::thread_rng();
match rng.gen_range(0..4) {
0 => OperationType::Create,
1 => OperationType::Read,
2 => OperationType::Update,
3 => OperationType::Delete,
_ => panic!("Invalid range generated for OperationType"),
}
}
}
#[derive(Debug, Copy, Clone, Deserialize, Serialize)]
struct Operation {
operation_type: OperationType,
action_time: Time,
}
impl Operation {
async fn perform_fake(&self, _collection: &mongodb::Collection) -> Result<()>{
Ok(())
}
async fn perform(&self, collection: mongodb::Collection) -> Result<()>{
match self.operation_type {
OperationType::Create => create_data(&collection, self.action_time).await?,
OperationType::Read => read_data(&collection, self.action_time).await?,
OperationType::Update => update_data(&collection, self.action_time).await?,
OperationType::Delete => delete_data(&collection, self.action_time).await?,
}
Ok(())
}
fn new(operation_type: OperationType, action_time: Time) -> Operation {
Operation{ operation_type, action_time }
}
}
type UserId = u32;
type Time = u64;
#[derive(Debug, Copy, Clone, Deserialize, Serialize)]
struct UserRequest {
id: UserId,
from: Location,
operation: Operation,
time: Time,
}
impl UserRequest {
fn new(id: UserId, from: Location, time: Time) -> UserRequest {
let max_action_time = 10;
let action_time = rand::thread_rng().gen_range(1..=max_action_time);
let operation = Operation::new(OperationType::random(), action_time);
UserRequest{ id, from, operation, time }
}
}
fn create_user_request_batch(amount: usize, id: UserId, from: Location, start_time: Time) -> Vec<UserRequest> {
let mut batch = Vec::with_capacity(amount);
let mut time = start_time;
for _ in 0..amount {
batch.push(UserRequest::new(id, from, time));
time += 1;
}
batch
}
fn create_manual_user_request_stream() -> Vec<UserRequest> {
let mut stream = Vec::with_capacity(64);
let max_id = 8;
// let id = 3;
let id = rand::thread_rng().gen_range(1..=max_id);
let mut time = 1;
{
let amount = 3;
let start_time = time;
let location = Location::random();
// let location = Location::Belgium;
stream.append(&mut create_user_request_batch(amount, id, location, start_time));
time += amount as Time;
}
{
let amount = 5;
let start_time = time;
let location = Location::random();
// let location = Location::Virginia;
stream.append(&mut create_user_request_batch(amount, id, location, start_time));
time += amount as Time;
}
{
let amount = 4;
let start_time = time;
let location = Location::random();
// let location = Location::Tokyo;
stream.append(&mut create_user_request_batch(amount, id, location, start_time));
time += amount as Time;
}
{
let amount = 2;
let start_time = time;
let location = Location::random();
// let location = Location::Belgium;
stream.append(&mut create_user_request_batch(amount, id, location, start_time));
// time += amount as Time;
}
stream.shrink_to_fit();
stream
}
fn create_random_user_request_stream(size: usize) -> Vec<UserRequest> {
let mut stream = Vec::with_capacity(size);
let mut time = 1;
let mut location = Location::random();
let max_amount = 10;
let mut amount = rand::thread_rng().gen_range(1..=max_amount);
let max_id = 8;
let mut id = rand::thread_rng().gen_range(1..=max_id);
for _ in 0..size {
stream.push(UserRequest::new(id, location, time));
amount -= 1;
if amount == 0 {
location = Location::random();
amount = rand::thread_rng().gen_range(1..=max_amount);
id = rand::thread_rng().gen_range(1..=max_id);
}
time += 1;
}
stream
}
// ============================================================================
// ============================================================================
// ============================================================================
type StorageId = usize;
struct Brain {
clients: Vec<Client>,
names: Vec<&'static str>,
storage_ids_for_user_id: HashMap<UserId, Vec<StorageId>>,
history: HashMap<UserId, Vec<StorageId>>,
}
impl Brain {
fn only_one_storage_for(&self, user_id: UserId) -> bool {
self.storage_ids_for_user_id.get(&user_id)
.expect(&format!("Uninitialized user {} in storage_ids_for_user_id", user_id))
.len() <= 1
}
fn rule_time_to_delete_storage_for(&self, storage_id: StorageId, user_id: UserId) -> bool {
let threshold = 10;
let mut foreign_counter = 0;
let history = self.history.get(&user_id).expect(&format!("Uninitialized user {} in history", user_id));
for &target in history.iter().rev() {
if target == storage_id {
// println!("\t[RULE Delete] For {} there WAS a request to {} during last {} queries. Will NOT delete.",
// user_id, self.names[storage_id], threshold);
return false;
}
foreign_counter += 1;
if foreign_counter >= threshold {
println!("\t[RULE Delete] For {} there was NOT a request to {} during last {} queries. WILL delete.",
user_id, self.names[storage_id], threshold);
return true;
}
}
// There was request for storage_id, but THRESHOLD has not been reached yet
false
}
fn rule_time_to_allocate_storage_for(&self, storage_id: StorageId, user_id: UserId) -> bool {
let already_allocated_storages = self.storage_ids_for_user_id.get(&user_id)
.expect(&format!("Uninitialized user {} in storage_ids_for_user_id", user_id));
let is_known = |storage_id| already_allocated_storages.contains(&storage_id);
if is_known(storage_id) {
// Already allocated
return false;
}
let depth_threshold = 8;
let amount_threshold = 3;
let mut amount = 0;
let mut depth = 0;
let history = self.history.get(&user_id).expect(&format!("Uninitialized user {} in history", user_id));
for &target in history.iter().rev() {
if target == storage_id {
amount += 1;
}
if amount >= amount_threshold {
println!("\t[RULE Allocate] For {} there WAS enough({}) requests that should have been to {} during last {} queries. Will allocate.",
user_id, amount_threshold, self.names[storage_id], depth_threshold);
return true;
}
depth += 1;
if depth >= depth_threshold {
// println!("\t[RULE Allocate] For {} there was NOT enough({}) requests that should have been to {} during last {} queries.",
// user_id, amount_threshold, self.names[storage_id], depth_threshold);
return false;
}
}
// Not deep enough history
false
}
async fn delete_storage_for_user(&mut self, storage_id: StorageId, user_id: UserId) -> Result<()> {
self.clients[storage_id].database("users").collection(&format!("user{}", user_id)).drop(None).await?;
self.storage_ids_for_user_id.get_mut(&user_id)
.expect(&format!("Uninitialized user {} in history", user_id))
.retain(|&x| x != storage_id);
Ok(())
}
async fn allocate_storage_for_user(&mut self, storage_id: StorageId, user_id: UserId) -> Result<()> {
println!("\t Allocating storage {} for user {}", storage_id, user_id);
// Fetch data from any Storage with this user's data
// ...just pick the first for now
let storage_id_to_fetch_from = self.storage_ids_for_user_id.get(&user_id)
.expect(&format!("Uninitialized user {} in storage_ids_for_user_id", user_id))
[0];
println!("\t Using {} to transfer from", storage_id_to_fetch_from);
let user_data = to_collection(&self.clients[storage_id_to_fetch_from], user_id).find(None, None).await?
.map(|document| bson::from_bson(Bson::Document(document.expect("Failed to parse user_data"))).expect("Failed to parse user_data"))
.collect::<Vec<UserData>>().await;
println!("\t Collected {} entries", user_data.len());
// Transfer this data to the new location
if user_data.len() > 0 {
to_collection(&self.clients[storage_id], user_id).insert_many(user_data.into_iter().map(user_data_to_doc), None).await?;
println!("\tFinished transfering to {}", storage_id);
} else {
println!("\tNo records exist for this user, so no records will be transfered");
}
// Update records
self.storage_ids_for_user_id.get_mut(&user_id)
.expect(&format!("Uninitialized user {} in storage_ids_for_user_id", user_id))
.push(storage_id);
Ok(())
}
fn storage_id_to_client(&self, id: StorageId) -> &Client {
&self.clients[id]
}
async fn dump(&self) -> Result<()> {
// Sequential
// for (i, client) in self.clients.iter().enumerate() {
// println!("######## {} contents ########", self.names[i]);
// dump(&client).await?;
// println!("########################################");
// }
// println!();
// Ok(())
// Parallel
println!("======== Dumping all databases ========");
let (s1, s2, s3, s4) = try_join!(
dump_to_str(&self.clients[0]),
dump_to_str(&self.clients[1]),
dump_to_str(&self.clients[2]),
dump_to_str(&self.clients[3])
)?;
let ss = vec![s1, s2, s3, s4];
for (i, s) in ss.into_iter().enumerate() {
println!("######## {} contents ########", self.names[i]);
println!("{}", s);
println!("########################################");
}
println!();
Ok(())
}
async fn collect_storage_ids_for_user_id(clients: &Vec<Client>, names: &Vec<&'static str>) -> Result<HashMap<UserId, Vec<StorageId>>> {
let mut map = HashMap::new();
for (i, client) in clients.iter().enumerate() {
let user_ids: Vec<UserId> = client.database("users")
.list_collection_names(None).await?
.into_iter()
.map(|user_str| user_str
.strip_prefix("user")
.expect("Collection name didn't start with 'user'")
.parse::<UserId>()
.expect("Failed to parse UserId from Collectio name"))
// .for_each(|user_id| map.entry(user_id).or_insert(Vec::new()).push(i))
.collect();
for &id in user_ids.iter() {
map.entry(id).or_insert(Vec::new()).push(i);
}
println!("Client {} contains user ids={:?}", names[i], user_ids);
}
println!("The final map:{:?}", map);
Ok(map)
}
// TODO: copy-pasted and modified from collect_storage_ids_for_user_id
// TODO: can merge.....
async fn initialize_history(clients: &Vec<Client>) -> Result<HashMap<UserId, Vec<StorageId>>> {
let mut map = HashMap::new();
for client in clients.iter() {
let user_ids: Vec<UserId> = client.database("users")
.list_collection_names(None).await?
.into_iter()
.map(|user_str| user_str
.strip_prefix("user")
.expect("Collection name didn't start with 'user'")
.parse::<UserId>()
.expect("Failed to parse UserId from Collectio name"))
.collect();
for id in user_ids.into_iter() {
map.entry(id).or_insert(Vec::new());
}
}
println!("Final known IDs for history:{:?}", map.keys());
Ok(map)
}
async fn new(clients: Vec<Client>, names: Vec<&'static str>) -> Result<Brain> {
let storage_ids_for_user_id = Brain::collect_storage_ids_for_user_id(&clients, &names).await?;
let history = Brain::initialize_history(&clients).await?;
Ok(Brain {
clients,
names,
// storage_ids_for_user_id: HashMap::new(),
storage_ids_for_user_id,
// history: HashMap::new(),
history,
})
}
// XXX: prone to incorrect indexing!!!
// XXX: prone to incorrect indexing!!!
// XXX: prone to incorrect indexing!!!
fn location_to_storage_id(&self, location: &Location) -> StorageId {
match location {
Location::Belgium => 0,
Location::Virginia => 1,
Location::Tokyo => 2,
Location::Ireland => 3,
}
}
fn storage_id_to_location(&self, storage_id: StorageId) -> Location {
match storage_id {
0 => Location::Belgium,
1 => Location::Virginia,
2 => Location::Tokyo,
3 => Location::Ireland,
_ => panic!("Invalid range for Location"),
}
}
fn pick_new_storage(&self, from: Location) -> StorageId {
// Pick Storage with best location
return self.location_to_storage_id(&from);
// Pick Storage with min amount of users
// let mut min_index = 0;
// let mut min = StorageId::MAX;
// for (i, client) in self.clients.iter().enumerate() {
// let users = client.database("users").list_collection_names(None).await?.len();
// if users < min {
// min_index = i;
// min = users;
// }
// }
// println!("\tThe storage with minimum users is {}(with {} users)", self.names[min_index], min);
// Ok(min_index)
}
fn select_best_from(&self, storages: &Vec<StorageId>, from: Location) -> StorageId {
// Select closest based on location
let target = self.location_to_storage_id(&from);
for &storage_id in storages {
if storage_id == target {
// Found best match!
return storage_id;
}
}
// ... otherwise select randomly
let mut rng = rand::thread_rng();
storages[rng.gen_range(0..storages.len())]
}
fn registered_user(&self, user_id: UserId) -> bool {
return self.storage_ids_for_user_id.contains_key(&user_id);
}
async fn handle_request(&mut self, user_request: UserRequest) -> Result<()> {
let UserRequest{ id: user_id, from, operation, .. } = user_request;
println!(":> Handling {:?}", &user_request);
// Need to register this user?
if !self.registered_user(user_id) {
let first_home = self.pick_new_storage(from); // XXX: should be in-place, but the BC complains
println!("\tThis is a new user, selecting storage {} for it", self.names[first_home]);
// Initialize everything for this user
self.storage_ids_for_user_id.insert(user_id, vec![first_home]);
self.history.insert(user_id, vec![]);
}
// Determine the server (Storage) to work with
let available_storage_ids = self.storage_ids_for_user_id.get(&user_id).unwrap();
let names = available_storage_ids.iter().map(|&id| self.names[id]).collect::<Vec<_>>();
println!("\tThere are {} storage variants for this request: {:?}", names.len(), names);
let selected_storage_id = self.select_best_from(available_storage_ids, from);
println!("\tSelecting variant {}", self.names[selected_storage_id]);
// Perform the operation
let client = self.storage_id_to_client(selected_storage_id);
operation.perform(to_collection(client, user_id)).await?;
println!("\tPerforming operation on {}", self.names[selected_storage_id]);
// operation.perform_fake(&to_collection(client, user_id)).await?;
// ... and maybe return result to user
// <<< RETURN RESULT HERE >>>
// Sync across all DBs
// Sequential
// {
// for &storage_id in available_storage_ids {
// if storage_id == selected_storage_id {
// // Have performed this operation on this DB already
// continue;
// }
// let client = self.storage_id_to_client(storage_id);
// operation.perform(&to_collection(client, user_id)).await?;
// }
// }
// Parallel
{
// let mut operations_temp = Vec::new();
let mut operations = Vec::new();
for &storage_id in available_storage_ids {
if storage_id == selected_storage_id {
// Have performed this operation on this DB already
println!("\t\tOperation on {} has already been synced, skipping", self.names[storage_id]);
continue;
}
println!("\t\tSyncing operation on {}", self.names[storage_id]);
let client = self.storage_id_to_client(storage_id);
operations.push(operation.perform(to_collection(client, user_id)));
// operations_temp.push(to_collection(client, user_id));
}
// for collection in operations_temp.into_iter() {
// operations.push(operation.perform(collection));
// }
try_join_all(operations).await?;
}
// Update history
// We use _from_ here and not _selected_ because we use this information
// to see where the requests are wanting to go, not where we direct them
// based on what we currently have
let loc = self.location_to_storage_id(&from);
self.history.get_mut(&user_id).expect(&format!("Uninitialized user {}", user_id)).push(loc);
// Apply rules
for storage_id in 0..4 { // clone() used for BC
if self.rule_time_to_allocate_storage_for(storage_id, user_id) {
println!("\t\t[RULE Allocate]: time to allocate storage {} for this user ({})", self.names[storage_id], user_id);
self.allocate_storage_for_user(storage_id, user_id).await?;
}
}
if !self.only_one_storage_for(user_id) {
let available_storage_ids = self.storage_ids_for_user_id.get(&user_id).unwrap().clone(); // clone for BC
for storage_id in available_storage_ids {
if self.rule_time_to_delete_storage_for(storage_id, user_id) {
println!("\t\t[RULE Delete]: time to delete storage {} for this user ({})", self.names[storage_id], user_id);
self.delete_storage_for_user(storage_id, user_id).await?;
}
}
}
Ok(())
}
async fn reset(&mut self) -> Result<()> {
println!(":> Resetting records");
self.storage_ids_for_user_id = HashMap::new();
self.history = HashMap::new();
// for (i, client) in self.clients.iter().enumerate() {
// println!(":> Resetting {}", self.names[i]);
// client.database("users").drop(None).await?
// }
println!("Deleting dbs...");
let a1 = self.clients[0].database("users");
let a2 = self.clients[1].database("users");
let a3 = self.clients[2].database("users");
let a4 = self.clients[3].database("users");
try_join!(
a1.drop(None),
a2.drop(None),
a3.drop(None),
a4.drop(None)
)?;
Ok(())
}
}
// ============================================================================
// ============================================================================
// ============================================================================
// ============================================================================
// ============================================================================
// ============================================================================
#[tokio::main]
async fn main() -> Result<()> {
let mut brain = Brain::new(
vec![
Client::with_uri_str(env::var("MONGO_MAPLE").expect("Set the MONGO_<NAME> env!").as_ref()).await?,
Client::with_uri_str(env::var("MONGO_LEMON").expect("Set the MONGO_<NAME> env!").as_ref()).await?,
Client::with_uri_str(env::var("MONGO_CHRISTMAS").expect("Set the MONGO_<NAME> env!").as_ref()).await?,
Client::with_uri_str(env::var("MONGO_ORANGE").expect("Set the MONGO_<NAME> env!").as_ref()).await?,
],
vec!["Maple Tree (Belgium)", "Lemon Tree (Virginia)", "Christmas Tree (Tokyo)", "Orange Tree (Ireland)"]
).await?;
// brain.reset().await?;
let requests = create_random_user_request_stream(128);
for request in requests {
brain.handle_request(request).await?;
}
brain.dump().await?;
Ok(())
}
|
use log::{debug, error};
use std::collections::HashMap;
use std::fmt;
use std::sync::mpsc::{channel, Receiver, Sender};
use std::sync::Arc;
use async_trait::async_trait;
use futures::future::Future;
use nvim_rs::{create::Spawner, neovim::Neovim, Handler};
use rmpv::Value;
use crate::nvim_gio::GioWriter;
use crate::thread_guard::ThreadGuard;
use crate::ui::color::{Color, Highlight};
#[cfg(test)]
mod tests;
macro_rules! unwrap_str {
($val:expr) => {
$val.as_str().unwrap();
};
}
macro_rules! unwrap_u64 {
($val:expr) => {
$val.as_u64().unwrap();
};
}
macro_rules! unwrap_i64 {
($val:expr) => {
$val.as_i64().unwrap();
};
}
macro_rules! unwrap_f64 {
($val:expr) => {
$val.as_f64().unwrap();
};
}
macro_rules! unwrap_array {
($val:expr) => {
$val.as_array().unwrap();
};
}
macro_rules! unwrap_map {
($val:expr) => {
$val.as_map().unwrap();
};
}
macro_rules! unwrap_bool {
($val:expr) => {
$val.as_bool().unwrap();
};
}
macro_rules! try_str {
($val:expr, $msg:expr) => {
$val.as_str()
.ok_or(format!("Value is not an str: {}", $msg))?
};
}
macro_rules! try_u64 {
($val:expr, $msg:expr) => {
$val.as_u64()
.ok_or(format!("Value is not an u64: {}", $msg))?
};
}
impl Highlight {
fn from_map_val(map: &[(Value, Value)]) -> Self {
let mut hl = Highlight::default();
for (prop, val) in map {
hl.set(unwrap_str!(prop), val.clone());
}
hl
}
fn set(&mut self, prop: &str, val: Value) {
match prop {
"foreground" => {
self.foreground = if let Some(val) = val.as_u64() {
Some(Color::from_u64(val))
} else {
None
}
}
"background" => {
self.background = if let Some(val) = val.as_u64() {
Some(Color::from_u64(val))
} else {
None
}
}
"special" => {
self.special = if let Some(val) = val.as_u64() {
Some(Color::from_u64(val))
} else {
None
}
}
"reverse" => {
self.reverse = unwrap_bool!(val);
}
"italic" => {
self.italic = unwrap_bool!(val);
}
"bold" => {
self.bold = unwrap_bool!(val);
}
"underline" => {
self.underline = unwrap_bool!(val);
}
"undercurl" => {
self.undercurl = unwrap_bool!(val);
}
"blend" => {
self.blend = unwrap_f64!(val) / 100.0;
}
"cterm_fg" => {}
"cterm_bg" => {}
_ => {
debug!("Unknown highligh property: {}", prop);
}
}
}
}
pub enum Notify {
/// Redraw event will always get parsed. If something goes wrong there,
/// we'll panic. Messages are coming from nvim so we should always be
/// able to parse them.
RedrawEvent(Vec<RedrawEvent>),
/// Gnvim event might fail parsing, because user can send basically
/// anything to the ('Gnvim') channel.
GnvimEvent(Result<GnvimEvent, String>),
}
#[derive(Clone, Debug, PartialEq)]
pub enum CursorShape {
Block,
Horizontal,
Vertical,
}
impl CursorShape {
fn from_string(name: &str) -> Self {
match String::from(name).to_lowercase().as_str() {
"block" => CursorShape::Block,
"horizontal" => CursorShape::Horizontal,
"vertical" => CursorShape::Vertical,
_ => {
panic!("Unknown cursor shape: {}", name);
}
}
}
}
impl Default for CursorShape {
fn default() -> Self {
CursorShape::Block
}
}
#[derive(Default, Clone, Debug, PartialEq)]
pub struct ModeInfo {
/// The cursor blinking period (in ms)
pub blink_on: u64,
pub cursor_shape: CursorShape,
/// The cursor's width (in percentages, from 0..1).
pub cell_percentage: f64,
// TODO(ville): Implement the rest.
}
impl ModeInfo {
fn set(&mut self, prop: &str, val: Value) {
match prop {
"blinkon" => {
self.blink_on = unwrap_u64!(val);
}
"cursor_shape" => {
self.cursor_shape = CursorShape::from_string(unwrap_str!(val))
}
"cell_percentage" => {
let mut val = unwrap_u64!(val);
// Ensure that the val is not zero.
if val == 0 {
val = 100;
}
self.cell_percentage = val as f64 / 100.0;
}
_ => {}
}
}
}
#[derive(Debug, PartialEq)]
pub struct Cell {
pub text: String,
pub hl_id: u64,
pub repeat: u64,
pub double_width: bool,
}
#[derive(Debug, PartialEq)]
pub enum OptionSet {
/// Font name.
GuiFont(String),
/// Space between lines.
LineSpace(i64),
ExtTabline(bool),
ExtCmdline(bool),
ExtPopupmenu(bool),
/// Event name.
NotSupported(String),
}
impl From<Value> for OptionSet {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let name = unwrap_str!(args[0]);
match name {
"guifont" => {
let val = unwrap_str!(args[1]);
OptionSet::GuiFont(String::from(val))
}
"linespace" => {
let val = unwrap_i64!(args[1]);
OptionSet::LineSpace(val)
}
"ext_tabline" => OptionSet::ExtTabline(unwrap_bool!(args[1])),
"ext_cmdline" => OptionSet::ExtCmdline(unwrap_bool!(args[1])),
"ext_popupmenu" => OptionSet::ExtPopupmenu(unwrap_bool!(args[1])),
_ => OptionSet::NotSupported(String::from(name)),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum CompletionItemKind {
Class,
Color,
Constant,
Constructor,
Enum,
EnumMember,
Event,
Function,
File,
Folder,
Field,
Interface,
Keyword,
Method,
Module,
Operator,
Property,
Reference,
Snippet,
Struct,
Text,
TypeParameter,
Unit,
Unknown,
Value,
Variable,
}
impl From<&str> for CompletionItemKind {
// Returns CompletionItemKind from a string
//
// Lower case kinds are vim-lsp
// https://github.com/prabirshrestha/vim-lsp/blob/2b583fefa20b7b1a5e7481a93fb6f1fee67e0846/autoload/lsp/omni.vim#L4-L28
// Single characters are coc.nvim
// https://github.com/neoclide/coc.nvim/blob/909710fddb04d383e5546b0f869c44f395a80d02/src/languages.ts#L143-L167
// Pascal cased kinds are LanguageClient-neovim
// https://github.com/autozimu/LanguageClient-neovim/blob/0ac444affdff8db699684aa4cf04c2cb0daf0286/rplugin/python3/denite/lsp/protocol.py#L48-L55
fn from(from: &str) -> Self {
match from {
"class" | "C" | "Class" => CompletionItemKind::Class,
"color" => CompletionItemKind::Color,
"constant" | "Constant" => CompletionItemKind::Constant,
"constructor" | "Constructor" => CompletionItemKind::Constructor,
"enum" | "Enum" => CompletionItemKind::Enum,
"enum member" | "Enum Member" => CompletionItemKind::EnumMember,
"event" | "E" | "Event" => CompletionItemKind::Event,
"file" | "F" | "File" => CompletionItemKind::File,
"field" | "m" | "Field" => CompletionItemKind::Field,
"folder" | "Folder" => CompletionItemKind::Folder,
"function" | "Function" => CompletionItemKind::Function,
"interface" | "I" | "Interface" => CompletionItemKind::Interface,
"keyword" | "k" | "Key" => CompletionItemKind::Keyword,
"method" | "f" | "Method" => CompletionItemKind::Method,
"module" | "M" | "Module" => CompletionItemKind::Module,
"operator" | "O" | "Operator" => CompletionItemKind::Operator,
"property" | "Property" => CompletionItemKind::Property,
"reference" | "r" => CompletionItemKind::Reference,
"snippet" => CompletionItemKind::Snippet,
"struct" | "S" | "Struct" => CompletionItemKind::Struct,
"text" => CompletionItemKind::Text,
"type parameter" | "T" | "Type Parameter" => {
CompletionItemKind::TypeParameter
}
"unit" | "U" => CompletionItemKind::Unit,
"value" => CompletionItemKind::Value,
"variable" | "v" | "Variable" => CompletionItemKind::Variable,
_ => CompletionItemKind::Unknown,
}
}
}
impl CompletionItemKind {
pub fn is_unknown(&self) -> bool {
match self {
CompletionItemKind::Unknown => true,
_ => false,
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct CompletionItem {
pub word: String,
pub kind: CompletionItemKind,
pub kind_raw: String,
pub menu: String,
pub info: String,
}
#[derive(Debug, PartialEq)]
pub struct PopupmenuShow {
pub items: Vec<CompletionItem>,
pub selected: i64,
pub row: u64,
pub col: u64,
pub grid: i64,
}
impl From<Value> for PopupmenuShow {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let selected = unwrap_i64!(args[1]);
let row = unwrap_u64!(args[2]);
let col = unwrap_u64!(args[3]);
let grid = unwrap_i64!(args[4]);
let mut items = vec![];
for item in unwrap_array!(args[0]) {
let item = unwrap_array!(item);
let word = unwrap_str!(item[0]).to_owned();
let kind = CompletionItemKind::from(unwrap_str!(item[1]));
let kind_raw = unwrap_str!(item[1]).to_owned();
let menu = unwrap_str!(item[2]).to_owned();
let info = unwrap_str!(item[3]).to_owned();
items.push(CompletionItem {
word,
kind,
menu,
info,
kind_raw,
});
}
PopupmenuShow {
items,
selected,
row,
col,
grid,
}
}
}
#[derive(Debug, PartialEq)]
pub struct CmdlineShow {
pub content: Vec<(u64, String)>,
pub pos: u64,
pub firstc: String,
pub prompt: String,
pub indent: u64,
pub level: u64,
}
impl From<Value> for CmdlineShow {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let content: Vec<(u64, String)> = unwrap_array!(args[0])
.iter()
.map(|v| {
let hl_id = unwrap_u64!(v[0]);
let text = unwrap_str!(v[1]);
(hl_id, String::from(text))
})
.collect();
let pos = unwrap_u64!(args[1]);
let firstc = String::from(unwrap_str!(args[2]));
let prompt = String::from(unwrap_str!(args[3]));
let indent = unwrap_u64!(args[4]);
let level = unwrap_u64!(args[5]);
CmdlineShow {
content,
pos,
firstc,
prompt,
indent,
level,
}
}
}
#[derive(Debug, PartialEq)]
pub struct GridLineSegment {
pub grid: i64,
pub row: u64,
pub col_start: u64,
pub cells: Vec<Cell>,
}
impl From<Value> for GridLineSegment {
fn from(args: Value) -> Self {
let entry = unwrap_array!(args);
let grid = unwrap_i64!(entry[0]);
let row = unwrap_u64!(entry[1]);
let col_start = unwrap_u64!(entry[2]);
let mut cells: Vec<Cell> = vec![];
for entry in unwrap_array!(entry[3]) {
let entry = unwrap_array!(entry);
let text = unwrap_str!(entry[0]);
let hl_id = if entry.len() >= 2 {
entry[1].as_u64()
} else {
None
};
let repeat = if entry.len() >= 3 {
unwrap_u64!(entry[2])
} else {
1
};
let hl_id = if let Some(hl_id) = hl_id {
hl_id
} else {
cells.last().unwrap().hl_id
};
if text == "" {
if let Some(prev) = cells.last_mut() {
prev.double_width = true;
}
}
cells.push(Cell {
hl_id,
repeat,
text: String::from(text),
double_width: false,
});
}
GridLineSegment {
grid,
row,
col_start,
cells,
}
}
}
#[derive(Debug, PartialEq)]
pub struct GridResize {
pub grid: i64,
pub width: u64,
pub height: u64,
}
impl From<Value> for GridResize {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
GridResize {
grid: unwrap_i64!(args[0]),
width: unwrap_u64!(args[1]),
height: unwrap_u64!(args[2]),
}
}
}
#[derive(Debug, PartialEq)]
pub struct GridCursorGoto {
pub grid: i64,
pub row: u64,
pub col: u64,
}
impl From<Value> for GridCursorGoto {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
GridCursorGoto {
grid: unwrap_i64!(args[0]),
row: unwrap_u64!(args[1]),
col: unwrap_u64!(args[2]),
}
}
}
#[derive(Debug, PartialEq)]
pub struct GridScroll {
pub grid: i64,
pub reg: [u64; 4],
pub rows: i64,
pub cols: i64,
}
impl From<Value> for GridScroll {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let reg: Vec<u64> = args[1..5].iter().map(|v| unwrap_u64!(v)).collect();
let reg = [reg[0], reg[1], reg[2], reg[3]];
GridScroll {
grid: unwrap_i64!(args[0]),
reg,
rows: unwrap_i64!(args[5]),
cols: unwrap_i64!(args[6]),
}
}
}
#[derive(Debug, PartialEq)]
pub struct DefaultColorsSet {
pub fg: Color,
pub bg: Color,
pub sp: Color,
}
impl From<Value> for DefaultColorsSet {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let fg = Color::from_u64(args[0].as_u64().unwrap_or(0));
let bg = Color::from_u64(args[1].as_u64().unwrap_or(std::u64::MAX));
// Default to red.
let sp = Color::from_u64(args[2].as_u64().unwrap_or(16711680));
DefaultColorsSet { fg, bg, sp }
}
}
#[derive(Debug, PartialEq)]
pub struct HlAttrDefine {
pub id: u64,
pub hl: Highlight,
}
impl From<Value> for HlAttrDefine {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let id = unwrap_u64!(args[0]);
let map = unwrap_map!(args[1]);
let hl = Highlight::from_map_val(map);
HlAttrDefine { id, hl }
}
}
#[derive(Debug, PartialEq)]
pub struct HlGroupSet {
pub name: String,
pub hl_id: u64,
}
impl From<Value> for HlGroupSet {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let name = unwrap_str!(args[0]).to_string();
let hl_id = unwrap_u64!(args[1]);
HlGroupSet { name, hl_id }
}
}
#[derive(Debug, PartialEq)]
pub struct ModeInfoSet {
pub cursor_shape_enabled: bool,
pub mode_info: Vec<ModeInfo>,
}
impl From<Value> for ModeInfoSet {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let cursor_shape_enabled = unwrap_bool!(args[0]);
let mut mode_info = vec![];
for info in unwrap_array!(args[1]).iter() {
let map = unwrap_map!(info);
let mut mode = ModeInfo::default();
for (prop, val) in map {
mode.set(unwrap_str!(prop), val.clone());
}
mode_info.push(mode);
}
ModeInfoSet {
cursor_shape_enabled,
mode_info,
}
}
}
#[derive(Debug, PartialEq)]
pub struct ModeChange {
pub name: String,
pub index: u64,
}
impl From<Value> for ModeChange {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let name = unwrap_str!(args[0]).to_string();
let index = unwrap_u64!(args[1]);
ModeChange { name, index }
}
}
#[derive(Debug, PartialEq)]
pub struct CmdlinePos {
pub pos: u64,
pub level: u64,
}
impl From<Value> for CmdlinePos {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let pos = unwrap_u64!(args[0]);
let level = unwrap_u64!(args[1]);
CmdlinePos { pos, level }
}
}
#[derive(Debug, PartialEq)]
pub struct CmdlineSpecialChar {
pub character: String,
pub shift: bool,
pub level: u64,
}
impl From<Value> for CmdlineSpecialChar {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
let c = unwrap_str!(args[0]);
let shift = unwrap_bool!(args[1]);
let level = unwrap_u64!(args[2]);
CmdlineSpecialChar {
character: c.to_string(),
shift,
level,
}
}
}
#[derive(Debug, PartialEq)]
pub struct CmdlineBlockAppend {
pub line: Vec<(u64, String)>,
}
impl From<Value> for CmdlineBlockAppend {
fn from(args: Value) -> Self {
let line = unwrap_array!(args[0])
.iter()
.map(|v| {
let hl_id = unwrap_u64!(v[0]);
let text = unwrap_str!(v[1]);
(hl_id, String::from(text))
})
.collect();
Self { line }
}
}
#[derive(Debug, PartialEq)]
pub struct TablineUpdate {
pub current: Value,
pub tabs: Vec<(Value, String)>,
}
impl From<Value> for TablineUpdate {
fn from(args: Value) -> Self {
let current = args[0].clone();
let tabs = unwrap_array!(args[1])
.iter()
.map(|item| {
let m = map_to_hash(&item);
(
(*m.get("tab").unwrap()).clone(),
unwrap_str!(m.get("name").unwrap()).to_string(),
)
})
.collect();
Self { current, tabs }
}
}
#[derive(Debug, PartialEq)]
pub struct CmdlineBlockShow {
pub lines: Vec<Vec<(u64, String)>>,
}
impl From<Value> for CmdlineBlockShow {
fn from(args: Value) -> Self {
let lines = unwrap_array!(args)
.iter()
.map(|line| {
unwrap_array!(line[0])
.iter()
.map(|v| {
let hl_id = unwrap_u64!(v[0]);
let text = unwrap_str!(v[1]);
(hl_id, String::from(text))
})
.collect()
})
.collect();
Self { lines }
}
}
#[derive(Debug, PartialEq)]
pub struct WindowPos {
pub grid: i64,
pub win: Value,
pub start_row: u64,
pub start_col: u64,
pub width: u64,
pub height: u64,
}
impl From<Value> for WindowPos {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
Self {
grid: unwrap_i64!(args[0]),
win: args[1].clone(),
start_row: unwrap_u64!(args[2]),
start_col: unwrap_u64!(args[3]),
width: unwrap_u64!(args[4]),
height: unwrap_u64!(args[5]),
}
}
}
#[derive(Debug, PartialEq)]
pub enum Anchor {
NW,
NE,
SW,
SE,
}
impl Anchor {
pub fn is_west(&self) -> bool {
match self {
Self::NW | Self::SW => true,
_ => false,
}
}
pub fn is_north(&self) -> bool {
match self {
Self::NW | Self::NE => true,
_ => false,
}
}
}
impl From<Value> for Anchor {
fn from(args: Value) -> Self {
let args = unwrap_str!(args);
match args {
"NW" => Self::NW,
"NE" => Self::NE,
"SW" => Self::SW,
"SE" => Self::SE,
_ => Self::NW,
}
}
}
#[derive(Debug, PartialEq)]
pub struct WindowFloatPos {
pub grid: i64,
pub win: Value,
pub anchor: Anchor,
pub anchor_grid: i64,
pub anchor_row: f64,
pub anchor_col: f64,
pub focusable: bool,
}
impl From<Value> for WindowFloatPos {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
Self {
grid: unwrap_i64!(args[0]),
win: args[1].clone(),
anchor: Anchor::from(args[2].clone()),
anchor_grid: unwrap_i64!(args[3]),
anchor_row: unwrap_f64!(args[4]),
anchor_col: unwrap_f64!(args[5]),
focusable: unwrap_bool!(args[6]),
}
}
}
#[derive(Debug, PartialEq)]
pub struct WindowExternalPos {
pub grid: i64,
pub win: Value,
}
impl From<Value> for WindowExternalPos {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
Self {
grid: unwrap_i64!(args[0]),
win: args[1].clone(),
}
}
}
#[derive(Debug, PartialEq)]
pub struct MsgSetPos {
pub grid: i64,
pub row: u64,
pub scrolled: bool,
pub sep_char: String,
}
impl From<Value> for MsgSetPos {
fn from(args: Value) -> Self {
let args = unwrap_array!(args);
Self {
grid: unwrap_i64!(args[0]),
row: unwrap_u64!(args[1]),
scrolled: unwrap_bool!(args[2]),
sep_char: unwrap_str!(args[3]).to_string(),
}
}
}
#[derive(Debug, PartialEq)]
pub enum RedrawEvent {
SetTitle(Vec<String>),
GridLine(Vec<GridLineSegment>),
GridResize(Vec<GridResize>),
GridCursorGoto(Vec<GridCursorGoto>),
GridClear(Vec<i64>),
GridDestroy(Vec<i64>),
GridScroll(Vec<GridScroll>),
DefaultColorsSet(Vec<DefaultColorsSet>),
HlAttrDefine(Vec<HlAttrDefine>),
HlGroupSet(Vec<HlGroupSet>),
OptionSet(Vec<OptionSet>),
ModeInfoSet(Vec<ModeInfoSet>),
ModeChange(Vec<ModeChange>),
SetBusy(bool),
Flush(),
PopupmenuShow(Vec<PopupmenuShow>),
PopupmenuHide(),
PopupmenuSelect(Vec<i64>),
TablineUpdate(Vec<TablineUpdate>),
CmdlineShow(Vec<CmdlineShow>),
CmdlineHide(),
CmdlinePos(Vec<CmdlinePos>),
CmdlineSpecialChar(Vec<CmdlineSpecialChar>),
CmdlineBlockShow(Vec<CmdlineBlockShow>),
CmdlineBlockAppend(Vec<CmdlineBlockAppend>),
CmdlineBlockHide(),
WindowPos(Vec<WindowPos>),
WindowFloatPos(Vec<WindowFloatPos>),
WindowExternalPos(Vec<WindowExternalPos>),
WindowHide(Vec<i64>),
WindowClose(Vec<i64>),
MsgSetPos(Vec<MsgSetPos>),
Ignored(String),
Unknown(String),
}
impl fmt::Display for RedrawEvent {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
RedrawEvent::SetTitle(..) => write!(fmt, "SetTitle"),
RedrawEvent::GridLine(..) => write!(fmt, "GridLine"),
RedrawEvent::GridResize(..) => write!(fmt, "GridResize"),
RedrawEvent::GridCursorGoto(..) => write!(fmt, "GridCursorGoto"),
RedrawEvent::GridClear(..) => write!(fmt, "GridClear"),
RedrawEvent::GridDestroy(..) => write!(fmt, "GridDestroy"),
RedrawEvent::GridScroll(..) => write!(fmt, "GridScroll"),
RedrawEvent::DefaultColorsSet(..) => {
write!(fmt, "DefaultColorsSet")
}
RedrawEvent::HlAttrDefine(..) => write!(fmt, "HlAttrDefine"),
RedrawEvent::HlGroupSet(..) => write!(fmt, "HlGroupSet"),
RedrawEvent::OptionSet(..) => write!(fmt, "OptionSet"),
RedrawEvent::ModeInfoSet(..) => write!(fmt, "ModeInfoSet"),
RedrawEvent::ModeChange(..) => write!(fmt, "ModeChange"),
RedrawEvent::SetBusy(..) => write!(fmt, "SetBusy"),
RedrawEvent::Flush(..) => write!(fmt, "Flush"),
RedrawEvent::PopupmenuShow(..) => write!(fmt, "PopupmenuShow"),
RedrawEvent::PopupmenuHide(..) => write!(fmt, "PopupmenuHide"),
RedrawEvent::PopupmenuSelect(..) => write!(fmt, "PopupmenuSelect"),
RedrawEvent::TablineUpdate(..) => write!(fmt, "TablineUpdate"),
RedrawEvent::CmdlineShow(..) => write!(fmt, "CmdlineShow"),
RedrawEvent::CmdlineHide(..) => write!(fmt, "CmdlineHide"),
RedrawEvent::CmdlinePos(..) => write!(fmt, "CmdlinePos"),
RedrawEvent::CmdlineSpecialChar(..) => {
write!(fmt, "CmdlineSpecialChar")
}
RedrawEvent::CmdlineBlockShow(..) => {
write!(fmt, "CmdlineBlockShow")
}
RedrawEvent::CmdlineBlockAppend(..) => {
write!(fmt, "CmdlineBlockAppend")
}
RedrawEvent::CmdlineBlockHide(..) => {
write!(fmt, "CmdlineBlockHide")
}
RedrawEvent::WindowPos(..) => write!(fmt, "WindowPos"),
RedrawEvent::WindowFloatPos(..) => write!(fmt, "WindowFloatPos"),
RedrawEvent::WindowExternalPos(..) => {
write!(fmt, "WindowExternalPos")
}
RedrawEvent::WindowHide(..) => write!(fmt, "WindowHide"),
RedrawEvent::WindowClose(..) => write!(fmt, "WindowClose"),
RedrawEvent::MsgSetPos(..) => write!(fmt, "MsgSetPos"),
RedrawEvent::Ignored(..) => write!(fmt, "Ignored"),
RedrawEvent::Unknown(e) => write!(fmt, "Unknown({})", e),
}
}
}
#[derive(Debug, PartialEq)]
pub enum GnvimEvent {
CompletionMenuToggleInfo,
CursorTooltipLoadStyle(String),
CursorTooltipShow(String, u64, u64),
CursorTooltipHide,
CursorTooltipSetStyle(String),
PopupmenuWidth(u64),
PopupmenuWidthDetails(u64),
PopupmenuShowMenuOnAllItems(bool),
EnableCursorAnimations(bool),
EnableExtTabline(bool),
EnableExtCmdline(bool),
EnableExtPopupmenu(bool),
Unknown(String),
}
pub enum Request {
CursorTooltipStyles,
}
/// Message type that we are sending to the UI.
pub enum Message {
/// RPC notify (see `:h rpcnotify()`).
Notify(Notify),
/// RPC Request (see `: rpcrequest()`).
Request(Sender<Result<Value, Value>>, Request),
/// Nvim went away or reading from the rcp connection failed.
Close,
}
#[derive(Clone)]
pub struct NvimBridge {
/// Channel to send messages to the ui.
tx: Arc<ThreadGuard<glib::Sender<Message>>>,
/// Channel to pass to the UI when we receive a request from nvim.
/// The UI should send values to this channel when ever it gets a message
/// Message::Request on its receiving end of `tx`.
request_tx: Arc<ThreadGuard<Sender<Result<Value, Value>>>>,
/// Receiving end of `request_tx`.
request_rx: Arc<ThreadGuard<Receiver<Result<Value, Value>>>>,
}
impl NvimBridge {
pub fn new(tx: glib::Sender<Message>) -> Self {
let (request_tx, request_rx) = channel();
NvimBridge {
tx: Arc::new(ThreadGuard::new(tx)),
request_tx: Arc::new(ThreadGuard::new(request_tx)),
request_rx: Arc::new(ThreadGuard::new(request_rx)),
}
}
}
#[async_trait]
impl Handler for NvimBridge {
type Writer = GioWriter;
async fn handle_request(
&self,
name: String,
args: Vec<Value>,
_neovim: Neovim<Self::Writer>,
) -> Result<Value, Value> {
match name.as_str() {
"Gnvim" => match parse_request(args) {
Ok(msg) => {
let tx = self.tx.borrow_mut();
tx.send(Message::Request(
self.request_tx.borrow_mut().clone(),
msg,
))
.unwrap();
let rx = self.request_rx.borrow_mut();
rx.recv().unwrap()
}
Err(_) => Err("Failed to parse request".into()),
},
_ => {
error!("Unknown request: {}", name);
Err("Unkown request".into())
}
}
}
async fn handle_notify(
&self,
name: String,
args: Vec<Value>,
_neovim: Neovim<<Self as Handler>::Writer>,
) {
if let Some(notify) = parse_notify(&name, args) {
let tx = self.tx.borrow_mut();
tx.send(Message::Notify(notify)).unwrap();
} else {
error!("Unknown notify: {}", name);
}
}
}
impl Spawner for NvimBridge {
type Handle = ();
fn spawn<Fut>(&self, future: Fut) -> Self::Handle
where
Fut: Future<Output = ()> + Send + 'static,
{
let c = glib::MainContext::default();
c.spawn(future);
}
}
fn parse_request(args: Vec<Value>) -> Result<Request, ()> {
let cmd = unwrap_str!(args[0]);
match cmd {
"CursorTooltipGetStyles" => Ok(Request::CursorTooltipStyles),
_ => Err(()),
}
}
fn parse_notify(name: &str, args: Vec<Value>) -> Option<Notify> {
match name {
"redraw" => Some(Notify::RedrawEvent(parse_redraw_event(args))),
"Gnvim" => Some(Notify::GnvimEvent(parse_gnvim_event(args))),
_ => None,
}
}
fn parse_single_redraw_event(cmd: &str, args: Vec<Value>) -> RedrawEvent {
match cmd {
"set_title" => RedrawEvent::SetTitle(
args.into_iter()
.map(|v| unwrap_str!(v[0]).to_string())
.collect(),
),
"grid_resize" => RedrawEvent::GridResize(
args.into_iter().map(GridResize::from).collect(),
),
"grid_cursor_goto" => RedrawEvent::GridCursorGoto(
args.into_iter().map(GridCursorGoto::from).collect(),
),
"grid_clear" => RedrawEvent::GridClear(
args.into_iter().map(|v| unwrap_i64!(v[0])).collect(),
),
"grid_destroy" => RedrawEvent::GridDestroy(
args.into_iter().map(|v| unwrap_i64!(v[0])).collect(),
),
"grid_scroll" => RedrawEvent::GridScroll(
args.into_iter().map(GridScroll::from).collect(),
),
"grid_line" => RedrawEvent::GridLine(
args.into_iter().map(GridLineSegment::from).collect(),
),
"default_colors_set" => RedrawEvent::DefaultColorsSet(
args.into_iter().map(DefaultColorsSet::from).collect(),
),
"hl_attr_define" => RedrawEvent::HlAttrDefine(
args.into_iter().map(HlAttrDefine::from).collect(),
),
"hl_group_set" => RedrawEvent::HlGroupSet(
args.into_iter().map(HlGroupSet::from).collect(),
),
"option_set" => RedrawEvent::OptionSet(
args.into_iter().map(OptionSet::from).collect(),
),
"mode_info_set" => RedrawEvent::ModeInfoSet(
args.into_iter().map(ModeInfoSet::from).collect(),
),
"mode_change" => RedrawEvent::ModeChange(
args.into_iter().map(ModeChange::from).collect(),
),
"busy_start" => RedrawEvent::SetBusy(true),
"busy_stop" => RedrawEvent::SetBusy(false),
"flush" => RedrawEvent::Flush(),
"popupmenu_show" => RedrawEvent::PopupmenuShow(
args.into_iter().map(PopupmenuShow::from).collect(),
),
"popupmenu_hide" => RedrawEvent::PopupmenuHide(),
"popupmenu_select" => RedrawEvent::PopupmenuSelect(
args.into_iter().map(|s| unwrap_i64!(s[0])).collect(),
),
"tabline_update" => RedrawEvent::TablineUpdate(
args.into_iter().map(TablineUpdate::from).collect(),
),
"cmdline_show" => RedrawEvent::CmdlineShow(
args.into_iter().map(CmdlineShow::from).collect(),
),
"cmdline_hide" => RedrawEvent::CmdlineHide(),
"cmdline_pos" => RedrawEvent::CmdlinePos(
args.into_iter().map(CmdlinePos::from).collect(),
),
"cmdline_special_char" => RedrawEvent::CmdlineSpecialChar(
args.into_iter().map(CmdlineSpecialChar::from).collect(),
),
"cmdline_block_show" => RedrawEvent::CmdlineBlockShow(
args.into_iter().map(CmdlineBlockShow::from).collect(),
),
"cmdline_block_append" => RedrawEvent::CmdlineBlockAppend(
args.into_iter().map(CmdlineBlockAppend::from).collect(),
),
"cmdline_block_hide" => RedrawEvent::CmdlineBlockHide(),
"win_pos" => RedrawEvent::WindowPos(
args.into_iter().map(WindowPos::from).collect(),
),
"win_float_pos" => RedrawEvent::WindowFloatPos(
args.into_iter().map(WindowFloatPos::from).collect(),
),
"win_external_pos" => RedrawEvent::WindowExternalPos(
args.into_iter().map(WindowExternalPos::from).collect(),
),
"win_hide" => RedrawEvent::WindowHide(
args.into_iter()
.map(|v| {
let v = unwrap_array!(v);
unwrap_i64!(v[0])
})
.collect(),
),
"win_close" => RedrawEvent::WindowClose(
args.into_iter()
.map(|v| {
let v = unwrap_array!(v);
unwrap_i64!(v[0])
})
.collect(),
),
"msg_set_pos" => RedrawEvent::MsgSetPos(
args.into_iter().map(MsgSetPos::from).collect(),
),
"mouse_on" | "mouse_off" => RedrawEvent::Ignored(cmd.to_string()),
_ => RedrawEvent::Unknown(cmd.to_string()),
}
}
pub(crate) fn parse_redraw_event(args: Vec<Value>) -> Vec<RedrawEvent> {
args.into_iter()
.map(|args| {
let args = unwrap_array!(args);
let cmd = unwrap_str!(args[0]);
parse_single_redraw_event(cmd, args[1..].to_vec())
})
.collect()
}
pub(crate) fn parse_gnvim_event(
args: Vec<Value>,
) -> Result<GnvimEvent, String> {
let cmd = try_str!(args.get(0).ok_or("No command given")?, "cmd");
let res = match cmd {
"CompletionMenuToggleInfo" => GnvimEvent::CompletionMenuToggleInfo,
"CursorTooltipLoadStyle" => {
let path =
try_str!(args.get(1).ok_or("path missing")?, "style file path");
GnvimEvent::CursorTooltipLoadStyle(path.to_string())
}
"CursorTooltipShow" => {
let content = try_str!(
args.get(1).ok_or("content missing")?,
"tooltip content"
);
let row =
try_u64!(args.get(2).ok_or("row missing")?, "tooltip row");
let col =
try_u64!(args.get(3).ok_or("col missing")?, "tooltip col");
GnvimEvent::CursorTooltipShow(content.to_string(), row, col)
}
"CursorTooltipHide" => GnvimEvent::CursorTooltipHide,
"CursorTooltipSetStyle" => {
let style = try_str!(
args.get(1).ok_or("path missing")?,
"tooltip style path"
);
GnvimEvent::CursorTooltipSetStyle(style.to_string())
}
"PopupmenuSetWidth" => {
let w =
try_u64!(args.get(1).ok_or("width missing")?, "pmenu width");
GnvimEvent::PopupmenuWidth(w)
}
"PopupmenuSetWidthDetails" => {
let w =
try_u64!(args.get(1).ok_or("width missing")?, "pmenu width");
GnvimEvent::PopupmenuWidthDetails(w)
}
"PopupmenuShowMenuOnAllItems" => {
let b = try_u64!(
args.get(1).ok_or("bool missing")?,
"pmenu show menu on all items"
);
GnvimEvent::PopupmenuShowMenuOnAllItems(b != 0)
}
"EnableCursorAnimations" => GnvimEvent::EnableCursorAnimations(
try_u64!(
args.get(1).ok_or("argument missing")?,
"failed to parse enable cursor animations argument"
) == 1,
),
"EnableExtTabline" => GnvimEvent::EnableExtTabline(
try_u64!(
args.get(1).ok_or("argument missing")?,
"failed to parse enable ext tabline argument"
) == 1,
),
"EnableExtCmdline" => GnvimEvent::EnableExtCmdline(
try_u64!(
args.get(1).ok_or("argument missing")?,
"failed to parse enable ext cmdline argument"
) == 1,
),
"EnableExtPopupmenu" => GnvimEvent::EnableExtPopupmenu(
try_u64!(
args.get(1).ok_or("argument missing")?,
"failed to parse enable ext popupmenu argument"
) == 1,
),
_ => GnvimEvent::Unknown(String::from(cmd)),
};
Ok(res)
}
fn map_to_hash<'a>(val: &'a Value) -> HashMap<&'a str, &'a Value> {
let mut h = HashMap::new();
for (prop, val) in unwrap_map!(val) {
h.insert(unwrap_str!(prop), val);
}
h
}
|
use getopts::{Matches, Options};
use std::fs::File;
use std::{env, io, process};
use vigenere::Config;
fn main() {
let args = env::args().collect::<Vec<String>>();
let mut opts = Options::new();
opts.optflag("h", "help", "print this help manu");
opts.optflag("d", "decipher", "decipher the message instead of ciphering");
opts.optopt("k", "key", "keyword to be used for Vigenere cipher", "STRING",);
opts.optopt("i", "input", "path to the input file", "FILE");
opts.optopt("o", "output", "path to the output file", "FILE");
// Get matches
let matches = match opts.parse(args) {
Ok(m) => m,
Err(f) => {
eprintln!("Failed to parse parameters with: {}", f.to_string());
process::exit(1);
}
};
// Display help and exit
if matches.opt_present("h") {
println!("{}", generate_usage(&opts));
process::exit(0);
}
let keyword = match matches.opt_str("k") {
Some(s) => s,
None => {
eprintln!("ERROR: Ciphering keyword must be specified");
process::exit(1);
}
};
let mut source = get_source(&matches);
let mut sink = get_sink(&matches);
let config = Config {
keyword,
source: &mut source,
sink: &mut sink,
decipher: matches.opt_present("d"),
};
// Run processing
match vigenere::run(config) {
Ok(_) => process::exit(0),
Err(f) => {
eprintln!("Failed to process cipher with: {}", f);
process::exit(1);
},
};
}
/// Opens a file for input, or connects to STDIN
fn get_source(matches: &Matches) -> Box<dyn io::Read> {
match matches.opt_str("i") {
None => Box::new(io::stdin()),
Some(path) => Box::new(File::open(path).expect("Failed to open input file")),
}
}
/// Creates or truncates (if exists already) a file for output,
/// or connects to STDOUT
fn get_sink(matches: &Matches) -> Box<dyn io::Write> {
match matches.opt_str("o") {
None => Box::new(io::stdout()),
Some(path) => Box::new(File::create(path).expect("Failed to open output file")),
}
}
/// Generates usage information string out of options object
fn generate_usage(opts: &Options) -> String {
let brief = "USAGE: vigenere --key STRING [--input FILE] [--output FILE] [--decipher]
\nOr just pipe text to the program to get (un)ciphered text to STDOUT.";
opts.usage(&brief)
}
|
use super::deserialize::{
adjust_mods, str_to_datetime, str_to_f32, str_to_maybe_datetime, str_to_maybe_f32,
};
use crate::{
util::{osu::ModSelection, CountryCode},
};
use chrono::{DateTime, Utc};
use rosu_v2::prelude::{GameMode, GameMods, Grade, RankStatus, Username};
use serde::{de::Error, Deserialize, Deserializer};
use std::{fmt, str::FromStr};
#[derive(Debug)]
pub struct OsuStatsPlayer {
pub user_id: u32,
pub count: u32,
pub username: Username,
}
#[derive(Deserialize)]
struct Outer {
#[serde(rename = "userId")]
user_id: u32,
count: String,
#[serde(rename = "osu_user")]
user: Inner,
}
#[derive(serde::Deserialize)]
pub struct Inner {
#[serde(rename = "userName")]
username: Username,
}
impl<'de> Deserialize<'de> for OsuStatsPlayer {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
let helper = Outer::deserialize(d)?;
Ok(OsuStatsPlayer {
user_id: helper.user_id,
count: u32::from_str(&helper.count).map_err(D::Error::custom)?,
username: helper.user.username,
})
}
}
#[derive(Debug, Deserialize)]
pub struct OsuStatsScore {
#[serde(rename = "userId")]
pub user_id: u32,
pub position: u32,
#[serde(rename = "rank")]
pub grade: Grade,
pub score: u32,
#[serde(rename = "maxCombo")]
pub max_combo: u32,
#[serde(deserialize_with = "str_to_f32")]
pub accuracy: f32,
pub count300: u32,
pub count100: u32,
pub count50: u32,
#[serde(rename = "countKatu")]
pub count_katu: u32,
#[serde(rename = "countGeki")]
pub count_geki: u32,
#[serde(rename = "countMiss")]
pub count_miss: u32,
#[serde(rename = "enabledMods", deserialize_with = "adjust_mods")]
pub enabled_mods: GameMods,
#[serde(rename = "playDate", deserialize_with = "str_to_datetime")]
pub date: DateTime<Utc>,
#[serde(rename = "ppValue")]
pub pp: Option<f32>,
#[serde(rename = "beatmap")]
pub map: OsuStatsMap,
}
#[derive(Debug, Deserialize)]
pub struct OsuStatsMap {
#[serde(rename = "beatmapId")]
pub beatmap_id: u32,
#[serde(rename = "beatmapSetId")]
pub beatmapset_id: u32,
#[serde(rename = "approved")]
pub approval_status: RankStatus,
#[serde(rename = "lastUpdated", deserialize_with = "str_to_datetime")]
pub last_updated: DateTime<Utc>,
#[serde(rename = "approvedDate", deserialize_with = "str_to_maybe_datetime")]
pub approved_date: Option<DateTime<Utc>>,
#[serde(rename = "hitLength")]
pub seconds_drain: u32,
#[serde(rename = "totalLength")]
pub seconds_total: u32,
pub mode: GameMode,
pub version: String,
pub artist: String,
pub title: String,
pub creator: Username,
pub bpm: f32,
pub source: String,
#[serde(rename = "diffRating", deserialize_with = "str_to_maybe_f32")]
pub stars: Option<f32>,
#[serde(rename = "diffSize", deserialize_with = "str_to_f32")]
pub diff_cs: f32,
#[serde(rename = "diffOverall", deserialize_with = "str_to_f32")]
pub diff_od: f32,
#[serde(rename = "diffApproach", deserialize_with = "str_to_f32")]
pub diff_ar: f32,
#[serde(rename = "diffDrain", deserialize_with = "str_to_f32")]
pub diff_hp: f32,
#[serde(rename = "maxCombo")]
pub max_combo: Option<u32>,
}
#[derive(Copy, Clone, Debug)]
pub enum OsuStatsOrder {
PlayDate = 0,
Pp = 1,
Rank = 2,
Accuracy = 3,
Combo = 4,
Score = 5,
Misses = 6,
}
impl Default for OsuStatsOrder {
fn default() -> Self {
Self::PlayDate
}
}
impl fmt::Display for OsuStatsOrder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
#[derive(Debug)]
pub struct OsuStatsParams {
pub username: Username,
pub mode: GameMode,
pub page: usize,
pub rank_min: usize,
pub rank_max: usize,
pub acc_min: f32,
pub acc_max: f32,
pub order: OsuStatsOrder,
pub mods: Option<ModSelection>,
pub descending: bool,
}
impl OsuStatsParams {
pub fn new(username: impl Into<Username>) -> Self {
Self {
username: username.into(),
mode: GameMode::STD,
page: 1,
rank_min: 1,
rank_max: 100,
acc_min: 0.0,
acc_max: 100.0,
order: OsuStatsOrder::default(),
mods: None,
descending: true,
}
}
pub fn mode(mut self, mode: GameMode) -> Self {
self.mode = mode;
self
}
}
#[derive(Debug)]
pub struct OsuStatsListParams {
pub country: Option<CountryCode>,
pub mode: GameMode,
pub page: usize,
pub rank_min: usize,
pub rank_max: usize,
}
|
//! ```cargo
//! [dependencies]
//! rboehm = { git = "https://github.com/softdevteam/rboehm" }
//! ```
extern crate rboehm;
use rboehm::gc::Gc;
use rboehm::BoehmAllocator;
#[global_allocator]
static GLOBAL_ALLOCATOR: BoehmAllocator = BoehmAllocator;
#[derive(Clone)]
struct S {
x: usize,
}
fn main() {
let mut x: Vec<S> = Vec::with_capacity(1);
x.push(S { x: 123 });
for _ in 0..5000000 {
let _obj = Gc::new(x.clone());
}
}
|
use crate::base::message_types::{ArchivedRkyvGenericResponse, RkyvGenericResponse};
use crate::base::ErrorCode;
use rkyv::AlignedVec;
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use std::net::{IpAddr, SocketAddr};
pub fn response_or_error(buffer: &AlignedVec) -> Result<&ArchivedRkyvGenericResponse, ErrorCode> {
let response = rkyv::check_archived_root::<RkyvGenericResponse>(buffer).unwrap();
if let Some(error_code) = response.as_error_response() {
Err(error_code)
} else {
Ok(response)
}
}
pub fn node_contains_raft_group(
node_index: usize,
total_nodes: usize,
raft_group_id: u16,
replicas_per_raft_group: usize,
) -> bool {
assert_eq!(
total_nodes % replicas_per_raft_group,
0,
"{total_nodes} % {replicas_per_raft_group} != 0",
);
// Divide all nodes up into groups that can support a raft group
let node_group = node_index / replicas_per_raft_group;
let node_groups = total_nodes / replicas_per_raft_group;
// Round-robin assignment of raft groups to nodes
let assigned_node_group = raft_group_id % node_groups as u16;
node_group == assigned_node_group as usize
}
pub fn check_access(
file_uid: u32,
file_gid: u32,
file_mode: u16,
uid: u32,
gid: u32,
mut access_mask: i32,
) -> bool {
// F_OK tests for existence of file
if access_mask == libc::F_OK {
return true;
}
let file_mode = i32::from(file_mode);
// root is allowed to read & write anything
if uid == 0 {
// root only allowed to exec if one of the X bits is set
access_mask &= libc::X_OK;
access_mask -= access_mask & (file_mode >> 6);
access_mask -= access_mask & (file_mode >> 3);
access_mask -= access_mask & file_mode;
return access_mask == 0;
}
if uid == file_uid {
access_mask -= access_mask & (file_mode >> 6);
} else if gid == file_gid {
access_mask -= access_mask & (file_mode >> 3);
} else {
access_mask -= access_mask & file_mode;
}
access_mask == 0
}
pub fn node_id_from_address(address: &SocketAddr) -> u64 {
let port = address.port();
match address.ip() {
IpAddr::V4(v4) => {
let octets = v4.octets();
u64::from(octets[0]) << 40
| u64::from(octets[1]) << 32
| u64::from(octets[2]) << 24
| u64::from(octets[3]) << 16
| u64::from(port)
}
IpAddr::V6(v6) => {
// TODO: there could be collisions. Should be generated randomly and then dynamically discovered
let mut hasher = DefaultHasher::new();
v6.hash(&mut hasher);
port.hash(&mut hasher);
hasher.finish()
}
}
}
|
fn calculate_length(s: String) -> (String, usize) {
let length = s.len();
(s, length)
}
fn calculate_length_with_reference(s: &String) -> usize {
s.len()
}
//fn change_string(s: &String) {
// s.push_str(" University");
//}
// argument as &mut reference
fn change_string_mut(s: &mut String) -> String {
s.push_str(" University");
s.to_string()
}
fn main() {
let s1: String = "Kenta".to_string();
let (s2, s1_length) = calculate_length(s1);
println!("s2 is {}, and the length is {}", s2, s1_length);
//reference
let s3 = "ritsumeikan".to_string();
let s3_length = calculate_length_with_reference(&s3);
println!("s3 is {}, and the length is {}", s3, s3_length);
let mut rits: String = String::from("ritsumeikan");
// error
//`:wq
//change_string(&rits);
// &mut reference
change_string_mut(&mut rits);
println!("{}", rits);
}
|
pub mod shader;
pub mod shader_program;
pub mod texture;
pub mod texture_builder;
pub mod uniform;
|
use serde::{Deserialize, Serialize};
#[derive(Clone, Default, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DeleteParams {
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
pub preconditions: Option<Preconditions>,
}
#[derive(Clone, Default, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Preconditions {
#[serde(default)]
#[serde(skip_serializing_if = "String::is_empty")]
pub uid: String,
#[serde(default)]
#[serde(skip_serializing_if = "String::is_empty")]
pub resource_version: String,
}
#[derive(Clone, Default, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ListParams {
#[serde(default)]
#[serde(skip_serializing_if = "String::is_empty")]
pub labels: String,
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<usize>,
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
pub offset: Option<usize>,
}
|
/// An enum to represent all characters in the Duployan block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Duployan {
/// \u{1bc00}: '𛰀'
LetterH,
/// \u{1bc01}: '𛰁'
LetterX,
/// \u{1bc02}: '𛰂'
LetterP,
/// \u{1bc03}: '𛰃'
LetterT,
/// \u{1bc04}: '𛰄'
LetterF,
/// \u{1bc05}: '𛰅'
LetterK,
/// \u{1bc06}: '𛰆'
LetterL,
/// \u{1bc07}: '𛰇'
LetterB,
/// \u{1bc08}: '𛰈'
LetterD,
/// \u{1bc09}: '𛰉'
LetterV,
/// \u{1bc0a}: '𛰊'
LetterG,
/// \u{1bc0b}: '𛰋'
LetterR,
/// \u{1bc0c}: '𛰌'
LetterPN,
/// \u{1bc0d}: '𛰍'
LetterDS,
/// \u{1bc0e}: '𛰎'
LetterFN,
/// \u{1bc0f}: '𛰏'
LetterKM,
/// \u{1bc10}: '𛰐'
LetterRS,
/// \u{1bc11}: '𛰑'
LetterTh,
/// \u{1bc12}: '𛰒'
LetterSloanDh,
/// \u{1bc13}: '𛰓'
LetterDh,
/// \u{1bc14}: '𛰔'
LetterKk,
/// \u{1bc15}: '𛰕'
LetterSloanJ,
/// \u{1bc16}: '𛰖'
LetterHl,
/// \u{1bc17}: '𛰗'
LetterLh,
/// \u{1bc18}: '𛰘'
LetterRh,
/// \u{1bc19}: '𛰙'
LetterM,
/// \u{1bc1a}: '𛰚'
LetterN,
/// \u{1bc1b}: '𛰛'
LetterJ,
/// \u{1bc1c}: '𛰜'
LetterS,
/// \u{1bc1d}: '𛰝'
LetterMN,
/// \u{1bc1e}: '𛰞'
LetterNM,
/// \u{1bc1f}: '𛰟'
LetterJM,
/// \u{1bc20}: '𛰠'
LetterSJ,
/// \u{1bc21}: '𛰡'
LetterMWithDot,
/// \u{1bc22}: '𛰢'
LetterNWithDot,
/// \u{1bc23}: '𛰣'
LetterJWithDot,
/// \u{1bc24}: '𛰤'
LetterJWithDotsInsideAndAbove,
/// \u{1bc25}: '𛰥'
LetterSWithDot,
/// \u{1bc26}: '𛰦'
LetterSWithDotBelow,
/// \u{1bc27}: '𛰧'
LetterMS,
/// \u{1bc28}: '𛰨'
LetterNS,
/// \u{1bc29}: '𛰩'
LetterJS,
/// \u{1bc2a}: '𛰪'
LetterSS,
/// \u{1bc2b}: '𛰫'
LetterMNS,
/// \u{1bc2c}: '𛰬'
LetterNMS,
/// \u{1bc2d}: '𛰭'
LetterJMS,
/// \u{1bc2e}: '𛰮'
LetterSJS,
/// \u{1bc2f}: '𛰯'
LetterJSWithDot,
/// \u{1bc30}: '𛰰'
LetterJN,
/// \u{1bc31}: '𛰱'
LetterJNS,
/// \u{1bc32}: '𛰲'
LetterST,
/// \u{1bc33}: '𛰳'
LetterSTR,
/// \u{1bc34}: '𛰴'
LetterSP,
/// \u{1bc35}: '𛰵'
LetterSPR,
/// \u{1bc36}: '𛰶'
LetterTS,
/// \u{1bc37}: '𛰷'
LetterTRS,
/// \u{1bc38}: '𛰸'
LetterW,
/// \u{1bc39}: '𛰹'
LetterWh,
/// \u{1bc3a}: '𛰺'
LetterWR,
/// \u{1bc3b}: '𛰻'
LetterSN,
/// \u{1bc3c}: '𛰼'
LetterSM,
/// \u{1bc3d}: '𛰽'
LetterKRS,
/// \u{1bc3e}: '𛰾'
LetterGRS,
/// \u{1bc3f}: '𛰿'
LetterSK,
/// \u{1bc40}: '𛱀'
LetterSKR,
/// \u{1bc41}: '𛱁'
LetterA,
/// \u{1bc42}: '𛱂'
LetterSloanOw,
/// \u{1bc43}: '𛱃'
LetterOa,
/// \u{1bc44}: '𛱄'
LetterO,
/// \u{1bc45}: '𛱅'
LetterAou,
/// \u{1bc46}: '𛱆'
LetterI,
/// \u{1bc47}: '𛱇'
LetterE,
/// \u{1bc48}: '𛱈'
LetterIe,
/// \u{1bc49}: '𛱉'
LetterShortI,
/// \u{1bc4a}: '𛱊'
LetterUi,
/// \u{1bc4b}: '𛱋'
LetterEe,
/// \u{1bc4c}: '𛱌'
LetterSloanEh,
/// \u{1bc4d}: '𛱍'
LetterRomanianI,
/// \u{1bc4e}: '𛱎'
LetterSloanEe,
/// \u{1bc4f}: '𛱏'
LetterLongI,
/// \u{1bc50}: '𛱐'
LetterYe,
/// \u{1bc51}: '𛱑'
LetterU,
/// \u{1bc52}: '𛱒'
LetterEu,
/// \u{1bc53}: '𛱓'
LetterXw,
/// \u{1bc54}: '𛱔'
LetterUN,
/// \u{1bc55}: '𛱕'
LetterLongU,
/// \u{1bc56}: '𛱖'
LetterRomanianU,
/// \u{1bc57}: '𛱗'
LetterUh,
/// \u{1bc58}: '𛱘'
LetterSloanU,
/// \u{1bc59}: '𛱙'
LetterOoh,
/// \u{1bc5a}: '𛱚'
LetterOw,
/// \u{1bc5b}: '𛱛'
LetterOu,
/// \u{1bc5c}: '𛱜'
LetterWa,
/// \u{1bc5d}: '𛱝'
LetterWo,
/// \u{1bc5e}: '𛱞'
LetterWi,
/// \u{1bc5f}: '𛱟'
LetterWei,
/// \u{1bc60}: '𛱠'
LetterWow,
/// \u{1bc61}: '𛱡'
LetterNasalU,
/// \u{1bc62}: '𛱢'
LetterNasalO,
/// \u{1bc63}: '𛱣'
LetterNasalI,
/// \u{1bc64}: '𛱤'
LetterNasalA,
/// \u{1bc65}: '𛱥'
LetterPerninAn,
/// \u{1bc66}: '𛱦'
LetterPerninAm,
/// \u{1bc67}: '𛱧'
LetterSloanEn,
/// \u{1bc68}: '𛱨'
LetterSloanAn,
/// \u{1bc69}: '𛱩'
LetterSloanOn,
/// \u{1bc6a}: '𛱪'
LetterVocalicM,
/// \u{1bc70}: '𛱰'
AffixLeftHorizontalSecant,
/// \u{1bc71}: '𛱱'
AffixMidHorizontalSecant,
/// \u{1bc72}: '𛱲'
AffixRightHorizontalSecant,
/// \u{1bc73}: '𛱳'
AffixLowVerticalSecant,
/// \u{1bc74}: '𛱴'
AffixMidVerticalSecant,
/// \u{1bc75}: '𛱵'
AffixHighVerticalSecant,
/// \u{1bc76}: '𛱶'
AffixAttachedSecant,
/// \u{1bc77}: '𛱷'
AffixAttachedLeftDashToDashRightSecant,
/// \u{1bc78}: '𛱸'
AffixAttachedTangent,
/// \u{1bc79}: '𛱹'
AffixAttachedTail,
/// \u{1bc7a}: '𛱺'
AffixAttachedEHook,
/// \u{1bc7b}: '𛱻'
AffixAttachedIHook,
/// \u{1bc7c}: '𛱼'
AffixAttachedTangentHook,
/// \u{1bc80}: '𛲀'
AffixHighAcute,
/// \u{1bc81}: '𛲁'
AffixHighTightAcute,
/// \u{1bc82}: '𛲂'
AffixHighGrave,
/// \u{1bc83}: '𛲃'
AffixHighLongGrave,
/// \u{1bc84}: '𛲄'
AffixHighDot,
/// \u{1bc85}: '𛲅'
AffixHighCircle,
/// \u{1bc86}: '𛲆'
AffixHighLine,
/// \u{1bc87}: '𛲇'
AffixHighWave,
/// \u{1bc88}: '𛲈'
AffixHighVertical,
/// \u{1bc90}: '𛲐'
AffixLowAcute,
/// \u{1bc91}: '𛲑'
AffixLowTightAcute,
/// \u{1bc92}: '𛲒'
AffixLowGrave,
/// \u{1bc93}: '𛲓'
AffixLowLongGrave,
/// \u{1bc94}: '𛲔'
AffixLowDot,
/// \u{1bc95}: '𛲕'
AffixLowCircle,
/// \u{1bc96}: '𛲖'
AffixLowLine,
/// \u{1bc97}: '𛲗'
AffixLowWave,
/// \u{1bc98}: '𛲘'
AffixLowVertical,
/// \u{1bc99}: '𛲙'
AffixLowArrow,
/// \u{1bc9c}: '𛲜'
SignOWithCross,
/// \u{1bc9d}: '𛲝'
ThickLetterSelector,
/// \u{1bc9e}: '𛲞'
DoubleMark,
}
impl Into<char> for Duployan {
fn into(self) -> char {
match self {
Duployan::LetterH => '𛰀',
Duployan::LetterX => '𛰁',
Duployan::LetterP => '𛰂',
Duployan::LetterT => '𛰃',
Duployan::LetterF => '𛰄',
Duployan::LetterK => '𛰅',
Duployan::LetterL => '𛰆',
Duployan::LetterB => '𛰇',
Duployan::LetterD => '𛰈',
Duployan::LetterV => '𛰉',
Duployan::LetterG => '𛰊',
Duployan::LetterR => '𛰋',
Duployan::LetterPN => '𛰌',
Duployan::LetterDS => '𛰍',
Duployan::LetterFN => '𛰎',
Duployan::LetterKM => '𛰏',
Duployan::LetterRS => '𛰐',
Duployan::LetterTh => '𛰑',
Duployan::LetterSloanDh => '𛰒',
Duployan::LetterDh => '𛰓',
Duployan::LetterKk => '𛰔',
Duployan::LetterSloanJ => '𛰕',
Duployan::LetterHl => '𛰖',
Duployan::LetterLh => '𛰗',
Duployan::LetterRh => '𛰘',
Duployan::LetterM => '𛰙',
Duployan::LetterN => '𛰚',
Duployan::LetterJ => '𛰛',
Duployan::LetterS => '𛰜',
Duployan::LetterMN => '𛰝',
Duployan::LetterNM => '𛰞',
Duployan::LetterJM => '𛰟',
Duployan::LetterSJ => '𛰠',
Duployan::LetterMWithDot => '𛰡',
Duployan::LetterNWithDot => '𛰢',
Duployan::LetterJWithDot => '𛰣',
Duployan::LetterJWithDotsInsideAndAbove => '𛰤',
Duployan::LetterSWithDot => '𛰥',
Duployan::LetterSWithDotBelow => '𛰦',
Duployan::LetterMS => '𛰧',
Duployan::LetterNS => '𛰨',
Duployan::LetterJS => '𛰩',
Duployan::LetterSS => '𛰪',
Duployan::LetterMNS => '𛰫',
Duployan::LetterNMS => '𛰬',
Duployan::LetterJMS => '𛰭',
Duployan::LetterSJS => '𛰮',
Duployan::LetterJSWithDot => '𛰯',
Duployan::LetterJN => '𛰰',
Duployan::LetterJNS => '𛰱',
Duployan::LetterST => '𛰲',
Duployan::LetterSTR => '𛰳',
Duployan::LetterSP => '𛰴',
Duployan::LetterSPR => '𛰵',
Duployan::LetterTS => '𛰶',
Duployan::LetterTRS => '𛰷',
Duployan::LetterW => '𛰸',
Duployan::LetterWh => '𛰹',
Duployan::LetterWR => '𛰺',
Duployan::LetterSN => '𛰻',
Duployan::LetterSM => '𛰼',
Duployan::LetterKRS => '𛰽',
Duployan::LetterGRS => '𛰾',
Duployan::LetterSK => '𛰿',
Duployan::LetterSKR => '𛱀',
Duployan::LetterA => '𛱁',
Duployan::LetterSloanOw => '𛱂',
Duployan::LetterOa => '𛱃',
Duployan::LetterO => '𛱄',
Duployan::LetterAou => '𛱅',
Duployan::LetterI => '𛱆',
Duployan::LetterE => '𛱇',
Duployan::LetterIe => '𛱈',
Duployan::LetterShortI => '𛱉',
Duployan::LetterUi => '𛱊',
Duployan::LetterEe => '𛱋',
Duployan::LetterSloanEh => '𛱌',
Duployan::LetterRomanianI => '𛱍',
Duployan::LetterSloanEe => '𛱎',
Duployan::LetterLongI => '𛱏',
Duployan::LetterYe => '𛱐',
Duployan::LetterU => '𛱑',
Duployan::LetterEu => '𛱒',
Duployan::LetterXw => '𛱓',
Duployan::LetterUN => '𛱔',
Duployan::LetterLongU => '𛱕',
Duployan::LetterRomanianU => '𛱖',
Duployan::LetterUh => '𛱗',
Duployan::LetterSloanU => '𛱘',
Duployan::LetterOoh => '𛱙',
Duployan::LetterOw => '𛱚',
Duployan::LetterOu => '𛱛',
Duployan::LetterWa => '𛱜',
Duployan::LetterWo => '𛱝',
Duployan::LetterWi => '𛱞',
Duployan::LetterWei => '𛱟',
Duployan::LetterWow => '𛱠',
Duployan::LetterNasalU => '𛱡',
Duployan::LetterNasalO => '𛱢',
Duployan::LetterNasalI => '𛱣',
Duployan::LetterNasalA => '𛱤',
Duployan::LetterPerninAn => '𛱥',
Duployan::LetterPerninAm => '𛱦',
Duployan::LetterSloanEn => '𛱧',
Duployan::LetterSloanAn => '𛱨',
Duployan::LetterSloanOn => '𛱩',
Duployan::LetterVocalicM => '𛱪',
Duployan::AffixLeftHorizontalSecant => '𛱰',
Duployan::AffixMidHorizontalSecant => '𛱱',
Duployan::AffixRightHorizontalSecant => '𛱲',
Duployan::AffixLowVerticalSecant => '𛱳',
Duployan::AffixMidVerticalSecant => '𛱴',
Duployan::AffixHighVerticalSecant => '𛱵',
Duployan::AffixAttachedSecant => '𛱶',
Duployan::AffixAttachedLeftDashToDashRightSecant => '𛱷',
Duployan::AffixAttachedTangent => '𛱸',
Duployan::AffixAttachedTail => '𛱹',
Duployan::AffixAttachedEHook => '𛱺',
Duployan::AffixAttachedIHook => '𛱻',
Duployan::AffixAttachedTangentHook => '𛱼',
Duployan::AffixHighAcute => '𛲀',
Duployan::AffixHighTightAcute => '𛲁',
Duployan::AffixHighGrave => '𛲂',
Duployan::AffixHighLongGrave => '𛲃',
Duployan::AffixHighDot => '𛲄',
Duployan::AffixHighCircle => '𛲅',
Duployan::AffixHighLine => '𛲆',
Duployan::AffixHighWave => '𛲇',
Duployan::AffixHighVertical => '𛲈',
Duployan::AffixLowAcute => '𛲐',
Duployan::AffixLowTightAcute => '𛲑',
Duployan::AffixLowGrave => '𛲒',
Duployan::AffixLowLongGrave => '𛲓',
Duployan::AffixLowDot => '𛲔',
Duployan::AffixLowCircle => '𛲕',
Duployan::AffixLowLine => '𛲖',
Duployan::AffixLowWave => '𛲗',
Duployan::AffixLowVertical => '𛲘',
Duployan::AffixLowArrow => '𛲙',
Duployan::SignOWithCross => '𛲜',
Duployan::ThickLetterSelector => '𛲝',
Duployan::DoubleMark => '𛲞',
}
}
}
impl std::convert::TryFrom<char> for Duployan {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'𛰀' => Ok(Duployan::LetterH),
'𛰁' => Ok(Duployan::LetterX),
'𛰂' => Ok(Duployan::LetterP),
'𛰃' => Ok(Duployan::LetterT),
'𛰄' => Ok(Duployan::LetterF),
'𛰅' => Ok(Duployan::LetterK),
'𛰆' => Ok(Duployan::LetterL),
'𛰇' => Ok(Duployan::LetterB),
'𛰈' => Ok(Duployan::LetterD),
'𛰉' => Ok(Duployan::LetterV),
'𛰊' => Ok(Duployan::LetterG),
'𛰋' => Ok(Duployan::LetterR),
'𛰌' => Ok(Duployan::LetterPN),
'𛰍' => Ok(Duployan::LetterDS),
'𛰎' => Ok(Duployan::LetterFN),
'𛰏' => Ok(Duployan::LetterKM),
'𛰐' => Ok(Duployan::LetterRS),
'𛰑' => Ok(Duployan::LetterTh),
'𛰒' => Ok(Duployan::LetterSloanDh),
'𛰓' => Ok(Duployan::LetterDh),
'𛰔' => Ok(Duployan::LetterKk),
'𛰕' => Ok(Duployan::LetterSloanJ),
'𛰖' => Ok(Duployan::LetterHl),
'𛰗' => Ok(Duployan::LetterLh),
'𛰘' => Ok(Duployan::LetterRh),
'𛰙' => Ok(Duployan::LetterM),
'𛰚' => Ok(Duployan::LetterN),
'𛰛' => Ok(Duployan::LetterJ),
'𛰜' => Ok(Duployan::LetterS),
'𛰝' => Ok(Duployan::LetterMN),
'𛰞' => Ok(Duployan::LetterNM),
'𛰟' => Ok(Duployan::LetterJM),
'𛰠' => Ok(Duployan::LetterSJ),
'𛰡' => Ok(Duployan::LetterMWithDot),
'𛰢' => Ok(Duployan::LetterNWithDot),
'𛰣' => Ok(Duployan::LetterJWithDot),
'𛰤' => Ok(Duployan::LetterJWithDotsInsideAndAbove),
'𛰥' => Ok(Duployan::LetterSWithDot),
'𛰦' => Ok(Duployan::LetterSWithDotBelow),
'𛰧' => Ok(Duployan::LetterMS),
'𛰨' => Ok(Duployan::LetterNS),
'𛰩' => Ok(Duployan::LetterJS),
'𛰪' => Ok(Duployan::LetterSS),
'𛰫' => Ok(Duployan::LetterMNS),
'𛰬' => Ok(Duployan::LetterNMS),
'𛰭' => Ok(Duployan::LetterJMS),
'𛰮' => Ok(Duployan::LetterSJS),
'𛰯' => Ok(Duployan::LetterJSWithDot),
'𛰰' => Ok(Duployan::LetterJN),
'𛰱' => Ok(Duployan::LetterJNS),
'𛰲' => Ok(Duployan::LetterST),
'𛰳' => Ok(Duployan::LetterSTR),
'𛰴' => Ok(Duployan::LetterSP),
'𛰵' => Ok(Duployan::LetterSPR),
'𛰶' => Ok(Duployan::LetterTS),
'𛰷' => Ok(Duployan::LetterTRS),
'𛰸' => Ok(Duployan::LetterW),
'𛰹' => Ok(Duployan::LetterWh),
'𛰺' => Ok(Duployan::LetterWR),
'𛰻' => Ok(Duployan::LetterSN),
'𛰼' => Ok(Duployan::LetterSM),
'𛰽' => Ok(Duployan::LetterKRS),
'𛰾' => Ok(Duployan::LetterGRS),
'𛰿' => Ok(Duployan::LetterSK),
'𛱀' => Ok(Duployan::LetterSKR),
'𛱁' => Ok(Duployan::LetterA),
'𛱂' => Ok(Duployan::LetterSloanOw),
'𛱃' => Ok(Duployan::LetterOa),
'𛱄' => Ok(Duployan::LetterO),
'𛱅' => Ok(Duployan::LetterAou),
'𛱆' => Ok(Duployan::LetterI),
'𛱇' => Ok(Duployan::LetterE),
'𛱈' => Ok(Duployan::LetterIe),
'𛱉' => Ok(Duployan::LetterShortI),
'𛱊' => Ok(Duployan::LetterUi),
'𛱋' => Ok(Duployan::LetterEe),
'𛱌' => Ok(Duployan::LetterSloanEh),
'𛱍' => Ok(Duployan::LetterRomanianI),
'𛱎' => Ok(Duployan::LetterSloanEe),
'𛱏' => Ok(Duployan::LetterLongI),
'𛱐' => Ok(Duployan::LetterYe),
'𛱑' => Ok(Duployan::LetterU),
'𛱒' => Ok(Duployan::LetterEu),
'𛱓' => Ok(Duployan::LetterXw),
'𛱔' => Ok(Duployan::LetterUN),
'𛱕' => Ok(Duployan::LetterLongU),
'𛱖' => Ok(Duployan::LetterRomanianU),
'𛱗' => Ok(Duployan::LetterUh),
'𛱘' => Ok(Duployan::LetterSloanU),
'𛱙' => Ok(Duployan::LetterOoh),
'𛱚' => Ok(Duployan::LetterOw),
'𛱛' => Ok(Duployan::LetterOu),
'𛱜' => Ok(Duployan::LetterWa),
'𛱝' => Ok(Duployan::LetterWo),
'𛱞' => Ok(Duployan::LetterWi),
'𛱟' => Ok(Duployan::LetterWei),
'𛱠' => Ok(Duployan::LetterWow),
'𛱡' => Ok(Duployan::LetterNasalU),
'𛱢' => Ok(Duployan::LetterNasalO),
'𛱣' => Ok(Duployan::LetterNasalI),
'𛱤' => Ok(Duployan::LetterNasalA),
'𛱥' => Ok(Duployan::LetterPerninAn),
'𛱦' => Ok(Duployan::LetterPerninAm),
'𛱧' => Ok(Duployan::LetterSloanEn),
'𛱨' => Ok(Duployan::LetterSloanAn),
'𛱩' => Ok(Duployan::LetterSloanOn),
'𛱪' => Ok(Duployan::LetterVocalicM),
'𛱰' => Ok(Duployan::AffixLeftHorizontalSecant),
'𛱱' => Ok(Duployan::AffixMidHorizontalSecant),
'𛱲' => Ok(Duployan::AffixRightHorizontalSecant),
'𛱳' => Ok(Duployan::AffixLowVerticalSecant),
'𛱴' => Ok(Duployan::AffixMidVerticalSecant),
'𛱵' => Ok(Duployan::AffixHighVerticalSecant),
'𛱶' => Ok(Duployan::AffixAttachedSecant),
'𛱷' => Ok(Duployan::AffixAttachedLeftDashToDashRightSecant),
'𛱸' => Ok(Duployan::AffixAttachedTangent),
'𛱹' => Ok(Duployan::AffixAttachedTail),
'𛱺' => Ok(Duployan::AffixAttachedEHook),
'𛱻' => Ok(Duployan::AffixAttachedIHook),
'𛱼' => Ok(Duployan::AffixAttachedTangentHook),
'𛲀' => Ok(Duployan::AffixHighAcute),
'𛲁' => Ok(Duployan::AffixHighTightAcute),
'𛲂' => Ok(Duployan::AffixHighGrave),
'𛲃' => Ok(Duployan::AffixHighLongGrave),
'𛲄' => Ok(Duployan::AffixHighDot),
'𛲅' => Ok(Duployan::AffixHighCircle),
'𛲆' => Ok(Duployan::AffixHighLine),
'𛲇' => Ok(Duployan::AffixHighWave),
'𛲈' => Ok(Duployan::AffixHighVertical),
'𛲐' => Ok(Duployan::AffixLowAcute),
'𛲑' => Ok(Duployan::AffixLowTightAcute),
'𛲒' => Ok(Duployan::AffixLowGrave),
'𛲓' => Ok(Duployan::AffixLowLongGrave),
'𛲔' => Ok(Duployan::AffixLowDot),
'𛲕' => Ok(Duployan::AffixLowCircle),
'𛲖' => Ok(Duployan::AffixLowLine),
'𛲗' => Ok(Duployan::AffixLowWave),
'𛲘' => Ok(Duployan::AffixLowVertical),
'𛲙' => Ok(Duployan::AffixLowArrow),
'𛲜' => Ok(Duployan::SignOWithCross),
'𛲝' => Ok(Duployan::ThickLetterSelector),
'𛲞' => Ok(Duployan::DoubleMark),
_ => Err(()),
}
}
}
impl Into<u32> for Duployan {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for Duployan {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for Duployan {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl Duployan {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
Duployan::LetterH
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("Duployan{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::collections::HashSet;
use std::str::FromStr;
use std::sync::Arc;
use anyhow::anyhow;
use anyhow::Error;
use ascii::AsciiString;
use ffi::Str;
use ir::instr::HasLoc;
use ir::instr::HasLocals;
use ir::instr::Hhbc;
use ir::instr::Predicate;
use ir::instr::Special;
use ir::instr::Terminator;
use ir::instr::Textual;
use ir::instr::TextualHackBuiltinParam;
use ir::Block;
use ir::BlockId;
use ir::ClassName;
use ir::CollectionType;
use ir::Constant;
use ir::ConstantId;
use ir::Func;
use ir::IncDecOp;
use ir::Instr;
use ir::InstrId;
use ir::LocId;
use ir::LocalId;
use ir::StringInterner;
use ir::TryCatchId;
use ir::ValueId;
use itertools::Itertools;
use log::trace;
use crate::class;
use crate::hack;
use crate::lower::func_builder::FuncBuilderEx as _;
use crate::mangle::Mangle as _;
use crate::mangle::MangleWithClass as _;
use crate::state::FuncDeclKind;
use crate::state::FuncDecls;
use crate::state::UnitState;
use crate::textual;
use crate::textual::Sid;
use crate::types::convert_ty;
use crate::util;
type Result<T = (), E = Error> = std::result::Result<T, E>;
/// Functions are defined as taking a param bundle.
///
/// f(params: HackParams): mixed;
pub(crate) fn write_function(
w: &mut dyn std::io::Write,
state: &mut UnitState,
function: ir::Function<'_>,
) -> Result {
trace!("Convert Function {}", function.name.as_bstr());
write_func(
w,
state,
&function.name.mangle(&state.strings),
tx_ty!(*void),
function.func,
None,
)
}
pub(crate) fn write_func(
w: &mut dyn std::io::Write,
unit_state: &mut UnitState,
name: &str,
this_ty: textual::Ty,
func: ir::Func<'_>,
method_info: Option<&MethodInfo<'_>>,
) -> Result {
let func = func.clone();
let mut func = crate::lower::lower(func, method_info, Arc::clone(&unit_state.strings));
ir::verify::verify_func(&func, &Default::default(), &unit_state.strings)?;
let params = std::mem::take(&mut func.params);
let param_lids = params
.iter()
.map(|p| LocalId::Named(p.name))
.collect::<HashSet<_>>();
let mut params = params
.into_iter()
.map(|p| {
let name_bytes = unit_state.strings.lookup_bytes(p.name);
let name_string = util::escaped_string(&name_bytes);
(name_string, convert_ty(p.ty.enforced, &unit_state.strings))
})
.collect_vec();
// Prepend the 'this' parameter.
let this_name = AsciiString::from_str("this").unwrap();
params.insert(0, (this_name, this_ty));
let params = params
.iter()
.map(|(name, ty)| (name.as_str(), ty.clone()))
.collect_vec();
let ret_ty = convert_ty(
std::mem::take(&mut func.return_type.enforced),
&unit_state.strings,
);
let lids = func
.body_instrs()
.flat_map(HasLocals::locals)
.cloned()
.collect::<HashSet<_>>();
let locals = lids
.into_iter()
.filter(|lid| !param_lids.contains(lid))
.sorted_by(|x, y| cmp_lid(&unit_state.strings, x, y))
.map(|lid| {
// TODO(arr): figure out how to provide more precise types
let ty = tx_ty!(*void);
(lid, ty)
})
.collect::<Vec<_>>();
let span = func.loc(func.loc_id).clone();
let func_declares = textual::write_function(
w,
&unit_state.strings,
name,
&span,
¶ms,
ret_ty,
&locals,
|w| {
let func = rewrite_prelude(func, Arc::clone(&unit_state.strings));
trace!(
"After Rewrite Prelude: {}",
ir::print::DisplayFunc(&func, true, &unit_state.strings)
);
let mut func = rewrite_jmp_ops(func);
ir::passes::clean::run(&mut func);
let mut state = FuncState::new(&unit_state.strings, &func, method_info);
for bid in func.block_ids() {
write_block(w, &mut state, bid)?;
}
Ok(state.func_declares)
},
)?;
unit_state
.func_declares
.declare(name, FuncDeclKind::Internal);
unit_state.func_declares.merge(func_declares);
Ok(())
}
fn write_block(w: &mut textual::FuncWriter<'_>, state: &mut FuncState<'_>, bid: BlockId) -> Result {
trace!(" Block {bid}");
let block = state.func.block(bid);
if block.tcid != TryCatchId::None {
textual_todo! { w.comment("TODO: Try-Catch Block")?; }
}
let params = block
.params
.iter()
.map(|iid| state.alloc_sid_for_iid(w, *iid))
.collect_vec();
// The entry BID is always included for us.
if bid != Func::ENTRY_BID {
w.write_label(bid, ¶ms)?;
}
for iid in block.iids() {
write_instr(w, state, iid)?;
}
Ok(())
}
fn write_instr(w: &mut textual::FuncWriter<'_>, state: &mut FuncState<'_>, iid: InstrId) -> Result {
let instr = state.func.instr(iid);
trace!(" Instr {iid}: {instr:?}");
state.update_loc(w, instr.loc_id())?;
// In general don't write directly to `w` here - isolate the formatting to
// the `textual` crate.
match *instr {
Instr::Call(ref call) => write_call(w, state, iid, call)?,
Instr::Hhbc(Hhbc::CGetL(lid, _)) => write_load_var(w, state, iid, lid)?,
Instr::Hhbc(Hhbc::IncDecL(lid, op, _)) => write_inc_dec_l(w, state, iid, lid, op)?,
Instr::Hhbc(Hhbc::SetL(vid, lid, _)) => {
write_set_var(w, state, lid, vid)?;
// SetL emits the input as the output.
state.copy_iid(iid, vid);
}
Instr::Hhbc(Hhbc::This(_)) => write_load_this(w, state, iid)?,
Instr::MemberOp(ref mop) => crate::member_op::write(w, state, iid, mop)?,
Instr::Special(Special::Textual(Textual::AssertFalse(vid, _))) => {
// I think "prune_not" means "stop if this expression IS true"...
let pred = hack::expr_builtin(hack::Builtin::IsTrue, [state.lookup_vid(vid)]);
w.prune_not(pred)?;
}
Instr::Special(Special::Textual(Textual::AssertTrue(vid, _))) => {
// I think "prune" means "stop if this expression IS NOT true"...
let pred = hack::expr_builtin(hack::Builtin::IsTrue, [state.lookup_vid(vid)]);
w.prune(pred)?;
}
Instr::Special(Special::Textual(Textual::HackBuiltin {
ref target,
ref params,
ref values,
loc: _,
})) => write_builtin(w, state, iid, target, params, values)?,
Instr::Terminator(Terminator::Enter(bid, _) | Terminator::Jmp(bid, _)) => {
w.jmp(&[bid], ())?;
}
Instr::Terminator(Terminator::JmpArgs(bid, ref params, _)) => {
w.jmp(
&[bid],
params.iter().map(|v| state.lookup_vid(*v)).collect_vec(),
)?;
}
Instr::Terminator(Terminator::JmpOp {
cond: _,
pred: _,
targets: [true_bid, false_bid],
loc: _,
}) => {
// We just need to emit the jmp - the rewrite_jmp_ops() pass should
// have already inserted assert in place on the target bids.
w.jmp(&[true_bid, false_bid], ())?;
}
Instr::Terminator(Terminator::Ret(vid, _)) => {
w.ret(state.lookup_vid(vid))?;
}
Instr::Terminator(Terminator::Unreachable) => {
w.unreachable()?;
}
Instr::Special(Special::Copy(..)) => todo!(),
Instr::Special(Special::IrToBc(..)) => todo!(),
Instr::Special(Special::Param) => todo!(),
Instr::Special(Special::Select(..)) => todo!(),
Instr::Special(Special::Tmp(..)) => todo!(),
Instr::Special(Special::Tombstone) => todo!(),
Instr::Terminator(Terminator::CallAsync(..))
| Instr::Terminator(Terminator::Exit(..))
| Instr::Terminator(Terminator::Fatal(..))
| Instr::Terminator(Terminator::IterInit(..))
| Instr::Terminator(Terminator::IterNext(..))
| Instr::Terminator(Terminator::MemoGet(..))
| Instr::Terminator(Terminator::MemoGetEager(..))
| Instr::Terminator(Terminator::NativeImpl(..))
| Instr::Terminator(Terminator::RetCSuspended(..))
| Instr::Terminator(Terminator::RetM(..))
| Instr::Terminator(Terminator::SSwitch { .. })
| Instr::Terminator(Terminator::Switch { .. })
| Instr::Terminator(Terminator::ThrowAsTypeStructException { .. }) => {
w.write_todo(&format!("{:?}", instr))?;
}
Instr::Terminator(Terminator::Throw(vid, _)) => {
textual_todo! {
let expr = state.lookup_vid(vid);
w.call("TODO_throw", [expr])?;
w.unreachable()?;
}
}
Instr::Hhbc(ref hhbc) => {
// This should only handle instructions that can't be rewritten into
// a simpler form (like control flow and generic calls). Everything
// else should be handled in lower().
textual_todo! {
use ir::instr::HasOperands;
let name = format!("TODO_hhbc_{}", hhbc);
state
.func_declares
.declare(&name, FuncDeclKind::External);
let output = w.call(
&name,
instr
.operands()
.iter()
.map(|vid| state.lookup_vid(*vid))
.collect_vec(),
)?;
state.set_iid(iid, output);
}
}
}
Ok(())
}
fn write_builtin(
w: &mut textual::FuncWriter<'_>,
state: &mut FuncState<'_>,
iid: InstrId,
target: &str,
params: &[TextualHackBuiltinParam],
values: &[ValueId],
) -> Result {
let mut values = values.iter();
let params = params
.iter()
.map(|param| match *param {
TextualHackBuiltinParam::Null => textual::Expr::null(),
TextualHackBuiltinParam::False => textual::Expr::false_(),
TextualHackBuiltinParam::HackInt(i) => textual::Expr::hack_int(i),
TextualHackBuiltinParam::HackString(ref s) => textual::Expr::hack_string(s.clone()),
TextualHackBuiltinParam::Int(i) => textual::Expr::int(i),
TextualHackBuiltinParam::String(ref s) => textual::Expr::string(s.clone()),
TextualHackBuiltinParam::True => textual::Expr::true_(),
TextualHackBuiltinParam::Value => {
let vid = *values.next().unwrap();
state.lookup_vid(vid)
}
})
.collect_vec();
let output = w.call(target, params)?;
state.set_iid(iid, output);
Ok(())
}
fn write_load_this(
w: &mut textual::FuncWriter<'_>,
state: &mut FuncState<'_>,
iid: InstrId,
) -> Result {
let class = state.method_info.unwrap().class;
let sid = w.load(class::non_static_ty(class.name, state.strings), "this")?;
state.set_iid(iid, sid);
Ok(())
}
fn write_load_var(
w: &mut textual::FuncWriter<'_>,
state: &mut FuncState<'_>,
iid: InstrId,
lid: LocalId,
) -> Result {
let sid = w.load(tx_ty!(mixed), textual::Expr::deref(lid))?;
state.set_iid(iid, sid);
Ok(())
}
fn write_set_var(
w: &mut textual::FuncWriter<'_>,
state: &mut FuncState<'_>,
lid: LocalId,
vid: ValueId,
) -> Result {
w.store(
textual::Expr::deref(lid),
state.lookup_vid(vid),
tx_ty!(mixed),
)
}
fn write_call(
w: &mut textual::FuncWriter<'_>,
state: &mut FuncState<'_>,
iid: InstrId,
call: &ir::Call,
) -> Result {
use ir::instr::CallDetail;
use ir::FCallArgsFlags;
let ir::Call {
ref operands,
context,
ref detail,
flags,
num_rets,
ref inouts,
ref readonly,
loc: _,
} = *call;
assert!(inouts.as_ref().map_or(true, |inouts| inouts.is_empty()));
assert!(readonly.as_ref().map_or(true, |ro| ro.is_empty()));
assert!(num_rets < 2);
let context = state.strings.lookup_bytes_or_none(context);
if let Some(context) = context {
if !context.is_empty() {
textual_todo! {
w.comment("TODO: write_call(Context: {context:?})")?;
}
}
}
if flags & FCallArgsFlags::HasUnpack != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::HasUnpack")?;
}
}
if flags & FCallArgsFlags::HasGenerics != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::HasGenerics")?;
}
}
if flags & FCallArgsFlags::LockWhileUnwinding != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::LockWhileUnwinding")?;
}
}
if flags & FCallArgsFlags::SkipRepack != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::SkipRepack")?;
}
}
if flags & FCallArgsFlags::SkipCoeffectsCheck != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::SkipCoeffectsCheck")?;
}
}
if flags & FCallArgsFlags::EnforceMutableReturn != 0 {
// todo!();
}
if flags & FCallArgsFlags::EnforceReadonlyThis != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::EnforceReadonlyThis")?;
}
}
if flags & FCallArgsFlags::ExplicitContext != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::ExplicitContext")?;
}
}
if flags & FCallArgsFlags::HasInOut != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::HasInOut")?;
}
}
if flags & FCallArgsFlags::EnforceInOut != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::EnforceInOut")?;
}
}
if flags & FCallArgsFlags::EnforceReadonly != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::EnforceReadonly")?;
}
}
if flags & FCallArgsFlags::HasAsyncEagerOffset != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::HasAsyncEagerOffset")?;
}
}
if flags & FCallArgsFlags::NumArgsStart != 0 {
textual_todo! {
w.comment("TODO: FCallArgsFlags::NumArgsStart")?;
}
}
let args = detail.args(operands);
let output = match *detail {
CallDetail::FCallClsMethod { .. } => todo!(),
CallDetail::FCallClsMethodD { clsid, method } => {
// C::foo()
let target = method.mangle(clsid, state.strings);
state
.func_declares
.declare(target.to_string(), FuncDeclKind::External);
let this = class::load_static_class(w, clsid, state.strings)?;
w.call_static(
&target,
this.into(),
args.iter().copied().map(|vid| state.lookup_vid(vid)),
)?
}
CallDetail::FCallClsMethodM { .. } => todo!(),
CallDetail::FCallClsMethodS { .. } => todo!(),
CallDetail::FCallClsMethodSD { .. } => {
textual_todo! { w.call("TODO_FCallClsMethodSD", ())? }
}
CallDetail::FCallCtor => {
textual_todo! {
// new $x
let ty = ClassName::new(Str::new(b"Mixed"));
let target =
ir::MethodName::new(ffi::Slice::new(b"TODO_ctor")).mangle(&ty, state.strings);
w.call_virtual(
&target,
state.lookup_vid(detail.obj(operands)),
args.iter().copied().map(|vid| state.lookup_vid(vid)),
)?
}
}
CallDetail::FCallFunc => todo!(),
CallDetail::FCallFuncD { func } => {
// foo()
let target = func.mangle(state.strings);
state
.func_declares
.declare(target.to_string(), FuncDeclKind::External);
// A top-level function is called like a class static in a special
// top-level class. Its 'this' pointer is null.
w.call_static(
&target,
textual::Expr::null(),
args.iter().copied().map(|vid| state.lookup_vid(vid)),
)?
}
CallDetail::FCallObjMethod { .. } => todo!(),
CallDetail::FCallObjMethodD { flavor, method } => {
// $x->y()
if flavor == ir::ObjMethodOp::NullSafe {
// Handle this in lowering.
textual_todo! {
w.comment("TODO: NullSafe")?;
}
}
// TODO: need to try to figure out the type.
let ty = ClassName::new(Str::new(b"Mixed"));
let target = method.mangle(&ty, state.strings);
state
.func_declares
.declare(target.to_string(), FuncDeclKind::External);
w.call_virtual(
&target,
state.lookup_vid(detail.obj(operands)),
args.iter().copied().map(|vid| state.lookup_vid(vid)),
)?
}
};
state.set_iid(iid, output);
Ok(())
}
fn write_inc_dec_l<'a>(
w: &mut textual::FuncWriter<'_>,
state: &mut FuncState<'a>,
iid: InstrId,
lid: LocalId,
op: IncDecOp,
) -> Result {
let builtin = match op {
IncDecOp::PreInc => hack::Builtin::Hhbc(hack::Hhbc::Add),
IncDecOp::PostInc => hack::Builtin::Hhbc(hack::Hhbc::Add),
IncDecOp::PreDec => hack::Builtin::Hhbc(hack::Hhbc::Sub),
IncDecOp::PostDec => hack::Builtin::Hhbc(hack::Hhbc::Sub),
_ => unreachable!(),
};
let pre = w.load(tx_ty!(mixed), textual::Expr::deref(lid))?;
let post = hack::call_builtin(w, builtin, (pre, textual::Expr::hack_int(1)))?;
w.store(textual::Expr::deref(lid), post, tx_ty!(mixed))?;
let sid = match op {
IncDecOp::PreInc | IncDecOp::PreDec => pre,
IncDecOp::PostInc | IncDecOp::PostDec => post,
_ => unreachable!(),
};
state.set_iid(iid, sid);
Ok(())
}
pub(crate) struct FuncState<'a> {
func_declares: FuncDecls,
func: &'a ir::Func<'a>,
iid_mapping: ir::InstrIdMap<textual::Expr>,
method_info: Option<&'a MethodInfo<'a>>,
pub(crate) strings: &'a StringInterner,
}
impl<'a> FuncState<'a> {
fn new(
strings: &'a StringInterner,
func: &'a ir::Func<'a>,
method_info: Option<&'a MethodInfo<'a>>,
) -> Self {
Self {
func_declares: Default::default(),
func,
iid_mapping: Default::default(),
method_info,
strings,
}
}
pub fn alloc_sid_for_iid(&mut self, w: &mut textual::FuncWriter<'_>, iid: InstrId) -> Sid {
let sid = w.alloc_sid();
self.set_iid(iid, sid);
sid
}
/// Look up a ValueId in the FuncState and return an Expr representing
/// it. For InstrIds and complex ConstIds return an Expr containing the
/// (already emitted) Sid. For simple ConstIds use an Expr representing the
/// value directly.
pub fn lookup_vid(&self, vid: ValueId) -> textual::Expr {
use textual::Expr;
match vid.full() {
ir::FullInstrId::Instr(iid) => self.lookup_iid(iid),
ir::FullInstrId::Constant(c) => {
use hack::Builtin;
let c = self.func.constant(c);
match c {
Constant::Bool(v) => hack::expr_builtin(Builtin::Bool, [Expr::bool_(*v)]),
Constant::Int(i) => hack::expr_builtin(Builtin::Int, [Expr::int(*i)]),
Constant::Null => hack::expr_builtin(Builtin::Null, ()),
Constant::String(s) => {
let s = self.strings.lookup_bstr(*s);
let s = util::escaped_string(&s);
hack::expr_builtin(Builtin::String, [Expr::string(s)])
}
Constant::Array(..) => todo!(),
Constant::Dir => todo!(),
Constant::Double(..) => textual_todo! {
textual::Expr::call("TODO_Double".to_string(), ())
},
Constant::File => todo!(),
Constant::FuncCred => todo!(),
Constant::Method => todo!(),
Constant::Named(..) => todo!(),
Constant::NewCol(..) => todo!(),
Constant::Uninit => todo!(),
}
}
ir::FullInstrId::None => unreachable!(),
}
}
pub fn lookup_iid(&self, iid: InstrId) -> textual::Expr {
self.iid_mapping
.get(&iid)
.cloned()
.ok_or_else(|| anyhow!("looking for {iid:?}"))
.unwrap()
}
pub(crate) fn set_iid(&mut self, iid: InstrId, expr: impl Into<textual::Expr>) {
let expr = expr.into();
let old = self.iid_mapping.insert(iid, expr);
assert!(old.is_none());
}
pub(crate) fn copy_iid(&mut self, iid: InstrId, input: ValueId) {
let expr = self.lookup_vid(input);
self.set_iid(iid, expr);
}
pub(crate) fn update_loc(&mut self, w: &mut textual::FuncWriter<'_>, loc: LocId) -> Result {
if loc != LocId::NONE {
let new = &self.func.locs[loc];
w.write_loc(new)?;
}
Ok(())
}
}
/// Rewrite the function prelude:
/// - Convert complex constants into builtins.
fn rewrite_prelude<'a>(mut func: ir::Func<'a>, strings: Arc<StringInterner>) -> ir::Func<'a> {
let mut remap = ir::ValueIdMap::default();
ir::FuncBuilder::borrow_func(&mut func, strings, |builder| {
// Swap out the initial block so we can inject our entry code.
let entry_bid = builder.func.alloc_bid(Block::default());
builder.func.blocks.swap(Func::ENTRY_BID, entry_bid);
builder
.func
.remap_bids(&[(Func::ENTRY_BID, entry_bid)].into_iter().collect());
builder.start_block(Func::ENTRY_BID);
write_constants(&mut remap, builder);
builder.emit(Instr::jmp(entry_bid, LocId::NONE));
});
func.remap_vids(&remap);
func
}
fn write_constants(remap: &mut ir::ValueIdMap<ValueId>, builder: &mut ir::FuncBuilder<'_>) {
// Steal the contents of the "complex" constants first. We need to do this
// because we may create more constants during lowering (but don't create
// more complex ones!).
let mut constants = Vec::default();
for (lid, constant) in builder.func.constants.iter_mut().enumerate() {
let lid = ConstantId::from_usize(lid);
match constant {
Constant::Bool(..)
| Constant::Double(..)
| Constant::Int(..)
| Constant::Null
| Constant::String(..)
| Constant::Uninit => continue,
Constant::Array(..)
| Constant::Dir
| Constant::File
| Constant::FuncCred
| Constant::Method
| Constant::Named(..)
| Constant::NewCol(..) => {
let constant = std::mem::replace(constant, Constant::Uninit);
constants.push((lid, constant));
}
}
}
for (lid, constant) in constants.into_iter() {
trace!(" Const {lid}: {constant:?}");
let src = ValueId::from_constant(lid);
let loc = LocId::NONE;
let vid = match constant {
Constant::Bool(..)
| Constant::Double(..)
| Constant::Int(..)
| Constant::Null
| Constant::String(..)
| Constant::Uninit => unreachable!(),
Constant::Array(..)
| Constant::Dir
| Constant::File
| Constant::FuncCred
| Constant::Method
| Constant::Named(..) => builder.emit_todo_instr(&format!("{constant:?}"), loc),
Constant::NewCol(ty) => match ty {
CollectionType::Vector => {
builder.emit_hack_builtin(hack::Builtin::Hhbc(hack::Hhbc::NewVec), &[], loc)
}
CollectionType::Map
| CollectionType::Set
| CollectionType::Pair
| CollectionType::ImmVector
| CollectionType::ImmMap
| CollectionType::ImmSet => builder.emit_todo_instr(&format!("{ty:?}"), loc),
_ => unreachable!(),
},
};
remap.insert(src, vid);
}
}
/// Convert from a deterministic jump model to a non-deterministic model.
///
/// In Textual instead of "jump if" you say "jump to a, b" and then in 'a' and 'b'
/// you say "stop if my condition isn't met".
///
/// This inserts the needed 'assert_true' and 'assert_false' statements but
/// leaves the original JmpOp as a marker for where to jump to.
fn rewrite_jmp_ops<'a>(mut func: ir::Func<'a>) -> ir::Func<'a> {
for bid in func.block_ids() {
match *func.terminator(bid) {
Terminator::JmpOp {
cond,
pred,
targets: [mut true_bid, mut false_bid],
loc,
} => {
// We need to rewrite this jump. Because we don't allow critical
// edges we can just insert the 'assert' at the start of the
// target block since we must be the only caller.
trace!(" JmpOp at {bid} needs to be rewritten");
match pred {
Predicate::Zero => {
std::mem::swap(&mut true_bid, &mut false_bid);
}
Predicate::NonZero => {}
}
let iid = func.alloc_instr(Instr::Special(Special::Textual(Textual::AssertTrue(
cond, loc,
))));
func.block_mut(true_bid).iids.insert(0, iid);
let iid = func.alloc_instr(Instr::Special(Special::Textual(Textual::AssertFalse(
cond, loc,
))));
func.block_mut(false_bid).iids.insert(0, iid);
}
_ => {}
}
}
func
}
pub(crate) struct MethodInfo<'a> {
pub(crate) class: &'a ir::Class<'a>,
pub(crate) is_static: bool,
}
/// Compare locals such that named ones go first followed by unnamed ones.
/// Ordering for named locals is stable and is based on their source names.
/// Unnamed locals have only their id which may differ accross runs. In which
/// case the IR would be non-deterministic and hence unstable ordering would be
/// the least of our concerns.
fn cmp_lid(strings: &StringInterner, x: &LocalId, y: &LocalId) -> std::cmp::Ordering {
match (x, y) {
(LocalId::Named(x_bid), LocalId::Named(y_bid)) => {
let x_name = strings.lookup_bytes(*x_bid);
let y_name = strings.lookup_bytes(*y_bid);
x_name.cmp(&y_name)
}
(LocalId::Named(_), LocalId::Unnamed(_)) => std::cmp::Ordering::Less,
(LocalId::Unnamed(_), LocalId::Named(_)) => std::cmp::Ordering::Greater,
(LocalId::Unnamed(x_id), LocalId::Unnamed(y_id)) => x_id.cmp(y_id),
}
}
|
use std::path::PathBuf;
use std::process::exit;
#[macro_use]
extern crate diesel;
use failure::Fail;
use structopt::StructOpt;
use structopt::clap::AppSettings;
#[macro_use] mod db;
mod command;
mod correction;
mod delay;
mod dictionary;
mod errors;
mod loader;
mod pager;
mod parser;
mod path;
mod screen;
mod str_utils;
mod types;
use crate::errors::{AppError, AppResultU};
#[derive(StructOpt, Debug)]
#[structopt(name = "eitaro")]
pub struct Opt {
#[structopt(subcommand)]
pub command: Option<Command>,
/// Dictionary name
#[structopt(short, long)]
pub dictionary: Option<String>
}
#[derive(StructOpt, Debug)]
#[structopt(setting = AppSettings::InferSubcommands)]
pub enum Command {
/// Analyze text (STDIN) using SVL
Analyze(command::analyze::Opt),
/// Build dictionary
Build(command::builder::Opt),
/// Generate completions script for this command
Completions(command::completions::Opt),
/// Access dictionary database using sqlite
#[structopt(alias = "db")]
Database(command::database::Opt),
/// Export the definitions for the given words (STDIN)
Export(command::export::Opt),
/// Output HTML fragment
Html(command::html::Opt),
/// Output keys
Lemmas(command::lemmas::Opt),
/// Lemmatize
Lemmatize(command::lemmatize::Opt),
/// Get word level (SVL)
#[structopt(alias = "lv")]
Level(command::level::Opt),
/// Like
Like(command::lookup::LikeOpt),
/// Lookup
Lookup(command::lookup::LookupOpt),
/// Display the file paths using by eitaro
Path,
/// HTTP Server
Server(command::http::Opt),
/// Interactive shell
Shell(command::lookup::ShellOpt),
/// Untypo
Untypo(command::untypo::Opt),
/// Play wordle
Wordle(command::wordle::Opt),
/// Extract lemmatized words
Words(command::words::Opt),
}
fn _main() -> AppResultU {
use self::Command::*;
let opt = Opt::from_args();
let dict = opt.dictionary.as_ref().map(AsRef::as_ref);
let dictionary_path: PathBuf = path::get_dictionary_path(dict).expect("Failed to get dictionary path");
if let Some(command) = opt.command {
match command {
Analyze(opt) =>
command::analyze::analyze(opt, &dictionary_path),
Build(opt) =>
command::builder::build_dictionary(opt, &dictionary_path),
Completions(opt) =>
command::completions::generate(opt, Opt::clap()),
Database(opt) =>
command::database::shell(opt, &dictionary_path),
Export(opt) =>
command::export::export(opt, &dictionary_path),
Html(opt) =>
command::html::lookup(opt, &dictionary_path),
Lemmas(opt) =>
command::lemmas::lemmas(opt, &dictionary_path),
Shell(opt) =>
command::lookup::shell(opt, &dictionary_path),
Lemmatize(opt) =>
command::lemmatize::lemmatize(opt, &dictionary_path),
Level(opt) =>
command::level::level(opt, &dictionary_path),
Like(opt) =>
command::lookup::like(opt, &dictionary_path),
Lookup(opt) =>
command::lookup::lookup(opt, &dictionary_path),
Path =>
command::path::path(&dictionary_path),
Server(opt) =>
command::http::start_server(opt, dictionary_path),
Untypo(opt) =>
command::untypo::untypo(opt, &dictionary_path),
Wordle(opt) =>
command::wordle::play(opt, &dictionary_path),
Words(opt) =>
command::words::extract(opt, &dictionary_path),
}
} else if let Some(Command::Shell(opt)) = Opt::from_iter(&["", "shell"]).command {
command::lookup::shell(opt, &dictionary_path)
} else {
panic!("WTF: {:?}", Opt::from_iter(&["shell"]))
}
}
fn main() {
// Supress `failed printing to stdout: Broken pipe (os error 32)`
unsafe {
libc::signal(libc::SIGPIPE, libc::SIG_DFL);
}
match _main() {
Err(AppError::Void) | Ok(_) => (),
Err(err) => {
if let AppError::Diesel(_) = err {
eprintln!("Please build dictionary before use. See `eitaro build --help`");
eprintln!("");
}
print_error(&err);
}
}
}
fn print_error(mut fail: &dyn Fail) {
let mut message = fail.to_string();
while let Some(cause) = fail.cause() {
message.push_str(&format!("\n\tcaused by: {}", cause));
fail = cause;
}
eprintln!("Error: {}", message);
exit(1);
}
|
// Creates and error for the current crate, aliases Result to use the new error and defines a from! macro to use for converting to
// the local error type.
/// # Example
/// ```
/// use std::fs;
/// use simpl::err;
///
/// err!(ExampleError,
/// {
/// Io@std::io::Error;
/// });
///
/// fn main() -> Result<()> {
/// fs::create_dir("test")?;
/// fs::remove_dir_all("test")?;
/// Ok(())
/// }
/// ```
#[macro_export]
macro_rules! err {
($i: ident, {$($j: ident@$t: ty;)*}) => {
#[derive(Debug)]
enum Errs {
$( $j($t)),*
}
impl std::error::Error for Errs {}
impl core::fmt::Display for Errs {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self {
$(
Errs::$j(t) => {
write!(f, "{}", t)
}
),*
}
}
}
#[derive(Debug)]
pub struct $i {
pub description: Option<String>,
pub data: Option<String>,
source: Option<Errs>
}
impl std::error::Error for $i {
fn source(&self) -> Option<&(dyn std::error::Error +'static + Send + Sync)> {
match self.source {
Some(ref source) => Some(source),
None => None
}
}
}
impl std::convert::From<&str> for $i {
fn from(str: &str) -> Self {
$i {
description: Some(str.to_string()),
data: None,
source: None
}
}
}
impl core::fmt::Display for $i {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
match self.description.as_ref() {
Some(err) => write!(f, "{}", err),
None => write!(f, "An unknown error has occurred!"),
}
}
}
pub type Result<T, E = $i> = std::result::Result<T, E>;
impl !Send for $i {}
impl !Sync for $i {}
$(
impl std::convert::From<$t> for $i {
fn from(e: $t) -> $i {
$i {
description: Some(String::from(format!("{}", e))),
data: None,
source: Some(Errs::$j(e))
}
}
}
)*
};
}
#[cfg(test)]
mod tests {
use std::fs;
super::err!(TestError,
{
Io@std::io::Error;
}
);
#[test]
#[should_panic]
fn should_fail_wrapper() {
fn should_fail() -> Result<()> {
fs::create_dir("test_fail/test")?;
Ok(())
}
should_fail().unwrap();
}
#[test]
fn should_succeed() -> Result<()> {
fs::create_dir("test_dir")?;
fs::remove_dir_all("test_dir")?;
Ok(())
}
}
|
// Copyright 2021 The Matrix.org Foundation C.I.C.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
mod csrf;
mod database;
mod http;
mod oauth2;
mod session;
mod util;
pub use self::{
csrf::CsrfConfig,
database::DatabaseConfig,
http::HttpConfig,
oauth2::{OAuth2ClientConfig, OAuth2Config},
session::SessionConfig,
util::ConfigurationSection,
};
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct RootConfig {
#[serde(default)]
pub oauth2: OAuth2Config,
#[serde(default)]
pub http: HttpConfig,
#[serde(default)]
pub database: DatabaseConfig,
pub csrf: CsrfConfig,
pub session: SessionConfig,
}
impl ConfigurationSection<'_> for RootConfig {
fn path() -> &'static str {
""
}
fn generate() -> Self {
Self {
oauth2: OAuth2Config::generate(),
http: HttpConfig::generate(),
database: DatabaseConfig::generate(),
csrf: CsrfConfig::generate(),
session: SessionConfig::generate(),
}
}
}
|
//! Solutions to the challenges in Set 1.
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::Path;
use attacks;
use challenges::{ChallengeResults, ChallengeResultsBuilder};
use utils::block::{BlockCipher, Algorithms, OperationModes, PaddingSchemes};
use utils::data::Data;
use utils::metrics;
use utils::xor;
/// Run the solution to Set 1 Challenge 1 (Convert hex to base64).
///
/// # Outputs
///
/// `hex_in` - The input as a hexadecimal string.
///
/// `base64_out` - The output as a base-64 string.
pub fn challenge01() -> ChallengeResults {
// Get the hex input.
let hex_in = "49276d206b696c6c696e6720796f757220627261696e206c\
696b65206120706f69736f6e6f7573206d757368726f6f6d";
// Convert to base-64.
let data = Data::from_hex(hex_in).unwrap();
let base64_out = data.to_base64();
// Return the results
ChallengeResultsBuilder::new()
.set(1)
.challenge(1)
.description("Convert hex to base64")
.output("hex_in", hex_in)
.output("base64_out", &base64_out)
.finalize()
}
/// Run the solution to Set 1 Challenge 2 (Fixed XOR).
///
/// # Outputs
///
/// `hex_in` - The input as a hexadecimal string.
///
/// `hex_key` - The XOR key as a hexadecimal string.
///
/// `hex_out` - The encrypted output as a hexadecimal string.
pub fn challenge02() -> ChallengeResults {
// Get the hex input.
let hex_in = "1c0111001f010100061a024b53535009181c";
// Get the key.
let hex_key = "686974207468652062756c6c277320657965";
// Encrypt the data.
let data = Data::from_hex(hex_in).unwrap();
let key = Data::from_hex(hex_key).unwrap();
let hex_out = xor::xor(&data, &key).to_hex();
// Return the results
ChallengeResultsBuilder::new()
.set(1)
.challenge(2)
.description("Fixed XOR")
.output("hex_in", hex_in)
.output("hex_key", hex_key)
.output("hex_out", &hex_out)
.finalize()
}
/// Run the solution to Set 1 Challenge 3 (Single-byte XOR cipher).
///
/// # Outputs
///
/// `hex_in` - The encrypted input as a hexadecimal string.
///
/// `hex_key` - The XOR key as a hexadecimal string.
///
/// `text_out` - The decrypted output as a text string.
pub fn challenge03() -> ChallengeResults {
// Get the hex input.
let hex_in = "1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736";
// Find the best XOR key.
let data = Data::from_hex(hex_in).unwrap();
let (key, _) = attacks::xor::best_single_byte_key(&data);
let hex_key = key.to_hex();
// Decrypt the data.
let text_out = xor::xor(&data, &key).to_text();
// Return the results
ChallengeResultsBuilder::new()
.set(1)
.challenge(3)
.description("Single-byte XOR cipher")
.output("hex_in", hex_in)
.output("hex_key", &hex_key)
.output("text_out", &text_out)
.finalize()
}
/// Run the solution to Set 1 Challenge 4 (Detect single-character XOR)
///
/// # Outputs
///
/// `hex_in` - The correct encrypted input as a hexadecimal string.
///
/// `hex_key` - The XOR key as a hexadecimal string.
///
/// `text_out` - The decrypted output as a text string.
pub fn challenge04() -> ChallengeResults {
// Keep track of the best match so far.
let mut best_data = Data::new();
let mut best_key = Data::new();
let mut best_score = 0.0;
// Read in all of the hexstrings from file.
let file = File::open(&Path::new("input/set1challenge4.txt")).unwrap();
let reader = BufReader::new(file);
for line_it in reader.lines() {
let line = line_it.unwrap();
// Check if this line provides a better match
let data = Data::from_hex(&line).unwrap();
let (key, score) = attacks::xor::best_single_byte_key(&data);
if score > best_score {
best_data = data;
best_key = key;
best_score = score;
}
}
let hex_in = best_data.to_hex();
let hex_key = best_key.to_hex();
let text_out = xor::xor(&best_data, &best_key).to_text();
// Return the results
ChallengeResultsBuilder::new()
.set(1)
.challenge(4)
.description("Detect single-character XOR")
.output("hex_in", &hex_in)
.output("hex_key", &hex_key)
.output("text_out", &text_out)
.finalize()
}
/// Run the solution to Set 1 Challenge 5 (Implement repeating-key XOR)
///
/// # Outputs
///
/// `text_in` - The unencrypted input as a plain text string.
///
/// `text_key` - The key as a plain text string.
///
/// `hex out` - The encrypted output as a hexadecimal string.
pub fn challenge05() -> ChallengeResults {
// Get the text input.
let text_in = "Burning 'em, if you ain't quick and nimble\n\
I go crazy when I hear a cymbal";
// Get the key.
let text_key = "ICE";
// Encrypt the data.
let data = Data::from_text(text_in);
let key = Data::from_text(text_key);
let hex_out = xor::xor(&data, &key).to_hex();
// Return the results
ChallengeResultsBuilder::new()
.set(1)
.challenge(5)
.description("Implement repeating-key XOR")
.output("text_in", text_in)
.output("text_key", text_key)
.output("hex_out", &hex_out)
.finalize()
}
/// Run the solution to Set 1 Challenge 6 (Break repeating-key XOR)
///
/// # Outputs
///
/// `base64_in` - The encrypted input as a base 64 string.
///
/// `text_key` - The key as a plain text string.
///
/// `text_out` - The decrypted data as a plain text string.
pub fn challenge06() -> ChallengeResults {
// Get the base-64 input.
let mut base64_in = "".to_string();
let file = File::open(&Path::new("input/set1challenge6.txt")).unwrap();
let reader = BufReader::new(file);
for line_it in reader.lines() {
base64_in.push_str(&line_it.unwrap());
}
// Find the best repeating-XOR key.
let data = Data::from_base64(&base64_in).unwrap();
let key = attacks::xor::best_repeating_key(&data);
let text_key = key.to_text();
// Decrypt the data.
let text_out = xor::xor(&data, &key).to_text();
// Return the results
ChallengeResultsBuilder::new()
.set(1)
.challenge(6)
.description("Break repeating-key XOR")
.output("base64_in", &base64_in)
.output("text_key", &text_key)
.output("text_out", &text_out)
.finalize()
}
/// Run the solution to Set 1 Challenge 7 (AES in ECB mode)
///
/// # Outputs
///
/// `base64_in` - The encrypted input as a base 64 string.
///
/// `text_key` - The key as a plain text string.
///
/// `text_out` - The decrypted output as a plain text string.
pub fn challenge07() -> ChallengeResults {
// Get the base-64 input.
let mut base64_in = "".to_string();
let file = File::open(&Path::new("input/set1challenge7.txt")).unwrap();
let reader = BufReader::new(file);
for line_it in reader.lines() {
base64_in.push_str(&line_it.unwrap());
}
// Get the key.
let text_key = "YELLOW SUBMARINE";
// Decrypt the data using AES-128-ECB.
let data = Data::from_base64(&base64_in).unwrap();
let key = Data::from_text(text_key);
let block = BlockCipher::new(Algorithms::Aes,
OperationModes::Ecb,
PaddingSchemes::Pkcs7,
&key)
.unwrap();
let text_out = block.decrypt(&data).unwrap().to_text();
// Return the results
ChallengeResultsBuilder::new()
.set(1)
.challenge(7)
.description("AES in ECB mode")
.output("base64_in", &base64_in)
.output("text_key", text_key)
.output("text_out", &text_out)
.finalize()
}
/// Run the solution to Set 1 Challenge 8 (Detect AES in ECB mode)
///
/// # Outputs
///
/// `hex_in` - The correct encrypted input as a hexadecimal string.
pub fn challenge08() -> ChallengeResults {
// Store all of the lines which are encrypted using ECB mode.
let mut hex_in = "".to_string();
// Read in all of the hexstrings from file.
let file = File::open(&Path::new("input/set1challenge8.txt")).unwrap();
let reader = BufReader::new(file);
for line_it in reader.lines() {
let line = line_it.unwrap();
// Check if this line was encrypted using ECB.
let data = Data::from_hex(&line).unwrap();
if metrics::has_repeated_blocks(&data, 16) {
hex_in.push_str(&data.to_hex());
}
}
// Return the results
ChallengeResultsBuilder::new()
.set(1)
.challenge(8)
.description("Detect AES in ECB mode")
.output("hex_in", &hex_in)
.finalize()
}
#[cfg(test)]
mod tests {
#[test]
fn challenge01() {
let results = super::challenge01();
results.check("hex_in",
"49276d206b696c6c696e6720796f757220627261696e206c\
696b65206120706f69736f6e6f7573206d757368726f6f6d");
results.check("base64_out",
"SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t");
}
#[test]
fn challenge02() {
let results = super::challenge02();
results.check("hex_in", "1c0111001f010100061a024b53535009181c");
results.check("hex_key", "686974207468652062756c6c277320657965");
results.check("hex_out", "746865206b696420646f6e277420706c6179");
}
#[test]
fn challenge03() {
let results = super::challenge03();
results.check("hex_in",
"1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736");
results.check("hex_key", "58");
results.check("text_out", "Cooking MC's like a pound of bacon");
}
#[test]
fn challenge04() {
let results = super::challenge04();
results.check("hex_in", "7b5a4215415d544115415d5015455447414c155c46155f4058455c5b523f");
results.check("hex_key", "35");
results.check("text_out", "Now that the party is jumping\n");
}
#[test]
fn challenge05() {
let results = super::challenge05();
results.check("text_in",
"Burning 'em, if you ain't quick and nimble\n\
I go crazy when I hear a cymbal");
results.check("text_key", "ICE");
results.check("hex_out",
"0b3637272a2b2e63622c2e69692a23693a2a3c6324202d623d63343c2a26226324272765272\
a282b2f20430a652e2c652a3124333a653e2b2027630c692b20283165286326302e27282f");
}
#[test]
fn challenge06() {
let results = super::challenge06();
results.check_prefix("base64_in",
"HUIfTQsPAh9PE048GmllH0kcDk4TAQsHThsBFkU2AB4BSWQgVB0dQzNTTmVSBgBHVBwN\
RU0HBAxTEjwMHghJGgkRTxRMIRpHKwAFHUdZEQQJAGQmB1MANxYGDBoXQR0BUlQwXwAg\
EwoFR08SSAhFTmU+Fgk4RQYFCBpGB08fWXh+amI2DB0PQQ1IBlUaGwAdQnQEHgFJGg");
results.check("text_key", "Terminator X: Bring the noise");
results.check_prefix("text_out",
"I'm back and I'm ringin' the bell \n\
A rockin' on the mike while the fly girls yell \n\
In ecstasy in the back of me");
}
#[test]
fn challenge07() {
let results = super::challenge07();
results.check_prefix("base64_in",
"CRIwqt4+szDbqkNY+I0qbDe3LQz0wiw0SuxBQtAM5TDdMbjCMD/venUDW9BLPEXODbk6\
a48oMbAY6DDZsuLbc0uR9cp9hQ0QQGATyyCESq2NSsvhx5zKlLtzdsnfK5ED5srKjK7F\
z4Q38/ttd+stL/9WnDzlJvAo7WBsjI5YJc2gmAYayNfmCW2lhZE/ZLG0CBD2aPw0W4");
results.check("text_key", "YELLOW SUBMARINE");
results.check_prefix("text_out",
"I'm back and I'm ringin' the bell \n\
A rockin' on the mike while the fly girls yell \n\
In ecstasy in the back of me");
}
#[test]
fn challenge08() {
let results = super::challenge08();
results.check("hex_in",
"d880619740a8a19b7840a8a31c810a3d08649af70dc06f4fd5d2d69c744cd283e2dd052f6b6\
41dbf9d11b0348542bb5708649af70dc06f4fd5d2d69c744cd2839475c9dfdbc1d46597949d\
9c7e82bf5a08649af70dc06f4fd5d2d69c744cd28397a93eab8d6aecd566489154789a6b030\
8649af70dc06f4fd5d2d69c744cd283d403180c98c8f6db1f2a3f9c4040deb0ab51b29933f2\
c123c58386b06fba186a");
}
} |
#![no_std]
#![no_main]
extern crate panic_semihosting;
extern crate cortex_m_rt as rt;
extern crate cortex_m_semihosting as sh;
#[macro_use(entry, exception)]
extern crate microbit;
use core::fmt::Write;
use rt::ExceptionFrame;
use sh::hio;
use microbit::hal::prelude::*;
use microbit::hal::serial;
use microbit::hal::serial::BAUD115200;
exception!(HardFault, hard_fault);
fn hard_fault(ef: &ExceptionFrame) -> ! {
panic!("{:#?}", ef);
}
exception!(*, default_handler);
fn default_handler(irqn: i16) {
panic!("Unhandled exception (IRQn = {})", irqn);
}
entry!(main);
fn main() -> ! {
let mut stdout = hio::hstdout().unwrap();
writeln!(stdout, "Start").unwrap();
if let Some(p) = microbit::Peripherals::take() {
// Split GPIO
let mut gpio = p.GPIO.split();
// Configure RX and TX pins accordingly
let tx = gpio.pin24.into_push_pull_output().downgrade();
let rx = gpio.pin25.into_floating_input().downgrade();
// Configure serial communication
let (mut tx, _) = serial::Serial::uart0(p.UART0, tx, rx, BAUD115200).split();
write!(tx, "serial - start\r\n");
// Get row and column for display
let mut led = gpio.pin13.into_push_pull_output();
let _ = gpio.pin4.into_push_pull_output();
// Set row high (column starts low)
led.set_high();
write!(tx, "serial - LED on\r\n");
}
panic!("End");
}
|
//! Übertool - the universal math format converter.
#[cfg(feature = "mathml")]
extern crate xmltree;
pub mod ast;
pub mod error;
pub mod format;
|
use super::*;
use std::io::Cursor;
use std::thread;
use std::time::Duration;
#[test]
fn test_connection() {
const TEST_DATA: &[u8; 5] = b"12345";
thread::spawn(move|| {
// Start up the server
Networked::new(Cursor::new(&TEST_DATA[..]), ("127.0.0.1", 4000))
.unwrap()
.listen()
.unwrap();
});
// Wait for the server to start up.
thread::sleep(Duration::from_millis(500));
// Connect to it
let mut reader = NetworkReader::new(("127.0.0.1", 4000)).unwrap();
let mut buf = [0u8; 4];
reader.seek(SeekFrom::Start(1)).unwrap();
reader.read_exact(&mut buf).unwrap();
assert_eq!(&buf[..], &TEST_DATA[1..]);
}
|
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
// Silence warning: "`extern` block uses type `u128`, which is not FFI-safe"
#![allow(improper_ctypes)]
include!(concat!(env!("OUT_DIR"), "/kernel_bindings_generated.rs"));
|
//! Interrupt Descriptor Table functionalitity
#![no_std]
#![feature(abi_x86_interrupt)]
#![feature(global_asm)]
#![feature(asm)]
/// Calls the load IDT function, loading the table into the cpu
pub fn init_idt() {
IDT.load();
}
use coop::keyboard;
use coop::mouse;
use lazy_static::lazy_static;
use memory::active_level_4_table;
use memory::swap_to_kernel_table;
use os_units::NumOfPages;
use printer::{print, println};
use serial::serial_println;
use task::scheduler::Scheduler;
use x86_64::structures::idt::InterruptDescriptorTable;
pub mod syscall;
lazy_static! {
///Static Interrupt Descriptor Table with all of the registered interrupt types and their handler functions
pub static ref IDT: InterruptDescriptorTable = {
let mut idt = InterruptDescriptorTable::new();
idt.breakpoint.set_handler_fn(breakpoint_handler);
idt.overflow.set_handler_fn(overflow_handler);
unsafe {
idt.double_fault.set_handler_fn(double_fault_handler).set_stack_index(gdt::DOUBLE_FAULT_INDEX);
}
idt.bound_range_exceeded.set_handler_fn(bound_range_handler);
idt.segment_not_present.set_handler_fn(segment_not_present_handler);
idt.alignment_check.set_handler_fn(alignment_handler);
idt.invalid_opcode.set_handler_fn(invalid_op_handler);
idt.invalid_tss.set_handler_fn(invalid_tss_handler);
idt.stack_segment_fault.set_handler_fn(stack_segment_handler);
idt.security_exception.set_handler_fn(security_exception_handler);
for i in PIC_1_OFFSET..(PIC_2_OFFSET + 8) {
idt[i as usize].set_handler_fn(tmp_handler);
}
idt[InterruptIndex::Timer.as_usize()].set_handler_fn(timer_interrupt_handler);
idt[InterruptIndex::Keyboard.as_usize()].set_handler_fn(keyboard_interrupt_handler);
idt[InterruptIndex::PrimATA.as_usize()].set_handler_fn(ata_interrupt_handler);
idt[InterruptIndex::Mouse.as_usize()].set_handler_fn(mouse_interrupt_handler);
idt[0x80].set_handler_fn(syscall);
idt.page_fault.set_handler_fn(page_fault_handler);
idt.divide_error.set_handler_fn(divide_error_handler);
idt.general_protection_fault.set_handler_fn(general_protection_handler);
idt.virtualization.set_handler_fn(virtualization_handler);
idt
};
}
extern "x86-interrupt" fn syscall(_: InterruptStackFrame) {
// unsafe { syscall(4, 5, 6 ,7)};
let call_num: u64;
let param1: u64;
let param2: u64;
let param3: u64;
unsafe {
asm!("mov {}, rax", out(reg) call_num);
asm!("mov {}, rdi", out(reg) param1);
asm!("mov {}, rsi", out(reg) param2);
asm!("mov {}, rdx", out(reg) param3);
}
if (call_num as usize) < SYSTEM_CALLS.len() {
SYSTEM_CALLS[call_num as usize](param1, param2, param3);
}
unsafe {
PICS.lock().notify_end_of_interrupt(0x80);
}
}
use pic8259::ChainedPics;
/// Static PICS controller wrapped in a Mutex
pub static PICS: spin::Mutex<ChainedPics> =
spin::Mutex::new(unsafe { ChainedPics::new(PIC_1_OFFSET, PIC_2_OFFSET) });
/// Remapped PIC 1 controller offset in the interrupt controller table
pub const PIC_1_OFFSET: u8 = 32;
/// Remapped PIC 2 controller offset in the interrupt controller table
pub const PIC_2_OFFSET: u8 = PIC_1_OFFSET + 8;
pub static READY: spin::Mutex<bool> = spin::Mutex::new(false);
use x86_64::structures::idt::InterruptStackFrame;
use x86_64::structures::idt::SelectorErrorCode;
extern "x86-interrupt" fn general_protection_handler(
stack_frame: InterruptStackFrame,
error_code: u64,
) {
panic!(
"EXCEPTION: GENERAL PROTECTION FAULT\n{:#?}\nERROR CODE : {:#?}",
stack_frame,
SelectorErrorCode::new(error_code)
);
}
extern "x86-interrupt" fn divide_error_handler(stack_frame: InterruptStackFrame) {
panic!("EXCEPTION: DIVIDE ERROR\n{:#?}", stack_frame);
}
extern "x86-interrupt" fn security_exception_handler(
stack_frame: InterruptStackFrame,
error_code: u64,
) {
panic!(
"EXCEPTION: SECURITY EXCEPTION ERROR\n{:#?}\nERROR CODE : {:#?}",
stack_frame,
SelectorErrorCode::new(error_code)
);
}
extern "x86-interrupt" fn stack_segment_handler(stack_frame: InterruptStackFrame, error_code: u64) {
panic!(
"EXCEPTION: STACK SEGMENT FAULT\n{:#?}\nERROR CODE : {:#?}",
stack_frame,
SelectorErrorCode::new(error_code)
);
}
extern "x86-interrupt" fn invalid_tss_handler(stack_frame: InterruptStackFrame, error_code: u64) {
panic!(
"EXCEPTION: INVALID TSS\n{:#?}\nERROR CODE : {:#?}",
stack_frame,
SelectorErrorCode::new(error_code)
);
}
extern "x86-interrupt" fn invalid_op_handler(stack_frame: InterruptStackFrame) {
panic!("EXCEPTION: INVALID OPCODE\n{:#?}\n", stack_frame);
}
extern "x86-interrupt" fn alignment_handler(stack_frame: InterruptStackFrame, error_code: u64) {
panic!(
"EXCEPTION: OUT OF ALIGNMENT\n{:#?}\nERROR CODE : {:#?}",
stack_frame,
SelectorErrorCode::new(error_code)
);
}
extern "x86-interrupt" fn segment_not_present_handler(
stack_frame: InterruptStackFrame,
error_code: u64,
) {
panic!(
"EXCEPTION: SEGMENT NOT PRESENT\n{:#?}\nERROR CODE : {:#?}",
stack_frame,
SelectorErrorCode::new(error_code)
);
}
extern "x86-interrupt" fn bound_range_handler(stack_frame: InterruptStackFrame) {
panic!("EXCEPTION: BOUND RANGE EXCEEDED\n{:#?}\n", stack_frame);
}
extern "x86-interrupt" fn virtualization_handler(stack_frame: InterruptStackFrame) {
panic!("EXCEPTION: VIRT EXCEPTION\n{:#?}\n", stack_frame);
}
extern "x86-interrupt" fn tmp_handler(stack_frame: InterruptStackFrame) {
panic!("EXCEPTION: TEMP EXCEPTION\n{:#?}\n", stack_frame);
}
extern "x86-interrupt" fn overflow_handler(stack_frame: InterruptStackFrame) {
println!("EXCEPTION: PAGE FAULT");
println!("Accessed Address: {:?}", Cr2::read());
println!("{:#?}", stack_frame);
loop {
x86_64::instructions::hlt();
}
}
///Doesnt do anything at the moment
///TODO: Notify the ata caller that the ata controller is ready
extern "x86-interrupt" fn ata_interrupt_handler(_stack_frame: InterruptStackFrame) {
unsafe {
PICS.lock()
.notify_end_of_interrupt(InterruptIndex::PrimATA.as_u8());
}
}
///Reads the key code from 0x60 port and adds that to the keyboard task handler
extern "x86-interrupt" fn mouse_interrupt_handler(_stack_frame: InterruptStackFrame) {
use x86_64::instructions::port::Port;
let mut port = Port::new(0x60);
let scancode: u8 = unsafe { port.read() };
unsafe { mouse::add_scancode(scancode) };
unsafe {
PICS.lock()
.notify_end_of_interrupt(InterruptIndex::Mouse.as_u8());
}
}
///Reads the key code from 0x60 port and adds that to the keyboard task handler
extern "x86-interrupt" fn keyboard_interrupt_handler(_stack_frame: InterruptStackFrame) {
use x86_64::instructions::port::Port;
let mut port = Port::new(0x60);
let scancode: u8 = unsafe { port.read() };
keyboard::add_scancode(scancode);
unsafe {
PICS.lock()
.notify_end_of_interrupt(InterruptIndex::Keyboard.as_u8());
}
}
use x86_64::registers::control::Cr2;
use x86_64::structures::idt::PageFaultErrorCode;
use x86_64::structures::paging::Mapper;
use x86_64::structures::paging::Page;
use x86_64::structures::paging::PageTableFlags;
use x86_64::structures::paging::RecursivePageTable;
use x86_64::structures::paging::Size4KiB;
use crate::syscall::SYSTEM_CALLS;
///Page fault handler prints out the respective errors and stack frame and halts cpu execution
extern "x86-interrupt" fn page_fault_handler(
_stack_frame: InterruptStackFrame,
_error_code: PageFaultErrorCode,
) {
let acc_addr = Cr2::read();
// println!("EXCEPTION: PAGE FAULT");
// println!("Accessed Address: {:?}", acc_addr);
// println!("Error Code: {:?}", _error_code);
// println!("{:#?}", _stack_frame);
match _error_code {
PageFaultErrorCode::INSTRUCTION_FETCH | PageFaultErrorCode::CAUSED_BY_WRITE => {
let mut rpt = RecursivePageTable::new(memory::active_level_4_table()).unwrap();
let page = Page::<Size4KiB>::containing_address(acc_addr);
let flags = PageTableFlags::PRESENT | PageTableFlags::WRITABLE;
unsafe {
match rpt.update_flags(page, flags) {
Ok(_) => (),
Err(x86_64::structures::paging::mapper::FlagUpdateError::PageNotMapped) => {
memory::virt::allocate_pages(acc_addr, NumOfPages::<Size4KiB>::new(1))
}
Err(e) => panic!("{:#?}", e),
}
PICS.lock().notify_end_of_interrupt(0xE);
}
}
_ => loop {
x86_64::instructions::hlt();
},
}
}
///Used for task time slices
extern "x86-interrupt" fn timer_interrupt_handler(_stack_frame: InterruptStackFrame) {
println!("*");
unsafe {
PICS.lock()
.notify_end_of_interrupt(InterruptIndex::Timer.as_u8());
}
if *READY.lock() {
x86_64::instructions::interrupts::disable();
let mut rpt = RecursivePageTable::new(active_level_4_table()).unwrap();
for (i, entry) in rpt.level_4_table().iter().enumerate() {
if !entry.is_unused() {
println!("{} {:#?}", i, entry);
}
}
println!("Disabled int {:p}", &IDT);
Scheduler::run();
}
// println!("ending timer interrupt");
}
///Double fault interrupt panics and prints the stack frame
extern "x86-interrupt" fn double_fault_handler(
stack_frame: InterruptStackFrame,
_error_code: u64,
) -> ! {
panic!("EXCEPTION: DOUBLE FAULT\n{:#?}", stack_frame);
}
///Breakpoints print out the stack frame at a specified breakpoint
extern "x86-interrupt" fn breakpoint_handler(stack_frame: InterruptStackFrame) {
println!("EXCEPTION: BREAKPOINT\n{:#?}", stack_frame)
}
/// Interrupt Index enum with all of the different interrupt handler types
#[derive(Debug, Clone, Copy)]
#[repr(u8)]
pub enum InterruptIndex {
Timer = PIC_1_OFFSET,
Keyboard,
Cascade,
COM2,
COM1,
LPT2,
FloppyDisk,
LPT1,
CmosRealTimeClock,
ACPI,
Free1,
Free2,
Mouse,
Coprocessor,
PrimATA,
SecoATA,
}
impl InterruptIndex {
/// u8 representation from the PIC_1_OFFSET
pub fn as_u8(self) -> u8 {
self as u8
}
/// usize representation from the PIC_1_OFFSET
fn as_usize(self) -> usize {
usize::from(self.as_u8())
}
}
|
// solution to r/dailyprogrammer challenge #290 [Intermediate] Blinking LEDs
// https://www.reddit.com/r/dailyprogrammer/comments/5as91q/20161102_challenge_290_intermediate_blinking_leds/
// port of the c++ version
use std::fs::File;
use std::io::prelude::*;
use std::collections::HashMap;
/* grammar of our mini programming language:
<line>: <whitespace> <instruction> |
<label> |
<empty>
<instruction> : ld a,<num> |
ld b,<num> |
out (0),a |
rlca |
rrca |
djnz <labelref>
*/
#[derive(Debug, PartialEq)]
enum Line {
Instruction( Instruction ),
Label( String ),
Empty,
ParseError
}
#[derive(Debug, PartialEq)]
enum Instruction {
LoadA( u8 ),
LoadB( u8 ),
Out,
Rlca,
Rrca,
Djnz{ index: usize },
}
impl Instruction {
#[allow(dead_code)]
fn to_string(&self) -> String {
match self {
&Instruction::LoadA( a ) => format!("ld a,{}", a),
&Instruction::LoadB( b ) => format!("ld b,{}", b),
&Instruction::Out => String::from("out (0),a"),
&Instruction::Rlca => String::from("rlca"),
&Instruction::Rrca => String::from("rrca"),
&Instruction::Djnz{ index } => format!("djnz {}", index),
}
}
}
fn leds_to_string(register_a: u8) -> String {
let mut result = String::new();
for c in (0..8).map(|x| 0x80 >> x) {
match c & register_a {
0 => result.push('.'),
_ => result.push('*'),
}
}
result
}
fn parse(line: &str, labels: &HashMap<String, usize>) -> Line {
// note: allowing the following even though they are technically not part of the problem description:
// line -> <whitespace> <instruction> <whitespace>
// line -> <whitespace> <label> <whitespace>
let trimmed: &str = line.trim();
match trimmed {
"rlca" => Line::Instruction(Instruction::Rlca),
"rrca" => Line::Instruction(Instruction::Rrca),
"out (0),a" => Line::Instruction(Instruction::Out),
"" => Line::Empty,
_ => {
if trimmed.len() < 5 {
try_label(trimmed, labels)
}
else {
match &trimmed[0..5] {
"ld a," => try_register_load(trimmed, true),
"ld b," => try_register_load(trimmed, false),
"djnz " => try_jump(trimmed, labels),
_ => try_label(trimmed, labels),
}
}
},
}
}
fn try_jump(trimmed: &str, labels: &HashMap<String, usize>) -> Line {
let new_label: &str = &trimmed[5..trimmed.len()];
match labels.get(new_label) {
Some(line) => Line::Instruction(Instruction::Djnz{ index: *line }),
None => Line::ParseError,
}
}
fn try_label(trimmed: &str, labels: &HashMap<String, usize>) -> Line {
let mut temp = trimmed.to_owned();
match temp.pop() {
// last char in trimmed line should be the semicolon
Some(':') => {
// label must be a valid format, and cannot match an existing label
if is_identifier(&temp) && !labels.contains_key(&temp) {
Line::Label(temp)
}
else {
Line::ParseError
}
},
_ => Line::ParseError,
}
}
fn is_identifier(chars: &str) -> bool {
// this function determines which characters I allow in the name of a label
let mut ret: bool = true;
for c in chars.chars() {
if !c.is_alphanumeric() && c != '_' {
ret = false;
break;
}
}
ret
}
fn try_register_load(trimmed: &str, register_a: bool) -> Line {
// note: register_a == false means instruction is a LoadB
let load_value = &trimmed[5..trimmed.len()].parse::<u8>();
match (register_a, load_value) {
(true, &Ok(a)) => Line::Instruction(Instruction::LoadA(a)),
(false, &Ok(b)) => Line::Instruction(Instruction::LoadB(b)),
(_, &Err(_)) => Line::ParseError,
}
}
fn run_file(file_name: &str) {
let mut file = File::open(file_name).expect(&(format!("Failed to open {}", file_name)));
let mut contents = String::new();
file.read_to_string(&mut contents).expect(&(format!("Failed to read from {}", file_name)));
let mut instructions: Vec<Instruction> = vec![];
let mut labels: HashMap<String, usize> = HashMap::new();
// parse the input file into instructions
for line in contents.lines() {
match parse(line, &labels) {
Line::Label(name) => { labels.insert(String::from(name), instructions.len()); },
Line::Instruction(instruction) => instructions.push(instruction),
Line::Empty => (),
Line::ParseError => println!("Error parsing line:\n '{}'", line),
}
}
//for inst in instructions {
// println!("{}", inst.to_string());
//}
// execute the instructions
let mut register_a: u8 = 0;
let mut register_b: u8 = 0;
let mut instruction_index: usize = 0;
while instruction_index < instructions.len() {
match instructions[instruction_index] {
Instruction::LoadA(a) => register_a = a,
Instruction::LoadB(b) => register_b = b,
Instruction::Out => { println!("{}", leds_to_string(register_a)); },
Instruction::Rlca => register_a = register_a.rotate_left(1),
Instruction::Rrca => register_a = register_a.rotate_right(1),
Instruction::Djnz{index: i} => {
if register_b > 0 {
register_b -= 1;
}
if register_b > 0 {
instruction_index = i;
continue; // don't want to add 1 in this case, so skip that part
}
},
}
instruction_index += 1;
}
}
fn main() {
//run_file("input1.txt");
//run_file("input2.txt");
//run_file("input3.txt");
run_file("input4.txt");
}
#[test]
#[allow(dead_code)]
fn test_try_register_load() {
assert_eq!(try_register_load("ld a,4", true), Line::Instruction(Instruction::LoadA(4)));
assert_eq!(try_register_load("ld a,4", true), Line::Instruction(Instruction::LoadA(4)));
assert_eq!(try_register_load("ld b,4", false), Line::Instruction(Instruction::LoadB(4)));
assert_eq!(try_register_load("ld a,400", true), Line::ParseError);
assert_eq!(try_register_load("ld a,a123", true), Line::ParseError);
assert_eq!(try_register_load("ld a,", true), Line::ParseError);
}
#[test]
#[allow(dead_code)]
fn test_try_label() {
let labels: HashMap<String, usize> = HashMap::new();
assert_eq!(try_label("lbl:", &labels), Line::Label("lbl".to_owned())); // String::from() ?
assert_eq!(try_label("longer_label:", &labels), Line::Label("longer_label".to_owned()));
assert_eq!(try_label("bad:label:", &labels), Line::ParseError);
assert_eq!(try_label("toomanycolons::", &labels), Line::ParseError);
assert_eq!(try_label("foo", &labels), Line::ParseError);
assert_eq!(try_label(":bar", &labels), Line::ParseError);
}
#[test]
#[allow(dead_code)]
fn test_try_jump() {
let mut labels: HashMap<String, usize> = HashMap::new();
labels.insert("label".to_string(), 0);
assert_eq!(try_jump("djnz label", &labels), Line::Instruction(Instruction::Djnz{ index: 0 }));
}
#[test]
#[allow(dead_code)]
fn test_parse() {
let labels: HashMap<String, usize> = HashMap::new();
// make sure correct boolean is supplied to try_register_load()
assert_eq!(parse(" ld a,4", &labels), Line::Instruction(Instruction::LoadA(4)));
assert_eq!(parse("ld b,4 ", &labels), Line::Instruction(Instruction::LoadB(4)));
// test instructions that don't have a dedicated function
assert_eq!(parse(" ", &labels), Line::Empty);
assert_eq!(parse(" rlca ", &labels), Line::Instruction(Instruction::Rlca));
assert_eq!(parse(" rrca ", &labels), Line::Instruction(Instruction::Rrca));
assert_eq!(parse(" out (0),a ", &labels), Line::Instruction(Instruction::Out));
// note: whitespace handling is covered in above tests
}
#[test]
#[allow(dead_code)]
fn test_leds_to_string() {
assert_eq!(leds_to_string(0x80), "*.......");
assert_eq!(leds_to_string(0x55), ".*.*.*.*");
assert_eq!(leds_to_string(0xAA), "*.*.*.*.");
assert_eq!(leds_to_string(0x0F), "....****");
}
|
#[doc = "Reader of register RCC_MP_AHB6ENSETR"]
pub type R = crate::R<u32, super::RCC_MP_AHB6ENSETR>;
#[doc = "Writer for register RCC_MP_AHB6ENSETR"]
pub type W = crate::W<u32, super::RCC_MP_AHB6ENSETR>;
#[doc = "Register RCC_MP_AHB6ENSETR `reset()`'s with value 0"]
impl crate::ResetValue for super::RCC_MP_AHB6ENSETR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "MDMAEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum MDMAEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"]
B_0X1 = 1,
}
impl From<MDMAEN_A> for bool {
#[inline(always)]
fn from(variant: MDMAEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `MDMAEN`"]
pub type MDMAEN_R = crate::R<bool, MDMAEN_A>;
impl MDMAEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MDMAEN_A {
match self.bits {
false => MDMAEN_A::B_0X0,
true => MDMAEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == MDMAEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == MDMAEN_A::B_0X1
}
}
#[doc = "Write proxy for field `MDMAEN`"]
pub struct MDMAEN_W<'a> {
w: &'a mut W,
}
impl<'a> MDMAEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: MDMAEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(MDMAEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(MDMAEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "GPUEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum GPUEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"]
B_0X1 = 1,
}
impl From<GPUEN_A> for bool {
#[inline(always)]
fn from(variant: GPUEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `GPUEN`"]
pub type GPUEN_R = crate::R<bool, GPUEN_A>;
impl GPUEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GPUEN_A {
match self.bits {
false => GPUEN_A::B_0X0,
true => GPUEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == GPUEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == GPUEN_A::B_0X1
}
}
#[doc = "Write proxy for field `GPUEN`"]
pub struct GPUEN_W<'a> {
w: &'a mut W,
}
impl<'a> GPUEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: GPUEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(GPUEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(GPUEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "ETHCKEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ETHCKEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that eth_ker_ck clock is disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the eth_ker_ck\r\n clock, reading means that the eth_ker_ck clock is\r\n enabled"]
B_0X1 = 1,
}
impl From<ETHCKEN_A> for bool {
#[inline(always)]
fn from(variant: ETHCKEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `ETHCKEN`"]
pub type ETHCKEN_R = crate::R<bool, ETHCKEN_A>;
impl ETHCKEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ETHCKEN_A {
match self.bits {
false => ETHCKEN_A::B_0X0,
true => ETHCKEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == ETHCKEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == ETHCKEN_A::B_0X1
}
}
#[doc = "Write proxy for field `ETHCKEN`"]
pub struct ETHCKEN_W<'a> {
w: &'a mut W,
}
impl<'a> ETHCKEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ETHCKEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that eth_ker_ck clock is disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(ETHCKEN_A::B_0X0)
}
#[doc = "Writing enables the eth_ker_ck clock, reading means that the eth_ker_ck clock is enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(ETHCKEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "ETHTXEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ETHTXEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the transmission clock is\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the transmission\r\n clock, reading means that the transmission clock\r\n is enabled"]
B_0X1 = 1,
}
impl From<ETHTXEN_A> for bool {
#[inline(always)]
fn from(variant: ETHTXEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `ETHTXEN`"]
pub type ETHTXEN_R = crate::R<bool, ETHTXEN_A>;
impl ETHTXEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ETHTXEN_A {
match self.bits {
false => ETHTXEN_A::B_0X0,
true => ETHTXEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == ETHTXEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == ETHTXEN_A::B_0X1
}
}
#[doc = "Write proxy for field `ETHTXEN`"]
pub struct ETHTXEN_W<'a> {
w: &'a mut W,
}
impl<'a> ETHTXEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ETHTXEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the transmission clock is disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(ETHTXEN_A::B_0X0)
}
#[doc = "Writing enables the transmission clock, reading means that the transmission clock is enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(ETHTXEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "ETHRXEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ETHRXEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the reception clock is\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the reception clock,\r\n reading means that the reception clock is\r\n enabled"]
B_0X1 = 1,
}
impl From<ETHRXEN_A> for bool {
#[inline(always)]
fn from(variant: ETHRXEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `ETHRXEN`"]
pub type ETHRXEN_R = crate::R<bool, ETHRXEN_A>;
impl ETHRXEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ETHRXEN_A {
match self.bits {
false => ETHRXEN_A::B_0X0,
true => ETHRXEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == ETHRXEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == ETHRXEN_A::B_0X1
}
}
#[doc = "Write proxy for field `ETHRXEN`"]
pub struct ETHRXEN_W<'a> {
w: &'a mut W,
}
impl<'a> ETHRXEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ETHRXEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the reception clock is disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(ETHRXEN_A::B_0X0)
}
#[doc = "Writing enables the reception clock, reading means that the reception clock is enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(ETHRXEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "ETHMACEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ETHMACEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the bus interface clock is\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the bus interface\r\n clock, reading means that the bus interface clock\r\n is enabled"]
B_0X1 = 1,
}
impl From<ETHMACEN_A> for bool {
#[inline(always)]
fn from(variant: ETHMACEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `ETHMACEN`"]
pub type ETHMACEN_R = crate::R<bool, ETHMACEN_A>;
impl ETHMACEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ETHMACEN_A {
match self.bits {
false => ETHMACEN_A::B_0X0,
true => ETHMACEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == ETHMACEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == ETHMACEN_A::B_0X1
}
}
#[doc = "Write proxy for field `ETHMACEN`"]
pub struct ETHMACEN_W<'a> {
w: &'a mut W,
}
impl<'a> ETHMACEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ETHMACEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the bus interface clock is disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(ETHMACEN_A::B_0X0)
}
#[doc = "Writing enables the bus interface clock, reading means that the bus interface clock is enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(ETHMACEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "FMCEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum FMCEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"]
B_0X1 = 1,
}
impl From<FMCEN_A> for bool {
#[inline(always)]
fn from(variant: FMCEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `FMCEN`"]
pub type FMCEN_R = crate::R<bool, FMCEN_A>;
impl FMCEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FMCEN_A {
match self.bits {
false => FMCEN_A::B_0X0,
true => FMCEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == FMCEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == FMCEN_A::B_0X1
}
}
#[doc = "Write proxy for field `FMCEN`"]
pub struct FMCEN_W<'a> {
w: &'a mut W,
}
impl<'a> FMCEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FMCEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(FMCEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(FMCEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "QSPIEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum QSPIEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"]
B_0X1 = 1,
}
impl From<QSPIEN_A> for bool {
#[inline(always)]
fn from(variant: QSPIEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `QSPIEN`"]
pub type QSPIEN_R = crate::R<bool, QSPIEN_A>;
impl QSPIEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> QSPIEN_A {
match self.bits {
false => QSPIEN_A::B_0X0,
true => QSPIEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == QSPIEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == QSPIEN_A::B_0X1
}
}
#[doc = "Write proxy for field `QSPIEN`"]
pub struct QSPIEN_W<'a> {
w: &'a mut W,
}
impl<'a> QSPIEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: QSPIEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(QSPIEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(QSPIEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "SDMMC1EN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SDMMC1EN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"]
B_0X1 = 1,
}
impl From<SDMMC1EN_A> for bool {
#[inline(always)]
fn from(variant: SDMMC1EN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `SDMMC1EN`"]
pub type SDMMC1EN_R = crate::R<bool, SDMMC1EN_A>;
impl SDMMC1EN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SDMMC1EN_A {
match self.bits {
false => SDMMC1EN_A::B_0X0,
true => SDMMC1EN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == SDMMC1EN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == SDMMC1EN_A::B_0X1
}
}
#[doc = "Write proxy for field `SDMMC1EN`"]
pub struct SDMMC1EN_W<'a> {
w: &'a mut W,
}
impl<'a> SDMMC1EN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SDMMC1EN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(SDMMC1EN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(SDMMC1EN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "SDMMC2EN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SDMMC2EN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"]
B_0X1 = 1,
}
impl From<SDMMC2EN_A> for bool {
#[inline(always)]
fn from(variant: SDMMC2EN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `SDMMC2EN`"]
pub type SDMMC2EN_R = crate::R<bool, SDMMC2EN_A>;
impl SDMMC2EN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SDMMC2EN_A {
match self.bits {
false => SDMMC2EN_A::B_0X0,
true => SDMMC2EN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == SDMMC2EN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == SDMMC2EN_A::B_0X1
}
}
#[doc = "Write proxy for field `SDMMC2EN`"]
pub struct SDMMC2EN_W<'a> {
w: &'a mut W,
}
impl<'a> SDMMC2EN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SDMMC2EN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(SDMMC2EN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(SDMMC2EN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "CRC1EN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CRC1EN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"]
B_0X1 = 1,
}
impl From<CRC1EN_A> for bool {
#[inline(always)]
fn from(variant: CRC1EN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `CRC1EN`"]
pub type CRC1EN_R = crate::R<bool, CRC1EN_A>;
impl CRC1EN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CRC1EN_A {
match self.bits {
false => CRC1EN_A::B_0X0,
true => CRC1EN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == CRC1EN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == CRC1EN_A::B_0X1
}
}
#[doc = "Write proxy for field `CRC1EN`"]
pub struct CRC1EN_W<'a> {
w: &'a mut W,
}
impl<'a> CRC1EN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CRC1EN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(CRC1EN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(CRC1EN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
#[doc = "USBHEN\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum USBHEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"]
B_0X1 = 1,
}
impl From<USBHEN_A> for bool {
#[inline(always)]
fn from(variant: USBHEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `USBHEN`"]
pub type USBHEN_R = crate::R<bool, USBHEN_A>;
impl USBHEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USBHEN_A {
match self.bits {
false => USBHEN_A::B_0X0,
true => USBHEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == USBHEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == USBHEN_A::B_0X1
}
}
#[doc = "Write proxy for field `USBHEN`"]
pub struct USBHEN_W<'a> {
w: &'a mut W,
}
impl<'a> USBHEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: USBHEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(USBHEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(USBHEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
impl R {
#[doc = "Bit 0 - MDMAEN"]
#[inline(always)]
pub fn mdmaen(&self) -> MDMAEN_R {
MDMAEN_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 5 - GPUEN"]
#[inline(always)]
pub fn gpuen(&self) -> GPUEN_R {
GPUEN_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 7 - ETHCKEN"]
#[inline(always)]
pub fn ethcken(&self) -> ETHCKEN_R {
ETHCKEN_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - ETHTXEN"]
#[inline(always)]
pub fn ethtxen(&self) -> ETHTXEN_R {
ETHTXEN_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - ETHRXEN"]
#[inline(always)]
pub fn ethrxen(&self) -> ETHRXEN_R {
ETHRXEN_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - ETHMACEN"]
#[inline(always)]
pub fn ethmacen(&self) -> ETHMACEN_R {
ETHMACEN_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 12 - FMCEN"]
#[inline(always)]
pub fn fmcen(&self) -> FMCEN_R {
FMCEN_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 14 - QSPIEN"]
#[inline(always)]
pub fn qspien(&self) -> QSPIEN_R {
QSPIEN_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 16 - SDMMC1EN"]
#[inline(always)]
pub fn sdmmc1en(&self) -> SDMMC1EN_R {
SDMMC1EN_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - SDMMC2EN"]
#[inline(always)]
pub fn sdmmc2en(&self) -> SDMMC2EN_R {
SDMMC2EN_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 20 - CRC1EN"]
#[inline(always)]
pub fn crc1en(&self) -> CRC1EN_R {
CRC1EN_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 24 - USBHEN"]
#[inline(always)]
pub fn usbhen(&self) -> USBHEN_R {
USBHEN_R::new(((self.bits >> 24) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - MDMAEN"]
#[inline(always)]
pub fn mdmaen(&mut self) -> MDMAEN_W {
MDMAEN_W { w: self }
}
#[doc = "Bit 5 - GPUEN"]
#[inline(always)]
pub fn gpuen(&mut self) -> GPUEN_W {
GPUEN_W { w: self }
}
#[doc = "Bit 7 - ETHCKEN"]
#[inline(always)]
pub fn ethcken(&mut self) -> ETHCKEN_W {
ETHCKEN_W { w: self }
}
#[doc = "Bit 8 - ETHTXEN"]
#[inline(always)]
pub fn ethtxen(&mut self) -> ETHTXEN_W {
ETHTXEN_W { w: self }
}
#[doc = "Bit 9 - ETHRXEN"]
#[inline(always)]
pub fn ethrxen(&mut self) -> ETHRXEN_W {
ETHRXEN_W { w: self }
}
#[doc = "Bit 10 - ETHMACEN"]
#[inline(always)]
pub fn ethmacen(&mut self) -> ETHMACEN_W {
ETHMACEN_W { w: self }
}
#[doc = "Bit 12 - FMCEN"]
#[inline(always)]
pub fn fmcen(&mut self) -> FMCEN_W {
FMCEN_W { w: self }
}
#[doc = "Bit 14 - QSPIEN"]
#[inline(always)]
pub fn qspien(&mut self) -> QSPIEN_W {
QSPIEN_W { w: self }
}
#[doc = "Bit 16 - SDMMC1EN"]
#[inline(always)]
pub fn sdmmc1en(&mut self) -> SDMMC1EN_W {
SDMMC1EN_W { w: self }
}
#[doc = "Bit 17 - SDMMC2EN"]
#[inline(always)]
pub fn sdmmc2en(&mut self) -> SDMMC2EN_W {
SDMMC2EN_W { w: self }
}
#[doc = "Bit 20 - CRC1EN"]
#[inline(always)]
pub fn crc1en(&mut self) -> CRC1EN_W {
CRC1EN_W { w: self }
}
#[doc = "Bit 24 - USBHEN"]
#[inline(always)]
pub fn usbhen(&mut self) -> USBHEN_W {
USBHEN_W { w: self }
}
}
|
use super::*;
use std::sync::Arc;
use tokio::sync::Mutex;
use util::Error;
use waitgroup::WaitGroup;
#[tokio::test]
async fn test_random_generator_collision() -> Result<(), Error> {
let test_cases = vec![
(
"CandidateID",
0, /*||-> String {
generate_cand_id()
},*/
),
(
"PWD", 1, /*||-> String {
generate_pwd()
},*/
),
(
"Ufrag", 2, /*|| ->String {
generate_ufrag()
},*/
),
];
const N: usize = 10;
const ITERATION: usize = 10;
for (name, test_case) in test_cases {
for _ in 0..ITERATION {
let rands = Arc::new(Mutex::new(vec![]));
// Create a new wait group.
let wg = WaitGroup::new();
for _ in 0..N {
let w = wg.worker();
let rs = Arc::clone(&rands);
tokio::spawn(async move {
let _d = w;
let s = if test_case == 0 {
generate_cand_id()
} else if test_case == 1 {
generate_pwd()
} else {
generate_ufrag()
};
let mut r = rs.lock().await;
r.push(s);
});
}
wg.wait().await;
let rs = rands.lock().await;
assert_eq!(rs.len(), N, "{} Failed to generate randoms", name);
for i in 0..N {
for j in i + 1..N {
assert_ne!(
rs[i], rs[j],
"{}: generateRandString caused collision: {} == {}",
name, rs[i], rs[j],
);
}
}
}
}
Ok(())
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// WidgetTime : Time setting for the widget.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WidgetTime {
#[serde(rename = "live_span", skip_serializing_if = "Option::is_none")]
pub live_span: Option<crate::models::WidgetLiveSpan>,
}
impl WidgetTime {
/// Time setting for the widget.
pub fn new() -> WidgetTime {
WidgetTime {
live_span: None,
}
}
}
|
use perseus::{link, t};
use sycamore::prelude::Template as SycamoreTemplate;
use sycamore::prelude::*;
pub static COPYRIGHT_YEARS: &str = "2021";
#[component(NavLinks<G>)]
pub fn nav_links() -> SycamoreTemplate<G> {
template! {
// TODO fix overly left alignment here on mobile
li(class = "m-3 p-1") {
a(href = link!("/docs"), class = "px-2") { (t!("navlinks-docs")) }
}
li(class = "m-3 p-1") {
a(href = link!("/comparisons"), class = "px-2") { (t!("navlinks-comparisons")) }
}
}
}
pub struct ContainerProps<G: GenericNode> {
pub title: String,
pub children: SycamoreTemplate<G>,
}
#[component(Container<G>)]
pub fn container(props: ContainerProps<G>) -> SycamoreTemplate<G> {
let title = props.title.clone();
let menu_open = Signal::new(false);
// We need to verbatim copy the value because of how it's used in Sycamore's reactivity system
let menu_open_2 = create_memo(cloned!((menu_open) => move || *menu_open.get()));
let toggle_menu = cloned!((menu_open) => move |_| menu_open.set(!*menu_open.get()));
template! {
// TODO click-away events
header(class = "shadow-md sm:p-2 w-full bg-white dark:text-white dark:bg-navy mb-20") {
div(class = "flex justify-between") {
a(class = "justify-self-start self-center m-3 ml-5 text-md sm:text-2xl", href = link!("/")) {
(title)
}
// The button for opening/closing the hamburger menu on mobile
// This is done by a Tailwind module
div(
class = format!(
"xs:hidden m-3 mr-5 tham tham-e-spin tham-w-6 {}",
if *menu_open.get() {
"tham-active"
} else {
""
}
),
on:click = toggle_menu
) {
div(class = "tham-box") {
div(class = "dark:bg-white tham-inner") {}
}
}
// This displays the navigation links on desktop
nav(class = "hidden xs:flex") {
ul(class = "mr-5 flex") {
NavLinks()
}
}
}
// This displays the navigation links when the menu is opened on mobile
// TODO click-away event
nav(
id = "mobile_nav_menu",
class = format!(
"xs:hidden w-full text-center justify-center {}",
if *menu_open_2.get() {
"flex flex-col"
} else {
"hidden"
}
)
) {
ul(class = "mr-5") {
NavLinks()
}
}
}
div(class = "overflow-y-scroll") {
main(class="h-full") {
(props.children.clone())
}
}
footer(class = "w-full flex justify-center py-5 bg-gray-100 dark:bg-navy-deep") {
p(class = "dark:text-white mx-5 text-center") {
span(dangerously_set_inner_html = &t!("footer.copyright", {
"years": COPYRIGHT_YEARS
}))
}
}
}
}
|
#[path = "../error.rs"]
mod error;
#[path = "../requests/mod.rs"]
mod requests;
#[path = "../trace.rs"]
mod trace;
#[path = "../vtubers.rs"]
mod vtubers;
use chrono::{DateTime, Utc};
use reqwest::Client;
use sqlx::PgPool;
use std::env;
use tracing_appender::rolling::Rotation;
use crate::error::Result;
use crate::vtubers::VTUBERS;
#[tokio::main]
async fn main() -> Result<()> {
let _guard = trace::init("channel_stat=debug", Rotation::DAILY);
let _span = tracing::info_span!("channel_stat");
let client = reqwest::Client::new();
let pool = PgPool::connect(&env::var("DATABASE_URL").unwrap()).await?;
let now = Utc::now();
bilibili_channels_stat(now, &client, &pool).await?;
youtube_channels_stat(now, &client, &pool).await?;
Ok(())
}
async fn bilibili_channels_stat(now: DateTime<Utc>, client: &Client, pool: &PgPool) -> Result<()> {
let ids = VTUBERS
.iter()
.filter_map(|v| v.bilibili)
.collect::<Vec<_>>();
let span = tracing::debug_span!("bilibili_channels_stat", len = ids.len(), ids = ?ids);
let channels = requests::bilibili_channels(&client, ids).await?;
tracing::debug!(parent: span, len = channels.len(), channels = ?channels);
for channel in &channels {
if let Some(vtb) = VTUBERS.iter().find(|v| v.bilibili == Some(&channel.id)) {
let _ = sqlx::query!(
r#"
update bilibili_channels
set (subscriber_count, view_count, updated_at)
= ($1, $2, $3)
where vtuber_id = $4
"#,
channel.subscriber_count,
channel.view_count,
now,
vtb.id,
)
.execute(pool)
.await?;
let _ = sqlx::query!(
r#"
insert into bilibili_channel_subscriber_statistic (vtuber_id, time, value)
values ($1, $2, $3)
"#,
vtb.id,
now,
channel.subscriber_count,
)
.execute(pool)
.await?;
let _ = sqlx::query!(
r#"
insert into bilibili_channel_view_statistic (vtuber_id, time, value)
values ($1, $2, $3)
"#,
vtb.id,
now,
channel.view_count,
)
.execute(pool)
.await?;
}
}
Ok(())
}
async fn youtube_channels_stat(now: DateTime<Utc>, client: &Client, pool: &PgPool) -> Result<()> {
let ids = VTUBERS.iter().filter_map(|v| v.youtube).collect::<Vec<_>>();
let span = tracing::debug_span!("youtube_channels_stat", len = ids.len(), ids = ?ids);
let channels = requests::youtube_channels(&client, ids).await?;
tracing::debug!(parent: span, len = channels.len(), channels = ?channels);
for channel in &channels {
if let Some(vtb) = VTUBERS.iter().find(|v| v.youtube == Some(&channel.id)) {
let _ = sqlx::query!(
r#"
update youtube_channels
set (subscriber_count, view_count, updated_at)
= ($1, $2, $3)
where vtuber_id = $4
"#,
channel.subscriber_count,
channel.view_count,
now,
vtb.id,
)
.execute(pool)
.await?;
let _ = sqlx::query!(
r#"
insert into youtube_channel_subscriber_statistic (vtuber_id, time, value)
values ($1, $2, $3)
"#,
vtb.id,
now,
channel.subscriber_count,
)
.execute(pool)
.await?;
let _ = sqlx::query!(
r#"
insert into youtube_channel_view_statistic (vtuber_id, time, value)
values ($1, $2, $3)
"#,
vtb.id,
now,
channel.view_count,
)
.execute(pool)
.await?;
}
}
Ok(())
}
|
pub fn run() {
let age: u8 = 28;
let check_id: bool = true;
// if/Slse
if age >= 21 && check_id {
println!("What do you wants drink ");
} else if age <= 18 && check_id {
println!("You can not get drink");
}
// Shorthand if
let is_of_age = if age > 21 { true } else { false };
println!("The age is {}", is_of_age);
} |
use crate::matcher::Matcher;
use crate::name_generator::{NameGenerator, NameGeneratorError};
use std::error::Error;
use std::fmt::Display;
use std::fs::{read_dir, rename};
use std::path::Path;
use std::{fmt, io};
#[cfg(test)]
use mockiato::mockable;
#[derive(Debug)]
pub(crate) enum RenamerError {
IoError(io::Error),
MatcherError,
NameGeneratorError(NameGeneratorError),
InvalidFileName,
}
impl Display for RenamerError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let message = match self {
RenamerError::IoError(_) => "An io error occurred",
RenamerError::MatcherError => "Could not match name against file",
RenamerError::NameGeneratorError(_) => "Unable to create the new file name",
RenamerError::InvalidFileName => "Invalid file name. Make sure it is is valid unicode",
};
write!(f, "{}", message)
}
}
impl Error for RenamerError {}
#[cfg_attr(test, mockable)]
pub(crate) trait Renamer {
fn rename_files_in_directory(&self, directory: &Path) -> Result<(), Box<dyn Error>>;
}
#[derive(Debug)]
pub(crate) struct RenamerImpl {
matcher: Box<dyn Matcher>,
name_generator: Box<dyn NameGenerator>,
}
impl RenamerImpl {
pub(crate) fn new(matcher: Box<dyn Matcher>, name_generator: Box<dyn NameGenerator>) -> Self {
Self {
matcher,
name_generator,
}
}
fn create_new_name(&self, old_name: &str) -> Result<String, RenamerError> {
let capture_groups = self
.matcher
.match_against(old_name)
.map_err(|_| RenamerError::MatcherError)?;
self.name_generator
.generate_name(capture_groups)
.map_err(RenamerError::NameGeneratorError)
}
}
impl Renamer for RenamerImpl {
fn rename_files_in_directory(&self, directory: &Path) -> Result<(), Box<dyn Error>> {
let dir_entry = read_dir(directory).map_err(RenamerError::IoError)?;
for entry in dir_entry {
let entry = entry.map_err(RenamerError::IoError)?;
let path = entry.path();
if path.is_dir() {
continue;
}
let old_name = entry.file_name();
let new_name = match self
.create_new_name(old_name.to_str().ok_or(RenamerError::InvalidFileName)?)
{
Ok(new_name) => new_name,
Err(_) => {
eprintln!("Ignoring file: {:?}", old_name);
continue;
}
};
println!("Renaming {:?} → {:?}", &old_name, &new_name);
if Path::new(&new_name).exists() {
eprintln!("Path already exists. Skipping...");
}
rename(old_name, new_name).map_err(RenamerError::IoError)?;
}
Ok(())
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub mod operations {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationEntityListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!("{}/providers/Microsoft.MachineLearning/operations", &operation_config.base_path,);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: OperationEntityListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
list::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod skus {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(operation_config: &crate::OperationConfig, subscription_id: &str) -> std::result::Result<SkuListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearning/skus",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: SkuListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
list::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod commitment_associations {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
commitment_plan_name: &str,
commitment_association_name: &str,
) -> std::result::Result<CommitmentAssociation, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearning/commitmentPlans/{}/commitmentAssociations/{}",
&operation_config.base_path, subscription_id, resource_group_name, commitment_plan_name, commitment_association_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CommitmentAssociation = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
get::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
commitment_plan_name: &str,
skip_token: Option<&str>,
) -> std::result::Result<CommitmentAssociationListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearning/commitmentPlans/{}/commitmentAssociations",
&operation_config.base_path, subscription_id, resource_group_name, commitment_plan_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(skip_token) = skip_token {
req_builder = req_builder.query(&[("$skipToken", skip_token)]);
}
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: CommitmentAssociationListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
list::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn move_(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
commitment_plan_name: &str,
commitment_association_name: &str,
move_payload: &MoveCommitmentAssociationRequest,
) -> std::result::Result<CommitmentAssociation, move_::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearning/commitmentPlans/{}/commitmentAssociations/{}/move",
&operation_config.base_path, subscription_id, resource_group_name, commitment_plan_name, commitment_association_name
);
let mut req_builder = client.post(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(move_::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
req_builder = req_builder.json(move_payload);
let req = req_builder.build().context(move_::BuildRequestError)?;
let rsp = client.execute(req).await.context(move_::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(move_::ResponseBytesError)?;
let rsp_value: CommitmentAssociation = serde_json::from_slice(&body).context(move_::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(move_::ResponseBytesError)?;
move_::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod move_ {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod commitment_plans {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn get(operation_config: &crate::OperationConfig) -> std::result::Result<CommitmentPlan, get::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearning/commitmentPlans/{}",
&operation_config.base_path, subscription_id, resource_group_name, commitment_plan_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(get::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
let req = req_builder.build().context(get::BuildRequestError)?;
let rsp = client.execute(req).await.context(get::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
let rsp_value: CommitmentPlan = serde_json::from_slice(&body).context(get::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?;
get::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod get {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
create_or_update_payload: &CommitmentPlan,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearning/commitmentPlans/{}",
&operation_config.base_path, subscription_id, resource_group_name, commitment_plan_name
);
let mut req_builder = client.put(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(create_or_update::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.json(create_or_update_payload);
let req = req_builder.build().context(create_or_update::BuildRequestError)?;
let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CommitmentPlan = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
StatusCode::CREATED => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
let rsp_value: CommitmentPlan = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?;
Ok(create_or_update::Response::Created201(rsp_value))
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?;
create_or_update::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug)]
pub enum Response {
Ok200(CommitmentPlan),
Created201(CommitmentPlan),
}
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn patch(
operation_config: &crate::OperationConfig,
patch_payload: &CommitmentPlanPatchPayload,
) -> std::result::Result<CommitmentPlan, patch::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearning/commitmentPlans/{}",
&operation_config.base_path, subscription_id, resource_group_name, commitment_plan_name
);
let mut req_builder = client.patch(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(patch::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.json(patch_payload);
let req = req_builder.build().context(patch::BuildRequestError)?;
let rsp = client.execute(req).await.context(patch::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(patch::ResponseBytesError)?;
let rsp_value: CommitmentPlan = serde_json::from_slice(&body).context(patch::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(patch::ResponseBytesError)?;
patch::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod patch {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn remove(operation_config: &crate::OperationConfig) -> std::result::Result<(), remove::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearning/commitmentPlans/{}",
&operation_config.base_path, subscription_id, resource_group_name, commitment_plan_name
);
let mut req_builder = client.delete(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(remove::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
let req = req_builder.build().context(remove::BuildRequestError)?;
let rsp = client.execute(req).await.context(remove::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => Ok(()),
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(remove::ResponseBytesError)?;
remove::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod remove {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
skip_token: Option<&str>,
) -> std::result::Result<CommitmentPlanListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.MachineLearning/commitmentPlans",
&operation_config.base_path, subscription_id
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(skip_token) = skip_token {
req_builder = req_builder.query(&[("$skipToken", skip_token)]);
}
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: CommitmentPlanListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
list::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_in_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
skip_token: Option<&str>,
) -> std::result::Result<CommitmentPlanListResult, list_in_resource_group::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearning/commitmentPlans",
&operation_config.base_path, subscription_id, resource_group_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list_in_resource_group::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(skip_token) = skip_token {
req_builder = req_builder.query(&[("$skipToken", skip_token)]);
}
let req = req_builder.build().context(list_in_resource_group::BuildRequestError)?;
let rsp = client.execute(req).await.context(list_in_resource_group::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list_in_resource_group::ResponseBytesError)?;
let rsp_value: CommitmentPlanListResult =
serde_json::from_slice(&body).context(list_in_resource_group::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list_in_resource_group::ResponseBytesError)?;
list_in_resource_group::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list_in_resource_group {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod usage_history {
use crate::models::*;
use reqwest::StatusCode;
use snafu::{ResultExt, Snafu};
pub async fn list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
commitment_plan_name: &str,
skip_token: Option<&str>,
) -> std::result::Result<PlanUsageHistoryListResult, list::Error> {
let client = &operation_config.client;
let uri_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearning/commitmentPlans/{}/usageHistory",
&operation_config.base_path, subscription_id, resource_group_name, commitment_plan_name
);
let mut req_builder = client.get(uri_str);
if let Some(token_credential) = &operation_config.token_credential {
let token_response = token_credential
.get_token(&operation_config.token_credential_resource)
.await
.context(list::GetTokenError)?;
req_builder = req_builder.bearer_auth(token_response.token.secret());
}
req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]);
if let Some(skip_token) = skip_token {
req_builder = req_builder.query(&[("$skipToken", skip_token)]);
}
let req = req_builder.build().context(list::BuildRequestError)?;
let rsp = client.execute(req).await.context(list::ExecuteRequestError)?;
match rsp.status() {
StatusCode::OK => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
let rsp_value: PlanUsageHistoryListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?;
Ok(rsp_value)
}
status_code => {
let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?;
list::UnexpectedResponse { status_code, body: body }.fail()
}
}
}
pub mod list {
use crate::{models, models::*};
use reqwest::StatusCode;
use snafu::Snafu;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub enum Error {
UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes },
BuildRequestError { source: reqwest::Error },
ExecuteRequestError { source: reqwest::Error },
ResponseBytesError { source: reqwest::Error },
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
|
pub mod indexing;
pub mod init;
pub mod validate;
|
use ggez::Context;
use ggez::graphics::{Canvas,MeshBuilder,DrawMode,DrawParam,Rect,Color,set_canvas,draw};
use ggez::nalgebra::Point2;
use crate::common::{Point,randrange};
pub struct Background {
canvas: Canvas,
stars: Vec<Star>,
}
struct Star {
pos: Point,
speed: f32,
}
impl Star {
fn new(pos: Point, speed: f32) -> Self {
Star {
pos, speed
}
}
}
impl Background {
pub fn new(ctx: &mut Context) -> Self {
let canvas = Canvas::with_window_size(ctx).unwrap();
let mut stars = Vec::new();
for _ in 0..500 {
stars.push(Star::new(Point::new(randrange(0.0,1919.0),
randrange(0.0, 1079.0)), randrange(0.2, 6.5)));
}
Background {
canvas, stars
}
}
pub fn update(&mut self) {
for star in &mut self.stars {
star.pos = star.pos + Point::new(0.0, star.speed);
if star.pos.y > 1080.0 {
star.pos = Point::new(randrange(0.0,1919.0), 0.0);
}
}
}
pub fn draw(&mut self, ctx: &mut Context) -> &Canvas {
let mut canvas_mb = MeshBuilder::new();
for star in &mut self.stars {
canvas_mb.circle(DrawMode::fill(),
Point2::new(star.pos.x, star.pos.y), 1.0, 1.0,
Color::new(0.5, 0.5, 0.5, 1.0));
}
canvas_mb.rectangle(DrawMode::fill(),
Rect::new(0.0, 0.0, 1920.0, 1080.0),
Color::new(0.0, 0.0, 0.0, 0.7));
let mesh = canvas_mb.build(ctx).unwrap();
set_canvas(ctx, Some(&self.canvas));
let _ = draw(ctx, &mesh, DrawParam::default());
set_canvas(ctx, None);
&self.canvas
}
}
|
use ipfs::{Block, Ipfs, IpfsOptions, TestTypes};
use std::convert::TryInto;
use futures::{FutureExt, TryFutureExt};
fn main() {
let options = IpfsOptions::<TestTypes>::default();
env_logger::Builder::new().parse_filters(&options.ipfs_log).init();
tokio::runtime::current_thread::block_on_all(async move {
// Start daemon and initialize repo
let (mut ipfs, fut) = Ipfs::new(options).start().await.unwrap();
tokio::spawn(fut.unit_error().boxed().compat());
// Create a Block
ipfs.put_block(Block::from("block-provide")).await.unwrap();
// Retrive a Block
let block = ipfs.get_block(Block::from("block-want\n").cid()).await.unwrap();
let contents: String = block.into();
println!("block contents: {:?}", contents);
// Add a file
ipfs.add("./examples/block.data".into()).await.unwrap();
// Get a file
let path = "/QmSy5pnHk1EnvE5dmJSyFKG5unXLGjPpBuJJCBQkBTvBaW".try_into().unwrap();
let file = ipfs.get(path).await.unwrap();
let contents: String = file.into();
println!("file contents: {:?}", contents);
}.unit_error().boxed().compat()).unwrap();
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Manages services
// In case we roll the toolchain and something we're using as a feature has been
// stabilized.
#![allow(stable_features)]
// TODO(dpradilla): reenable and add comments #![deny(missing_docs)]
#![deny(unreachable_patterns)]
#![deny(unused)]
// TODO(dpradilla): remove allow
#![allow(dead_code)]
use crate::lifmgr::LIF;
use crate::{error, UUID};
use std::collections::HashSet;
/// `Manager` keeps track of interfaces where a service is enabled
/// and verifies conflicting services are not enabled.
pub struct Manager {
// dhcp_server has collection of interfaces where DHCP dhcp_server is enabled.
dhcp_server: std::collections::HashSet<UUID>,
// dhcp_client has collection of interfaces DHCP dhcp_client is enabled.
dhcp_client: std::collections::HashSet<UUID>,
}
impl Manager {
//! Creates a new Manager.
pub fn new() -> Self {
Manager { dhcp_server: HashSet::new(), dhcp_client: HashSet::new() }
}
/// `enable_server` sets dhcp dhcp_server as enabled on indicated interface.
pub fn enable_server(&mut self, lif: &LIF) -> error::Result<bool> {
if self.dhcp_client.contains(&lif.id().uuid) {
return Err(error::NetworkManager::SERVICE(error::Service::NotEnabled));
}
Ok(self.dhcp_server.insert(lif.id().uuid))
}
/// `disable_server` sets dhcp dhcp_server as disable on indicated interface.
pub fn disable_server(&mut self, lif: &LIF) -> bool {
self.dhcp_server.remove(&lif.id().uuid)
}
/// `is_server_enabled` returns true if the DHCP dhcp_server is enabled on indicated interface.
pub fn is_server_enabled(&mut self, lif: &LIF) -> bool {
self.dhcp_server.contains(&lif.id().uuid)
}
/// `enable_client` sets dhcp dhcp_client as enabled on indicated interface.
pub fn enable_client(&mut self, lif: &LIF) -> error::Result<bool> {
if self.dhcp_server.contains(&lif.id().uuid) {
return Err(error::NetworkManager::SERVICE(error::Service::NotEnabled));
}
Ok(self.dhcp_client.insert(lif.id().uuid))
}
/// `disable_client` sets dhcp dhcp_client as disable on indicated interface.
pub fn disable_client(&mut self, lif: &LIF) -> bool {
self.dhcp_client.remove(&lif.id().uuid)
}
/// `is_client_enabled` returns true if the DHCP dhcp_client is enabled on indicated interface.
pub fn is_client_enabled(&mut self, lif: &LIF) -> bool {
self.dhcp_client.contains(&lif.id().uuid)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::lifmgr::{LIFType, LIF};
use crate::portmgr::PortId;
use crate::portmgr::{Port, PortManager};
fn create_lifs() -> (LIF, LIF) {
let mut pm = PortManager::new();
pm.add_port(Port::new(PortId::from(1), "port1", 1));
pm.add_port(Port::new(PortId::from(2), "port2", 1));
let l1 =
LIF::new(1, LIFType::WAN, "name1", PortId::from(0), vec![PortId::from(1)], 0, None)
.unwrap();
let l2 =
LIF::new(2, LIFType::LAN, "name2", PortId::from(0), vec![PortId::from(2)], 0, None)
.unwrap();
(l1, l2)
}
#[test]
fn test_service_manager_new() {
let m = Manager::new();
assert_eq!(m.dhcp_server.len(), 0);
assert_eq!(m.dhcp_client.len(), 0);
}
#[test]
fn test_server_enable() {
let mut m = Manager::new();
let (l, _) = create_lifs();
assert!(m.enable_server(&l).unwrap_or(false));
assert_eq!(m.dhcp_server.len(), 1);
}
#[test]
fn test_server_double_enable() {
let mut m = Manager::new();
let (l, _) = create_lifs();
assert!(m.enable_server(&l).unwrap_or(false));
assert!(!m.enable_server(&l).unwrap_or(false));
assert_eq!(m.dhcp_server.len(), 1);
}
#[test]
fn test_disable_server() {
let mut m = Manager::new();
let (l, l2) = create_lifs();
assert!(m.enable_server(&l).unwrap_or(false));
assert_eq!(m.dhcp_server.len(), 1);
assert!(m.disable_server(&l));
assert_eq!(m.dhcp_server.len(), 0);
assert!(m.enable_server(&l).unwrap_or(false));
assert!(m.enable_server(&l2).unwrap_or(false));
assert_eq!(m.dhcp_server.len(), 2);
assert!(m.disable_server(&l));
assert_eq!(m.dhcp_server.len(), 1);
}
#[test]
fn test_server_disable_already_disabled() {
let mut m = Manager::new();
let (l, _) = create_lifs();
assert!(!m.disable_server(&l));
assert_eq!(m.dhcp_server.len(), 0);
}
#[test]
fn test_client_enable() {
let mut m = Manager::new();
let (l, _) = create_lifs();
assert!(m.enable_client(&l).unwrap_or(false));
assert_eq!(m.dhcp_client.len(), 1);
}
#[test]
fn test_client_double_enable() {
let mut m = Manager::new();
let (l, _) = create_lifs();
assert!(m.enable_client(&l).unwrap_or(false));
assert!(!m.enable_client(&l).unwrap_or(false));
assert_eq!(m.dhcp_client.len(), 1);
}
#[test]
fn test_disable_client() {
let mut m = Manager::new();
let (l, l2) = create_lifs();
assert!(m.enable_client(&l).unwrap_or(false));
assert_eq!(m.dhcp_client.len(), 1);
assert!(m.disable_client(&l));
assert_eq!(m.dhcp_client.len(), 0);
assert!(m.enable_client(&l).unwrap_or(false));
assert!(m.enable_client(&l2).unwrap_or(false));
assert_eq!(m.dhcp_client.len(), 2);
assert!(m.disable_client(&l));
assert_eq!(m.dhcp_client.len(), 1);
}
#[test]
fn test_client_disable_already_disabled() {
let mut m = Manager::new();
let (l, _) = create_lifs();
assert!(!m.disable_client(&l));
assert_eq!(m.dhcp_client.len(), 0);
}
#[test]
fn test_enable_server_with_client_enabled() {
let mut m = Manager::new();
let (l, _) = create_lifs();
assert!(m.enable_client(&l).unwrap_or(false));
assert_eq!(m.dhcp_client.len(), 1);
assert!(!m.enable_server(&l).unwrap_or(false));
assert_eq!(m.dhcp_server.len(), 0);
}
#[test]
fn test_enable_client_with_server_enabled() {
let mut m = Manager::new();
let (l, _) = create_lifs();
assert!(m.enable_server(&l).unwrap_or(false));
assert_eq!(m.dhcp_server.len(), 1);
assert!(!m.enable_client(&l).unwrap_or(false));
assert_eq!(m.dhcp_client.len(), 0);
}
}
|
#[doc = "Reader of register FMC_CSQCFGR1"]
pub type R = crate::R<u32, super::FMC_CSQCFGR1>;
#[doc = "Writer for register FMC_CSQCFGR1"]
pub type W = crate::W<u32, super::FMC_CSQCFGR1>;
#[doc = "Register FMC_CSQCFGR1 `reset()`'s with value 0"]
impl crate::ResetValue for super::FMC_CSQCFGR1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `CMD2EN`"]
pub type CMD2EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMD2EN`"]
pub struct CMD2EN_W<'a> {
w: &'a mut W,
}
impl<'a> CMD2EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `DMADEN`"]
pub type DMADEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DMADEN`"]
pub struct DMADEN_W<'a> {
w: &'a mut W,
}
impl<'a> DMADEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `ACYNBR`"]
pub type ACYNBR_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ACYNBR`"]
pub struct ACYNBR_W<'a> {
w: &'a mut W,
}
impl<'a> ACYNBR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4);
self.w
}
}
#[doc = "Reader of field `CMD1`"]
pub type CMD1_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CMD1`"]
pub struct CMD1_W<'a> {
w: &'a mut W,
}
impl<'a> CMD1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8);
self.w
}
}
#[doc = "Reader of field `CMD2`"]
pub type CMD2_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `CMD2`"]
pub struct CMD2_W<'a> {
w: &'a mut W,
}
impl<'a> CMD2_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16);
self.w
}
}
#[doc = "Reader of field `CMD1T`"]
pub type CMD1T_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMD1T`"]
pub struct CMD1T_W<'a> {
w: &'a mut W,
}
impl<'a> CMD1T_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
#[doc = "Reader of field `CMD2T`"]
pub type CMD2T_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMD2T`"]
pub struct CMD2T_W<'a> {
w: &'a mut W,
}
impl<'a> CMD2T_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);
self.w
}
}
impl R {
#[doc = "Bit 1 - CMD2EN"]
#[inline(always)]
pub fn cmd2en(&self) -> CMD2EN_R {
CMD2EN_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - DMADEN"]
#[inline(always)]
pub fn dmaden(&self) -> DMADEN_R {
DMADEN_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bits 4:6 - ACYNBR"]
#[inline(always)]
pub fn acynbr(&self) -> ACYNBR_R {
ACYNBR_R::new(((self.bits >> 4) & 0x07) as u8)
}
#[doc = "Bits 8:15 - CMD1"]
#[inline(always)]
pub fn cmd1(&self) -> CMD1_R {
CMD1_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bits 16:23 - CMD2"]
#[inline(always)]
pub fn cmd2(&self) -> CMD2_R {
CMD2_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bit 24 - CMD1T"]
#[inline(always)]
pub fn cmd1t(&self) -> CMD1T_R {
CMD1T_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 25 - CMD2T"]
#[inline(always)]
pub fn cmd2t(&self) -> CMD2T_R {
CMD2T_R::new(((self.bits >> 25) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 1 - CMD2EN"]
#[inline(always)]
pub fn cmd2en(&mut self) -> CMD2EN_W {
CMD2EN_W { w: self }
}
#[doc = "Bit 2 - DMADEN"]
#[inline(always)]
pub fn dmaden(&mut self) -> DMADEN_W {
DMADEN_W { w: self }
}
#[doc = "Bits 4:6 - ACYNBR"]
#[inline(always)]
pub fn acynbr(&mut self) -> ACYNBR_W {
ACYNBR_W { w: self }
}
#[doc = "Bits 8:15 - CMD1"]
#[inline(always)]
pub fn cmd1(&mut self) -> CMD1_W {
CMD1_W { w: self }
}
#[doc = "Bits 16:23 - CMD2"]
#[inline(always)]
pub fn cmd2(&mut self) -> CMD2_W {
CMD2_W { w: self }
}
#[doc = "Bit 24 - CMD1T"]
#[inline(always)]
pub fn cmd1t(&mut self) -> CMD1T_W {
CMD1T_W { w: self }
}
#[doc = "Bit 25 - CMD2T"]
#[inline(always)]
pub fn cmd2t(&mut self) -> CMD2T_W {
CMD2T_W { w: self }
}
}
|
//! Time Series definitions
//!
use chrono::prelude::{UTC, DateTime};
use chrono::TimeZone;
#[derive(Debug, PartialEq)]
pub struct DataPoint {
index: DateTime<UTC>,
value: f64
}
#[derive(Debug, PartialEq)]
pub struct TimeSeries {
data: Vec<DataPoint>,
}
impl DataPoint {
pub fn new(idx: DateTime<UTC>, val: f64) -> DataPoint {
DataPoint { index: idx, value: val }
}
}
impl TimeSeries {
/// Create Time Series from DataPoints
pub fn new(data: Vec<DataPoint>) -> TimeSeries {
TimeSeries { data: data }
}
/// Create TS from values.Time index will be created from timestamp
pub fn from_values(data: Vec<f64>) -> TimeSeries {
let xs = data.iter().enumerate()
.map(|(i, x)| DataPoint::new(UTC.timestamp(i as i64, 0), x.clone()))
.collect();
TimeSeries { data: xs}
}
pub fn len(&self) -> usize {
self.data.len()
}
/// Get list of values
pub fn values(&self) -> Vec<f64> {
self.data.iter().map(|dp| dp.value).collect()
}
}
/// ------------------------------------------------------------------------------------------------
/// Module unit tests
/// ------------------------------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
use chrono::prelude::{UTC, TimeZone};
#[test]
fn test_new() {
let a = TimeSeries::new(vec![ DataPoint::new(UTC.ymd(2014, 1, 23).and_hms(9, 10, 11), 4.5),
DataPoint::new(UTC.ymd(2014, 1, 24).and_hms(9, 10, 11), 3.0)]);
assert!(a.len() == 2);
}
#[test]
fn test_values() {
let a = TimeSeries::from_values(vec![4.5, 3.5, 8.9]);
assert!(a.values() == vec![4.5, 3.5, 8.9]);
}
} |
// Copyright 2020 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Derived from https://github.com/apoelstra/rust-jsonrpc
//! JSON RPC Client functionality
use failure::Fail;
use hyper;
use serde_json;
/// Builds a request
pub fn build_request<'a, 'b>(name: &'a str, params: &'b serde_json::Value) -> Request<'a, 'b> {
Request {
method: name,
params: params,
id: From::from(1),
jsonrpc: Some("2.0"),
}
}
#[derive(Debug, Clone, PartialEq, Serialize)]
/// A JSONRPC request object
pub struct Request<'a, 'b> {
/// The name of the RPC call
pub method: &'a str,
/// Parameters to the RPC call
pub params: &'b serde_json::Value,
/// Identifier for this Request, which should appear in the response
pub id: serde_json::Value,
/// jsonrpc field, MUST be "2.0"
pub jsonrpc: Option<&'a str>,
}
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
/// A JSONRPC response object
pub struct Response {
/// A result if there is one, or null
pub result: Option<serde_json::Value>,
/// An error if there is one, or null
pub error: Option<RpcError>,
/// Identifier for this Request, which should match that of the request
pub id: serde_json::Value,
/// jsonrpc field, MUST be "2.0"
pub jsonrpc: Option<String>,
}
impl Response {
/// Extract the result from a response
pub fn result<T: serde::de::DeserializeOwned>(&self) -> Result<T, Error> {
if let Some(ref e) = self.error {
return Err(Error::Rpc(e.clone()));
}
let result = match self.result.clone() {
Some(r) => {
if !r["Err"].is_null() {
// we get error. Let's respond as error
return Err(Error::GenericError(r["Err"].to_string()));
}
serde_json::from_value(r["Ok"].clone()).map_err(|e| Error::Json(format!("{}", e)))
}
None => serde_json::from_value(serde_json::Value::Null)
.map_err(|e| Error::Json(format!("{}", e))),
}?;
Ok(result)
}
/// Extract the result from a response, consuming the response
pub fn into_result<T: serde::de::DeserializeOwned>(self) -> Result<T, Error> {
if let Some(e) = self.error {
return Err(Error::Rpc(e));
}
self.result()
}
/// Return the RPC error, if there was one, but do not check the result
pub fn _check_error(self) -> Result<(), Error> {
if let Some(e) = self.error {
Err(Error::Rpc(e))
} else {
Ok(())
}
}
/// Returns whether or not the `result` field is empty
pub fn _is_none(&self) -> bool {
self.result.is_none()
}
}
/// A library error
#[derive(Debug, Fail, Clone)]
pub enum Error {
/// Json error
#[fail(display = "Unable to parse json, {}", _0)]
Json(String),
/// Client error
#[fail(display = "Connection error, {}", _0)]
Hyper(String),
/// Error response
#[fail(display = "RPC error: {:?}", _0)]
Rpc(RpcError),
/// Internal generic Error
#[fail(display = "Client error: {}", _0)]
GenericError(String),
}
impl From<serde_json::Error> for Error {
fn from(e: serde_json::Error) -> Error {
Error::Json(format!("{}", e))
}
}
impl From<hyper::error::Error> for Error {
fn from(e: hyper::error::Error) -> Error {
Error::Hyper(format!("{}", e))
}
}
impl From<RpcError> for Error {
fn from(e: RpcError) -> Error {
Error::Rpc(e)
}
}
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
/// A JSONRPC error object
pub struct RpcError {
/// The integer identifier of the error
pub code: i32,
/// A string describing the error
pub message: String,
/// Additional data specific to the error
pub data: Option<serde_json::Value>,
}
|
use crate::utils::translators;
fn encode_hex_vec(buf: Vec<u32>) -> String {
let mut buf_size = 0;
let mut byte_buf: u32 = 0;
let mut encoded: String = String::new();
for b in buf {
buf_size += 1;
byte_buf |= b;
if buf_size == 6 {
let encoding = encode_match(byte_buf, buf_size/2);
// dbg!(&encoding);
encoded.push_str(&encoding);
byte_buf = 0;
buf_size = 0;
continue
}
byte_buf <<= 4;
}
if buf_size != 0 {
// dbg!(byte_buf);
let encoding = encode_match(byte_buf, buf_size/2);
encoded.push_str(&encoding);
}
encoded
}
pub fn encode_hex_str(buf: &'static str) -> String {
let mut buf_size = 0;
let mut byte_buf: u32 = 0;
let mut encoded: String = String::new();
for c in buf.chars() {
buf_size += 1;
byte_buf |= c.to_digit(16).unwrap();
if buf_size == 6 {
let encoding = encode_match(byte_buf, buf_size/2);
encoded.push_str(&encoding);
byte_buf = 0;
buf_size = 0;
continue
}
byte_buf <<= 4;
}
if buf_size != 0 {
let encoding = encode_match(byte_buf, buf_size/2);
encoded.push_str(&encoding);
}
encoded
}
fn encode_str(val: &str) -> String {
let mut buf_size: u8 = 0;
let mut encoded: String = String::new();
let mut byte_buf: u32 = 0;
for c in val.chars() {
buf_size += 1;
byte_buf |= c as u32;
//byte buffer full, encode to base64
if buf_size == 3 {
let encoding = encode_match(byte_buf, buf_size);
encoded.push_str(&encoding);
byte_buf = 0;
buf_size = 0;
continue;
}
byte_buf <<= 8;
}
if buf_size != 0 {
let encoding = encode_match(byte_buf, buf_size);
encoded.push_str(&encoding);
}
encoded
}
fn encode_match(mut byte_buf: u32, buf_size: u8) -> String {
let mut encoded: String = String::new();
let mut pos = 0;
let base64_buf = match buf_size {
1 => {byte_buf <<= 8; 2},
2 => 3,
3 => 4,
_ => panic!("Error processing base64_buff")
};
while pos < base64_buf {
let b = (byte_buf & 0xFC0000) >> 18;
let val = translators::to_base64(b);
byte_buf <<= 6;
encoded.push(val);
pos += 1;
}
match buf_size {
1 => {encoded.push('=');encoded.push('=')},
2 => encoded.push('='),
_ => (),
}
encoded
}
#[cfg(test)]
mod tests {
use super::*;
const VAL: &'static str = "49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d";
const RESULT: &'static str = "SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t";
const T1: &'static str = "Man is distinguished, not only by his reason, but by this singular passion from other animals, which is a lust of the mind, that by a perseverance of delight in the continued and indefatigable generation of knowledge, exceeds the short vehemence of any carnal pleasure.";
const T1_ANSWER: &'static str = "TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=";
#[test]
pub fn test_alpha_to_hex() {
let alpha = "abcdefghijklmnopqrstuvwxyz";
for a in alpha.chars() {
println!("{}", a);
}
}
#[test]
pub fn test_xor_values() {
let val = "abcdefg"; // a
let alpha = 'a'; // b
dbg!(alpha as u32);
println!("alpha as u32 as hex {:x}", alpha as u32);
println!("alpha to_digit as hex {:x}", alpha.to_digit(16).unwrap());
println!("alpha to_digit as hex {:b}", alpha.to_digit(16).unwrap());
dbg!(translators::ascii_to_hex(&alpha.to_string()));
let hex_enc_val: Vec<u32> = val.chars().map(|x| x as u32).collect(); // convert to hex
dbg!(&hex_enc_val);
let xor_env_val: Vec<u32> = val.chars().map(|x| x as u32 ^ alpha as u32).collect(); // xor a ^ b = c
dbg!(&xor_env_val);
let re_xor_env_val: Vec<u32> = xor_env_val.iter().map(|x| *x ^ alpha as u32).collect(); // xor c ^ b = a
dbg!(&re_xor_env_val);
let decoded: Vec<&str> = re_xor_env_val.iter().map(|x| {
translators::hex_val_to_ascii(*x).1
}).collect();
dbg!(decoded);
}
#[test]
pub fn test_xor_single() {
let val = "1b37";
for v in val.chars() {
print!("|char {} ", v);
print!("|as u32 {} ", v as u32);
print!("|to_digit(16) {} ", v.to_digit(16).unwrap());
println!();
}
}
#[test]
pub fn test_base64_match() {
let input: u32 = 0b0100_1101_0110_0001_0110_1110;
assert_eq!(encode_match(input, 3), "TWFu");
}
#[test]
pub fn test_hex_decode() {
assert_eq!(encode_hex_str(VAL), RESULT);
}
#[test]
pub fn test_manual_encode() {
dbg!(encode_match(0b_00100000_01100100, 2));
}
#[test]
pub fn test_run() {
assert_eq!(encode_hex_str(VAL), RESULT);
assert_eq!(encode_str(T1), T1_ANSWER);
}
#[test]
pub fn test_run_2() {
let test_val = ("s", "cw==");
dbg!(test_val);
assert_eq!(encode_str(test_val.0),test_val.1);
let test_val = ("Ma", "TWE=");
dbg!(test_val);
assert_eq!(encode_str(test_val.0),test_val.1);
let test_val = ("Man", "TWFu");
dbg!(test_val);
assert_eq!(encode_str(test_val.0),test_val.1);
let test_val = ("Man is", "TWFuIGlz");
dbg!(test_val);
assert_eq!(encode_str(test_val.0),test_val.1);
let test_val = ("Man is d", "TWFuIGlzIGQ=");
dbg!(test_val);
assert_eq!(encode_str(test_val.0),test_val.1);
let test_val = ("Man is di", "TWFuIGlzIGRp");
dbg!(test_val);
assert_eq!(encode_str(test_val.0),test_val.1);
let test_val = ("Man is dis", "TWFuIGlzIGRpcw==");
dbg!(test_val);
assert_eq!(encode_str(test_val.0),test_val.1);
}
} |
pub mod euler_library;
pub mod tests; |
use chrono::NaiveDateTime;
use uuid::Uuid;
use models;
// Create
#[derive(Debug, Serialize)]
pub struct CreateResponse {
id: Uuid,
data: CreateResponseData,
}
impl CreateResponse {
pub fn new(room: &models::Room) -> CreateResponse {
CreateResponse {
id: room.id,
data: CreateResponseData {
created_at: room.created_at,
},
}
}
}
#[derive(Debug, Serialize)]
struct CreateResponseData {
created_at: NaiveDateTime,
}
// Create
// Read
#[derive(Debug, Deserialize)]
pub struct ReadRequest {
pub room_id: Uuid,
}
pub type ReadResponse = CreateResponse;
type ReadResponseData = CreateResponseData;
// Read
// Delete
pub type DeleteRequest = ReadRequest;
pub type DeleteResponse = ReadResponse;
// Delete
// List
#[derive(Debug, Serialize)]
pub struct ListResponse(Vec<ListResponseData>);
impl ListResponse {
pub fn new(rooms: &[models::Room]) -> ListResponse {
let data: Vec<ListResponseData> = rooms
.iter()
.map(|room| ListResponseData {
id: room.id,
data: ReadResponseData {
created_at: room.created_at,
},
})
.collect();
ListResponse(data)
}
}
type ListResponseData = ReadResponse;
// List
|
use unftp_sbe_fs::ServerExt;
#[tokio::main]
pub async fn main() {
pretty_env_logger::init();
let addr = "127.0.0.1:2121";
let server = libunftp::Server::with_fs(std::env::temp_dir());
println!("Starting ftp server on {}", addr);
server.listen(addr).await.unwrap();
}
|
use crate::display::DisplayWith;
use crate::grammar::Grammar;
use crate::grammar::NonterminalIndex;
use crate::grammar::RuleIndex;
use crate::grammar::TerminalIndex;
use crate::utility::vec_with_size;
use serde::Deserialize;
use serde::Serialize;
use std::fmt;
use std::iter;
use std::ops;
#[derive(Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Clone, Copy)]
pub struct State(pub(self) usize);
impl fmt::Display for State {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Table {
action: Vec<Vec<Action>>, // indexed with TerminalIndex and State
end_action: Vec<EndAction>, // indexed with State
goto: Vec<Vec<Goto>>, // indexed with NonterminalIndex and State
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq)]
pub enum Action {
Reduce(RuleIndex),
// reduce using rule
Shift(State),
// shift to state
Error,
}
impl fmt::Display for Action {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Action::Reduce(index) => {
write!(f, "r{index}")?;
}
Action::Shift(state) => write!(f, "s{state}")?,
Action::Error => write!(f, "")?,
}
Ok(())
}
}
impl<N, T> DisplayWith<Grammar<N, T>> for Action
where
N: fmt::Display + Ord,
T: fmt::Debug + Ord,
{
fn fmt(&self, f: &mut fmt::Formatter, grammar: &Grammar<N, T>) -> fmt::Result {
match self {
Action::Reduce(index) => {
write!(f, "r{}", index.display_with(grammar))?;
}
Action::Shift(state) => write!(f, "s{state}")?,
Action::Error => write!(f, "")?,
}
Ok(())
}
}
impl Default for Action {
fn default() -> Self {
Action::Error
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq)]
pub enum EndAction {
Reduce(RuleIndex),
// reduce using rule
Accept,
Error,
}
impl fmt::Display for EndAction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
EndAction::Reduce(index) => {
write!(f, "r{index}")?;
}
EndAction::Accept => write!(f, "acc")?,
EndAction::Error => write!(f, "")?,
}
Ok(())
}
}
impl<N, T> DisplayWith<Grammar<N, T>> for EndAction
where
N: fmt::Display + Ord,
T: fmt::Debug + Ord,
{
fn fmt(&self, f: &mut fmt::Formatter, grammar: &Grammar<N, T>) -> fmt::Result {
match self {
EndAction::Reduce(index) => {
write!(f, "r{}", index.display_with(grammar))?;
}
EndAction::Accept => write!(f, "acc")?,
EndAction::Error => write!(f, "")?,
}
Ok(())
}
}
impl Default for EndAction {
fn default() -> Self {
EndAction::Error
}
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq)]
pub enum Goto {
Goto(State),
Error,
}
impl fmt::Display for Goto {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match self {
Goto::Goto(state) => write!(f, "{state}"),
Goto::Error => write!(f, ""),
}
}
}
impl Default for Goto {
fn default() -> Self {
Goto::Error
}
}
impl Table {
/// Create an empty(no state) table.
pub fn new<N, T>(grammar: &Grammar<N, T>) -> Table
where
N: Ord,
T: Ord,
{
Table {
action: vec_with_size(grammar.terminals_len(), Vec::new()),
end_action: Vec::new(),
goto: vec_with_size(grammar.nonterminals_len(), Vec::new()),
}
}
pub fn push_state(&mut self) -> State {
let state = State(self.state_len());
self.end_action.push(Default::default());
for states in &mut self.action {
states.push(Default::default());
}
for states in &mut self.goto {
states.push(Default::default());
}
state
}
pub fn state_len(&self) -> usize {
self.end_action.len()
}
pub fn states(&self) -> iter::Map<ops::Range<usize>, fn(usize) -> State> {
(0..self.state_len()).map(State)
}
pub fn start_state() -> State {
State(0)
}
pub fn goto(&self, state: State, nonterminal: NonterminalIndex) -> Goto {
self.goto[nonterminal.value()][state.0]
}
pub fn goto_mut(&mut self, state: State, nonterminal: NonterminalIndex) -> &mut Goto {
&mut self.goto[nonterminal.value()][state.0]
}
pub fn action(&self, state: State, terminal: TerminalIndex) -> Action {
self.action[terminal.value()][state.0]
}
pub fn action_mut(&mut self, state: State, terminal: TerminalIndex) -> &mut Action {
&mut self.action[terminal.value()][state.0]
}
pub fn end_action(&self, state: State) -> EndAction {
self.end_action[state.0]
}
pub fn end_action_mut(&mut self, state: State) -> &mut EndAction {
&mut self.end_action[state.0]
}
pub fn pretty_table<N, T>(&self, grammar: &Grammar<N, T>, detailed: bool) -> prettytable::Table
where
N: fmt::Display + Ord,
T: fmt::Debug + Ord,
{
use prettytable::{Cell, Row};
let mut table = prettytable::Table::new();
table.add_row({
let cells = vec![
Cell::new(""),
Cell::new("action").with_hspan(grammar.terminals_len() + 1),
Cell::new("goto").with_hspan(grammar.nonterminals_len()),
];
Row::new(cells)
});
table.add_row({
let mut cells = vec![Cell::new("")];
for terminal in grammar.terminal_indices() {
cells.push(Cell::new(&format!("{}", terminal.display_with(grammar))));
}
cells.push(Cell::new("$"));
for nonterminal in grammar.nonterminal_indices() {
cells.push(Cell::new(&format!("{}", nonterminal.display_with(grammar))));
}
Row::new(cells)
});
for state in self.states() {
table.add_row({
let mut cells = vec![Cell::new(&format!("{state}"))];
for terminal in grammar.terminal_indices() {
if detailed {
cells.push(Cell::new(&format!(
"{}",
self.action(state, terminal).display_with(grammar)
)));
} else {
cells.push(Cell::new(&format!("{}", self.action(state, terminal))));
}
}
if detailed {
cells.push(Cell::new(&format!(
"{}",
self.end_action(state).display_with(grammar)
)));
} else {
cells.push(Cell::new(&format!("{}", self.end_action(state))));
}
for nonterminal in grammar.nonterminal_indices() {
cells.push(Cell::new(&format!("{}", self.goto(state, nonterminal))));
}
Row::new(cells)
});
}
table
}
}
|
#[doc = "Reader of register INTS"]
pub type R = crate::R<u32, super::INTS>;
#[doc = "Reader of field `EP_STALL_NAK`"]
pub type EP_STALL_NAK_R = crate::R<bool, bool>;
#[doc = "Reader of field `ABORT_DONE`"]
pub type ABORT_DONE_R = crate::R<bool, bool>;
#[doc = "Reader of field `DEV_SOF`"]
pub type DEV_SOF_R = crate::R<bool, bool>;
#[doc = "Reader of field `SETUP_REQ`"]
pub type SETUP_REQ_R = crate::R<bool, bool>;
#[doc = "Reader of field `DEV_RESUME_FROM_HOST`"]
pub type DEV_RESUME_FROM_HOST_R = crate::R<bool, bool>;
#[doc = "Reader of field `DEV_SUSPEND`"]
pub type DEV_SUSPEND_R = crate::R<bool, bool>;
#[doc = "Reader of field `DEV_CONN_DIS`"]
pub type DEV_CONN_DIS_R = crate::R<bool, bool>;
#[doc = "Reader of field `BUS_RESET`"]
pub type BUS_RESET_R = crate::R<bool, bool>;
#[doc = "Reader of field `VBUS_DETECT`"]
pub type VBUS_DETECT_R = crate::R<bool, bool>;
#[doc = "Reader of field `STALL`"]
pub type STALL_R = crate::R<bool, bool>;
#[doc = "Reader of field `ERROR_CRC`"]
pub type ERROR_CRC_R = crate::R<bool, bool>;
#[doc = "Reader of field `ERROR_BIT_STUFF`"]
pub type ERROR_BIT_STUFF_R = crate::R<bool, bool>;
#[doc = "Reader of field `ERROR_RX_OVERFLOW`"]
pub type ERROR_RX_OVERFLOW_R = crate::R<bool, bool>;
#[doc = "Reader of field `ERROR_RX_TIMEOUT`"]
pub type ERROR_RX_TIMEOUT_R = crate::R<bool, bool>;
#[doc = "Reader of field `ERROR_DATA_SEQ`"]
pub type ERROR_DATA_SEQ_R = crate::R<bool, bool>;
#[doc = "Reader of field `BUFF_STATUS`"]
pub type BUFF_STATUS_R = crate::R<bool, bool>;
#[doc = "Reader of field `TRANS_COMPLETE`"]
pub type TRANS_COMPLETE_R = crate::R<bool, bool>;
#[doc = "Reader of field `HOST_SOF`"]
pub type HOST_SOF_R = crate::R<bool, bool>;
#[doc = "Reader of field `HOST_RESUME`"]
pub type HOST_RESUME_R = crate::R<bool, bool>;
#[doc = "Reader of field `HOST_CONN_DIS`"]
pub type HOST_CONN_DIS_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 19 - Raised when any bit in EP_STATUS_STALL_NAK is set. Clear by clearing all bits in EP_STATUS_STALL_NAK."]
#[inline(always)]
pub fn ep_stall_nak(&self) -> EP_STALL_NAK_R {
EP_STALL_NAK_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 18 - Raised when any bit in ABORT_DONE is set. Clear by clearing all bits in ABORT_DONE."]
#[inline(always)]
pub fn abort_done(&self) -> ABORT_DONE_R {
ABORT_DONE_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 17 - Set every time the device receives a SOF (Start of Frame) packet. Cleared by reading SOF_RD"]
#[inline(always)]
pub fn dev_sof(&self) -> DEV_SOF_R {
DEV_SOF_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 16 - Device. Source: SIE_STATUS.SETUP_REC"]
#[inline(always)]
pub fn setup_req(&self) -> SETUP_REQ_R {
SETUP_REQ_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 15 - Set when the device receives a resume from the host. Cleared by writing to SIE_STATUS.RESUME"]
#[inline(always)]
pub fn dev_resume_from_host(&self) -> DEV_RESUME_FROM_HOST_R {
DEV_RESUME_FROM_HOST_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 14 - Set when the device suspend state changes. Cleared by writing to SIE_STATUS.SUSPENDED"]
#[inline(always)]
pub fn dev_suspend(&self) -> DEV_SUSPEND_R {
DEV_SUSPEND_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 13 - Set when the device connection state changes. Cleared by writing to SIE_STATUS.CONNECTED"]
#[inline(always)]
pub fn dev_conn_dis(&self) -> DEV_CONN_DIS_R {
DEV_CONN_DIS_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 12 - Source: SIE_STATUS.BUS_RESET"]
#[inline(always)]
pub fn bus_reset(&self) -> BUS_RESET_R {
BUS_RESET_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 11 - Source: SIE_STATUS.VBUS_DETECT"]
#[inline(always)]
pub fn vbus_detect(&self) -> VBUS_DETECT_R {
VBUS_DETECT_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 10 - Source: SIE_STATUS.STALL_REC"]
#[inline(always)]
pub fn stall(&self) -> STALL_R {
STALL_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 9 - Source: SIE_STATUS.CRC_ERROR"]
#[inline(always)]
pub fn error_crc(&self) -> ERROR_CRC_R {
ERROR_CRC_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8 - Source: SIE_STATUS.BIT_STUFF_ERROR"]
#[inline(always)]
pub fn error_bit_stuff(&self) -> ERROR_BIT_STUFF_R {
ERROR_BIT_STUFF_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 7 - Source: SIE_STATUS.RX_OVERFLOW"]
#[inline(always)]
pub fn error_rx_overflow(&self) -> ERROR_RX_OVERFLOW_R {
ERROR_RX_OVERFLOW_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 6 - Source: SIE_STATUS.RX_TIMEOUT"]
#[inline(always)]
pub fn error_rx_timeout(&self) -> ERROR_RX_TIMEOUT_R {
ERROR_RX_TIMEOUT_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 5 - Source: SIE_STATUS.DATA_SEQ_ERROR"]
#[inline(always)]
pub fn error_data_seq(&self) -> ERROR_DATA_SEQ_R {
ERROR_DATA_SEQ_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 4 - Raised when any bit in BUFF_STATUS is set. Clear by clearing all bits in BUFF_STATUS."]
#[inline(always)]
pub fn buff_status(&self) -> BUFF_STATUS_R {
BUFF_STATUS_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 3 - Raised every time SIE_STATUS.TRANS_COMPLETE is set. Clear by writing to this bit."]
#[inline(always)]
pub fn trans_complete(&self) -> TRANS_COMPLETE_R {
TRANS_COMPLETE_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 2 - Host: raised every time the host sends a SOF (Start of Frame). Cleared by reading SOF_RD"]
#[inline(always)]
pub fn host_sof(&self) -> HOST_SOF_R {
HOST_SOF_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1 - Host: raised when a device wakes up the host. Cleared by writing to SIE_STATUS.RESUME"]
#[inline(always)]
pub fn host_resume(&self) -> HOST_RESUME_R {
HOST_RESUME_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0 - Host: raised when a device is connected or disconnected (i.e. when SIE_STATUS.SPEED changes). Cleared by writing to SIE_STATUS.SPEED"]
#[inline(always)]
pub fn host_conn_dis(&self) -> HOST_CONN_DIS_R {
HOST_CONN_DIS_R::new((self.bits & 0x01) != 0)
}
}
|
use std::sync::{Arc, Mutex};
use std::net::{TcpListener, TcpStream};
use std::error::Error;
use std::io::Write;
use chrono::prelude::*;
use logging::MessageType;
pub(crate) struct StreamPackage{
stream: TcpStream,
time_stamp: DateTime<Utc>, //Check out time libraries,
}
pub(crate) struct StreamQueue{
packages: Vec<StreamPackage>,
}
pub(crate) enum QueueOperationError{
Empty,
}
#[inline]
pub(crate) fn network_controller(in_queue: Arc<Mutex<StreamQueue>>, out_queue: Arc<Mutex<StreamQueue>>, addr: &str){
//Listener
let listener: TcpListener = TcpListener::bind(addr).unwrap();
//Listen for incoming connections
let awaiting_en_queue: Vec<StreamPackage> = Vec::new();
for stream in listener.incoming(){
let unparsed_request: TcpStream = match stream{
Ok(unparsed_request) => {
//TODO: Check how macros work
log!(MessageType::Connection, unparsed_request.local_addr().unwrap().to_string().as_str());
unparsed_request
},
Err(error) => {
log!(MessageType::Error, error.description());
break;
}
};
//Package the request
let package: StreamPackage = StreamPackage::package(unparsed_request);
//Acquire lock for the in_queue then enque the package
//If thereis an object on the out_queue, we'll dequeue and
match in_queue.lock(){
Ok(mutex) => {
if awaiting_en_queue.len() != 0{
//FIXME
for pack in awaiting_en_queue{
mutex.en_queue(pack);
}
mutex.en_queue(package);
//Empty the awaiting queue
awaiting_en_queue = Vec::new();
} else {
mutex.en_queue(package);
}
},
Err(error) =>{
awaiting_en_queue.push(package);
}
}
match out_queue.lock(){
Ok(mutex) => {
if !mutex.empty(){
let package: StreamPackage = mutex.de_queue();
let out_stream: TcpStream = package.unpackage();
out_stream.flush();
}
},
}
}
}
impl StreamQueue{
pub(crate) fn new() -> StreamQueue{
StreamQueue{
packages: Vec::new(),
}
}
#[inline]
pub(crate) fn en_queue(&mut self, package: StreamPackage){
self.packages.push(package);
}
#[inline]
pub(crate) fn de_queue(&mut self) -> StreamPackage{
//If the queue is empry return empty
//Return the first element in the qeueue
self.packages.remove(0)
}
pub(crate) fn empty(&self) -> bool{
self.packages.len() == 0
}
}
impl StreamPackage{
pub(crate) fn package(package: TcpStream) -> StreamPackage{
StreamPackage{
stream: package,
time_stamp: Utc::now(), //TODO:
}
}
#[inline]
pub(crate) fn unpackage(self) -> TcpStream{
self.stream
}
#[inline]
pub(crate) fn peek_stream(&mut self) -> &mut TcpStream{
&mut self.stream
}
}
|
use failure::Error;
fn first_answer() {
let mut board: Vec<usize> = vec![3, 7];
let mut elf1 = 0;
let mut elf2 = 1;
let mut digits: Vec<usize> = Vec::new();
let after = 768071;
while board.len() < after + 10 {
let score1 = board[elf1];
let score2 = board[elf2];
let mut score = score1 + score2;
digits.push(score % 10);
score /= 10;
while score > 0 {
digits.push(score % 10);
score /= 10;
}
while !digits.is_empty() {
board.push(digits.pop().unwrap());
}
let n = board.len();
elf1 = (elf1 + 1 + score1) % n;
elf2 = (elf2 + 1 + score2) % n;
}
println!("first answer: {:?}", &board[after..after + 10]);
}
const PATTERN: [usize; 6] = [7, 6, 8, 0, 7, 1];
struct Match {
start: usize,
next: usize,
}
impl Match {
fn new(start: usize) -> Match {
Match { start, next: 0 }
}
fn wants(&self, n: usize) -> bool {
n == PATTERN[self.next]
}
}
fn second_answer() {
let mut board: Vec<usize> = vec![3, 7];
let mut elf1 = 0;
let mut elf2 = 1;
let mut digits: Vec<usize> = Vec::new();
let mut matches: Vec<Match> = Vec::new();
loop {
let score1 = board[elf1];
let score2 = board[elf2];
let mut score = score1 + score2;
digits.push(score % 10);
score /= 10;
while score > 0 {
digits.push(score % 10);
score /= 10;
}
while !digits.is_empty() {
matches.push(Match::new(board.len()));
let recipe = digits.pop().unwrap();
board.push(recipe);
matches.retain(|m| m.wants(recipe));
for m in matches.iter_mut() {
m.next += 1;
if m.next == PATTERN.len() {
println!("second answer: {}", m.start);
return;
}
}
}
let n = board.len();
elf1 = (elf1 + 1 + score1) % n;
elf2 = (elf2 + 1 + score2) % n;
}
}
fn main() -> Result<(), Error> {
first_answer();
second_answer();
Ok(())
}
|
#[doc = "Register `CR1` reader"]
pub type R = crate::R<CR1_SPEC>;
#[doc = "Register `CR1` writer"]
pub type W = crate::W<CR1_SPEC>;
#[doc = "Field `LPMS` reader - Low-power mode selection These bits select the low-power mode entered when CPU enters deepsleep mode. 1XX: Shutdown mode"]
pub type LPMS_R = crate::FieldReader;
#[doc = "Field `LPMS` writer - Low-power mode selection These bits select the low-power mode entered when CPU enters deepsleep mode. 1XX: Shutdown mode"]
pub type LPMS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `FPD_STOP` reader - Flash memory powered down during Stop mode This bit determines whether the Flash memory is put in power-down mode or remains in idle mode when the device enters Stop mode."]
pub type FPD_STOP_R = crate::BitReader;
#[doc = "Field `FPD_STOP` writer - Flash memory powered down during Stop mode This bit determines whether the Flash memory is put in power-down mode or remains in idle mode when the device enters Stop mode."]
pub type FPD_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FPD_SLP` reader - Flash memory powered down during Sleep mode This bit determines whether the Flash memory is put in power-down mode or remains in idle mode when the device enters Sleep mode."]
pub type FPD_SLP_R = crate::BitReader;
#[doc = "Field `FPD_SLP` writer - Flash memory powered down during Sleep mode This bit determines whether the Flash memory is put in power-down mode or remains in idle mode when the device enters Sleep mode."]
pub type FPD_SLP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:2 - Low-power mode selection These bits select the low-power mode entered when CPU enters deepsleep mode. 1XX: Shutdown mode"]
#[inline(always)]
pub fn lpms(&self) -> LPMS_R {
LPMS_R::new((self.bits & 7) as u8)
}
#[doc = "Bit 3 - Flash memory powered down during Stop mode This bit determines whether the Flash memory is put in power-down mode or remains in idle mode when the device enters Stop mode."]
#[inline(always)]
pub fn fpd_stop(&self) -> FPD_STOP_R {
FPD_STOP_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 5 - Flash memory powered down during Sleep mode This bit determines whether the Flash memory is put in power-down mode or remains in idle mode when the device enters Sleep mode."]
#[inline(always)]
pub fn fpd_slp(&self) -> FPD_SLP_R {
FPD_SLP_R::new(((self.bits >> 5) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:2 - Low-power mode selection These bits select the low-power mode entered when CPU enters deepsleep mode. 1XX: Shutdown mode"]
#[inline(always)]
#[must_use]
pub fn lpms(&mut self) -> LPMS_W<CR1_SPEC, 0> {
LPMS_W::new(self)
}
#[doc = "Bit 3 - Flash memory powered down during Stop mode This bit determines whether the Flash memory is put in power-down mode or remains in idle mode when the device enters Stop mode."]
#[inline(always)]
#[must_use]
pub fn fpd_stop(&mut self) -> FPD_STOP_W<CR1_SPEC, 3> {
FPD_STOP_W::new(self)
}
#[doc = "Bit 5 - Flash memory powered down during Sleep mode This bit determines whether the Flash memory is put in power-down mode or remains in idle mode when the device enters Sleep mode."]
#[inline(always)]
#[must_use]
pub fn fpd_slp(&mut self) -> FPD_SLP_W<CR1_SPEC, 5> {
FPD_SLP_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "PWR control register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CR1_SPEC;
impl crate::RegisterSpec for CR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cr1::R`](R) reader structure"]
impl crate::Readable for CR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cr1::W`](W) writer structure"]
impl crate::Writable for CR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CR1 to value 0x0208"]
impl crate::Resettable for CR1_SPEC {
const RESET_VALUE: Self::Ux = 0x0208;
}
|
#![doc = include_str!("../README.md")]
use anyhow::{anyhow, Context, Result};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::path::PathBuf;
use std::{env, fs};
/// Holds the contents of tree-sitter's configuration file.
///
/// The file typically lives at `~/.config/tree-sitter/config.json`, but see the [`Config::load`][]
/// method for the full details on where it might be located.
///
/// This type holds the generic JSON content of the configuration file. Individual tree-sitter
/// components will use the [`Config::get`][] method to parse that JSON to extract configuration
/// fields that are specific to that component.
#[derive(Debug)]
pub struct Config {
pub location: PathBuf,
pub config: Value,
}
impl Config {
pub fn find_config_file() -> Result<Option<PathBuf>> {
if let Ok(path) = env::var("TREE_SITTER_DIR") {
let mut path = PathBuf::from(path);
path.push("config.json");
if !path.exists() {
return Ok(None);
}
if path.is_file() {
return Ok(Some(path));
}
}
let xdg_path = Self::xdg_config_file()?;
if xdg_path.is_file() {
return Ok(Some(xdg_path));
}
let legacy_path = dirs::home_dir()
.ok_or(anyhow!("Cannot determine home directory"))?
.join(".tree-sitter")
.join("config.json");
if legacy_path.is_file() {
return Ok(Some(legacy_path));
}
Ok(None)
}
fn xdg_config_file() -> Result<PathBuf> {
let xdg_path = dirs::config_dir()
.ok_or(anyhow!("Cannot determine config directory"))?
.join("tree-sitter")
.join("config.json");
Ok(xdg_path)
}
/// Locates and loads in the user's configuration file. We search for the configuration file
/// in the following locations, in order:
///
/// - `$TREE_SITTER_DIR/config.json`, if the `TREE_SITTER_DIR` environment variable is set
/// - `tree-sitter/config.json` in your default user configuration directory, as determined
/// by [`dirs::config_dir`](https://docs.rs/dirs/*/dirs/fn.config_dir.html)
/// - `$HOME/.tree-sitter/config.json` as a fallback from where tree-sitter _used_ to store
/// its configuration
pub fn load() -> Result<Config> {
let location = match Self::find_config_file()? {
Some(location) => location,
None => return Config::initial(),
};
let content = fs::read_to_string(&location)
.with_context(|| format!("Failed to read {}", &location.to_string_lossy()))?;
let config = serde_json::from_str(&content)
.with_context(|| format!("Bad JSON config {}", &location.to_string_lossy()))?;
Ok(Config { location, config })
}
/// Creates an empty initial configuration file. You can then use the [`Config::add`][] method
/// to add the component-specific configuration types for any components that want to add
/// content to the default file, and then use [`Config::save`][] to write the configuration to
/// disk.
///
/// (Note that this is typically only done by the `tree-sitter init-config` command.)
pub fn initial() -> Result<Config> {
let location = if let Ok(path) = env::var("TREE_SITTER_DIR") {
let mut path = PathBuf::from(path);
path.push("config.json");
path
} else {
Self::xdg_config_file()?
};
let config = serde_json::json!({});
Ok(Config { location, config })
}
/// Saves this configuration to the file that it was originally loaded from.
pub fn save(&self) -> Result<()> {
let json = serde_json::to_string_pretty(&self.config)?;
fs::create_dir_all(self.location.parent().unwrap())?;
fs::write(&self.location, json)?;
Ok(())
}
/// Parses a component-specific configuration from the configuration file. The type `C` must
/// be [deserializable](https://docs.rs/serde/*/serde/trait.Deserialize.html) from a JSON
/// object, and must only include the fields relevant to that component.
pub fn get<C>(&self) -> Result<C>
where
C: for<'de> Deserialize<'de>,
{
let config = serde_json::from_value(self.config.clone())?;
Ok(config)
}
/// Adds a component-specific configuration to the configuration file. The type `C` must be
/// [serializable](https://docs.rs/serde/*/serde/trait.Serialize.html) into a JSON object, and
/// must only include the fields relevant to that component.
pub fn add<C>(&mut self, config: C) -> Result<()>
where
C: Serialize,
{
let mut config = serde_json::to_value(&config)?;
self.config
.as_object_mut()
.unwrap()
.append(config.as_object_mut().unwrap());
Ok(())
}
}
|
use gtk::glib;
use gtk::prelude::*;
use sysinfo::{Pid, PidExt, Process, ProcessExt};
use crate::utils::format_number;
use std::cell::Cell;
use std::collections::HashMap;
use std::rc::Rc;
#[allow(dead_code)]
pub struct Procs {
pub left_tree: gtk::TreeView,
pub scroll: gtk::ScrolledWindow,
pub current_pid: Rc<Cell<Option<Pid>>>,
pub kill_button: gtk::Button,
pub info_button: gtk::Button,
pub vertical_layout: gtk::Box,
pub list_store: gtk::ListStore,
pub columns: Vec<gtk::TreeViewColumn>,
pub filter_entry: gtk::SearchEntry,
pub search_bar: gtk::SearchBar,
}
impl Procs {
pub fn new(proc_list: &HashMap<Pid, Process>, stack: >k::Stack) -> Procs {
let left_tree = gtk::TreeView::builder().headers_visible(true).build();
let scroll = gtk::ScrolledWindow::builder().child(&left_tree).build();
let current_pid = Rc::new(Cell::new(None));
let kill_button = gtk::Button::builder()
.label("End task")
.hexpand(true)
.margin_top(6)
.margin_bottom(6)
.margin_end(6)
.sensitive(false)
.build();
let info_button = gtk::Button::builder()
.label("More information")
.hexpand(true)
.margin_top(6)
.margin_bottom(6)
.margin_end(6)
.sensitive(false)
.build();
let overlay = gtk::Overlay::builder()
.child(&scroll)
.hexpand(true)
.vexpand(true)
.build();
let filter_entry = gtk::SearchEntry::new();
let search_bar = gtk::SearchBar::builder()
.halign(gtk::Align::End)
.valign(gtk::Align::End)
.show_close_button(true)
.child(&filter_entry)
.build();
// We put the filter entry at the right bottom.
overlay.add_overlay(&search_bar);
let mut columns: Vec<gtk::TreeViewColumn> = Vec::new();
let list_store = gtk::ListStore::new(&[
// The first four columns of the model are going to be visible in the view.
glib::Type::U32, // pid
glib::Type::STRING, // name
glib::Type::STRING, // CPU
glib::Type::STRING, // mem
glib::Type::STRING, // disk I/O
// These two will serve as keys when sorting by process name and CPU usage.
glib::Type::STRING, // name_lowercase
glib::Type::F32, // CPU_f32
glib::Type::U64, // mem
glib::Type::U64, // disk I/O
]);
for pro in proc_list.values() {
if let Some(exe) = pro
.exe()
.file_name()
.and_then(|f| f.to_str())
.or_else(|| Some(pro.name()))
{
create_and_fill_model(
&list_store,
pro.pid().as_u32(),
pro.cmd(),
exe,
pro.cpu_usage(),
pro.memory(),
);
}
}
let vertical_layout = gtk::Box::new(gtk::Orientation::Vertical, 0);
let horizontal_layout = gtk::Box::new(gtk::Orientation::Horizontal, 6);
left_tree.connect_cursor_changed(
glib::clone!(@strong current_pid, @weak kill_button, @weak info_button => move |tree_view| {
let selection = tree_view.selection();
let (pid, ret) = if let Some((model, iter)) = selection.selected() {
if let Ok(x) = model.get_value(&iter, 0).get::<u32>() {
(Some(Pid::from_u32(x)), true)
} else {
(None, false)
}
} else {
(None, false)
};
current_pid.set(pid);
kill_button.set_sensitive(ret);
info_button.set_sensitive(ret);
}),
);
vertical_layout.append(&overlay);
horizontal_layout.append(&info_button);
horizontal_layout.append(&kill_button);
vertical_layout.append(&horizontal_layout);
// The filter part.
let filter_model = gtk::TreeModelFilter::new(&list_store, None);
filter_model.set_visible_func(
glib::clone!(@weak filter_entry => @default-return false, move |model, iter| {
if !WidgetExt::is_visible(&filter_entry) {
return true;
}
let text = filter_entry.text();
if text.is_empty() {
return true;
}
let text: &str = text.as_ref();
// TODO: Maybe add an option to make searches case sensitive?
let pid = model.get_value(iter, 0)
.get::<u32>()
.map(|p| p.to_string())
.ok()
.unwrap_or_default();
let name = model.get_value(iter, 1)
.get::<String>()
.map(|s| s.to_lowercase())
.ok()
.unwrap_or_default();
pid.contains(text) ||
text.contains(&pid) ||
name.contains(text) ||
text.contains(&name)
}),
);
// For the filtering to be taken into account, we need to add it directly into the
// "global" model.
let sort_model = gtk::TreeModelSort::with_model(&filter_model);
left_tree.set_model(Some(&sort_model));
left_tree.set_search_entry(Some(&filter_entry));
append_column("pid", &mut columns, &left_tree, None);
append_column("process name", &mut columns, &left_tree, Some(200));
append_column("cpu usage", &mut columns, &left_tree, None);
append_column("memory usage", &mut columns, &left_tree, None);
#[cfg(not(windows))]
{
append_column("disk I/O usage", &mut columns, &left_tree, None);
}
#[cfg(windows)]
{
append_column("I/O usage", &mut columns, &left_tree, None);
}
// When we click the "name" column the order is defined by the
// "name_lowercase" effectively making the built-in comparator ignore case.
columns[1].set_sort_column_id(5);
// Likewise clicking the "CPU" column sorts by the "CPU_f32" one because
// we want the order to be numerical not lexicographical.
columns[2].set_sort_column_id(6);
// The memory usage display has been improved, so to make efficient sort,
// we have to separate the display and the actual number.
columns[3].set_sort_column_id(7);
// The disk I/O usage display has been improved, so to make efficient sort,
// we have to separate the display and the actual number.
columns[4].set_sort_column_id(8);
filter_entry.connect_search_changed(move |_| {
filter_model.refilter();
});
// Sort by CPU usage by default.
sort_model.set_sort_column_id(gtk::SortColumn::Index(6), gtk::SortType::Descending);
stack.add_titled(&vertical_layout, Some("Processes"), "Processes");
Procs {
left_tree,
scroll,
current_pid,
kill_button,
info_button,
vertical_layout: vertical_layout
.downcast::<gtk::Box>()
.expect("downcast failed"),
list_store,
columns,
filter_entry,
search_bar,
}
}
}
fn append_column(
title: &str,
v: &mut Vec<gtk::TreeViewColumn>,
left_tree: >k::TreeView,
max_width: Option<i32>,
) {
let id = v.len() as i32;
let renderer = gtk::CellRendererText::new();
if title != "process name" {
renderer.set_xalign(1.0);
}
let column = gtk::TreeViewColumn::builder()
.title(title)
.resizable(true)
.min_width(10)
.clickable(true)
.sort_column_id(id)
.build();
if let Some(max_width) = max_width {
column.set_max_width(max_width);
column.set_expand(true);
}
column.pack_start(&renderer, true);
column.add_attribute(&renderer, "text", id);
left_tree.append_column(&column);
v.push(column);
}
pub fn create_and_fill_model(
list_store: >k::ListStore,
pid: u32,
cmdline: &[String],
name: &str,
cpu: f32,
memory: u64,
) {
if cmdline.is_empty() || name.is_empty() {
return;
}
list_store.insert_with_values(
None,
&[
(0, &pid),
(1, &name),
(2, &format!("{:.1}", cpu)),
(3, &format_number(memory)),
(4, &String::new()),
(5, &name.to_lowercase()),
(6, &cpu),
(7, &memory),
(8, &0),
],
);
}
|
// Pasts
//
// Copyright (c) 2019-2020 Jeron Aldaron Lau
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// https://apache.org/licenses/LICENSE-2.0>, or the Zlib License, <LICENSE-ZLIB
// or http://opensource.org/licenses/Zlib>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use core::{fmt::Debug, future::Future, pin::Pin, task::Context, task::Poll};
pub enum SelectFuture<'b, T, A: Future<Output = T> + Unpin> {
Future(&'b mut [A]),
OptFuture(&'b mut [Option<A>]),
}
impl<T, A: Future<Output = T> + Unpin> Debug for SelectFuture<'_, T, A> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Self::Future(_) => write!(f, "Future"),
Self::OptFuture(_) => write!(f, "OptFuture"),
}
}
}
impl<T, A: Future<Output = T> + Unpin> Future for SelectFuture<'_, T, A> {
type Output = (usize, T);
fn poll(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Self::Output> {
match *self {
SelectFuture::Future(ref mut tasks) => {
for (task_id, task) in tasks.iter_mut().enumerate() {
let pin_fut = Pin::new(task);
let task = pin_fut.poll(cx);
match task {
Poll::Ready(ret) => return Poll::Ready((task_id, ret)),
Poll::Pending => {}
}
}
}
SelectFuture::OptFuture(ref mut tasks) => {
for (task_id, task_opt) in tasks.iter_mut().enumerate() {
if let Some(ref mut task) = task_opt {
let pin_fut = Pin::new(task);
let task = pin_fut.poll(cx);
match task {
Poll::Ready(ret) => {
*task_opt = None;
return Poll::Ready((task_id, ret));
}
Poll::Pending => {}
}
}
}
}
};
Poll::Pending
}
}
/// A trait to select on a slice of `Future`s or `Option<Future>`s.
///
/// # Select on slice of futures.
#[cfg_attr(
feature = "std",
doc = r#"
```rust
use pasts::prelude::*;
use pasts::CvarExec;
static EXECUTOR: CvarExec = CvarExec::new();
async fn async_main() {
let mut hello = async { "Hello" };
let mut world = async { "World!" };
// Hello is ready, so returns with index and result.
assert_eq!((0, "Hello"), [hello.fut(), world.fut()].select().await);
}
EXECUTOR.block_on(async_main());
```
"#
)]
// Future needs to be unpin to prevent UB because `Future`s can move between
// calls to select after starting (which fills future's RAM with garbage data).
pub trait Select<T, A: Future<Output = T> + Unpin> {
/// Poll multiple futures, and return the value from the future that returns
/// `Ready` first.
fn select(&mut self) -> SelectFuture<'_, T, A>;
}
impl<T, A: Future<Output = T> + Unpin> Select<T, A> for [A] {
fn select(&mut self) -> SelectFuture<'_, T, A> {
SelectFuture::Future(self)
}
}
impl<T, A: Future<Output = T> + Unpin> Select<T, A> for [Option<A>] {
fn select(&mut self) -> SelectFuture<'_, T, A> {
SelectFuture::OptFuture(self)
}
}
|
use std::fmt::{self, Debug, Error, Formatter};
use std::{error, result};
pub type Id = String;
#[derive(PartialEq, Eq, Ord, Hash, PartialOrd, Copy, Clone)]
pub enum Op2 {
LT,
GT,
LTE,
GTE,
Eq,
Add,
Sub,
Mul,
And,
Or,
Impl,
Iff,
}
#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
pub enum Const {
Int(i64),
Bool(bool),
}
impl Debug for Op2 {
fn fmt(&self, fmt: &mut Formatter) -> result::Result<(), Error> {
use self::Op2::*;
match *self {
LT => write!(fmt, "<"),
GT => write!(fmt, ">"),
LTE => write!(fmt, "≤"),
GTE => write!(fmt, "≥"),
Eq => write!(fmt, "="),
Add => write!(fmt, "+"),
Sub => write!(fmt, "-"),
Mul => write!(fmt, "*"),
And => write!(fmt, "∧"),
Or => write!(fmt, "∨"),
Impl => write!(fmt, "⇒"),
Iff => write!(fmt, "⇔"),
}
}
}
// const EXIT_FAILURE: i32 = 1;
// from https://stackoverflow.com/questions/27588416/how-to-send-output-to-stderr
#[macro_export]
macro_rules! eprintln(
($($arg:tt)*) => { {
use std::io::Write;
let r = writeln!(&mut ::std::io::stderr(), $($arg)*);
r.expect("failed printing to stderr");
} }
);
#[macro_export]
macro_rules! die(
($($arg:tt)*) => { {
use std;
eprintln!($($arg)*);
std::process::exit(1/*EXIT_FAILURE*/)
} }
);
#[macro_export]
macro_rules! err(
($($arg:tt)*) => { {
use crate::common::LiquidError;
Err(LiquidError::new(format!($($arg)*)))
} }
);
#[derive(Debug)]
pub struct LiquidError {
msg: String,
}
impl LiquidError {
pub fn new(msg: String) -> LiquidError {
LiquidError { msg }
}
}
impl fmt::Display for LiquidError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.msg)
}
}
impl error::Error for LiquidError {
fn description(&self) -> &str {
&self.msg
}
}
pub type Result<T> = result::Result<T, LiquidError>;
|
use std::{
convert::TryFrom,
fmt::Debug,
ops::{Deref, DerefMut},
};
use crate::{
restriction::Restrictions,
util::{impl_deref_wrapped, impl_try_from_repeated},
};
use librespot_core::date::Date;
use librespot_protocol as protocol;
use protocol::metadata::SalePeriod as SalePeriodMessage;
#[derive(Debug, Clone)]
pub struct SalePeriod {
pub restrictions: Restrictions,
pub start: Date,
pub end: Date,
}
#[derive(Debug, Clone, Default)]
pub struct SalePeriods(pub Vec<SalePeriod>);
impl_deref_wrapped!(SalePeriods, Vec<SalePeriod>);
impl TryFrom<&SalePeriodMessage> for SalePeriod {
type Error = librespot_core::Error;
fn try_from(sale_period: &SalePeriodMessage) -> Result<Self, Self::Error> {
Ok(Self {
restrictions: sale_period.restriction.as_slice().into(),
start: sale_period.start.get_or_default().try_into()?,
end: sale_period.end.get_or_default().try_into()?,
})
}
}
impl_try_from_repeated!(SalePeriodMessage, SalePeriods);
|
#[doc = "Reader of register TEMP_CFGR1"]
pub type R = crate::R<u32, super::TEMP_CFGR1>;
#[doc = "Writer for register TEMP_CFGR1"]
pub type W = crate::W<u32, super::TEMP_CFGR1>;
#[doc = "Register TEMP_CFGR1 `reset()`'s with value 0"]
impl crate::ResetValue for super::TEMP_CFGR1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `TS1_EN`"]
pub type TS1_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TS1_EN`"]
pub struct TS1_EN_W<'a> {
w: &'a mut W,
}
impl<'a> TS1_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `TS1_START`"]
pub type TS1_START_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TS1_START`"]
pub struct TS1_START_W<'a> {
w: &'a mut W,
}
impl<'a> TS1_START_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `TS1_INTRIG_SEL`"]
pub type TS1_INTRIG_SEL_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `TS1_INTRIG_SEL`"]
pub struct TS1_INTRIG_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> TS1_INTRIG_SEL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);
self.w
}
}
#[doc = "Reader of field `TS1_SMP_TIME`"]
pub type TS1_SMP_TIME_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `TS1_SMP_TIME`"]
pub struct TS1_SMP_TIME_W<'a> {
w: &'a mut W,
}
impl<'a> TS1_SMP_TIME_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16);
self.w
}
}
#[doc = "Reader of field `REFCLK_SEL`"]
pub type REFCLK_SEL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `REFCLK_SEL`"]
pub struct REFCLK_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> REFCLK_SEL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
#[doc = "Reader of field `Q_MEAS_opt`"]
pub type Q_MEAS_OPT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `Q_MEAS_opt`"]
pub struct Q_MEAS_OPT_W<'a> {
w: &'a mut W,
}
impl<'a> Q_MEAS_OPT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21);
self.w
}
}
#[doc = "Reader of field `HSREF_CLK_DIV`"]
pub type HSREF_CLK_DIV_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `HSREF_CLK_DIV`"]
pub struct HSREF_CLK_DIV_W<'a> {
w: &'a mut W,
}
impl<'a> HSREF_CLK_DIV_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 24)) | (((value as u32) & 0x7f) << 24);
self.w
}
}
impl R {
#[doc = "Bit 0 - TS1_EN"]
#[inline(always)]
pub fn ts1_en(&self) -> TS1_EN_R {
TS1_EN_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 4 - TS1_START"]
#[inline(always)]
pub fn ts1_start(&self) -> TS1_START_R {
TS1_START_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bits 8:11 - TS1_INTRIG_SEL"]
#[inline(always)]
pub fn ts1_intrig_sel(&self) -> TS1_INTRIG_SEL_R {
TS1_INTRIG_SEL_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 16:19 - TS1_SMP_TIME"]
#[inline(always)]
pub fn ts1_smp_time(&self) -> TS1_SMP_TIME_R {
TS1_SMP_TIME_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bit 20 - REFCLK_SEL"]
#[inline(always)]
pub fn refclk_sel(&self) -> REFCLK_SEL_R {
REFCLK_SEL_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 21 - Q_MEAS_opt"]
#[inline(always)]
pub fn q_meas_opt(&self) -> Q_MEAS_OPT_R {
Q_MEAS_OPT_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bits 24:30 - HSREF_CLK_DIV"]
#[inline(always)]
pub fn hsref_clk_div(&self) -> HSREF_CLK_DIV_R {
HSREF_CLK_DIV_R::new(((self.bits >> 24) & 0x7f) as u8)
}
}
impl W {
#[doc = "Bit 0 - TS1_EN"]
#[inline(always)]
pub fn ts1_en(&mut self) -> TS1_EN_W {
TS1_EN_W { w: self }
}
#[doc = "Bit 4 - TS1_START"]
#[inline(always)]
pub fn ts1_start(&mut self) -> TS1_START_W {
TS1_START_W { w: self }
}
#[doc = "Bits 8:11 - TS1_INTRIG_SEL"]
#[inline(always)]
pub fn ts1_intrig_sel(&mut self) -> TS1_INTRIG_SEL_W {
TS1_INTRIG_SEL_W { w: self }
}
#[doc = "Bits 16:19 - TS1_SMP_TIME"]
#[inline(always)]
pub fn ts1_smp_time(&mut self) -> TS1_SMP_TIME_W {
TS1_SMP_TIME_W { w: self }
}
#[doc = "Bit 20 - REFCLK_SEL"]
#[inline(always)]
pub fn refclk_sel(&mut self) -> REFCLK_SEL_W {
REFCLK_SEL_W { w: self }
}
#[doc = "Bit 21 - Q_MEAS_opt"]
#[inline(always)]
pub fn q_meas_opt(&mut self) -> Q_MEAS_OPT_W {
Q_MEAS_OPT_W { w: self }
}
#[doc = "Bits 24:30 - HSREF_CLK_DIV"]
#[inline(always)]
pub fn hsref_clk_div(&mut self) -> HSREF_CLK_DIV_W {
HSREF_CLK_DIV_W { w: self }
}
}
|
// Copyright (C) 2021 Subspace Labs, Inc.
// SPDX-License-Identifier: GPL-3.0-or-later
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! A collection of node-specific RPC methods.
//! Substrate provides the `sc-rpc` crate, which defines the core RPC layer
//! used by Substrate nodes. This file extends those RPC definitions with
//! capabilities that are specific to this project's runtime configuration.
#![warn(missing_docs)]
use jsonrpsee::RpcModule;
use pallet_transaction_payment_rpc::{TransactionPayment, TransactionPaymentApiServer};
use sc_client_api::{AuxStore, BlockBackend};
use sc_consensus_subspace::archiver::SegmentHeadersStore;
use sc_consensus_subspace::notification::SubspaceNotificationStream;
use sc_consensus_subspace::{
ArchivedSegmentNotification, NewSlotNotification, RewardSigningNotification, SubspaceSyncOracle,
};
use sc_consensus_subspace_rpc::{SubspaceRpc, SubspaceRpcApiServer, SubspaceRpcConfig};
use sc_rpc::SubscriptionTaskExecutor;
use sc_rpc_api::DenyUnsafe;
use sc_rpc_spec_v2::chain_spec::{ChainSpec, ChainSpecApiServer};
use sc_transaction_pool_api::TransactionPool;
use sp_api::ProvideRuntimeApi;
use sp_block_builder::BlockBuilder;
use sp_blockchain::{Error as BlockChainError, HeaderBackend, HeaderMetadata};
use sp_consensus::SyncOracle;
use sp_consensus_subspace::{FarmerPublicKey, SubspaceApi};
use sp_objects::ObjectsApi;
use std::sync::Arc;
use subspace_core_primitives::crypto::kzg::Kzg;
use subspace_networking::libp2p::Multiaddr;
use subspace_runtime_primitives::opaque::Block;
use subspace_runtime_primitives::{AccountId, Balance, Index};
use substrate_frame_rpc_system::{System, SystemApiServer};
/// Full client dependencies.
pub struct FullDeps<C, P, SO, AS>
where
SO: SyncOracle + Send + Sync + Clone,
{
/// The client instance to use.
pub client: Arc<C>,
/// Transaction pool instance.
pub pool: Arc<P>,
/// A copy of the chain spec.
pub chain_spec: Box<dyn sc_chain_spec::ChainSpec>,
/// Whether to deny unsafe calls.
pub deny_unsafe: DenyUnsafe,
/// Executor to drive the subscription manager in the Grandpa RPC handler.
pub subscription_executor: SubscriptionTaskExecutor,
/// A stream with notifications about new slot arrival with ability to send solution back.
pub new_slot_notification_stream: SubspaceNotificationStream<NewSlotNotification>,
/// A stream with notifications about headers that need to be signed with ability to send
/// signature back.
pub reward_signing_notification_stream: SubspaceNotificationStream<RewardSigningNotification>,
/// A stream with notifications about archived segment creation.
pub archived_segment_notification_stream:
SubspaceNotificationStream<ArchivedSegmentNotification>,
/// Bootstrap nodes for DSN.
pub dsn_bootstrap_nodes: Vec<Multiaddr>,
/// Segment header provider.
pub segment_headers_store: SegmentHeadersStore<AS>,
/// Subspace sync oracle.
pub sync_oracle: SubspaceSyncOracle<SO>,
/// Kzg instance.
pub kzg: Kzg,
}
/// Instantiate all full RPC extensions.
pub fn create_full<C, P, SO, AS>(
deps: FullDeps<C, P, SO, AS>,
) -> Result<RpcModule<()>, Box<dyn std::error::Error + Send + Sync>>
where
C: ProvideRuntimeApi<Block>
+ BlockBackend<Block>
+ HeaderBackend<Block>
+ HeaderMetadata<Block, Error = BlockChainError>
+ Send
+ Sync
+ 'static,
C::Api: substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Index>
+ pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance>
+ BlockBuilder<Block>
+ SubspaceApi<Block, FarmerPublicKey>
+ ObjectsApi<Block>,
P: TransactionPool + 'static,
SO: SyncOracle + Send + Sync + Clone + 'static,
AS: AuxStore + Send + Sync + 'static,
{
let mut module = RpcModule::new(());
let FullDeps {
client,
pool,
chain_spec,
deny_unsafe,
subscription_executor,
new_slot_notification_stream,
reward_signing_notification_stream,
archived_segment_notification_stream,
dsn_bootstrap_nodes,
segment_headers_store,
sync_oracle,
kzg,
} = deps;
let chain_name = chain_spec.name().to_string();
let genesis_hash = client.info().genesis_hash;
let properties = chain_spec.properties();
module.merge(ChainSpec::new(chain_name, genesis_hash, properties).into_rpc())?;
module.merge(System::new(client.clone(), pool, deny_unsafe).into_rpc())?;
module.merge(TransactionPayment::new(client.clone()).into_rpc())?;
module.merge(
SubspaceRpc::new(SubspaceRpcConfig {
client,
subscription_executor,
new_slot_notification_stream,
reward_signing_notification_stream,
archived_segment_notification_stream,
dsn_bootstrap_nodes,
segment_headers_store,
sync_oracle,
kzg,
deny_unsafe,
})
.into_rpc(),
)?;
Ok(module)
}
|
//! Weechat Configuration module
use libc::{c_char, c_int};
use std::collections::HashMap;
use std::ffi::CStr;
use std::os::raw::c_void;
use std::ptr;
use crate::config_options::{
BooleanOption, ColorOption, ConfigOption, IntegerOption, OptionDescription,
OptionPointers, OptionType, StringOption,
};
use crate::{LossyCString, Weechat};
use std::borrow::Cow;
use weechat_sys::{
t_config_file, t_config_option, t_config_section, t_weechat_plugin,
WEECHAT_RC_OK,
};
/// Weechat configuration file
pub struct Config<T> {
ptr: *mut t_config_file,
weechat_ptr: *mut t_weechat_plugin,
_config_data: Box<ConfigPointers<T>>,
sections: HashMap<String, ConfigSection>,
}
struct ConfigPointers<T> {
reload_cb: Option<fn(&mut T)>,
reload_data: T,
}
/// Weechat Configuration section
pub struct ConfigSection {
ptr: *mut t_config_section,
config_ptr: *mut t_config_file,
weechat_ptr: *mut t_weechat_plugin,
}
/// Represents the options when creating a new config section.
#[derive(Default)]
pub struct ConfigSectionInfo<'a, T> {
/// Name of the config section
pub name: &'a str,
/// Can the user create new options?
pub user_can_add_options: bool,
/// Can the user delete options?
pub user_can_delete_option: bool,
/// A function called when an option from the section is read from the disk
pub read_callback: Option<fn(&T)>,
/// Data passed to the `read_callback`
pub read_callback_data: Option<T>,
/// A function called when the section is written to the disk
pub write_callback: Option<fn(&T)>,
/// Data passed to the `write_callback`
pub write_callback_data: Option<T>,
/// A function called when default values for the section must be written to the disk
pub write_default_callback: Option<fn(&T)>,
/// Data passed to the `write_default_callback`
pub write_default_callback_data: Option<T>,
/// A function called when a new option is created in the section
pub create_option_callback: Option<fn(&T)>,
/// Data passed to the `create_option_callback`
pub create_option_callback_data: Option<T>,
/// A function called when an option is deleted in the section
pub delete_option_callback: Option<fn(&T)>,
/// Data passed to the `delete_option_callback`
pub delete_option_callback_data: Option<T>,
}
impl<T> Drop for Config<T> {
fn drop(&mut self) {
let weechat = Weechat::from_ptr(self.weechat_ptr);
let config_free = weechat.get().config_free.unwrap();
// Drop the sections first.
self.sections.clear();
unsafe {
// Now drop the config.
config_free(self.ptr)
};
}
}
impl Drop for ConfigSection {
fn drop(&mut self) {
let weechat = Weechat::from_ptr(self.weechat_ptr);
let options_free = weechat.get().config_section_free_options.unwrap();
let section_free = weechat.get().config_section_free.unwrap();
unsafe {
options_free(self.ptr);
section_free(self.ptr);
};
}
}
impl<T> Config<T> {
/// Create a new section in the configuration file.
pub fn new_section<S: Default>(
&mut self,
section_info: ConfigSectionInfo<S>,
) -> &ConfigSection {
let weechat = Weechat::from_ptr(self.weechat_ptr);
let new_section = weechat.get().config_new_section.unwrap();
let name = LossyCString::new(section_info.name);
let ptr = unsafe {
new_section(
self.ptr,
name.as_ptr(),
section_info.user_can_add_options as i32,
section_info.user_can_delete_option as i32,
None,
ptr::null_mut(),
ptr::null_mut(),
None,
ptr::null_mut(),
ptr::null_mut(),
None,
ptr::null_mut(),
ptr::null_mut(),
None,
ptr::null_mut(),
ptr::null_mut(),
None,
ptr::null_mut(),
ptr::null_mut(),
)
};
let section = ConfigSection {
ptr,
config_ptr: self.ptr,
weechat_ptr: weechat.ptr,
};
self.sections.insert(section_info.name.to_string(), section);
&self.sections[section_info.name]
}
}
type WeechatOptChangeCbT = unsafe extern "C" fn(
pointer: *const c_void,
_data: *mut c_void,
option_pointer: *mut t_config_option,
);
type WeechatOptCheckCbT = unsafe extern "C" fn(
pointer: *const c_void,
_data: *mut c_void,
option_pointer: *mut t_config_option,
value: *const c_char,
) -> c_int;
impl ConfigSection {
/// Create a new string Weechat configuration option.
pub fn new_string_option<D>(
&self,
name: &str,
description: &str,
default_value: &str,
value: &str,
null_allowed: bool,
change_cb: Option<fn(&mut D, &StringOption)>,
change_cb_data: Option<D>,
) -> StringOption
where
D: Default,
{
let ptr = self.new_option(
OptionDescription {
name,
description,
option_type: OptionType::String,
default_value,
value,
null_allowed,
..Default::default()
},
None,
None::<String>,
change_cb,
change_cb_data,
None,
None::<String>,
);
StringOption {
ptr,
weechat_ptr: self.weechat_ptr,
}
}
/// Create a new boolean Weechat configuration option.
pub fn new_boolean_option<D>(
&self,
name: &str,
description: &str,
default_value: bool,
value: bool,
null_allowed: bool,
change_cb: Option<fn(&mut D, &BooleanOption)>,
change_cb_data: Option<D>,
) -> BooleanOption
where
D: Default,
{
let value = if value { "on" } else { "off" };
let default_value = if default_value { "on" } else { "off" };
let ptr = self.new_option(
OptionDescription {
name,
description,
option_type: OptionType::Boolean,
default_value,
value,
null_allowed,
..Default::default()
},
None,
None::<String>,
change_cb,
change_cb_data,
None,
None::<String>,
);
BooleanOption {
ptr,
weechat_ptr: self.weechat_ptr,
}
}
/// Create a new integer Weechat configuration option.
pub fn new_integer_option<D>(
&self,
name: &str,
description: &str,
string_values: &str,
min: i32,
max: i32,
default_value: &str,
value: &str,
null_allowed: bool,
change_cb: Option<fn(&mut D, &IntegerOption)>,
change_cb_data: Option<D>,
) -> IntegerOption
where
D: Default,
{
let ptr = self.new_option(
OptionDescription {
name,
option_type: OptionType::Integer,
description,
string_values,
min,
max,
default_value,
value,
null_allowed,
},
None,
None::<String>,
change_cb,
change_cb_data,
None,
None::<String>,
);
IntegerOption {
ptr,
weechat_ptr: self.weechat_ptr,
}
}
/// Create a new color Weechat configuration option.
pub fn new_color_option<D>(
&self,
name: &str,
description: &str,
default_value: &str,
value: &str,
null_allowed: bool,
change_cb: Option<fn(&mut D, &ColorOption)>,
change_cb_data: Option<D>,
) -> ColorOption
where
D: Default,
{
let ptr = self.new_option(
OptionDescription {
name,
description,
option_type: OptionType::Color,
default_value,
value,
null_allowed,
..Default::default()
},
None,
None::<String>,
change_cb,
change_cb_data,
None,
None::<String>,
);
ColorOption {
ptr,
weechat_ptr: self.weechat_ptr,
}
}
fn new_option<'a, T, A, B, C>(
&self,
option_description: OptionDescription,
check_cb: Option<fn(&mut A, &T, Cow<str>)>,
check_cb_data: Option<A>,
change_cb: Option<fn(&mut B, &T)>,
change_cb_data: Option<B>,
delete_cb: Option<fn(&mut C, &T)>,
delete_cb_data: Option<C>,
) -> *mut t_config_option
where
T: ConfigOption<'static>,
A: Default,
B: Default,
C: Default,
{
unsafe extern "C" fn c_check_cb<T, A, B, C>(
pointer: *const c_void,
_data: *mut c_void,
option_pointer: *mut t_config_option,
value: *const c_char,
) -> c_int
where
T: ConfigOption<'static>,
{
let value = CStr::from_ptr(value).to_string_lossy();
let pointers: &mut OptionPointers<T, A, B, C> =
{ &mut *(pointer as *mut OptionPointers<T, A, B, C>) };
let option = T::from_ptrs(option_pointer, pointers.weechat_ptr);
let data = &mut pointers.check_cb_data;
if let Some(callback) = pointers.check_cb {
callback(data, &option, value)
};
WEECHAT_RC_OK
}
unsafe extern "C" fn c_change_cb<T, A, B, C>(
pointer: *const c_void,
_data: *mut c_void,
option_pointer: *mut t_config_option,
) where
T: ConfigOption<'static>,
{
let pointers: &mut OptionPointers<T, A, B, C> =
{ &mut *(pointer as *mut OptionPointers<T, A, B, C>) };
let option = T::from_ptrs(option_pointer, pointers.weechat_ptr);
let data = &mut pointers.change_cb_data;
if let Some(callback) = pointers.change_cb {
callback(data, &option)
};
}
unsafe extern "C" fn c_delete_cb<T, A, B, C>(
pointer: *const c_void,
_data: *mut c_void,
option_pointer: *mut t_config_option,
) where
T: ConfigOption<'static>,
{
let pointers: &mut OptionPointers<T, A, B, C> =
{ &mut *(pointer as *mut OptionPointers<T, A, B, C>) };
let option = T::from_ptrs(option_pointer, pointers.weechat_ptr);
let data = &mut pointers.delete_cb_data;
if let Some(callback) = pointers.delete_cb {
callback(data, &option)
};
}
let weechat = Weechat::from_ptr(self.weechat_ptr);
let name = LossyCString::new(option_description.name);
let description = LossyCString::new(option_description.description);
let option_type =
LossyCString::new(option_description.option_type.as_str());
let string_values = LossyCString::new(option_description.string_values);
let default_value = LossyCString::new(option_description.default_value);
let value = LossyCString::new(option_description.value);
let option_pointers = Box::new(OptionPointers::<T, A, B, C> {
weechat_ptr: self.weechat_ptr,
check_cb: check_cb,
check_cb_data: check_cb_data.unwrap_or_default(),
change_cb: change_cb,
change_cb_data: change_cb_data.unwrap_or_default(),
delete_cb: delete_cb,
delete_cb_data: delete_cb_data.unwrap_or_default(),
});
// TODO this leaks curently.
let option_pointers_ref: &OptionPointers<T, A, B, C> =
Box::leak(option_pointers);
let c_check_cb: Option<WeechatOptCheckCbT> = match check_cb {
Some(_) => Some(c_check_cb::<T, A, B, C>),
None => None,
};
let c_change_cb: Option<WeechatOptChangeCbT> = match change_cb {
Some(_) => Some(c_change_cb::<T, A, B, C>),
None => None,
};
let c_delete_cb: Option<WeechatOptChangeCbT> = match delete_cb {
Some(_) => Some(c_delete_cb::<T, A, B, C>),
None => None,
};
let config_new_option = weechat.get().config_new_option.unwrap();
unsafe {
config_new_option(
self.config_ptr,
self.ptr,
name.as_ptr(),
option_type.as_ptr(),
description.as_ptr(),
string_values.as_ptr(),
option_description.min,
option_description.max,
default_value.as_ptr(),
value.as_ptr(),
option_description.null_allowed as i32,
c_check_cb,
option_pointers_ref as *const _ as *const c_void,
ptr::null_mut(),
c_change_cb,
option_pointers_ref as *const _ as *const c_void,
ptr::null_mut(),
c_delete_cb,
option_pointers_ref as *const _ as *const c_void,
ptr::null_mut(),
)
}
}
}
type WeechatReloadT = unsafe extern "C" fn(
pointer: *const c_void,
_data: *mut c_void,
_config_pointer: *mut t_config_file,
) -> c_int;
/// Configuration file part of the weechat API.
impl Weechat {
/// Create a new Weechat configuration file, returns a `Config` object.
/// The configuration file is freed when the `Config` object is dropped.
/// * `name` - Name of the new configuration file
/// * `reload_callback` - Callback that will be called when the
/// configuration file is reloaded.
/// * `reload_data` - Data that will be taken over by weechat and passed
/// to the reload callback, this data will be freed when the `Config`
/// object returned by this method is dropped.
pub fn config_new<T: Default>(
&self,
name: &str,
reload_callback: Option<fn(&mut T)>,
reload_data: Option<T>,
) -> Config<T> {
unsafe extern "C" fn c_reload_cb<T>(
pointer: *const c_void,
_data: *mut c_void,
_config_pointer: *mut t_config_file,
) -> c_int {
let pointers: &mut ConfigPointers<T> =
{ &mut *(pointer as *mut ConfigPointers<T>) };
let data = &mut pointers.reload_data;
if let Some(callback) = pointers.reload_cb {
callback(data)
}
WEECHAT_RC_OK
}
let c_name = LossyCString::new(name);
let config_pointers = Box::new(ConfigPointers::<T> {
reload_cb: reload_callback,
reload_data: reload_data.unwrap_or_default(),
});
let config_pointers_ref = Box::leak(config_pointers);
let c_reload_cb: Option<WeechatReloadT> = match reload_callback {
Some(_) => Some(c_reload_cb::<T>),
None => None,
};
let config_new = self.get().config_new.unwrap();
let config_ptr = unsafe {
config_new(
self.ptr,
c_name.as_ptr(),
c_reload_cb,
config_pointers_ref as *const _ as *const c_void,
ptr::null_mut(),
)
};
let config_data = unsafe { Box::from_raw(config_pointers_ref) };
Config {
ptr: config_ptr,
weechat_ptr: self.ptr,
_config_data: config_data,
sections: HashMap::new(),
}
}
}
|
use crate::{
kzg10,
multiset::{multiset_equality, quotient_poly, MultiSet},
transcript::TranscriptProtocol,
};
use ark_bls12_381::{Bls12_381, Fr};
use ark_poly::{
polynomial::univariate::DensePolynomial as Polynomial, EvaluationDomain, Polynomial as Poly,
Radix2EvaluationDomain, UVPolynomial,
};
use ark_poly_commit::kzg10::{Commitment, Powers, VerifierKey};
// Evaluations store the evaluations of different polynomial.
// `t` denotes that the polynomial was evaluated at t(z) for some random evaluation challenge `z`
// `t_omega` denotes the polynomial was evaluated at t(z * omega) where omega is the group generator
// In the FFT context, the normal terminology is that t(z*omega) means to evaluate a polynomial at the next root of unity from `z`.
pub struct Evaluations {
pub f: Fr,
pub t: Fr,
pub t_omega: Fr,
pub h_1: Fr,
pub h_1_omega: Fr,
pub h_2: Fr,
pub h_2_omega: Fr,
pub z: Fr,
pub z_omega: Fr,
}
// Commitments of different polynomials
pub struct Commitments {
pub f: Commitment<Bls12_381>,
pub q: Commitment<Bls12_381>,
pub h_1: Commitment<Bls12_381>,
pub h_2: Commitment<Bls12_381>,
pub z: Commitment<Bls12_381>,
}
// In the best case, this protocol requires 4 extra G1 elements (Commitment)
// These are: h_1_commit,h_2_commit, f_commit,t_commit
//
// The commitment to the accumulator and the quotient polynomial would ideally be joined into the existing ones in PLONK
//
// We would require 7 extra Scalar elements (Evaluations)
// These are: h_1_eval, h_1_omega_eval, h_2_eval, h_2_omega_eval, f_eval, t_eval, t_omega_eval
//
// We would ideally be able to combine the accumulator, Z(X) with the permutation accumulator in plonk, and the quotient polynomial with the quotient polynomial in PLONK
// Which would save us 2 evaluation points: z_eval and z_omega_eval
// q_eval which is the quotient evaluation is usually created from the prover messages
//
// Lastly, the Witness commitments can also be batched with the PLONK opening Proof.
pub struct EqualityProof {
pub aggregate_witness_comm: Commitment<Bls12_381>,
pub shifted_aggregate_witness_comm: Commitment<Bls12_381>,
pub evaluations: Evaluations,
pub commitments: Commitments,
}
impl EqualityProof {
pub fn prove(
f: MultiSet,
t: MultiSet,
proving_key: &Powers<Bls12_381>,
transcript: &mut dyn TranscriptProtocol,
) -> EqualityProof {
let domain: Radix2EvaluationDomain<Fr> = EvaluationDomain::new(t.len()).unwrap();
// Convert witness and table to polynomials
let f_poly = f.to_polynomial(&domain);
let f_commit = kzg10::commit(proving_key, &f_poly);
let t_poly = t.to_polynomial(&domain);
// Compute h_1 and h_2
let (h_1, h_2) = multiset_equality::compute_h1_h2(&f, &t);
// Convert h_1 and h_2 to polynomials
let h_1_poly = h_1.to_polynomial(&domain);
let h_2_poly = h_2.to_polynomial(&domain);
// Commit to h_1(X) and h_2(X)
let h_1_commit = kzg10::commit(proving_key, &h_1_poly);
let h_2_commit = kzg10::commit(proving_key, &h_2_poly);
// Add commitments to transcript
transcript.append_commitment(b"h_1_poly", &h_1_commit);
transcript.append_commitment(b"h_2_poly", &h_2_commit);
let beta = transcript.challenge_scalar(b"beta");
let gamma = transcript.challenge_scalar(b"gamma");
// Compute Z(X)
let z_evaluations =
multiset_equality::compute_accumulator_values(&f, &t, &h_1, &h_2, beta, gamma);
let z_poly = Polynomial::from_coefficients_vec(domain.ifft(&z_evaluations));
// Commit to Z(X)
let z_commit = kzg10::commit(proving_key, &z_poly);
transcript.append_commitment(b"accumulator_poly", &z_commit);
// Compute quotient polynomial
let (quotient_poly, _) = quotient_poly::compute(
&domain, &z_poly, &f_poly, &t_poly, &h_1_poly, &h_2_poly, beta, gamma,
);
// Commit to quotient polynomial
let q_commit = kzg10::commit(proving_key, "ient_poly);
transcript.append_commitment(b"quotient_poly", &q_commit);
// Compute the Witness that f was a subset of t
//
let evaluation_challenge = transcript.challenge_scalar(b"evaluation_challenge");
transcript.append_scalar(b"evaluation_challenge", &evaluation_challenge);
let evaluation_omega = evaluation_challenge * domain.group_gen;
// Compute evaluations at `z`
let f_eval = f_poly.evaluate(&evaluation_challenge);
let t_eval = t_poly.evaluate(&evaluation_challenge);
let h_1_eval = h_1_poly.evaluate(&evaluation_challenge);
let h_2_eval = h_2_poly.evaluate(&evaluation_challenge);
let z_eval = z_poly.evaluate(&evaluation_challenge);
let q_eval = quotient_poly.evaluate(&evaluation_challenge);
// Compute evaluations at `z * omega`
let t_omega_eval = t_poly.evaluate(&evaluation_omega);
let h_1_omega_eval = h_1_poly.evaluate(&evaluation_omega);
let h_2_omega_eval = h_2_poly.evaluate(&evaluation_omega);
let z_omega_eval = z_poly.evaluate(&evaluation_omega);
transcript.append_scalar(b"f_eval", &f_eval);
transcript.append_scalar(b"t_eval", &t_eval);
transcript.append_scalar(b"h_1_eval", &h_1_eval);
transcript.append_scalar(b"h_2_eval", &h_2_eval);
transcript.append_scalar(b"z_eval", &z_eval);
transcript.append_scalar(b"q_eval", &q_eval);
transcript.append_scalar(b"t_omega_eval", &t_omega_eval);
transcript.append_scalar(b"h_1_omega_eval", &h_1_omega_eval);
transcript.append_scalar(b"h_2_omega_eval", &h_2_omega_eval);
transcript.append_scalar(b"z_omega_eval", &z_omega_eval);
let aggregation_challenge = transcript.challenge_scalar(b"witness_aggregation");
// Compute opening proof for f(X) evaluated at `z`
let agg_witness = kzg10::compute_aggregate_witness(
vec![
&f_poly,
&t_poly,
&h_1_poly,
&h_2_poly,
&z_poly,
"ient_poly,
],
evaluation_challenge,
aggregation_challenge,
);
let agg_witness_comm = kzg10::commit(proving_key, &agg_witness);
// Compute opening proofs for f(X) evaluated at `z * omega`
let shifted_agg_witness = kzg10::compute_aggregate_witness(
vec![&t_poly, &h_1_poly, &h_2_poly, &z_poly],
evaluation_omega,
aggregation_challenge,
);
let shifted_agg_witness_comm = kzg10::commit(proving_key, &shifted_agg_witness);
EqualityProof {
evaluations: Evaluations {
f: f_eval,
t: t_eval,
t_omega: t_omega_eval,
h_1: h_1_eval,
h_1_omega: h_1_omega_eval,
h_2: h_2_eval,
h_2_omega: h_2_omega_eval,
z: z_eval,
z_omega: z_omega_eval,
},
commitments: Commitments {
f: f_commit,
q: q_commit,
h_1: h_1_commit,
h_2: h_2_commit,
z: z_commit,
},
aggregate_witness_comm: agg_witness_comm,
shifted_aggregate_witness_comm: shifted_agg_witness_comm,
}
}
pub fn verify(
&self,
n: usize,
verification_key: &VerifierKey<Bls12_381>,
commitment_to_t: Commitment<Bls12_381>,
transcript: &mut dyn TranscriptProtocol,
) -> bool {
let domain: Radix2EvaluationDomain<Fr> = EvaluationDomain::new(n).unwrap();
transcript.append_commitment(b"h_1_poly", &self.commitments.h_1);
transcript.append_commitment(b"h_2_poly", &self.commitments.h_2);
let beta = transcript.challenge_scalar(b"beta");
let gamma = transcript.challenge_scalar(b"gamma");
transcript.append_commitment(b"accumulator_poly", &self.commitments.z);
transcript.append_commitment(b"quotient_poly", &self.commitments.q);
let evaluation_challenge = transcript.challenge_scalar(b"evaluation_challenge");
transcript.append_scalar(b"evaluation_challenge", &evaluation_challenge);
let evaluation_omega = evaluation_challenge * domain.group_gen;
// Compute quotient evaluation (Q(z)) from the provers messages
let q_eval =
self.compute_quotient_evaluation(&beta, &gamma, &evaluation_challenge, &domain);
transcript.append_scalar(b"f_eval", &self.evaluations.f);
transcript.append_scalar(b"t_eval", &self.evaluations.t);
transcript.append_scalar(b"h_1_eval", &self.evaluations.h_1);
transcript.append_scalar(b"h_2_eval", &self.evaluations.h_2);
transcript.append_scalar(b"z_eval", &self.evaluations.z);
transcript.append_scalar(b"q_eval", &q_eval);
transcript.append_scalar(b"t_omega_eval", &self.evaluations.t_omega);
transcript.append_scalar(b"h_1_omega_eval", &self.evaluations.h_1_omega);
transcript.append_scalar(b"h_2_omega_eval", &self.evaluations.h_2_omega);
transcript.append_scalar(b"z_omega_eval", &self.evaluations.z_omega);
let aggregation_challenge = transcript.challenge_scalar(b"witness_aggregation");
// Create aggregate opening proof for all polynomials evaluated at the evaluation challenge `z`
let agg_commitment = kzg10::aggregate_commitments(
vec![
&self.commitments.f,
&commitment_to_t,
&self.commitments.h_1,
&self.commitments.h_2,
&self.commitments.z,
&self.commitments.q,
],
aggregation_challenge,
);
let agg_value = kzg10::aggregate_values(
vec![
&self.evaluations.f,
&self.evaluations.t,
&self.evaluations.h_1,
&self.evaluations.h_2,
&self.evaluations.z,
&q_eval,
],
aggregation_challenge,
);
// Create aggregate opening proof for all polynomials evaluated at the shifted evaluation challenge `z * omega`
let shifted_agg_commitment = kzg10::aggregate_commitments(
vec![
&commitment_to_t,
&self.commitments.h_1,
&self.commitments.h_2,
&self.commitments.z,
],
aggregation_challenge,
);
let shifted_agg_value = kzg10::aggregate_values(
vec![
&self.evaluations.t_omega,
&self.evaluations.h_1_omega,
&self.evaluations.h_2_omega,
&self.evaluations.z_omega,
],
aggregation_challenge,
);
// Batch Verify both opening proofs
let ok = kzg10::batch_verify(
&verification_key,
vec![agg_commitment, shifted_agg_commitment],
vec![
self.aggregate_witness_comm,
self.shifted_aggregate_witness_comm,
],
vec![evaluation_challenge, evaluation_omega],
vec![agg_value, shifted_agg_value],
);
ok
}
/// Computes the quotient evaluation from the prover messages
fn compute_quotient_evaluation<E: EvaluationDomain<Fr>>(
&self,
beta: &Fr,
gamma: &Fr,
evaluation_challenge: &Fr,
domain: &E,
) -> Fr {
// g^{n+1}
let last_element = domain.elements().last().unwrap();
let lagrange_evaluations = domain.evaluate_all_lagrange_coefficients(*evaluation_challenge);
// L_1(Z);
let l1_z = lagrange_evaluations[0];
// L_{n+1}(Z);
let ln_plus_1_z = lagrange_evaluations[domain.size() - 1];
// Z_H(Z)
let v_h = domain.evaluate_vanishing_polynomial(*evaluation_challenge);
let beta_one = Fr::from(1u8) + beta;
let gamma_beta_one = (Fr::from(1u8) + beta) * gamma;
// L_1(X) [ Z(X) -1]
let a = { (self.evaluations.z - Fr::from(1u8)) * l1_z };
// x-g^{n+1} * Z(X)(1+beta) * (gamma + f(x)) (gamma(1+beta) + t(x) + beta * t(Xg))
let b = {
let b_0 = *evaluation_challenge - last_element;
let b_1 = self.evaluations.z * beta_one;
let b_2 = self.evaluations.f + gamma;
let b_3 = gamma_beta_one + self.evaluations.t + (self.evaluations.t_omega * beta);
b_0 * b_1 * b_2 * b_3
};
// x-g^{n+1} * Z(Xg)[(gamma(1+beta) + h_1(X) + beta * h_1(Xg)][(gamma(1+beta) + h_2(X) + beta * h_2(Xg)]
let c = {
let c_0 = (*evaluation_challenge - last_element) * self.evaluations.z_omega;
let c_1 = gamma_beta_one + self.evaluations.h_1 + (self.evaluations.h_1_omega * beta);
let c_2 = gamma_beta_one + self.evaluations.h_2 + (self.evaluations.h_2_omega * beta);
c_0 * c_1 * c_2
};
// L_{n+1}(X)[h_1(X) - h_2(Xg)]
let d = ln_plus_1_z * (self.evaluations.h_1 - self.evaluations.h_2_omega);
// L_{n+1}(X)[Z(X) - 1]
let e = (self.evaluations.z - Fr::from(1u8)) * ln_plus_1_z;
(a + b - c + d + e) / v_h
}
}
|
// error-pattern:ran out of stack
// Test that the task fails after hiting the recursion limit
fn main() {
main();
} |
//! # riff
//!
//! `riff` provides utility methods for reading and writing RIFF-formatted files,
//! such as Microsoft Wave, AVI or DLS files.
use std::fmt;
use std::io::Read;
use std::io::Write;
use std::io::Seek;
use std::io::SeekFrom;
use std::convert::TryInto;
/// A chunk id, also known as FourCC
#[derive(PartialEq, Eq, Clone, Copy, Hash)]
pub struct ChunkId {
/// The raw bytes of the id
pub value: [u8; 4]
}
/// The `RIFF` id
pub static RIFF_ID: ChunkId = ChunkId { value: [0x52, 0x49, 0x46, 0x46] };
/// The `LIST` id
pub static LIST_ID: ChunkId = ChunkId { value: [0x4C, 0x49, 0x53, 0x54] };
/// The `seqt` id
pub static SEQT_ID: ChunkId = ChunkId { value: [0x73, 0x65, 0x71, 0x74] };
impl ChunkId {
/// Returns the value of the id as a string.
///
/// # Examples
/// ```
/// assert_eq!(riff::RIFF_ID.as_str(), "RIFF");
/// ```
///
/// # Panics
/// This function panics when the value does not represent a valid UTF-8 string.
pub fn as_str(&self) -> &str {
std::str::from_utf8(&self.value).unwrap()
}
/// Creates a new ChunkId from a string.
///
/// # Examples
/// ```
/// # use std::error::Error;
/// #
/// # fn try_main() -> Result<(), Box<Error>> {
/// let chunk_id = riff::ChunkId::new("RIFF")?;
/// # Ok(())
/// # }
/// #
/// # fn main() {
/// # try_main().unwrap();
/// # }
/// ```
///
/// # Errors
/// The function fails when the string's length in bytes is not exactly 4.
pub fn new(s: &str) -> Result<ChunkId, &str> {
let bytes = s.as_bytes();
if bytes.len() != 4 {
Err("Invalid length")
} else {
let mut a: [u8; 4] = Default::default();
a.copy_from_slice(&bytes[..]);
Ok(ChunkId { value: a })
}
}
}
impl fmt::Display for ChunkId {
fn fmt(&self, f: &mut std::fmt::Formatter) -> fmt::Result {
write!(f, "'{}'", self.as_str())
}
}
impl fmt::Debug for ChunkId {
fn fmt(&self, f: &mut std::fmt::Formatter) -> fmt::Result {
write!(f, "{}", self)
}
}
#[derive(PartialEq, Debug)]
pub enum ChunkContents {
Data(ChunkId, Vec<u8>),
Children(ChunkId, ChunkId, Vec<ChunkContents>),
ChildrenNoType(ChunkId, Vec<ChunkContents>)
}
impl ChunkContents {
pub fn write<T>(&self, writer: &mut T) -> std::io::Result<u64>
where T: Seek + Write {
match &self {
&ChunkContents::Data(id, data) => {
if data.len() as u64 > u32::MAX as u64 {
use std::io::{Error, ErrorKind};
return Err(Error::new(ErrorKind::InvalidData, "Data too big"));
}
let len = data.len() as u32;
writer.write_all(&id.value)?;
writer.write_all(&len.to_le_bytes())?;
writer.write_all(&data)?;
if len % 2 != 0 {
let single_byte: [u8; 1] = [0];
writer.write_all(&single_byte)?;
}
Ok((8 + len + (len % 2)).into())
}
&ChunkContents::Children(id, chunk_type, children) => {
writer.write_all(&id.value)?;
let len_pos = writer.seek(SeekFrom::Current(0))?;
let zeros: [u8; 4] = [0, 0, 0, 0];
writer.write_all(&zeros)?;
writer.write_all(&chunk_type.value)?;
let mut total_len: u64 = 4;
for child in children {
total_len = total_len + child.write(writer)?;
}
if total_len > u32::MAX as u64 {
use std::io::{Error, ErrorKind};
return Err(Error::new(ErrorKind::InvalidData, "Data too big"));
}
let end_pos = writer.seek(SeekFrom::Current(0))?;
writer.seek(SeekFrom::Start(len_pos))?;
writer.write_all(&(total_len as u32).to_le_bytes())?;
writer.seek(SeekFrom::Start(end_pos))?;
Ok((8 + total_len + (total_len % 2)).into())
}
&ChunkContents::ChildrenNoType(id, children) => {
writer.write_all(&id.value)?;
let len_pos = writer.seek(SeekFrom::Current(0))?;
let zeros: [u8; 4] = [0, 0, 0, 0];
writer.write_all(&zeros)?;
let mut total_len: u64 = 0;
for child in children {
total_len = total_len + child.write(writer)?;
}
if total_len > u32::MAX as u64 {
use std::io::{Error, ErrorKind};
return Err(Error::new(ErrorKind::InvalidData, "Data too big"));
}
let end_pos = writer.seek(SeekFrom::Current(0))?;
writer.seek(SeekFrom::Start(len_pos))?;
writer.write_all(&(total_len as u32).to_le_bytes())?;
writer.seek(SeekFrom::Start(end_pos))?;
Ok((8 + total_len + (total_len % 2)).into())
}
}
}
}
/// A chunk, also known as a form
#[derive(PartialEq, Eq, Debug)]
pub struct Chunk {
pos: u64,
id: ChunkId,
len: u32,
}
/// An iterator over the children of a `Chunk`
pub struct Iter<'a, T>
where T: Seek + Read {
end: u64,
cur: u64,
stream: &'a mut T
}
impl<'a, T> Iterator for Iter<'a, T>
where T: Seek + Read {
type Item = std::io::Result<Chunk>;
fn next(&mut self) -> Option<Self::Item> {
if self.cur >= self.end {
return None
}
let chunk = match Chunk::read(&mut self.stream, self.cur) {
Ok(chunk) => chunk,
Err(err) => return Some(Err(err)),
};
let len = chunk.len() as u64;
self.cur = self.cur + len + 8 + (len % 2);
Some(Ok(chunk))
}
}
impl Chunk {
/// Returns the `ChunkId` of this chunk.
pub fn id(&self) -> ChunkId {
self.id.clone()
}
/// Returns the number of bytes in this chunk.
pub fn len(&self) -> u32 {
self.len
}
/// Returns the offset of this chunk from the start of the stream.
pub fn offset(&self) -> u64 {
self.pos
}
/// Reads the chunk type of this chunk.
///
/// Generally only valid for `RIFF` and `LIST` chunks.
pub fn read_type<T>(&self, stream: &mut T) -> std::io::Result<ChunkId>
where T: Read + Seek {
stream.seek(SeekFrom::Start(self.pos + 8))?;
let mut fourcc : [u8; 4] = [0; 4];
stream.read_exact(&mut fourcc)?;
Ok(ChunkId { value: fourcc })
}
/// Reads a chunk from the specified position in the stream.
pub fn read<T>(stream: &mut T, pos: u64) -> std::io::Result<Chunk>
where T: Read + Seek {
stream.seek(SeekFrom::Start(pos))?;
let mut fourcc : [u8; 4] = [0; 4];
stream.read_exact(&mut fourcc)?;
let mut len : [u8; 4] = [0; 4];
stream.read_exact(&mut len)?;
Ok(Chunk {
pos: pos,
id: ChunkId { value: fourcc },
len: u32::from_le_bytes(len)
})
}
/// Reads the entirety of the contents of a chunk.
pub fn read_contents<T>(&self, stream: &mut T) -> std::io::Result<Vec<u8>>
where T: Read + Seek {
stream.seek(SeekFrom::Start(self.pos + 8))?;
let mut data: Vec<u8> = vec![0; self.len.try_into().unwrap()];
stream.read_exact(&mut data)?;
Ok(data)
}
/// Returns an iterator over the children of the chunk.
///
/// If the chunk has children but is noncompliant, e.g. it has
/// no type identifier (like `seqt` chunks), use `iter_no_type` instead.
pub fn iter<'a, T>(&self, stream: &'a mut T) -> Iter<'a, T>
where T: Seek + Read {
Iter {
cur: self.pos + 12,
end: self.pos + 4 + (self.len as u64),
stream: stream
}
}
/// Returns an iterator over the chilren of the chunk. Only valid for
/// noncompliant chunks that have children but no chunk type identifier
/// (like `seqt` chunks).
pub fn iter_no_type<'a, T>(&self, stream: &'a mut T) -> Iter<'a, T>
where T: Seek + Read {
Iter {
cur: self.pos + 8,
end: self.pos + 4 + (self.len as u64),
stream: stream
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn chunkid_from_str() {
assert_eq!(ChunkId::new("RIFF").unwrap(), RIFF_ID);
assert_eq!(ChunkId::new("LIST").unwrap(), LIST_ID);
assert_eq!(ChunkId::new("seqt").unwrap(), SEQT_ID);
assert_eq!(ChunkId::new("123 ").unwrap(),
ChunkId { value: [0x31, 0x32, 0x33, 0x20] });
assert_eq!(ChunkId::new("123"), Err("Invalid length"));
assert_eq!(ChunkId::new("12345"), Err("Invalid length"));
}
#[test]
fn chunkid_to_str() {
assert_eq!(RIFF_ID.as_str(), "RIFF");
assert_eq!(LIST_ID.as_str(), "LIST");
assert_eq!(SEQT_ID.as_str(), "seqt");
assert_eq!(ChunkId::new("123 ").unwrap().as_str(), "123 ");
}
#[test]
fn chunkid_format() {
assert_eq!(format!("{}", RIFF_ID), "'RIFF'");
assert_eq!(format!("{}", LIST_ID), "'LIST'");
assert_eq!(format!("{}", SEQT_ID), "'seqt'");
assert_eq!(format!("{:?}", RIFF_ID), "'RIFF'");
assert_eq!(format!("{:?}", LIST_ID), "'LIST'");
assert_eq!(format!("{:?}", SEQT_ID), "'seqt'");
}
}
|
use crate::account::responses::ListBlobsByTagsResponse;
use crate::core::prelude::*;
use azure_core::headers::add_optional_header;
use azure_core::prelude::*;
use std::convert::TryInto;
#[derive(Debug, Clone)]
pub struct FindBlobsByTagsBuilder<'a> {
client: &'a StorageClient,
expression: String,
lease_id: Option<&'a str>,
next_marker: Option<NextMarker>,
max_results: Option<MaxResults>,
client_request_id: Option<ClientRequestId<'a>>,
timeout: Option<Timeout>,
}
impl<'a> FindBlobsByTagsBuilder<'a> {
pub(crate) fn new(client: &'a StorageClient) -> Self {
Self {
client,
expression: "".to_string(),
lease_id: None,
next_marker: None,
max_results: None,
client_request_id: None,
timeout: None,
}
}
setters! {
expression: String => expression,
next_marker: NextMarker => Some(next_marker),
max_results: MaxResults => Some(max_results),
client_request_id: ClientRequestId<'a> => Some(client_request_id),
timeout: Timeout => Some(timeout),
}
pub async fn execute(
&self,
) -> Result<ListBlobsByTagsResponse, Box<dyn std::error::Error + Send + Sync>> {
let mut url = self
.client
.storage_account_client()
.blob_storage_url()
.to_owned();
self.timeout.append_to_url_query(&mut url);
url.query_pairs_mut().append_pair("comp", "blobs");
url.query_pairs_mut().append_pair("where", &self.expression);
trace!("url == {:?}", url);
let (request, _url) = self.client.prepare_request(
url.as_str(),
&http::Method::GET,
&|mut request| {
request = add_optional_header(&self.client_request_id, request);
request
},
None,
)?;
let response = self
.client
.http_client()
.execute_request_check_status(request, http::StatusCode::OK)
.await?;
debug!("response.headers() == {:#?}", response.headers());
Ok((&response).try_into()?)
}
}
|
#![allow(dead_code)]
use veccentric::Fecc;
use std::time::Instant;
use pixels::{Error, Pixels, SurfaceTexture};
use winit::{
dpi::LogicalSize,
event::{Event, VirtualKeyCode},
event_loop::{ControlFlow, EventLoop},
window::WindowBuilder,
};
use winit_input_helper::WinitInputHelper;
pub const WIDTH: u32 = 64;
pub const HEIGHT: u32 = 64;
const SCALE: f64 = 5.0;
#[derive(Copy, Clone)]
pub struct Color(pub u8, pub u8, pub u8);
impl Color {
pub fn red() -> Self {
Color(0xff, 0x00, 0x00)
}
pub fn green() -> Self {
Color(0x00, 0xff, 0x00)
}
pub fn blue() -> Self {
Color(0x00, 0x00, 0xff)
}
pub fn white() -> Self {
Color(0xff, 0xff, 0xff)
}
pub fn black() -> Self {
Color(0x00, 0x00, 0x00)
}
}
pub fn run<S, U, D>(
mut state: S,
mut update: U,
mut draw: D,
background: Color,
) -> Result<(), Error>
where
S: 'static,
U: FnMut(&mut S, f64) + 'static,
D: FnMut(&S, &mut Buffer) + 'static,
{
let event_loop = EventLoop::new();
let mut input = WinitInputHelper::new();
let window = {
let size =
LogicalSize::new(WIDTH as f64 * SCALE, HEIGHT as f64 * SCALE);
WindowBuilder::new()
.with_title(get_exec_name())
.with_inner_size(size)
.with_min_inner_size(size)
.build(&event_loop)
.unwrap()
};
let mut pixels = {
let window_size = window.inner_size();
let surface_texture =
SurfaceTexture::new(window_size.width, window_size.height, &window);
Pixels::new(WIDTH, HEIGHT, surface_texture)?
};
let mut dt = Instant::now();
event_loop.run(move |event, _, control_flow| {
if let Event::RedrawRequested(_) = event {
let mut buffer = Buffer::new(pixels.get_frame());
update(&mut state, dt.elapsed().as_secs_f64());
buffer.clear(background);
draw(&state, &mut buffer);
dt = Instant::now();
if pixels.render().is_err() {
*control_flow = ControlFlow::Exit;
return;
}
}
if input.update(&event) {
if input.key_pressed(VirtualKeyCode::Escape) || input.quit() {
*control_flow = ControlFlow::Exit;
return;
}
if let Some(size) = input.window_resized() {
pixels.resize_surface(size.width, size.height);
}
window.request_redraw();
}
});
}
pub struct Buffer<'a> {
pixels: &'a mut [u8],
}
impl<'a> Buffer<'a> {
fn new(pixels: &'a mut [u8]) -> Self {
Self { pixels }
}
#[allow(clippy::many_single_char_names)]
pub fn draw_point(&mut self, position: Fecc, Color(r, g, b): Color) {
let (x, y) = position.floor().into();
if let Some(ix) = Self::ix(x, y) {
self.pixels[ix..(ix + 4)].copy_from_slice(&[r, g, b, 0xff]);
}
}
fn clear(&mut self, Color(r, g, b): Color) {
for pixel in self.pixels.chunks_exact_mut(4) {
pixel.copy_from_slice(&[r, g, b, 0xff]);
}
}
fn ix(x: i64, y: i64) -> Option<usize> {
if (0..WIDTH as i64).contains(&x) && (0..HEIGHT as i64).contains(&y) {
Some(((x + y * WIDTH as i64) * 4) as usize)
} else {
None
}
}
}
fn get_exec_name() -> String {
std::env::current_exe()
.ok()
.and_then(|pb| pb.file_name().map(|s| s.to_os_string()))
.and_then(|s| s.into_string().ok())
.unwrap_or_else(|| "veccentric".to_string())
}
|
//! GLL Grammar state and SPPF structure
/// Re-exported `petgraph` structs to avoid requiring dependency of generated code on it.
pub use petgraph::{dot::Dot, Directed, Graph};
use petgraph::{
graph::{EdgeReference, NodeIndex},
visit::EdgeRef,
};
use std::collections::{BTreeMap, BTreeSet};
use std::fmt::{Debug, Write};
/// Re-exported `streaming_iterator` structs to avoid requiring dependency of generated code on it.
pub use streaming_iterator::StreamingIterator;
/// GSS Node Label Type
pub type GSSNode<L> = (L, usize);
/// SPPF Nodes are stored into a vec, and references are stored as integer
pub type SPPFNodeIndex = usize;
/// A common trait that all symbols should impl,
/// the symbols and impl are generated.
/// You don't need to impl it in your code.
pub trait GrammarSymbol: Debug + PartialEq {
fn is_eps(&self) -> bool;
}
/// A common trait that all labels should impl,
/// the labels and impl are generated.
/// You don't need to impl it in your code.
pub trait GrammarLabel: Debug {
type Symbol: GrammarSymbol;
/// if self is of form `X ::= a . b`,
/// return true if a is a terminal or a non-nullable nonterminal and if b is not eps
fn first(&self) -> bool;
/// return Some(lhs) if it is the end of a grammar rule `lhs -> ...`, otherwise None
fn end(&self) -> Option<Self::Symbol>;
}
/// Binary SPPF Node structure.
///
/// Each node can be one of: Dummy, Symbol, Intermediate and Packed.
///
/// Symbol is a Terminal or a Nonterminal.
///
/// Intermediate is a grammar rule with position.
///
/// Packed means different derivations of the same grammar rule.
///
/// Each grammar rule possible position corresponds to a label.
/// So store labels for Intermediate and Packed rules.
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd)]
pub enum SPPFNode<L: GrammarLabel> {
/// $ node in original paper
Dummy,
/// (symbol, left, right, children)
Symbol(L::Symbol, usize, usize, Vec<SPPFNodeIndex>),
/// (label, left, right, children)
Intermediate(L, usize, usize, Vec<SPPFNodeIndex>),
/// (label, split, children)
Packed(L, usize, Vec<SPPFNodeIndex>),
}
/// All GSS Parser states.
/// It is used by generated code, don't use it directly.
pub struct GSSState<L: Ord + Clone + GrammarLabel> {
/// Direct GSS graph
pub graph: Graph<GSSNode<L>, SPPFNodeIndex, Directed>,
/// Mapping from node to its index
pub nodes: BTreeMap<GSSNode<L>, NodeIndex>,
/// All sppf nodes, and nodes reference each other by index
pub sppf_nodes: Vec<SPPFNode<L>>,
pub initial_node_index: NodeIndex,
/// U_j in original paper
pub visited: Vec<BTreeSet<(L, NodeIndex, SPPFNodeIndex)>>,
/// R in original paper
pub todo: Vec<(L, NodeIndex, usize, SPPFNodeIndex)>,
/// P in original paper
pub pop: BTreeSet<(NodeIndex, SPPFNodeIndex)>,
/// C_i in original paper
pub current_position: usize,
/// C_u in original paper
pub current_node_index: NodeIndex,
/// C_n in original paper
pub current_sppf_node: usize,
}
impl<L: GrammarLabel> SPPFNode<L> {
/// Get right extent of the node, panics if it doesn't have it.
pub fn right_extent(&self) -> usize {
use SPPFNode::*;
match self {
Symbol(_, _, r, _) => *r,
Intermediate(_, _, r, _) => *r,
_ => panic!("no right extent for packed and dummy"),
}
}
/// Get left extent of the node, panics if it doesn't have it.
pub fn left_extent(&self) -> usize {
use SPPFNode::*;
match self {
Symbol(_, l, _, _) => *l,
Intermediate(_, l, _, _) => *l,
_ => panic!("no left extent for packed and dummy"),
}
}
/// Get children node references.
pub fn children(&self) -> Option<&Vec<SPPFNodeIndex>> {
use SPPFNode::*;
match self {
Dummy => None,
Symbol(_, _, _, children) => Some(children),
Intermediate(_, _, _, children) => Some(children),
Packed(_, _, children) => Some(children),
}
}
fn children_mut(&mut self) -> Option<&mut Vec<SPPFNodeIndex>> {
use SPPFNode::*;
match self {
Symbol(_, _, _, children) => Some(children),
Intermediate(_, _, _, children) => Some(children),
_ => panic!("no children for packed and dummy"),
}
}
}
impl<L: Ord + Clone + GrammarLabel> GSSState<L> {
/// The `add` function in the paper
pub fn add(&mut self, l: L, u: NodeIndex, i: usize, w: SPPFNodeIndex) {
if !self.visited[i].contains(&(l.clone(), u, w)) {
self.visited[i].insert((l.clone(), u, w));
self.todo.push((l, u, i, w));
}
}
/// The `pop` function in the paper
pub fn pop(&mut self, u: NodeIndex, i: usize, z: SPPFNodeIndex) {
if u != self.initial_node_index {
let (l, _k) = self.graph[u].clone();
self.pop.insert((u, z));
let edges: Vec<EdgeReference<SPPFNodeIndex>> = self.graph.edges(u).collect();
let edge_data: Vec<(NodeIndex, SPPFNodeIndex)> = edges
.iter()
.map(|edge| (edge.target(), *edge.weight()))
.collect();
for (v, w) in edge_data {
let y = self.get_node_p(l.clone(), w, z);
self.add(l.clone(), v, i, y);
}
}
}
/// The `create` function in the paper
pub fn create(&mut self, l: L, u: NodeIndex, j: usize, w: SPPFNodeIndex) -> NodeIndex {
let node = (l.clone(), j);
let v = if let Some(index) = self.nodes.get(&node) {
*index
} else {
let index = self.graph.add_node(node.clone());
self.nodes.insert(node, index);
index
};
if self.graph.find_edge(v, u).is_none() {
self.graph.add_edge(v, u, w);
let pop = self.pop.clone();
for (index, z) in pop.into_iter() {
if index == v {
let y = self.get_node_p(l.clone(), w, z);
let h = self.sppf_nodes[z].right_extent();
self.add(l.clone(), u, h, y);
}
}
}
v
}
/// The `get_node_t` function in the paper
pub fn get_node_t(&mut self, x: L::Symbol, i: usize) -> SPPFNodeIndex {
let h = if x.is_eps() { i } else { i + 1 };
self.find_or_create_sppf_symbol(x, i, h)
}
/// The `get_node_p` function in the paper
pub fn get_node_p(&mut self, l: L, w: SPPFNodeIndex, z: SPPFNodeIndex) -> SPPFNodeIndex {
if l.first() {
return z;
} else {
let node_z = &self.sppf_nodes[z];
let k = node_z.left_extent();
let i = node_z.right_extent();
let node_w = &self.sppf_nodes[w];
if SPPFNode::Dummy != *node_w {
// w != $
let j = node_w.left_extent();
assert_eq!(node_w.right_extent(), k);
if let Some(t) = l.end() {
// t = X
let y = self.find_or_create_sppf_symbol(t, j, i);
if !self.has_packed_child(y, &l, k) {
let len = self.sppf_nodes.len();
self.sppf_nodes[y].children_mut().unwrap().push(len);
self.sppf_nodes.push(SPPFNode::Packed(l, k, vec![w, z]));
}
y
} else {
// t = l
let y = self.find_or_create_sppf_intermediate(l.clone(), j, i);
if !self.has_packed_child(y, &l, k) {
let len = self.sppf_nodes.len();
self.sppf_nodes[y].children_mut().unwrap().push(len);
self.sppf_nodes.push(SPPFNode::Packed(l, k, vec![w, z]));
}
y
}
} else {
// w = $
if let Some(t) = l.end() {
// t = X
let y = self.find_or_create_sppf_symbol(t, k, i);
if !self.has_packed_child(y, &l, k) {
let len = self.sppf_nodes.len();
self.sppf_nodes[y].children_mut().unwrap().push(len);
self.sppf_nodes.push(SPPFNode::Packed(l, k, vec![z]));
}
y
} else {
// t = l
let y = self.find_or_create_sppf_intermediate(l.clone(), k, i);
if !self.has_packed_child(y, &l, k) {
let len = self.sppf_nodes.len();
self.sppf_nodes[y].children_mut().unwrap().push(len);
self.sppf_nodes.push(SPPFNode::Packed(l, k, vec![z]));
}
y
}
}
}
}
fn find_or_create_sppf_symbol(&mut self, s: L::Symbol, i: usize, j: usize) -> SPPFNodeIndex {
for (index, node) in self.sppf_nodes.iter().enumerate() {
if let SPPFNode::Symbol(node_s, node_i, node_j, _) = node {
if *node_s == s && *node_i == i && *node_j == j {
return index;
}
}
}
self.sppf_nodes.push(SPPFNode::Symbol(s, i, j, vec![]));
self.sppf_nodes.len() - 1
}
fn find_or_create_sppf_intermediate(&mut self, l: L, i: usize, j: usize) -> SPPFNodeIndex {
// TODO: linear search is slow
for (index, node) in self.sppf_nodes.iter().enumerate() {
if let SPPFNode::Intermediate(node_l, node_i, node_j, _) = node {
if *node_l == l && *node_i == i && *node_j == j {
return index;
}
}
}
self.sppf_nodes
.push(SPPFNode::Intermediate(l, i, j, vec![]));
self.sppf_nodes.len() - 1
}
/// Collect all symbol leaves of a packed node
pub fn collect_symbols(&self, node: SPPFNodeIndex) -> Vec<SPPFNodeIndex> {
use SPPFNode::*;
match &self.sppf_nodes[node] {
Dummy => vec![],
Symbol(_, _, _, _) => vec![node],
Intermediate(_, _, _, children) => children
.iter()
.map(|node| self.collect_symbols(*node))
.flatten()
.collect(),
Packed(_, _, children) => children
.iter()
.map(|node| self.collect_symbols(*node))
.flatten()
.collect(),
}
}
fn has_packed_child(&self, node: SPPFNodeIndex, l: &L, k: usize) -> bool {
// TODO: linear search is slow
if let Some(children) = self.sppf_nodes[node].children() {
return children.iter().any(|index| match &self.sppf_nodes[*index] {
SPPFNode::Packed(node_l, node_k, _) => node_l == l && *node_k == k,
_ => false,
});
} else {
unreachable!()
}
}
/// Print current SPPF graph in graphviz format
pub fn print_sppf_dot(&self) -> String {
let mut res = String::new();
write!(&mut res, "digraph {{\n").unwrap();
for (i, node) in self.sppf_nodes.iter().enumerate() {
let label = match node {
SPPFNode::Symbol(s, _, _, _) => format!("{:?}", s),
SPPFNode::Intermediate(_, _, _, _) => format!("I"),
SPPFNode::Packed(_, _, _) => format!("P"),
SPPFNode::Dummy => format!("D"),
};
write!(&mut res, "{} [label={:?}]\n", i, label).unwrap();
if let Some(children) = node.children() {
for child in children {
write!(&mut res, "{} -> {}\n", i, child).unwrap();
}
}
}
write!(&mut res, "}}").unwrap();
res
}
/// Print current GSS graph in graphviz format
pub fn print_gss_dot(&self) -> String {
format!("{:?}", Dot::with_config(&self.graph, &[]))
}
}
|
#![feature(test)]
extern crate test;
#[cfg(test)]
mod benchmarks;
|
use std::fmt;
use std::fmt::Debug;
use serde::de;
use serde::de::Deserialize;
use serde::de::Deserializer;
use serde::de::MapAccess;
use serde::de::SeqAccess;
use serde::de::Visitor;
use serde::ser::Serialize;
use serde::ser::SerializeStruct;
use serde::ser::Serializer;
pub type MortonCode = u32;
pub type MortonValue = u16;
const MORTON_CODE_BITS: usize = 32;
const MORTON_VALUE_BITS: usize = 10;
const MORTON_MAX_VALUES: usize = 1024;
#[derive(Clone)]
pub struct MortonEncoder {
cell_bits: usize,
cell_mask: usize,
dimensions: usize,
table: Vec<[MortonCode; MORTON_MAX_VALUES]>,
}
impl Debug for MortonEncoder {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"MortonEncoder {{ cell_bits: {}, cell_mask: {}, dimensions: {}, table: ",
self.cell_bits, self.cell_mask, self.dimensions
)?;
write!(f, "[ ")?;
for k in &self.table {
write!(f, "[ ")?;
for v in k.iter() {
write!(f, "{}, ", v)?;
}
write!(f, "], ")?;
}
write!(f, "] }}")
}
}
impl MortonEncoder {
pub fn new(dimensions: usize, cell_bits: usize) -> Self {
// Make sure we can store the encoding in a single T.
// Don't know how to make that test generically
assert!(MORTON_VALUE_BITS >= cell_bits);
assert!(MORTON_CODE_BITS >= cell_bits * dimensions);
//let mut masks = vec![];
let mut table = vec![];
let cell_max = 1 << cell_bits;
let cell_mask = cell_max - 1;
// Build lookup table & masks
for k in 0..dimensions {
table.push([0; MORTON_MAX_VALUES]);
for i in 0..cell_max {
let mut v = 0;
for p in 0..cell_bits {
// Note: bit is at position p, so shift it only K-1 p position again below, instead
// of K times
let bit = i & (1 << p);
let new_bit = bit << (p * (dimensions - 1) + k);
v |= new_bit;
}
table[k][i] = v as MortonCode;
}
/*
let mut v = 0usize;
for p in 0..cell_bits {
let new_bit = 1 << p * (dimensions - 1) + k;
v = v | new_bit;
}
masks.push(v as MortonCode);
*/
}
MortonEncoder {
cell_bits,
cell_mask,
dimensions,
table,
//masks,
}
}
fn encode_1(&self, k: usize, v: MortonValue) -> MortonCode {
// Already done by the array bound checker anyway
//assert!((v as usize) < MORTON_MAX_VALUES);
//assert!(k < self.table.len());
// Ensure we only have valid values in inputs, even when less bits than
// the maximum is used to define those values.
let v = v as usize & self.cell_mask;
self.table[k][v]
}
fn decode_1(&self, k: usize, code: MortonCode) -> MortonValue {
// Already done by the array bound checker anyway
//assert!(k < self.table.len());
let mut v = 0;
for i in 0..self.cell_bits {
let bit_pos = i * self.table.len() + k;
let bit = code as usize & (1 << bit_pos);
let bit_pos = bit_pos - i;
v |= (bit >> bit_pos) as MortonValue;
}
v as MortonValue
}
pub fn encode(&self, v: &[MortonValue]) -> Result<MortonCode, String> {
//TODO: Should we check inside each objects, or just assume it is correct and/or rely on the bound checks?
if self.dimensions != v.len() {
return Err(format!(
"Incorrect number of dimensions, expected {}, got {} for {:?}",
self.dimensions,
v.len(),
v
));
}
let mut code = 0;
for (k, i) in v.iter().enumerate().take(self.dimensions) {
code |= self.encode_1(k, *i);
}
Ok(code)
}
pub fn decode(&self, code: MortonCode) -> Vec<MortonValue> {
let mut values = vec![];
for k in 0..self.dimensions {
values.push(self.decode_1(k, code));
}
values
}
}
impl Serialize for MortonEncoder {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
// We serialize the minimum amount of information necessary to
// deserialize the table.
// This is the parameters to init(dimensions, cell_bits)
let mut state = serializer.serialize_struct("MortonEncoder", 2)?;
state.serialize_field("cell_bits", &self.cell_bits)?;
state.serialize_field("dimensions", &self.dimensions)?;
state.end()
}
}
impl<'de> Deserialize<'de> for MortonEncoder {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
enum Field {
CellBits,
Dimensions,
};
impl<'de> Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Field, D::Error>
where
D: Deserializer<'de>,
{
struct FieldVisitor;
impl<'de> Visitor<'de> for FieldVisitor {
type Value = Field;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("`cell_bits` or `dimensions`")
}
fn visit_str<E>(self, value: &str) -> Result<Field, E>
where
E: de::Error,
{
match value {
"cell_bits" => Ok(Field::CellBits),
"dimensions" => Ok(Field::Dimensions),
_ => Err(de::Error::unknown_field(value, FIELDS)),
}
}
}
deserializer.deserialize_identifier(FieldVisitor)
}
}
struct MortonEncoderVisitor;
impl<'de> Visitor<'de> for MortonEncoderVisitor {
type Value = MortonEncoder;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("struct MortonEncoder")
}
fn visit_seq<V>(self, mut seq: V) -> Result<MortonEncoder, V::Error>
where
V: SeqAccess<'de>,
{
let cell_bits = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
let dimensions = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(1, &self))?;
Ok(MortonEncoder::new(dimensions, cell_bits))
}
fn visit_map<V>(self, mut map: V) -> Result<MortonEncoder, V::Error>
where
V: MapAccess<'de>,
{
let mut cell_bits = None;
let mut dimensions = None;
while let Some(key) = map.next_key()? {
match key {
Field::CellBits => {
if cell_bits.is_some() {
return Err(de::Error::duplicate_field("cell_bits"));
}
cell_bits = Some(map.next_value()?);
}
Field::Dimensions => {
if dimensions.is_some() {
return Err(de::Error::duplicate_field("dimensions"));
}
dimensions = Some(map.next_value()?);
}
}
}
let cell_bits = cell_bits.ok_or_else(|| de::Error::missing_field("cell_bits"))?;
let dimensions =
dimensions.ok_or_else(|| de::Error::missing_field("dimensions"))?;
Ok(MortonEncoder::new(dimensions, cell_bits))
}
}
const FIELDS: &[&str] = &["cell_bits", "dimensions"];
deserializer.deserialize_struct("MortonEncoder", FIELDS, MortonEncoderVisitor)
}
}
#[cfg(test)]
mod tests {
use super::*;
mod init {
use super::*;
/* Check the assertions */
#[test]
#[should_panic]
fn dim1_bit32() {
let _m = MortonEncoder::new(1, 31);
}
#[test]
#[should_panic]
fn dim2_bit16() {
// Max 10 bit for the codes, even if 16 would fit
let _m = MortonEncoder::new(2, 16);
}
#[test]
#[should_panic]
fn dim33_bit1() {
let _m = MortonEncoder::new(33, 1);
}
#[test]
#[should_panic]
fn dim17_bit2() {
let _m = MortonEncoder::new(17, 2);
}
#[test]
fn dim1_bit10() {
let _m = MortonEncoder::new(1, 10);
}
#[test]
fn dim2_bit10() {
let _m = MortonEncoder::new(2, 10);
}
#[test]
fn dim3_bit10() {
let _m = MortonEncoder::new(3, 10);
}
#[test]
fn dim4_bit8() {
let _m = MortonEncoder::new(4, 8);
}
#[test]
fn dim32_bit1() {
let _m = MortonEncoder::new(32, 1);
}
/*
morton_init();
// Morton table looks OK
// for n in 0..10 {
// println!("{:4}", n);
// for k in 0..K {
// println!("{:032b}", unsafe {MORTON[k][n]});
// }
// }
for n in 0..CELL_MAX {
println!("## {:04}", n);
let mut c = 0 as Code;
for k in 0..K {
// check diagonal
c = c | morton_encode(k, n as u16);
}
let f = n as u16;
for k in 1..2 {
// check diagonal
let p = morton_decode(k, c);
println!("\n{:04} \n f {:04}\n p {:04}\n 𝚫 {:06}\n", c, f, p, f-p);
}
}
let mut f = 0.0f64;
// while f < 1.0 {
// let v = convert_to_fixed(&f);
// let p = convert_to_f64(&v);
// println!("\n{:010} \n f {:+0.16e}\n p {:+03.16e}\n 𝚫 {:+03.16e}\n", v, f, p, f - p);
//
// f += 0.1e-1;
// }
let f =0.000724939184752;
let v = convert_to_fixed(&f);
let p = convert_to_f64(&v);
println!("\n{:010} \n f {:+0.16e}\n p {:+03.16e}\n 𝚫 {:+03.16e}\n", v, f, p, f - p);
*/
}
mod encode {
use super::*;
/* Check the lookup table produced */
#[test]
fn dim1_bit10() {
let m = MortonEncoder::new(1, 10);
for n in 0..MORTON_MAX_VALUES {
assert_eq!(n as MortonCode, m.encode_1(0, n as MortonValue));
}
}
#[test]
fn table_dim2_bit10() {
let m = MortonEncoder::new(2, 10);
let mut lookup = Vec::<Vec<MortonCode>>::new();
for k in 0..2 {
lookup.push(Vec::new());
for n in 0..MORTON_MAX_VALUES {
// Morton numbers are number where the bit are exploded so that we can
// interleave them. This means that for each position of a value, we need to
// insert dimensions - 1 columns between each bits, and shift that result by the
// dimension number so that we can OR all the dimensions together without having
// bits colliding.
let mut v = 0;
for p in 0..MORTON_VALUE_BITS {
let b = (n & (1 << p)) >> p;
v = v | b << (p * 2 + k);
}
lookup[k].push(v as MortonCode);
}
}
for k in 0..2 {
for n in 0..MORTON_MAX_VALUES {
assert_eq!(lookup[k][n], m.encode_1(k, n as MortonValue));
}
}
}
fn check(dimensions: usize, value_max: usize, value_bits: usize, m: MortonEncoder) -> () {
let mut lookup = Vec::<Vec<MortonCode>>::new();
for k in 0..dimensions {
lookup.push(Vec::new());
for n in 0..value_max {
// Morton numbers are number where the bit are exploded so that we can
// interleave them. This means that for each position of a value, we need to
// insert dimensions -1 columns between each bits, and shift that result by the
// dimension number so that we can OR all the dimensions together without having
// bits colliding.
let mut v = 0;
for p in 0..value_bits {
let b = (n & (1 << p)) >> p;
v = v | b << (p * dimensions + k);
}
lookup[k].push(v as MortonCode);
}
}
for k in 0..dimensions {
for n in 0..value_max {
assert_eq!(lookup[k][n], m.encode_1(k, n as MortonValue));
}
}
}
#[test]
fn table_dim3_bit10() {
let m = MortonEncoder::new(3, 10);
check(3, 1024, 10, m);
}
#[test]
fn table_dim4_bit8() {
let m = MortonEncoder::new(4, 8);
check(4, 256, 8, m);
}
}
}
|
fn main() {
// 型を複数持つタプルが作れるというか、複数の型の値を扱うための機能
let a: (isize, f64, &str) = (1, 1.0, "abc");
println!("{},{},{}", a.0, a.1, a.2);
//println!("{:?},{:?},{:?}", a[0], a[1], a[2]);
} |
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// FormulaAndFunctionQueryDefinition : A formula and function query.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct FormulaAndFunctionQueryDefinition {
#[serde(rename = "aggregator", skip_serializing_if = "Option::is_none")]
pub aggregator: Option<crate::models::FormulaAndFunctionMetricAggregation>,
#[serde(rename = "data_source")]
pub data_source: crate::models::FormulaAndFunctionProcessQueryDataSource,
/// Name of query for use in formulas.
#[serde(rename = "name")]
pub name: String,
/// Metrics query definition.
#[serde(rename = "query")]
pub query: String,
#[serde(rename = "compute")]
pub compute: Box<crate::models::FormulaAndFunctionEventQueryDefinitionCompute>,
/// Group by options.
#[serde(rename = "group_by", skip_serializing_if = "Option::is_none")]
pub group_by: Option<Vec<crate::models::FormulaAndFunctionEventQueryGroupBy>>,
/// An array of index names to query in the stream. Omit or use `[]` to query all indexes at once.
#[serde(rename = "indexes", skip_serializing_if = "Option::is_none")]
pub indexes: Option<Vec<String>>,
#[serde(rename = "search", skip_serializing_if = "Option::is_none")]
pub search: Option<Box<crate::models::FormulaAndFunctionEventQueryDefinitionSearch>>,
/// Whether to normalize the CPU percentages.
#[serde(rename = "is_normalized_cpu", skip_serializing_if = "Option::is_none")]
pub is_normalized_cpu: Option<bool>,
/// Number of hits to return.
#[serde(rename = "limit", skip_serializing_if = "Option::is_none")]
pub limit: Option<i64>,
/// Process metric name.
#[serde(rename = "metric")]
pub metric: String,
#[serde(rename = "sort", skip_serializing_if = "Option::is_none")]
pub sort: Option<crate::models::QuerySortOrder>,
/// An array of tags to filter by.
#[serde(rename = "tag_filters", skip_serializing_if = "Option::is_none")]
pub tag_filters: Option<Vec<String>>,
/// Text to use as filter.
#[serde(rename = "text_filter", skip_serializing_if = "Option::is_none")]
pub text_filter: Option<String>,
}
impl FormulaAndFunctionQueryDefinition {
/// A formula and function query.
pub fn new(data_source: crate::models::FormulaAndFunctionProcessQueryDataSource, name: String, query: String, compute: crate::models::FormulaAndFunctionEventQueryDefinitionCompute, metric: String) -> FormulaAndFunctionQueryDefinition {
FormulaAndFunctionQueryDefinition {
aggregator: None,
data_source,
name,
query,
compute: Box::new(compute),
group_by: None,
indexes: None,
search: None,
is_normalized_cpu: None,
limit: None,
metric,
sort: None,
tag_filters: None,
text_filter: None,
}
}
}
|
use crate::prelude::*;
#[derive(Debug)]
struct DespawnCommand(pub Entity);
pub fn despawn_entities_system(world: &mut World) {
let entities = world
.query::<&DespawnCommand>()
.into_iter()
.map(|(_, cmd)| cmd.0)
.collect::<Vec<_>>();
despawn_entities(world, entities);
}
fn despawn_entities(world: &mut World, entities: Vec<Entity>) {
for entity in entities {
if let Err(_) = world.despawn(entity) {
console::log(format!("Tried to despawn missing entity: {}", entity.id()));
}
}
}
// pub fn queue_despawn(world: &mut World, entity: Entity) {
// world.spawn_command(DespawnCommand(entity));
// }
pub fn queue_despawn_batch<I>(world: &mut World, iter: I)
where
I: IntoIterator<Item = Entity>,
{
world.spawn_batch_commands(iter.into_iter().map(|entity| DespawnCommand(entity)));
}
|
use std::fmt::{Display,Formatter};
use std::sync::mpsc::Sender;
use std::sync::{Arc,Mutex};
use std::thread;
use std::time::SystemTime;
use std::time::Duration;
use Payload;
use sensor_lib::TempHumidityValue;
use linux_hal::{I2cdev, Delay};
use linux_hal::i2cdev::linux::LinuxI2CError;
use htu21d::HTU21D;
use htu21d::Error as Htu21dError;
#[derive(Debug)]
pub struct Error {
message: String,
}
impl From<LinuxI2CError> for Error {
fn from(err: LinuxI2CError) -> Self {
Error {
message: format!("I2C Error: {}", err),
}
}
}
impl From<Htu21dError<LinuxI2CError>> for Error {
fn from(err: Htu21dError<LinuxI2CError>) -> Self {
match err {
Htu21dError::I2c(i2c_error) => {
Error {
message: format!("HTU21 I2C Error: {}", i2c_error),
}
},
}
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter) -> Result<(), std::fmt::Error> {
write!(f, "{}", self.message)
}
}
pub struct Htu21d {
sender: Sender<Payload>,
lock: Arc<Mutex<i32>>,
htu21d: HTU21D<I2cdev, Delay>,
humidity_mutex: Arc<Mutex<(f32,f32)>>,
}
impl Htu21d {
pub fn new(sender: Sender<Payload>, lock: Arc<Mutex<i32>>, humidity_mutex: Arc<Mutex<(f32,f32)>>) -> Result<Htu21d, Error> {
info!("Create and Init HTU21Df");
let dev = I2cdev::new("/dev/i2c-1")?;
let mut htu21d = HTU21D::new(dev, Delay);
htu21d.reset()?;
Ok(Htu21d{
sender,
lock,
htu21d,
humidity_mutex
})
}
pub fn start_thread(mut htu: Htu21d){
thread::spawn(move || {
info!("Started HTU21D Thread");
loop {
//Read and send value every 1 mins (add a few milliseconds to hopefully reduce collisions on mutex blocking)
thread::sleep(Duration::from_millis(60005));
//////////////////////////
// TEMP AND HUMIDITY
//////////////////////////
let mut temp = None;
let mut humidity = None;
match htu.lock.lock() {
Ok(_) => {
match htu.htu21d.read_temperature() {
Ok(val) => {
temp = Some(val);
},
Err(err) => {
error!("Failed to read temp from HTU21D: {:?}", err);
},
}
match htu.htu21d.read_humidity() {
Ok(val) => {
humidity = Some(val);
},
Err(err) => {
error!("Failed to read humidity from HTU21D: {:?}", err);
},
}
},
Err(_) => {
error!("The lock has been poisoned, sending a poison message to kill the app");
htu.sender.send(Payload{
queue: String::from("poison"),
bytes: String::from("poison")
}).unwrap(); //We don't really care anymore if this thread panics
},
}
if let (Some(temp_val), Some(hum_val)) = (temp, humidity){
//Update the humidity mutex for the sgp30 to use
match htu.humidity_mutex.lock(){
Ok(mut mut_val) => {
*mut_val = (temp_val, hum_val);
},
Err(_) => {
error!("The lock has been poisoned, sending a poison message to kill the app");
htu.sender.send(Payload{
queue: String::from("poison"),
bytes: String::from("poison")
}).unwrap(); //We don't really care anymore if this thread panics
},
}
let temp_f = temp_val as f32 * 1.8 + 32.0;
let temp_humidity = TempHumidityValue {
timestamp: SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_millis() as u64,
location: 2,
temp: temp_f,
humidity: hum_val,
};
match serde_json::to_string(&temp_humidity) {
Ok(val) => {
match htu.sender.send(Payload{
queue: String::from("/ws/2/grp/temp_humidity"),
bytes: val
}){
Ok(_) => {},
Err(err) => {
error!("Failed to send message to main thread: {}", err);
},
}
}
Err(err) => {
error!("Failed to serialize the temp_humidity value: {}", err);
}
};
}
}
});
}
} |
// Copyright © 2019 Cormac O'Brien.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
extern crate byteorder;
mod instruction;
mod types;
use std::fs::File;
use std::io::{BufRead, Read};
use std::ops::Deref;
use std::path::Path;
use byteorder::{BigEndian, ReadBytesExt};
use instruction::Instruction;
use types::{Addr, RegId, Val};
const DIGITS: [[u8; 5]; 16] = [
[0xF0, 0x90, 0x90, 0x90, 0xF0],
[0x20, 0x60, 0x20, 0x20, 0x70],
[0xF0, 0x10, 0xF0, 0x80, 0xF0],
[0xF0, 0x10, 0xF0, 0x10, 0xF0],
[0x90, 0x90, 0xF0, 0x10, 0x10],
[0xF0, 0x80, 0xF0, 0x10, 0xF0],
[0xF0, 0x80, 0xF0, 0x90, 0xF0],
[0xF0, 0x10, 0x20, 0x40, 0x40],
[0xF0, 0x90, 0xF0, 0x90, 0xF0],
[0xF0, 0x90, 0xF0, 0x10, 0xF0],
[0xF0, 0x90, 0xF0, 0x90, 0x90],
[0xE0, 0x90, 0xE0, 0x90, 0xE0],
[0xF0, 0x80, 0x80, 0x80, 0xF0],
[0xE0, 0x90, 0x90, 0x90, 0xE0],
[0xF0, 0x80, 0xF0, 0x80, 0xF0],
[0xF0, 0x80, 0xF0, 0x80, 0x80],
];
struct Display {
pixels: [[u8; 64]; 32],
}
impl Display {
pub fn new() -> Display {
Display {
pixels: [[0; 64]; 32],
}
}
pub fn clear(&mut self) {
self.pixels = [[0; 64]; 32];
}
pub fn draw(&mut self, x: Val, y: Val, sprite: &[u8]) -> bool {
let mut collision = false;
for (y_ofs, byte) in sprite.iter().enumerate() {
// don't draw off the screen
if y.0 as usize + y_ofs >= self.pixels.len() {
break;
}
for (x_ofs, shift) in (0..8).rev().enumerate() {
// don't draw off the screen
if x.0 as usize + x_ofs >= self.pixels[0].len() {
break;
}
let bit = (byte >> shift) & 1u8;
self.pixels[y.0 as usize + y_ofs][x.0 as usize + x_ofs] ^= bit;
collision = collision || self.pixels[y.0 as usize][x.0 as usize] != bit;
}
println!("");
}
collision
}
pub fn print(&self) {
for row in self.pixels.iter() {
for pix in row.iter() {
if *pix == 1 {
print!("#");
} else {
print!(" ");
}
}
println!("");
}
}
}
struct Pc(Addr);
impl Pc {
pub fn new() -> Pc {
Pc(Addr(0x200))
}
pub fn get(&self) -> Addr {
self.0
}
pub fn increment(&mut self) {
(self.0).0 = (self.0).0 + 2;
}
/// Increment PC if cond is true.
pub fn increment_cond(&mut self, cond: bool) {
(self.0).0 = (self.0).0 + 2 * (cond as u16);
}
pub fn jump(&mut self, addr: Addr) {
(self.0).0 = addr.0 - 2; // pc will advance to correct address next cycle
}
}
struct Reg(Val);
impl Reg {
pub fn new() -> Reg {
Reg(Val(0))
}
pub fn get(&self) -> Val {
self.0
}
pub fn set(&mut self, k: Val) {
self.0 = k
}
pub fn add(&mut self, k: Val) -> bool {
let (val, carry) = self.0.overflowing_add(*k);
self.0 = Val(val);
return carry;
}
pub fn sub(&mut self, k: Val) -> bool {
let (val, carry) = self.0.overflowing_sub(*k);
self.0 = Val(val);
return carry;
}
pub fn shr(&mut self) {
use std::ops::Shr;
self.0 = Val(self.0.shr(1));
}
pub fn shl(&mut self) {
use std::ops::Shl;
self.0 = Val(self.0.shl(1));
}
}
impl Deref for Reg {
type Target = Val;
fn deref(&self) -> &Self::Target {
&self.0
}
}
struct Stack {
stack: [Addr; 16],
sp: usize,
}
impl Stack {
pub fn new() -> Stack {
Stack {
stack: [Addr(0); 16],
sp: 0,
}
}
pub fn push(&mut self, addr: Addr) {
self.stack[self.sp] = addr;
self.sp += 1;
// TODO: handle stack overflow
}
pub fn pop(&mut self) -> Addr {
let addr = self.stack[self.sp];
self.sp -= 1;
addr
// TODO: handle stack underflow
}
}
struct Chip8 {
mem: [u8; 4096],
register_v: [Reg; 16],
register_i: Addr,
delay: u8,
sound: u8,
pc: Pc,
sp: u8,
stack: Stack,
display: Display,
}
impl Chip8 {
pub fn new() -> Chip8 {
Chip8 {
mem: [0; 4096],
register_v: [
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
Reg::new(),
],
register_i: Addr(0),
delay: 0,
sound: 0,
pc: Pc::new(),
sp: 0,
stack: Stack::new(),
display: Display::new(),
}
}
pub fn reg(&self, id: RegId) -> &Reg {
&self.register_v[id.0 as usize]
}
pub fn reg_mut(&mut self, id: RegId) -> &mut Reg {
&mut self.register_v[id.0 as usize]
}
pub fn set_carry(&mut self, carry: bool) {
self.register_v[15].set(Val(carry as u8));
}
pub fn load<P>(&mut self, path: P)
where
P: AsRef<Path>,
{
let mut f = File::open(path).unwrap();
let len = f.metadata().unwrap().len() as usize;
let dst = &mut self.mem[0x200..0x200 + len];
f.read_exact(dst).unwrap();
}
pub fn start(&mut self) {
loop {
let pc_val = self.pc.get().0 as usize;
let mut slice = &self.mem[pc_val..pc_val + 2];
let opcode = slice.read_u16::<BigEndian>().unwrap();
println!("{:x}", opcode);
let instr = Instruction::interpret(opcode).unwrap();
println!("{:?}", instr);
self.exec(instr);
self.display.print();
}
}
pub fn exec(&mut self, instruction: Instruction) {
use Instruction::*;
match instruction {
Sys { addr } => unimplemented!(),
Cls => self.display.clear(),
Ret => {
let addr = self.stack.pop();
self.pc.jump(addr);
}
Jump { addr } => self.pc.jump(addr),
Call { addr } => {
self.stack.push(self.pc.get());
self.pc.jump(addr);
}
SeVal { x, k } => self.pc.increment_cond(self.reg(x).get() == k),
SneVal { x, k } => self.pc.increment_cond(self.reg(x).get() != k),
SeReg { x, y } => self
.pc
.increment_cond(self.reg(x).get() == self.reg(y).get()),
LdVal { x, k } => self.reg_mut(x).set(k),
AddVal { x, k } => {
let _carry = self.reg_mut(x).add(k); // TODO: does this ignore overflow?
}
LdReg { x, y } => {
let y_val = self.reg(y).get();
self.reg_mut(x).set(y_val);
}
Or { x, y } => {
let (x_val, y_val) = (self.reg(x).get(), self.reg(y).get());
self.reg_mut(x).set(Val(*x_val | *y_val));
}
And { x, y } => {
let (x_val, y_val) = (self.reg(x).get(), self.reg(y).get());
self.reg_mut(x).set(Val(*x_val & *y_val));
}
Xor { x, y } => {
let (x_val, y_val) = (self.reg(x).get(), self.reg(y).get());
self.reg_mut(x).set(Val(*x_val & *y_val));
}
AddReg { x, y } => {
let y_val = self.reg(y).get();
let carry = self.reg_mut(x).add(y_val);
self.set_carry(carry);
}
Sub { x, y } => {
let y_val = self.reg(y).get();
let not_carry = !self.reg_mut(x).sub(y_val);
self.set_carry(not_carry); // SUB sets carry flag if it does not underflow
}
Shr { x } => self.reg_mut(x).shr(),
SubN { x, y } => {
let x_val = self.reg(x).get();
let not_carry = !self.reg_mut(y).sub(x_val);
self.set_carry(not_carry);
}
Shl { x } => self.reg_mut(x).shl(),
SneReg { x, y } => self
.pc
.increment_cond(self.reg(x).get() == self.reg(y).get()),
LdI { addr } => self.register_i = addr,
JpOfs { addr } => {
let v0_val = self.reg(RegId(0)).get().0 as u16;
let new_addr = Addr(addr.0 + v0_val);
self.register_i = new_addr;
}
Rnd { x, k } => unimplemented!(),
Drw { x, y, n } => {
let sprite =
&self.mem[self.register_i.0 as usize..self.register_i.0 as usize + n as usize];
let collision = self
.display
.draw(self.reg(x).get(), self.reg(y).get(), sprite);
self.set_carry(collision);
}
Skp { x } => unimplemented!(),
Sknp { x } => unimplemented!(),
Dt { x } => {
let val = Val(self.delay);
self.reg_mut(x).set(val);
}
LdKey { x } => unimplemented!(),
LdDt { x } => self.delay = self.reg(x).get().0,
LdSt { x } => self.sound = self.reg(x).get().0,
AddI { x } => self.register_i = Addr(self.register_i.0 + self.reg(x).get().0 as u16),
LdDigit { x } => unimplemented!(),
Bcd { x } => {
let mut x_val = self.reg(x).get().0;
let ones = x_val % 10;
x_val /= 10;
let tens = x_val % 10;
x_val /= 10;
let hundreds = x_val % 10;
self.mem[self.register_i.0 as usize] = hundreds;
self.mem[self.register_i.0 as usize + 1] = tens;
self.mem[self.register_i.0 as usize + 2] = ones;
}
Store { x } => {
for k in 0..x.0 {
let id = RegId(k);
self.mem[self.register_i.0 as usize + k as usize] = self.reg(id).get().0;
}
}
Read { x } => {
for k in 0..x.0 {
let id = RegId(k);
let val = self.mem[self.register_i.0 as usize + k as usize];
self.reg_mut(id).set(Val(val));
}
}
}
self.pc.increment();
}
}
fn main() {
let mut chip8 = Chip8::new();
chip8.load("roms/programs/Chip8 Picture.ch8");
chip8.start();
}
|
use std::io;
use std::str::FromStr;
use crate::base::Part;
pub fn part1(r: &mut dyn io::Read) -> Result<String, String> {
solve(r, Part::One)
}
pub fn part2(r: &mut dyn io::Read) -> Result<String, String> {
solve(r, Part::Two)
}
fn solve(r: &mut dyn io::Read, part: Part) -> Result<String, String> {
let mut input = String::new();
r.read_to_string(&mut input).map_err(|e| e.to_string())?;
let length = parse_input(input.trim());
match part {
Part::One => {
let (vec, final_position) = build_ring_buffer(2017, length);
Ok(vec[final_position + 1].to_string())
}
Part::Two => Ok(value_after_zero(50_000_000, length).to_string()),
}
}
fn parse_input(input: &str) -> usize {
usize::from_str(input).unwrap()
}
fn build_ring_buffer(final_value: usize, length: usize) -> (Vec<usize>, usize) {
let mut vec = Vec::with_capacity(final_value + 1);
vec.push(0);
let mut current_position = 0;
for i in 1..=final_value {
let index_to_insert = ((current_position + length) % i) + 1;
vec.insert(index_to_insert, i);
current_position = index_to_insert;
}
(vec, current_position)
}
fn value_after_zero(final_value: usize, length: usize) -> usize {
let mut index_for_zero = 0;
let mut value_after_zero = 0;
let mut current_position = index_for_zero;
for i in 1..final_value {
let index_to_insert = ((current_position + length) % i) + 1;
if index_to_insert <= index_for_zero {
index_for_zero += 1;
} else if index_to_insert == index_for_zero + 1 {
value_after_zero = i;
}
current_position = index_to_insert;
}
value_after_zero
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test;
mod part1 {
use super::*;
test!(example, "3", "638", part1);
test!(actual, file "../../../inputs/2017/17", "1311", part1);
}
mod part2 {
use super::*;
test!(actual, file "../../../inputs/2017/17", "39170601", part2);
}
}
|
use regex::Regex;
use std::env;
use std::fs;
fn main() {
let args: Vec<String> = env::args().collect();
let filename = &args[1];
let contents = fs::read_to_string(filename).expect("Something went wrong reading the file");
let lines: Vec<&str> = contents.lines().collect();
let mut bags = Vec::new();
for line in lines {
let parent_and_children: Vec<&str> = line.split(" contain ").collect();
let temp_parent = parent_and_children[0];
let mut parent = "";
let descriptor = Regex::new(r"(\w+ \w+) bags{0,1}").unwrap();
match descriptor.captures(temp_parent) {
Some(cap) => {
parent = cap.get(1).unwrap().as_str();
},
None => (),
};
let children_line = &parent_and_children[1][..parent_and_children[1].len()-1];
let children: Vec<&str> = children_line.split(", ").collect();
let mut temp_children = Vec::new();
for child in children {
let re = Regex::new(r"\d+ (\w+ \w+) bags{0,1}").unwrap();
match re.captures(child) {
Some(cap) => {
temp_children.push(cap.get(1).unwrap().as_str())
},
None => (),
};
}
let mut bag = Vec::new();
bag.push(vec![vec![parent], temp_children]);
bags.push(bag);
}
let mut check_bags: Vec<&str> = Vec::new();
for bag in &bags {
let children = &bag[0][1];
if children.contains(&"shiny gold") {
// println!("{:#?} has shiny gold", bag);
check_bags.push(bag[0][0][0]);
}
}
let mut next_check: Vec<&str> = Vec::new();
let mut i = 0;
while check_bags.len() > 0 {
let check_bag = check_bags[i];
for bag in &bags {
let children = &bag[0][1];
if children.contains(&check_bag) {
// println!("{:#?} has {:#?}", bag, check_bag);
next_check.push(bag[0][0][0]);
}
}
println!("{}", next_check.len());
if i == check_bags.len()-1 {
check_bags.clear();
check_bags.append(&mut next_check);
next_check.clear();
i = 0;
}
i += 1;
}
// println!("total bags: {}", total_bags);
}
|
use super::{
apis::alpha_vantage,
clock, config, strategies,
trading::{Account, Broker, PriceData},
};
pub struct BacktestBroker {
capital: f64,
}
impl Broker for BacktestBroker {
fn capital(&mut self, _time: clock::LocalDateTime) -> f64 {
self.capital
}
fn unsettled_cash(&self) -> f64 {
0.0
}
fn is_market_open(&self, datetime: clock::LocalDateTime) -> bool {
let open = clock::Time::from_hms(9, 30, 0);
let close = clock::Time::from_hms(16, 0, 0);
let time = datetime.time();
let day_of_week: i32 = datetime.date().format("%u").to_string().parse().unwrap();
time >= open && time < close && day_of_week < 6
}
fn sell_order(
&mut self,
_ticker: &String,
_shares: i32,
_price: f64,
_time: clock::LocalDateTime,
) {
}
fn buy_order(
&mut self,
_ticker: &String,
_shares: i32,
_price: f64,
_time: clock::LocalDateTime,
) -> Option<f64> {
Some(self.capital)
}
}
pub fn run_backtest(tickers: &[String], env: &config::Env, verbose: bool) {
for ticker in tickers {
let mut account = Account::new(BacktestBroker { capital: 1000.0 });
let mut price_data = PriceData::new(alpha_vantage::client(&env));
if let Some(candles) = price_data.history(ticker, 180, "1:minute") {
let mut strategy = strategies::SmaCrossover::new(ticker, candles);
// let mut strategy = strategies::Sma9CrossesSma180::new(ticker, candles);
strategy.execute(&mut price_data, &mut account);
log_results(ticker, account, verbose);
} else {
break;
}
}
}
fn log_results(ticker: &String, account: Account<BacktestBroker>, verbose: bool) {
let mut winning_trades = Vec::new();
let mut losing_trades = Vec::new();
for position in &account.positions {
if position.total_return() >= 0.0 {
winning_trades.push(position);
} else {
losing_trades.push(position);
}
}
winning_trades.sort_by(|a, b| b.total_return().partial_cmp(&a.total_return()).unwrap());
losing_trades.sort_by(|a, b| a.total_return().partial_cmp(&b.total_return()).unwrap());
let wins_sum: f64 = winning_trades.iter().map(|p| p.total_return()).sum();
let losses_sum: f64 = losing_trades.iter().map(|p| p.total_return()).sum();
let win_percent = winning_trades.len() as f64 / account.positions.len() as f64 * 100.0;
if verbose {
for position in &account.positions {
println!("Position {}", position);
}
}
println!(
"{:6}-- W/L/W%: {}/{}/{:.2}% - P/L: ${:.4}/${:.4} - Net: ${:.4}\n",
ticker,
winning_trades.len(),
losing_trades.len(),
win_percent,
wins_sum,
losses_sum,
wins_sum + losses_sum,
);
}
#[cfg(test)]
mod tests {}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationDisplay {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationDetail {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationDisplay>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<operation_detail::Properties>,
}
pub mod operation_detail {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(rename = "serviceSpecification", default, skip_serializing_if = "Option::is_none")]
pub service_specification: Option<properties::ServiceSpecification>,
}
pub mod properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceSpecification {
#[serde(rename = "metricSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub metric_specifications: Vec<MetricSpecifications>,
#[serde(rename = "logSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub log_specifications: Vec<LogSpecifications>,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricSpecifications {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "displayDescription", default, skip_serializing_if = "Option::is_none")]
pub display_description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
#[serde(rename = "aggregationType", default, skip_serializing_if = "Option::is_none")]
pub aggregation_type: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dimensions: Vec<MetricDimensions>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogSpecifications {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "blobDuration", default, skip_serializing_if = "Option::is_none")]
pub blob_duration: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricDimensions {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<OperationDetail>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub location: String,
pub sku: ResourceSku,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AnalysisServicesServer {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AnalysisServicesServerProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<ResourceSku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AnalysisServicesServers {
pub value: Vec<AnalysisServicesServer>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AnalysisServicesServerUpdateParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<ResourceSku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AnalysisServicesServerMutableProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AnalysisServicesServerProperties {
#[serde(flatten)]
pub analysis_services_server_mutable_properties: AnalysisServicesServerMutableProperties,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<analysis_services_server_properties::State>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<analysis_services_server_properties::ProvisioningState>,
#[serde(rename = "serverFullName", default, skip_serializing_if = "Option::is_none")]
pub server_full_name: Option<String>,
}
pub mod analysis_services_server_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Deleting,
Succeeded,
Failed,
Paused,
Suspended,
Provisioning,
Updating,
Suspending,
Pausing,
Resuming,
Preparing,
Scaling,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Deleting,
Succeeded,
Failed,
Paused,
Suspended,
Provisioning,
Updating,
Suspending,
Pausing,
Resuming,
Preparing,
Scaling,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ResourceSku {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tier: Option<resource_sku::Tier>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub capacity: Option<i32>,
}
pub mod resource_sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Tier {
Development,
Basic,
Standard,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AnalysisServicesServerMutableProperties {
#[serde(rename = "asAdministrators", default, skip_serializing_if = "Option::is_none")]
pub as_administrators: Option<ServerAdministrators>,
#[serde(rename = "backupBlobContainerUri", default, skip_serializing_if = "Option::is_none")]
pub backup_blob_container_uri: Option<String>,
#[serde(rename = "managedMode", default, skip_serializing_if = "Option::is_none")]
pub managed_mode: Option<analysis_services_server_mutable_properties::ManagedMode>,
#[serde(rename = "serverMonitorMode", default, skip_serializing_if = "Option::is_none")]
pub server_monitor_mode: Option<analysis_services_server_mutable_properties::ServerMonitorMode>,
}
pub mod analysis_services_server_mutable_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ManagedMode {}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ServerMonitorMode {}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServerAdministrators {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub members: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CheckServerNameAvailabilityParameters {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CheckServerNameAvailabilityResult {
#[serde(rename = "nameAvailable", default, skip_serializing_if = "Option::is_none")]
pub name_available: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reason: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorObject>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SkuEnumerationForNewResourceResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<ResourceSku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SkuEnumerationForExistingResourceResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<SkuDetailsForExistingResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SkuDetailsForExistingResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<ResourceSku>,
#[serde(rename = "resourceType", default, skip_serializing_if = "Option::is_none")]
pub resource_type: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationsErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetail {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDetail>,
#[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorAdditionalInfo {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub info: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorObject {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(rename = "subCode", default, skip_serializing_if = "Option::is_none")]
pub sub_code: Option<i32>,
#[serde(rename = "httpStatusCode", default, skip_serializing_if = "Option::is_none")]
pub http_status_code: Option<i32>,
#[serde(rename = "timeStamp", default, skip_serializing_if = "Option::is_none")]
pub time_stamp: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorObject>,
}
|
pub mod animation;
pub mod collision;
pub mod input;
pub mod objects;
pub mod screen;
pub mod sprite;
pub mod text;
pub mod texture;
|
use serde::Deserialize;
use serde::Serialize;
#[derive(Serialize, Deserialize, Debug)]
pub struct SubscribeBody {
pub id: String, // Your channel ID.
pub uri: String, // Ex: "https://mydomain.com/notifications". Your receiving URL.
pub token: Option<String>, // Ex: "target=myApp-myCalendarChannelDest". (Optional) Your channel token.
pub expiration: Option<u64>, // Ex: 1426325213000 // (Optional) Your requested channel expiration time.
}
|
extern crate dmbc;
extern crate exonum;
pub mod utils;
use exonum::blockchain::Transaction;
use exonum::crypto::{PublicKey, SecretKey};
use exonum::encoding::serialize::FromHex;
use dmbc::currency::assets::{AssetBundle, AssetId};
use dmbc::currency::transactions::builders::transaction;
#[test]
fn capi_transfer_fees_payer() {
let contents = utils::run("transfer_fees_payer");
let inputs = utils::read_inputs("transfer_fees_payer").unwrap();
let offer = inputs["offer"].as_object().unwrap();
let from = PublicKey::from_hex(offer["from"].as_str().unwrap());
let to = PublicKey::from_hex(offer["to"].as_str().unwrap());
let fees_payer = PublicKey::from_hex(offer["fees_payer"].as_str().unwrap());
let mut builder = transaction::Builder::new()
.keypair(from.unwrap(), SecretKey::zero())
.tx_transfer_with_fees_payer()
.recipient(to.unwrap())
.fees_payer(fees_payer.unwrap(), SecretKey::zero())
.amount(offer["amount"].as_u64().unwrap())
.seed(offer["seed"].as_u64().unwrap())
.data_info(offer["memo"].as_str().unwrap());
for asset in offer["assets"].as_array().unwrap() {
let id = AssetId::from_hex(asset["id"].as_str().unwrap());
let amount = asset["amount"].as_u64().unwrap();
let bundle = AssetBundle::new(id.unwrap(), amount);
builder.add_asset_value_ref(bundle);
}
let tx = builder.build();
let tx: Box<Transaction> = tx.into();
let hex = utils::hex_string(tx.raw().body().to_vec());
assert_eq!(contents, hex);
}
|
extern crate bincode;
extern crate num;
extern crate rand;
extern crate rustfft;
use self::bincode::rustc_serialize::{decode_from, encode_into};
use self::bincode::SizeLimit;
use self::rustfft::algorithm::Radix4;
use self::rustfft::num_complex::Complex;
use self::rustfft::num_traits::Zero;
use self::rustfft::FFT;
use std::collections::HashMap;
use std::fs::File;
use std::io::{BufReader, BufWriter};
use std::rc::Rc;
use event::{ControlEvent, Controllable};
use io::PitchConvert;
use types::*;
const OVERSAMPLING: usize = 2;
const INVERSE: bool = true;
const SCALE: bool = true;
/// Stores a period of a band-limited signal together with
/// the maximum frequency before aliasing occurs.
#[derive(RustcDecodable, RustcEncodable)]
pub struct Wavetable {
/// The band-limited signal
table: Vec<Float>,
/// The maximum phase increment (frequency) that is handled by this table.
/// The oscillators frequency is determined by the amount of phase increment
/// in each sample tick.
// TODO: rename to `max_frequency`?
max_phase_incr: Float,
}
impl Wavetable {
/// Returns a linear interpolated sample from the wavetable for the given phase.
/// The phase is mapped to a table index.
fn sample(&self, phasor: Float) -> Float {
let table_len = self.table.len();
let idx = if phasor < 0.0 { phasor + 1.0 } else { phasor } * table_len as Float;
// linear interpolation
let (i, j) = (
idx.floor() as usize % table_len,
idx.ceil() as usize % table_len,
);
self.table[i] + (self.table[j] - self.table[i]) * (idx - i as Float)
}
}
/// Implemented waveforms.
#[derive(PartialEq, Eq, Hash, Debug, Copy, Clone)]
pub enum Waveform {
Sine,
Saw,
Square,
Tri,
SharpTri,
Random,
}
/// Normalizes the signal into a range of `[-1.0, 1.0]`.
macro_rules! scale {
($flag:expr, $signal:expr) => {
if $flag {
let scale = $signal
.iter()
.fold(0.0, |acc: Float, val| acc.max(val.re.abs()));
for sample in &mut $signal {
sample.re *= scale.recip();
}
}
};
}
/// Builds wavetables for each waveform and returns a `HashMap` containing them.
pub fn generate_wavetables(
fundamental_freq: Float,
sample_rate: usize,
) -> HashMap<Waveform, Vec<Wavetable>> {
let mut tables: HashMap<Waveform, Vec<Wavetable>> = HashMap::new();
for waveform in &[
Waveform::Saw,
Waveform::Sine,
Waveform::Square,
Waveform::Tri,
Waveform::SharpTri,
Waveform::Random,
] {
let filename = format!(
"ytterbium-{}-wavetable-{:?}.bin",
env!("CARGO_PKG_VERSION"),
waveform
);
let band_limited_table = {
if let Ok(file) = File::open(&filename) {
let mut reader = BufReader::new(file);
decode_from(&mut reader, SizeLimit::Infinite)
.unwrap_or_else(|_| panic!("could not decode wavetable: {}", filename))
} else {
let band_limited_table = build_wavetables(*waveform, fundamental_freq, sample_rate);
let file = File::create(&filename).unwrap_or_else(|_| {
panic!("could not create file for wavetable: {}", filename)
});
let mut writer = BufWriter::new(file);
encode_into(&band_limited_table, &mut writer, SizeLimit::Infinite)
.unwrap_or_else(|_| panic!("could not encode wavetable: {}", filename));
band_limited_table
}
};
tables.insert(*waveform, band_limited_table);
}
tables
}
/// Builds the band-limited wavetables for the given waveform, fundamental frequency and
/// sample rate.
fn build_wavetables(
waveform: Waveform,
fundamental_freq: Float,
sample_rate: usize,
) -> Vec<Wavetable> {
let min_table_size = 64;
let mut phase_incr = fundamental_freq * 2.0 / sample_rate as Float;
let (mut harmonics, mut table_size) = match waveform {
Waveform::Sine => (1, 4096),
_ => {
let harmonics = sample_rate / (2 * (2.0 * fundamental_freq) as usize);
let table_size = harmonics.next_power_of_two() * 2 * OVERSAMPLING;
(harmonics, table_size)
}
};
let mut tables: Vec<Wavetable> = Vec::with_capacity((harmonics as Float).log2() as usize);
// use sine if only 1 harmonic is left, otherwise the last table for waveforms with
// only odd harmonics would be empty!
while harmonics > 0 {
let fft = Radix4::new(table_size, INVERSE);
let mut spectrum = vec![num::Complex::zero(); table_size];
let mut signal = spectrum.clone();
generate_spectrum(waveform, harmonics, &mut spectrum);
fft.process(spectrum.as_mut_slice(), signal.as_mut_slice());
scale!(SCALE, signal);
tables.push(Wavetable {
table: signal.iter().map(|c| c.re).collect::<Vec<_>>(),
max_phase_incr: phase_incr,
});
harmonics >>= 1; // half the number of harmonics
phase_incr *= 2.0;
let next_table_size = harmonics.next_power_of_two() * 2 * OVERSAMPLING;
table_size = ::std::cmp::max(min_table_size, next_table_size);
}
tables
}
/// Generates a band-limited spectrum with given number of harmonics for the given waveform.
fn generate_spectrum(waveform: Waveform, harmonics: usize, spectrum: &mut Vec<Complex<Float>>) {
let table_size = spectrum.len();
if harmonics == 1 {
// use a pure sine
spectrum[1] = Complex { re: 1.0, im: -1.0 };
spectrum[table_size - 1] = -spectrum[1];
return;
}
match waveform {
Waveform::Saw => {
for i in 1..harmonics {
let magnitude = (i as Float).recip();
spectrum[i] = Complex {
re: 1.0,
im: -1.0 * magnitude,
};
spectrum[table_size - i] = -spectrum[i];
}
}
Waveform::Square => {
for i in (1..harmonics).filter(|i| i % 2 == 1) {
let magnitude = (i as Float).recip();
spectrum[i] = Complex {
re: 1.0,
im: -1.0 * magnitude,
};
spectrum[table_size - i] = -spectrum[i];
}
}
Waveform::Tri => {
for i in (1..harmonics).filter(|i| i % 2 == 1) {
let sign = if i % 4 == 1 { 1.0 } else { -1.0 };
let magnitude = ((i * i) as Float).recip();
spectrum[i] = Complex {
re: 1.0,
im: -1.0 * magnitude * sign,
};
spectrum[table_size - i] = -spectrum[i];
}
}
Waveform::SharpTri => {
for i in (1..harmonics).filter(|i| i % 2 == 1) {
let sign = if i % 4 == 1 { 1.0 } else { -1.0 };
let magnitude = (i as Float).recip();
spectrum[i] = Complex {
re: 1.0,
im: -1.0 * magnitude * sign,
};
spectrum[table_size - i] = -spectrum[i];
}
}
Waveform::Random => {
for i in 1..harmonics {
let magnitude = (i as Float).recip();
spectrum[i] = Complex {
re: 1.0,
im: -rand::random::<Float>() * magnitude,
};
spectrum[table_size - i] = -spectrum[i];
}
}
_ => {}
}
}
/// A band-limited wavetable oscillator.
pub struct WavetableOsc {
phase_incr: Float,
sample_rate: usize,
key: u8,
detune_hz: Float,
phase: Float,
phase_changed: bool,
phasor: Float,
transpose: i32, // transposition in octaves
last_sample: Float,
waveform: Waveform,
id: String,
pitch_convert: Rc<PitchConvert>,
tables: Rc<HashMap<Waveform, Vec<Wavetable>>>,
}
impl WavetableOsc {
/// Constructs a wavetable oscillator for the given sample rate.
pub fn new(
sample_rate: usize,
wavetables: Rc<HashMap<Waveform, Vec<Wavetable>>>,
pitch_convert: Rc<PitchConvert>,
) -> Self {
WavetableOsc {
phase_incr: 0.0,
sample_rate,
key: 0,
detune_hz: 0.0, // Hz
phase: 0.0,
phase_changed: false,
phasor: 0.0,
transpose: 0,
last_sample: 0.0,
waveform: Waveform::Sine,
id: "".to_owned(),
pitch_convert,
tables: wavetables,
}
}
pub fn with_id<S: Into<String>>(
id: S,
sample_rate: usize,
wavetables: Rc<HashMap<Waveform, Vec<Wavetable>>>,
pitch_convert: Rc<PitchConvert>,
) -> Self {
let mut osc = WavetableOsc::new(sample_rate, wavetables, pitch_convert);
osc.set_id(id);
osc
}
/// Sets the oscillators frequency in Hz.
pub fn set_freq(&mut self, freq: Float) {
self.phase_incr = (freq * Float::powi(2.0, self.transpose)) / self.sample_rate as Float;
}
/// Sets the waveform to use.
pub fn set_waveform(&mut self, waveform: Waveform) {
self.waveform = waveform;
}
pub fn set_phase(&mut self, phase: Float) {
const PHASE_DELTA: Float = 0.01;
if (self.phase - phase).abs() > PHASE_DELTA {
self.phase_changed = true;
}
self.phase = phase;
}
pub fn set_id<S: Into<String>>(&mut self, id: S) {
self.id = id.into();
}
/// Returns the next sample from the oscillator.
pub fn tick(&mut self) -> Float {
let phasor = (self.phasor + self.phase).fract();
let mut sample = self.sample(phasor);
if self.phase_changed {
sample = (self.last_sample + sample) / 2.0;
self.phase_changed = false;
}
self.phasor += self.phase_incr;
self.last_sample = sample;
sample
}
/// Returns the sample from the appropriate band-limited wavetable.
fn sample(&self, phasor: Float) -> Float {
let wavetables = self.tables.get(&self.waveform).unwrap();
let mut idx = 0;
for i in 0..wavetables.len() {
idx = i;
if wavetables[idx].max_phase_incr > self.phase_incr {
break;
}
}
let wavetable = &wavetables[idx];
wavetable.sample(phasor)
}
/// Resets the state of the oscillator.
fn reset(&mut self) {
self.phase = 0.0;
self.phasor = 0.0;
self.last_sample = 0.0;
}
}
impl Controllable for WavetableOsc {
fn handle(&mut self, msg: &ControlEvent) {
match *msg {
ControlEvent::NoteOn { key, .. } => {
self.key = key;
let freq = self.pitch_convert.key_to_hz(key) + self.detune_hz;
self.set_freq(freq);
}
ControlEvent::Waveform { ref id, waveform } => {
if *id == self.id {
self.set_waveform(waveform);
}
}
ControlEvent::Phase { ref id, phase } => {
if *id == self.id {
self.set_phase(phase)
}
}
ControlEvent::Transpose { ref id, transpose } => {
if *id == self.id {
self.transpose = transpose
}
}
ControlEvent::Detune { ref id, detune } => {
if *id == self.id {
let (low, current, high) = (
self.pitch_convert.key_to_hz(self.key - 1),
self.pitch_convert.key_to_hz(self.key),
self.pitch_convert.key_to_hz(self.key + 1),
);
// linear approximation of cents
let cent = if detune < 0 {
(low - current)
} else {
(high - current)
} / 100.0;
self.detune_hz = Float::from(detune) * cent;
let detuned_freq = current + self.detune_hz;
self.set_freq(detuned_freq);
}
}
_ => (),
}
}
}
#[test]
fn test_wavetable_sweep() {
extern crate hound;
const SAMPLE_RATE: usize = 48_000;
const LOW_FREQ: Float = 20.0;
let wavetables = Rc::new(generate_wavetables(LOW_FREQ, SAMPLE_RATE));
let pitch_convert = Rc::new(PitchConvert::default());
let mut osc = WavetableOsc::new(SAMPLE_RATE, wavetables, pitch_convert);
let wave_spec = hound::WavSpec {
channels: 1,
sample_format: hound::SampleFormat::Int,
sample_rate: SAMPLE_RATE as u32,
bits_per_sample: 32,
};
for waveform in &[
Waveform::Sine,
Waveform::Saw,
Waveform::Square,
Waveform::Tri,
Waveform::SharpTri,
Waveform::Random,
] {
let filename = format!(
"ytterbium-{}-{:?}-sweep.wav",
env!("CARGO_PKG_VERSION"),
waveform
);
// An existing file will be overwritten.
osc.reset();
let mut writer = hound::WavWriter::create(filename, wave_spec).unwrap();
let scale = ::std::i32::MAX as Float;
osc.set_waveform(*waveform);
let mut freq = LOW_FREQ;
let num_samples = SAMPLE_RATE * 10;
let multiplier = 1.0 + ((LOW_FREQ * 1000.0).ln() - (LOW_FREQ).ln()) / num_samples as Float;
for _ in 0..num_samples {
osc.set_freq(freq);
let sample = osc.tick() * scale;
writer.write_sample(sample as i32).unwrap();
freq *= multiplier;
}
writer.finalize().unwrap();
}
}
// test negative phase values
#[test]
fn test_wavetable_phase() {
const SAMPLE_RATE: usize = 48_000;
const LOW_FREQ: Float = 20.0;
const EPSILON: f64 = 0.0001;
let wavetables = Rc::new(generate_wavetables(LOW_FREQ, SAMPLE_RATE));
let pitch_convert = Rc::new(PitchConvert::default());
let mut osc = WavetableOsc::new(SAMPLE_RATE, wavetables, pitch_convert);
for freq in &[1.0, 1000.0, ((SAMPLE_RATE >> 1) - 1) as Float] {
osc.reset();
osc.set_freq(*freq);
let num_samples = SAMPLE_RATE / *freq as usize; // one period
let mut total_error = 0.0;
let phase_incr = (2.0 * PI * freq) / SAMPLE_RATE as Float; // for reference sine
for idx in 0..num_samples {
let sample = osc.tick();
let sine = Float::sin(phase_incr * idx as Float);
let error = sine - sample;
total_error += error * error; // squared error
}
assert_relative_eq!(total_error, 0.0, epsilon = EPSILON);
// +90 degree: sin->cos
osc.reset();
total_error = 0.0;
osc.set_phase(0.25);
for idx in 0..num_samples {
let sample = osc.tick();
let cosine = Float::cos(phase_incr * idx as Float);
let error = cosine - sample;
total_error += error * error; // squared error
}
assert_relative_eq!(total_error, 0.0, epsilon = EPSILON);
// 180 degree
osc.reset();
total_error = 0.0;
osc.set_phase(0.5);
for idx in 0..num_samples {
let sample = osc.tick();
let sine = Float::sin(phase_incr * idx as Float + PI);
let error = sine - sample;
total_error += error * error; // squared error
}
assert_relative_eq!(total_error, 0.0, epsilon = EPSILON);
// -90 degree
osc.reset();
total_error = 0.0;
osc.set_phase(-0.5);
for idx in 0..num_samples {
let sample = osc.tick();
let sine = Float::sin(phase_incr * idx as Float - PI);
let error = sine - sample;
total_error += error * error; // squared error
}
assert_relative_eq!(total_error, 0.0, epsilon = EPSILON);
}
}
#[test]
fn test_wavetable_fm() {
extern crate hound;
const SAMPLE_RATE: usize = 48_000;
const LOW_FREQ: Float = 20.0;
let wavetables = Rc::new(generate_wavetables(LOW_FREQ, SAMPLE_RATE));
let pitch_convert = Rc::new(PitchConvert::default());
let mut carrier = WavetableOsc::new(SAMPLE_RATE, wavetables.clone(), pitch_convert.clone());
let mut modulator = WavetableOsc::new(SAMPLE_RATE, wavetables.clone(), pitch_convert.clone());
let num_samples = SAMPLE_RATE * 10;
let wave_spec = hound::WavSpec {
channels: 1,
sample_format: hound::SampleFormat::Int,
sample_rate: SAMPLE_RATE as u32,
bits_per_sample: 32,
};
let scale = ::std::i32::MAX as Float;
let filename = format!("ytterbium-{}-fm.wav", env!("CARGO_PKG_VERSION"));
let carrier_freq = 440.0;
carrier.set_freq(carrier_freq);
let mut modulator_freq = carrier_freq / 8.0;
let freq_multiplier =
1.0 + ((carrier_freq * 8.0).ln() - (modulator_freq).ln()) / num_samples as Float;
modulator.set_freq(modulator_freq);
let mut mod_index: Float = 0.01;
let multiplier = 1.0 + ((mod_index * 1000.0).ln() - (mod_index).ln()) / num_samples as Float;
let mut writer = hound::WavWriter::create(filename, wave_spec).unwrap();
for idx in 0..num_samples {
let mod_sample = modulator.tick();
let sample = carrier.tick();
carrier.set_phase(mod_sample * mod_index);
writer.write_sample((sample * scale) as i32).unwrap();
mod_index *= multiplier;
modulator_freq *= freq_multiplier;
modulator.set_freq(modulator_freq);
}
writer.finalize().unwrap();
}
|
pub(crate) mod v4 {
use lazy_static::lazy_static;
use reqwest::Client;
use std::time::Duration;
use super::ApiError;
const GITHUB_API_V4_URL: &str = "https://api.github.com/graphql";
const USER_AGENT: &str = "giss";
lazy_static! {
pub static ref CLIENT: Client = Client::builder()
.connect_timeout(Duration::from_secs(10))
.timeout(std::time::Duration::from_secs(15))
.build()
.unwrap();
}
pub async fn request<T: serde::de::DeserializeOwned>(
token: &str,
query: crate::search::GraphQLQuery,
) -> Result<T, ApiError> {
log::debug!("{}", query.variables);
let request: reqwest::Request = CLIENT
.post(GITHUB_API_V4_URL)
.header("User-Agent", USER_AGENT)
.bearer_auth(token)
.json(&query)
.build()
.expect("Failed to build query");
let response: reqwest::Response = CLIENT.execute(request).await?;
let status_code: u16 = response.status().as_u16();
match status_code {
200 => {
log::debug!("GitHub API: {}", status_code);
Ok(response.json().await?)
}
_ => {
let error: String = response.text().await?;
log::error!("GitHub API: {} - {}", status_code, error);
Err(ApiError::Response(status_code))
}
}
}
}
impl From<reqwest::Error> for ApiError {
fn from(e: reqwest::Error) -> Self {
match e.status() {
Some(code) => ApiError::Response(code.as_u16()),
None => ApiError::NoResponse(e.to_string()),
}
}
}
#[derive(Debug)]
pub enum ApiError {
NoResponse(String),
Response(u16),
}
|
// Copyright 2020 The RustExample Authors.
//
// Code is licensed under Apache License, Version 2.0.
use anyhow::Result;
pub static APP_USER_AGENT: &str = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.72 Safari/537.36";
pub static GET_URL_INTERVAL:u64 = 2;
trait Index {
fn new(url: &'static str, pattern: &'static str, max_page: u32) -> Self;
fn make_url(&self, tag: &'static str, page: u32) -> String;
fn make_filename(&self, url: &'static str, tag: &'static str) -> String;
fn parse_urls(&self, tag: &'static str) -> Result<Vec<String>>;
}
pub mod alphacoders;
pub mod wallhaven;
pub mod bing;
pub mod nasa; |
use backend::{color, Color};
use widget::Widget;
use Event;
pub trait App {
type UI: Widget;
type MyEvent: Send;
fn widget(&self) -> Self::UI;
// TODO need an enum for this
fn handle_event(&mut self, Event<Self::MyEvent>) -> Result<(), Option<String>>;
fn style(&self, &str) -> (Option<Box<Color>>, Option<Box<Color>>) {
(None, None)
}
fn default_style(&self) -> Box<Color> {
Box::new(color::Reset)
}
}
|
use hlist::*;
use ty::{
Infer,
TmPre,
infer,
};
use ty::op::{
Eval,
Thunk,
};
/// Partially apply a thunk to an argument or evaluate a constant
/// (i.e., operation symbol)
#[rustc_on_unimplemented = "`{Fx}` cannot be applied to `{Self}`"]
pub trait AppEval<M, FxDTy, Fx>: TmPre<FxDTy> where
Fx: Infer<Arity = FxDTy>,
M: infer::mode::Mode,
{
type Out;
}
impl<
Args,
Cx: Infer<Arity = CxDTy>,
CxDTy,
> AppEval<
infer::mode::Constant,
CxDTy,
Cx
> for Args where
Args: AppEval<infer::mode::Thunk, CxDTy, Thunk<Cx, HN>>,
Args: TmPre<CxDTy>,
{
type Out = <Args as AppEval<
infer::mode::Thunk,
CxDTy,
Thunk<Cx, HN>>
>::Out;
}
impl<
Fx: Infer<Arity = FxDTy>,
FxDTy,
Xs,
> AppEval<
infer::mode::Thunk,
HN,
Thunk<Fx, Xs>,
> for HN where
Thunk<Fx, Xs>
: Infer<Arity = HN>,
Xs: Eval<Fx>,
Xs: TmPre<FxDTy, Out = HN>,
{
type Out = <Xs as Eval<Fx>>::Out;
}
impl<
Tx: Infer<Arity = HC<TxDHTy, TxDTTy>>,
TxDHTy,
TxDTTy,
> AppEval<
infer::mode::Thunk,
HC<TxDHTy, TxDTTy>,
Tx
> for HN {
type Out = Tx;
}
impl<
ArgsHTm,
ArgsTTm,
Fx: Infer<Arity = HC<FxDHTy, FxDTTy>>,
FxDHTy,
FxDTTy,
TxDHTy,
TxDTTy,
Xs,
> AppEval<
infer::mode::Thunk,
HC<TxDHTy, TxDTTy>,
Thunk<Fx, Xs>
> for HC<ArgsHTm, ArgsTTm> where
Xs: TmPre<HC<FxDHTy, FxDTTy>, Out = HC<TxDHTy, TxDTTy>>,
Xs: Snoc<ArgsHTm>,
ArgsTTm
: AppEval<
infer::mode::Thunk,
TxDTTy,
Thunk<Fx, HS<Xs, ArgsHTm>>
>,
HC<ArgsHTm, ArgsTTm>
: TmPre<HC<TxDHTy, TxDTTy>>,
{
type Out = <ArgsTTm as AppEval<
infer::mode::Thunk,
TxDTTy,
Thunk<Fx, HS<Xs, ArgsHTm>>>
>::Out;
}
|
pub(crate) use ser::TychoSerializer;
pub(crate) mod ser;
pub(crate) mod seq;
pub(crate) mod variant;
pub(crate) mod map;
pub(crate) mod struct_;
|
//! Traits needed in order to implement a riddle compatible platform
//! system.
//!
//! If you are consuming `riddle` directly, there should be no need to use this module.
mod window;
pub use window::*;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.