text stringlengths 8 4.13M |
|---|
extern crate iron;
extern crate libc;
extern crate mio;
extern crate threadpool;
extern crate time;
extern crate url;
#[macro_use] extern crate lazy_static;
#[macro_use] extern crate log;
#[cfg(test)]
#[macro_use]
extern crate matches;
#[cfg(test)]
extern crate regex;
pub mod common;
pub mod ctrlc;
pub mod error;
pub mod storage;
pub mod tracker;
#[cfg(test)]
pub mod test_support {
pub use super::common::test_support::*;
pub use super::common::model::test_support::*;
pub use super::storage::test_support::*;
}
|
use concierge_api_rs::PayloadMessage as GenericPayloadMessage;
use cs3_physics::{polygon::Polygon, vector::Vec2f};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use std::marker::PhantomData;
pub type PayloadMessage<'a> = GenericPayloadMessage<'a, PhysicsPayload<'a>>;
type RgbColor = (u8, u8, u8);
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct EntityDump {
pub id: Uuid,
#[serde(flatten)]
pub polygon: Polygon,
pub color: RgbColor,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct EntityUpdate {
pub id: Uuid,
pub position: Vec2f,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(tag = "type", rename_all = "SCREAMING_SNAKE_CASE")]
pub enum PhysicsPayload<'a> {
FetchEntities,
FetchPositions,
SpawnEntity,
ToggleColor { id: Uuid },
TouchEntity { id: Uuid },
EntityDump { entities: Vec<EntityDump> },
EntityNew { entity: EntityDump },
EntityDelete { ids: Vec<Uuid> },
PositionDump { updates: Vec<EntityUpdate> },
ColorUpdate { id: Uuid, color: RgbColor },
Reserved {
_phantom: PhantomData<&'a ()>
}
}
|
use crate::vector::Vector4ISize;
use derive_more::Add;
use std::ops::Mul;
#[derive(
Default,
Clone,
Copy,
PartialEq,
Eq,
PartialOrd,
Ord,
Hash,
Add,
AddAssign,
Sub,
SubAssign,
Debug,
)]
pub struct QuadricVector(Vector4ISize);
impl QuadricVector {
pub fn new(x: isize, y: isize, z: isize, t: isize) -> Self {
if x + y + z + t != 0 {
panic!(
"Invalid QuadricVector values x = {}, y = {}, z = {}, t = {}",
x, y, z, t
);
}
Self(Vector4ISize { x, y, z, t })
}
pub fn direction(direction: usize) -> Self {
DIRECTIONS[direction]
}
pub fn x(&self) -> isize {
self.0.x
}
pub fn y(&self) -> isize {
self.0.y
}
pub fn z(&self) -> isize {
self.0.z
}
pub fn t(&self) -> isize {
self.0.t
}
pub fn distance(self, other: Self) -> isize {
let vector = self - other;
(isize::abs(vector.x())
+ isize::abs(vector.y())
+ isize::abs(vector.z())
+ isize::abs(vector.t()))
/ 2
}
pub fn neighbor(&self, direction: usize) -> Self {
*self + Self::direction(direction)
}
pub fn sphere_iter(&self, radius: usize) -> SphereIter {
SphereIter::new(radius, *self)
}
}
impl Mul<isize> for QuadricVector {
type Output = Self;
fn mul(self, rhs: isize) -> Self::Output {
Self(self.0 * rhs)
}
}
impl Mul<QuadricVector> for isize {
type Output = QuadricVector;
fn mul(self, rhs: QuadricVector) -> Self::Output {
rhs * self
}
}
const NUM_DIRECTIONS: usize = 12;
// Don't use constructor and lazy_static so that the compiler can actually optimize the use
// of directions.
const DIRECTIONS: [QuadricVector; NUM_DIRECTIONS] = [
QuadricVector(Vector4ISize {
x: 1,
y: -1,
z: 0,
t: 0,
}),
QuadricVector(Vector4ISize {
x: 1,
y: 0,
z: -1,
t: 0,
}),
QuadricVector(Vector4ISize {
x: 0,
y: 1,
z: -1,
t: 0,
}),
QuadricVector(Vector4ISize {
x: 1,
y: 0,
z: 0,
t: -1,
}),
QuadricVector(Vector4ISize {
x: 0,
y: 1,
z: 0,
t: -1,
}),
QuadricVector(Vector4ISize {
x: 0,
y: 0,
z: 1,
t: -1,
}),
QuadricVector(Vector4ISize {
x: -1,
y: 1,
z: 0,
t: 0,
}),
QuadricVector(Vector4ISize {
x: -1,
y: 0,
z: 1,
t: 0,
}),
QuadricVector(Vector4ISize {
x: 0,
y: -1,
z: 1,
t: 0,
}),
QuadricVector(Vector4ISize {
x: -1,
y: 0,
z: 0,
t: 1,
}),
QuadricVector(Vector4ISize {
x: 0,
y: -1,
z: 0,
t: 1,
}),
QuadricVector(Vector4ISize {
x: 0,
y: 0,
z: -1,
t: 1,
}),
];
struct SphereRingIter {
edge_lengths: [usize; 2],
direction: usize,
next: QuadricVector,
edge_index: usize,
}
impl SphereRingIter {
fn new(edge_lengths: [usize; 2], next: QuadricVector) -> Self {
let mut direction = 0;
// Drain all but last edge so that:
// - the state is ready for next iteration
// - the ring of size 0 case is handled correctly (it returns
// the first value, and no more then)
while direction < 5 && edge_lengths[direction & 1] == 0 {
direction += 1;
}
Self {
edge_lengths,
direction,
next,
edge_index: 1,
}
}
pub fn peek(&mut self) -> Option<&QuadricVector> {
if self.direction < 6 {
Some(&self.next)
} else {
None
}
}
}
const SPHERE_RING_ITER_DIRECTIONS: [usize; 6] = [0, 1, 2, 6, 7, 8];
impl Iterator for SphereRingIter {
type Item = QuadricVector;
fn next(&mut self) -> Option<Self::Item> {
let edge_lengths = self.edge_lengths;
let direction = self.direction;
if direction < 6 {
let next = self.next;
self.next = next.neighbor(SPHERE_RING_ITER_DIRECTIONS[direction]);
let ei = self.edge_index;
if ei < edge_lengths[direction & 1] {
self.edge_index = ei + 1;
} else {
self.edge_index = 1;
self.direction = direction + 1;
while self.direction < 6 && edge_lengths[self.direction & 1] == 0 {
self.direction += 1;
}
}
Some(next)
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let [el1, el2] = self.edge_lengths;
if el1 > 0 || el2 > 0 {
let length = 3 * (el1 + el2);
(length, Some(length))
} else {
(1, Some(1))
}
}
}
pub struct SphereIter {
radius: usize,
depth: usize,
max_depth: usize,
iter: SphereRingIter,
}
impl SphereIter {
fn new(radius: usize, center: QuadricVector) -> Self {
Self {
radius,
depth: 0,
max_depth: 2 * (radius + (radius / 3)) + 1,
iter: SphereRingIter::new(
[radius % 3, 0],
center
+ (radius as isize / 3)
* (QuadricVector::direction(0) + QuadricVector::direction(1))
- radius as isize * QuadricVector::direction(3),
),
}
}
pub fn peek(&mut self) -> Option<&QuadricVector> {
self.iter.peek()
}
}
impl Iterator for SphereIter {
type Item = QuadricVector;
fn next(&mut self) -> Option<Self::Item> {
let res = self.iter.next();
if res.is_some() && self.iter.peek().is_none() {
let depth = self.depth;
self.depth = depth + 1;
if depth < self.max_depth {
let [el1, el2] = self.iter.edge_lengths;
let (edge_lengths, next) = if depth < self.radius / 3 {
(
[el1 + 3, 0],
self.iter.next + QuadricVector::direction(6) + QuadricVector::direction(7),
)
} else if el1 == self.radius && el2 < self.radius {
([el1, el2 + 1], self.iter.next + QuadricVector::direction(5))
} else if el1 > 0 {
([el1 - 1, el2], self.iter.next + QuadricVector::direction(3))
} else if el2 > self.radius % 3 {
(
[0, el2 - 3],
self.iter.next + QuadricVector::direction(1) + QuadricVector::direction(2),
)
} else {
return res;
};
self.iter = SphereRingIter::new(edge_lengths, next);
}
}
res
}
fn size_hint(&self) -> (usize, Option<usize>) {
let radius = self.radius;
if radius > 0 {
// Triangles minus shared vertices
let mut exact = 4 * (1 + radius) * (2 + radius) - 12;
if radius > 1 {
// Squares interior
exact += 6 * (radius - 1) * (radius - 1);
}
(exact, Some(exact))
} else {
(1, Some(1))
}
}
}
#[test]
fn test_new_quadric_vector() {
assert_eq!(
QuadricVector::new(1, 2, -7, 4),
QuadricVector(Vector4ISize {
x: 1,
y: 2,
z: -7,
t: 4
})
)
}
#[test]
#[should_panic]
fn test_new_invalid_quadric_vector() {
QuadricVector::new(1, 2, -7, 42);
}
#[test]
fn test_quadric_vector_x() {
assert_eq!(QuadricVector::new(1, 2, -7, 4).x(), 1);
}
#[test]
fn test_quadric_vector_y() {
assert_eq!(QuadricVector::new(1, 2, -7, 4).y(), 2);
}
#[test]
fn test_quadric_vector_z() {
assert_eq!(QuadricVector::new(1, 2, -7, 4).z(), -7);
}
#[test]
fn test_quadric_vector_t() {
assert_eq!(QuadricVector::new(1, 2, -7, 4).t(), 4);
}
#[test]
fn test_quadric_vector_addition() {
assert_eq!(
QuadricVector::new(1, 2, -7, 4) + QuadricVector::new(-10, -20, 70, -40),
QuadricVector::new(-9, -18, 63, -36)
);
}
#[test]
fn test_quadric_vector_subtraction() {
assert_eq!(
QuadricVector::new(1, 2, -7, 4) - QuadricVector::new(-10, -20, 70, -40),
QuadricVector::new(11, 22, -77, 44)
);
}
#[test]
fn test_quadric_vector_distance() {
let a = QuadricVector::new(1, 2, -7, 4);
let b = QuadricVector::new(-2, -3, 7, -2);
assert_eq!(a.distance(b), 14);
assert_eq!(b.distance(a), 14);
}
#[test]
fn test_directions_are_valid() {
for v in DIRECTIONS.iter() {
QuadricVector::new(v.x(), v.y(), v.z(), v.t());
}
}
#[test]
fn test_all_directions_are_unique() {
for dir1 in 0..NUM_DIRECTIONS - 1 {
for dir2 in dir1 + 1..NUM_DIRECTIONS {
assert_ne!(DIRECTIONS[dir1], DIRECTIONS[dir2])
}
}
}
#[test]
fn test_all_directions_have_opposite() {
for dir in 0..NUM_DIRECTIONS / 2 {
assert_eq!(
DIRECTIONS[dir] + DIRECTIONS[dir + NUM_DIRECTIONS / 2],
QuadricVector::default()
);
}
}
#[test]
fn test_neighbor() {
assert_eq!(
QuadricVector::new(-1, 0, 1, 0).neighbor(0),
QuadricVector::new(0, -1, 1, 0)
);
}
#[cfg(test)]
fn do_test_sphere_iter(radius: usize, expected: &Vec<QuadricVector>) {
let center = QuadricVector::default();
let mut iter = center.sphere_iter(radius);
let mut peeked = iter.peek().cloned();
assert!(peeked.is_some());
let mut i = 0;
loop {
let next = iter.next();
assert_eq!(next, peeked);
peeked = iter.peek().cloned();
if i < expected.len() {
assert_eq!(next, Some(expected[i]));
assert_eq!(expected[i].distance(center), radius as isize);
} else {
assert_eq!(next, None);
break;
}
i += 1;
}
assert_eq!(peeked, None);
assert_eq!(iter.next(), None);
assert_eq!(iter.size_hint(), (expected.len(), Some(expected.len())));
}
#[test]
fn test_sphere_iter0() {
do_test_sphere_iter(0, &vec![QuadricVector::default()]);
}
#[test]
fn test_sphere_iter1() {
do_test_sphere_iter(
1,
&vec![
QuadricVector::new(-1, 0, 0, 1),
QuadricVector::new(0, -1, 0, 1),
QuadricVector::new(0, 0, -1, 1),
QuadricVector::new(-1, 0, 1, 0),
QuadricVector::new(0, -1, 1, 0),
QuadricVector::new(1, -1, 0, 0),
QuadricVector::new(1, 0, -1, 0),
QuadricVector::new(0, 1, -1, 0),
QuadricVector::new(-1, 1, 0, 0),
QuadricVector::new(0, 0, 1, -1),
QuadricVector::new(1, 0, 0, -1),
QuadricVector::new(0, 1, 0, -1),
],
);
}
#[test]
fn test_sphere_iter2() {
do_test_sphere_iter(
2,
&vec![
QuadricVector::new(-2, 0, 0, 2),
QuadricVector::new(-1, -1, 0, 2),
QuadricVector::new(0, -2, 0, 2),
QuadricVector::new(0, -1, -1, 2),
QuadricVector::new(0, 0, -2, 2),
QuadricVector::new(-1, 0, -1, 2),
QuadricVector::new(-2, 0, 1, 1),
QuadricVector::new(-1, -1, 1, 1),
QuadricVector::new(0, -2, 1, 1),
QuadricVector::new(1, -2, 0, 1),
QuadricVector::new(1, -1, -1, 1),
QuadricVector::new(1, 0, -2, 1),
QuadricVector::new(0, 1, -2, 1),
QuadricVector::new(-1, 1, -1, 1),
QuadricVector::new(-2, 1, 0, 1),
QuadricVector::new(-2, 0, 2, 0),
QuadricVector::new(-1, -1, 2, 0),
QuadricVector::new(0, -2, 2, 0),
QuadricVector::new(1, -2, 1, 0),
QuadricVector::new(2, -2, 0, 0),
QuadricVector::new(2, -1, -1, 0),
QuadricVector::new(2, 0, -2, 0),
QuadricVector::new(1, 1, -2, 0),
QuadricVector::new(0, 2, -2, 0),
QuadricVector::new(-1, 2, -1, 0),
QuadricVector::new(-2, 2, 0, 0),
QuadricVector::new(-2, 1, 1, 0),
QuadricVector::new(-1, 0, 2, -1),
QuadricVector::new(0, -1, 2, -1),
QuadricVector::new(1, -1, 1, -1),
QuadricVector::new(2, -1, 0, -1),
QuadricVector::new(2, 0, -1, -1),
QuadricVector::new(1, 1, -1, -1),
QuadricVector::new(0, 2, -1, -1),
QuadricVector::new(-1, 2, 0, -1),
QuadricVector::new(-1, 1, 1, -1),
QuadricVector::new(0, 0, 2, -2),
QuadricVector::new(1, 0, 1, -2),
QuadricVector::new(2, 0, 0, -2),
QuadricVector::new(1, 1, 0, -2),
QuadricVector::new(0, 2, 0, -2),
QuadricVector::new(0, 1, 1, -2),
],
);
}
#[test]
fn test_sphere_iter4() {
println!(
"{:?}",
QuadricVector::default().sphere_iter(4).collect::<Vec<_>>()
);
do_test_sphere_iter(
4,
&vec![
QuadricVector::new(-2, -1, -1, 4),
QuadricVector::new(-1, -2, -1, 4),
QuadricVector::new(-1, -1, -2, 4),
QuadricVector::new(-4, 0, 0, 4),
QuadricVector::new(-3, -1, 0, 4),
QuadricVector::new(-2, -2, 0, 4),
QuadricVector::new(-1, -3, 0, 4),
QuadricVector::new(0, -4, 0, 4),
QuadricVector::new(0, -3, -1, 4),
QuadricVector::new(0, -2, -2, 4),
QuadricVector::new(0, -1, -3, 4),
QuadricVector::new(0, 0, -4, 4),
QuadricVector::new(-1, 0, -3, 4),
QuadricVector::new(-2, 0, -2, 4),
QuadricVector::new(-3, 0, -1, 4),
QuadricVector::new(-4, 0, 1, 3),
QuadricVector::new(-3, -1, 1, 3),
QuadricVector::new(-2, -2, 1, 3),
QuadricVector::new(-1, -3, 1, 3),
QuadricVector::new(0, -4, 1, 3),
QuadricVector::new(1, -4, 0, 3),
QuadricVector::new(1, -3, -1, 3),
QuadricVector::new(1, -2, -2, 3),
QuadricVector::new(1, -1, -3, 3),
QuadricVector::new(1, 0, -4, 3),
QuadricVector::new(0, 1, -4, 3),
QuadricVector::new(-1, 1, -3, 3),
QuadricVector::new(-2, 1, -2, 3),
QuadricVector::new(-3, 1, -1, 3),
QuadricVector::new(-4, 1, 0, 3),
QuadricVector::new(-4, 0, 2, 2),
QuadricVector::new(-3, -1, 2, 2),
QuadricVector::new(-2, -2, 2, 2),
QuadricVector::new(-1, -3, 2, 2),
QuadricVector::new(0, -4, 2, 2),
QuadricVector::new(1, -4, 1, 2),
QuadricVector::new(2, -4, 0, 2),
QuadricVector::new(2, -3, -1, 2),
QuadricVector::new(2, -2, -2, 2),
QuadricVector::new(2, -1, -3, 2),
QuadricVector::new(2, 0, -4, 2),
QuadricVector::new(1, 1, -4, 2),
QuadricVector::new(0, 2, -4, 2),
QuadricVector::new(-1, 2, -3, 2),
QuadricVector::new(-2, 2, -2, 2),
QuadricVector::new(-3, 2, -1, 2),
QuadricVector::new(-4, 2, 0, 2),
QuadricVector::new(-4, 1, 1, 2),
QuadricVector::new(-4, 0, 3, 1),
QuadricVector::new(-3, -1, 3, 1),
QuadricVector::new(-2, -2, 3, 1),
QuadricVector::new(-1, -3, 3, 1),
QuadricVector::new(0, -4, 3, 1),
QuadricVector::new(1, -4, 2, 1),
QuadricVector::new(2, -4, 1, 1),
QuadricVector::new(3, -4, 0, 1),
QuadricVector::new(3, -3, -1, 1),
QuadricVector::new(3, -2, -2, 1),
QuadricVector::new(3, -1, -3, 1),
QuadricVector::new(3, 0, -4, 1),
QuadricVector::new(2, 1, -4, 1),
QuadricVector::new(1, 2, -4, 1),
QuadricVector::new(0, 3, -4, 1),
QuadricVector::new(-1, 3, -3, 1),
QuadricVector::new(-2, 3, -2, 1),
QuadricVector::new(-3, 3, -1, 1),
QuadricVector::new(-4, 3, 0, 1),
QuadricVector::new(-4, 2, 1, 1),
QuadricVector::new(-4, 1, 2, 1),
QuadricVector::new(-4, 0, 4, 0),
QuadricVector::new(-3, -1, 4, 0),
QuadricVector::new(-2, -2, 4, 0),
QuadricVector::new(-1, -3, 4, 0),
QuadricVector::new(0, -4, 4, 0),
QuadricVector::new(1, -4, 3, 0),
QuadricVector::new(2, -4, 2, 0),
QuadricVector::new(3, -4, 1, 0),
QuadricVector::new(4, -4, 0, 0),
QuadricVector::new(4, -3, -1, 0),
QuadricVector::new(4, -2, -2, 0),
QuadricVector::new(4, -1, -3, 0),
QuadricVector::new(4, 0, -4, 0),
QuadricVector::new(3, 1, -4, 0),
QuadricVector::new(2, 2, -4, 0),
QuadricVector::new(1, 3, -4, 0),
QuadricVector::new(0, 4, -4, 0),
QuadricVector::new(-1, 4, -3, 0),
QuadricVector::new(-2, 4, -2, 0),
QuadricVector::new(-3, 4, -1, 0),
QuadricVector::new(-4, 4, 0, 0),
QuadricVector::new(-4, 3, 1, 0),
QuadricVector::new(-4, 2, 2, 0),
QuadricVector::new(-4, 1, 3, 0),
QuadricVector::new(-3, 0, 4, -1),
QuadricVector::new(-2, -1, 4, -1),
QuadricVector::new(-1, -2, 4, -1),
QuadricVector::new(0, -3, 4, -1),
QuadricVector::new(1, -3, 3, -1),
QuadricVector::new(2, -3, 2, -1),
QuadricVector::new(3, -3, 1, -1),
QuadricVector::new(4, -3, 0, -1),
QuadricVector::new(4, -2, -1, -1),
QuadricVector::new(4, -1, -2, -1),
QuadricVector::new(4, 0, -3, -1),
QuadricVector::new(3, 1, -3, -1),
QuadricVector::new(2, 2, -3, -1),
QuadricVector::new(1, 3, -3, -1),
QuadricVector::new(0, 4, -3, -1),
QuadricVector::new(-1, 4, -2, -1),
QuadricVector::new(-2, 4, -1, -1),
QuadricVector::new(-3, 4, 0, -1),
QuadricVector::new(-3, 3, 1, -1),
QuadricVector::new(-3, 2, 2, -1),
QuadricVector::new(-3, 1, 3, -1),
QuadricVector::new(-2, 0, 4, -2),
QuadricVector::new(-1, -1, 4, -2),
QuadricVector::new(0, -2, 4, -2),
QuadricVector::new(1, -2, 3, -2),
QuadricVector::new(2, -2, 2, -2),
QuadricVector::new(3, -2, 1, -2),
QuadricVector::new(4, -2, 0, -2),
QuadricVector::new(4, -1, -1, -2),
QuadricVector::new(4, 0, -2, -2),
QuadricVector::new(3, 1, -2, -2),
QuadricVector::new(2, 2, -2, -2),
QuadricVector::new(1, 3, -2, -2),
QuadricVector::new(0, 4, -2, -2),
QuadricVector::new(-1, 4, -1, -2),
QuadricVector::new(-2, 4, 0, -2),
QuadricVector::new(-2, 3, 1, -2),
QuadricVector::new(-2, 2, 2, -2),
QuadricVector::new(-2, 1, 3, -2),
QuadricVector::new(-1, 0, 4, -3),
QuadricVector::new(0, -1, 4, -3),
QuadricVector::new(1, -1, 3, -3),
QuadricVector::new(2, -1, 2, -3),
QuadricVector::new(3, -1, 1, -3),
QuadricVector::new(4, -1, 0, -3),
QuadricVector::new(4, 0, -1, -3),
QuadricVector::new(3, 1, -1, -3),
QuadricVector::new(2, 2, -1, -3),
QuadricVector::new(1, 3, -1, -3),
QuadricVector::new(0, 4, -1, -3),
QuadricVector::new(-1, 4, 0, -3),
QuadricVector::new(-1, 3, 1, -3),
QuadricVector::new(-1, 2, 2, -3),
QuadricVector::new(-1, 1, 3, -3),
QuadricVector::new(0, 0, 4, -4),
QuadricVector::new(1, 0, 3, -4),
QuadricVector::new(2, 0, 2, -4),
QuadricVector::new(3, 0, 1, -4),
QuadricVector::new(4, 0, 0, -4),
QuadricVector::new(3, 1, 0, -4),
QuadricVector::new(2, 2, 0, -4),
QuadricVector::new(1, 3, 0, -4),
QuadricVector::new(0, 4, 0, -4),
QuadricVector::new(0, 3, 1, -4),
QuadricVector::new(0, 2, 2, -4),
QuadricVector::new(0, 1, 3, -4),
QuadricVector::new(1, 1, 2, -4),
QuadricVector::new(2, 1, 1, -4),
QuadricVector::new(1, 2, 1, -4),
],
);
}
|
#[doc = "Reader of register LE_RF_TEST_MODE_EXT"]
pub type R = crate::R<u32, super::LE_RF_TEST_MODE_EXT>;
#[doc = "Writer for register LE_RF_TEST_MODE_EXT"]
pub type W = crate::W<u32, super::LE_RF_TEST_MODE_EXT>;
#[doc = "Register LE_RF_TEST_MODE_EXT `reset()`'s with value 0"]
impl crate::ResetValue for super::LE_RF_TEST_MODE_EXT {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DTM_PACKET_LENGTH`"]
pub type DTM_PACKET_LENGTH_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `DTM_PACKET_LENGTH`"]
pub struct DTM_PACKET_LENGTH_W<'a> {
w: &'a mut W,
}
impl<'a> DTM_PACKET_LENGTH_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - DTM TX packet length. Bits \\[7:6\\] are accessible onle when DLE is enabled"]
#[inline(always)]
pub fn dtm_packet_length(&self) -> DTM_PACKET_LENGTH_R {
DTM_PACKET_LENGTH_R::new((self.bits & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - DTM TX packet length. Bits \\[7:6\\] are accessible onle when DLE is enabled"]
#[inline(always)]
pub fn dtm_packet_length(&mut self) -> DTM_PACKET_LENGTH_W {
DTM_PACKET_LENGTH_W { w: self }
}
}
|
// PROJECT - kvstore: store key value pairs in a file format of our choice.
use std::collections::HashMap;
fn main() {
// arguments to insert into the CLI app.
// args is an itterator.
let mut arguments = std::env::args().skip(1);
// on linux, the first arg is the path to the application. we dont want that here. we want our stuff to be optional.
let key: String = arguments.next().expect("key not found, please add a key.");
let value: String = arguments.next().expect("value not found, please add a value.");
println!("The key is {}, the value is {}", key, value);
// - create our kv pair
let contents = format!("{} = {}\n", key, value);
// - write out to file.
let write_result = std::fs::write("kv.db", contents).expect("something didnt go right in std::fs::write");
let database = Database::new().expect("database::new() crashed");
}
// --- Database Struct and Impl
struct Database {
map: HashMap<String, String>,
}
impl Database {
fn new() -> Result<Database, std::io::Error> {
let mut map = HashMap::new();
let contents = std::fs::read_to_string("kv.db")?;
for line in contents.lines() {
let mut chunks = line.splitn(2, "=");
let key = chunks.next().expect("no key");
let value = chunks.next().expect("no value.");
map.insert(key.to_owned(), value.to_owned());
}
Ok(Database {
map: map
})
}
}
// *** THIS IS WHAT THE QUESTION MARK EXTENDS TO KINDA ***
// // read the kv.db file
// let contents = match std::fs::read_to_string( "kv.db") {
// Ok(c) => c,
// Err(error) => {
// return Err(error);
// }
// };
|
use std::fmt;
use std::io;
use std::mem::{self, MaybeUninit};
use serde::{Serialize, Serializer};
#[cfg(feature = "json")]
use serde_json::Serializer as JsonSerializer;
use crate::{Argument, FormatType};
#[cfg(feature = "json")]
type CompactJsonSerializer<W> = JsonSerializer<W, serde_json::ser::CompactFormatter>;
#[cfg(feature = "json")]
type PrettyJsonSerializer<W> = JsonSerializer<W, serde_json::ser::PrettyFormatter<'static>>;
pub type FormatFn<T> = fn(&T, fmt: &mut fmt::Formatter) -> fmt::Result;
struct FmtProxy<'a> {
data: &'a (),
func: FormatFn<()>,
}
impl<'a> FmtProxy<'a> {
pub fn new<T>(data: &'a T, func: FormatFn<T>) -> Self {
unsafe {
FmtProxy {
data: &*(data as *const T as *const ()),
func: std::mem::transmute(func),
}
}
}
}
impl fmt::Display for FmtProxy<'_> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
(self.func)(self.data, fmt)
}
}
#[derive(Debug)]
pub enum FormatError {
Type(FormatType),
Serde(String),
Io(io::Error),
}
impl fmt::Display for FormatError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
FormatError::Type(format) => write!(f, "cannot format as {}", format),
FormatError::Serde(error) => write!(f, "{}", error),
FormatError::Io(error) => write!(f, "{}", error),
}
}
}
impl std::error::Error for FormatError {}
impl serde::ser::Error for FormatError {
fn custom<T>(msg: T) -> Self
where
T: fmt::Display,
{
FormatError::Serde(msg.to_string())
}
}
#[cfg(feature = "json")]
impl From<serde_json::Error> for FormatError {
fn from(error: serde_json::Error) -> Self {
FormatError::Serde(error.to_string())
}
}
enum FormatterTarget<W> {
Write(W),
#[cfg(feature = "json")]
Compact(CompactJsonSerializer<W>),
#[cfg(feature = "json")]
Pretty(PrettyJsonSerializer<W>),
}
impl<W> FormatterTarget<W>
where
W: io::Write,
{
pub fn new(write: W) -> Self {
FormatterTarget::Write(write)
}
#[cfg(feature = "json")]
pub fn compact(write: W) -> Self {
FormatterTarget::Compact(JsonSerializer::new(write))
}
#[cfg(feature = "json")]
pub fn pretty(write: W) -> Self {
FormatterTarget::Pretty(JsonSerializer::pretty(write))
}
pub fn into_inner(self) -> W {
match self {
FormatterTarget::Write(write) => write,
#[cfg(feature = "json")]
FormatterTarget::Compact(write) => write.into_inner(),
#[cfg(feature = "json")]
FormatterTarget::Pretty(write) => write.into_inner(),
}
}
pub fn as_write(&mut self) -> &mut W {
self.convert(FormatterTarget::new);
#[cfg_attr(not(feature = "json"), allow(unreachable_patterns))]
match self {
FormatterTarget::Write(inner) => inner,
_ => unreachable!(),
}
}
#[cfg(feature = "json")]
pub fn as_compact(&mut self) -> &mut CompactJsonSerializer<W> {
self.convert(FormatterTarget::compact);
#[cfg_attr(not(feature = "json"), allow(unreachable_patterns))]
match self {
FormatterTarget::Compact(inner) => inner,
_ => unreachable!(),
}
}
#[cfg(feature = "json")]
pub fn as_pretty(&mut self) -> &mut PrettyJsonSerializer<W> {
self.convert(FormatterTarget::pretty);
match self {
FormatterTarget::Pretty(inner) => inner,
_ => unreachable!(),
}
}
fn convert<F>(&mut self, f: F)
where
F: FnOnce(W) -> Self,
{
unsafe {
let mut placeholder = MaybeUninit::uninit();
mem::swap(self, &mut *placeholder.as_mut_ptr());
let converted = f(placeholder.assume_init().into_inner());
mem::forget(mem::replace(self, converted));
}
}
}
pub struct Formatter<W> {
target: FormatterTarget<W>,
ty: FormatType,
alternate: bool,
}
impl<W> Formatter<W>
where
W: io::Write,
{
pub fn new(write: W) -> Self {
Formatter {
target: FormatterTarget::new(write),
ty: FormatType::Display,
alternate: false,
}
}
pub fn with_type(mut self, ty: FormatType) -> Self {
self.ty = ty;
self
}
pub fn with_alternate(mut self, alternate: bool) -> Self {
self.alternate = alternate;
self
}
pub fn format(&mut self, value: Argument<'_>) -> Result<(), FormatError> {
// TODO: Serde calls erased_serialize here, which always passes the error through
// Error::custom. In this process we lose the original error.
value.serialize(self)
}
#[cfg(feature = "json")]
fn serialize<D: Serialize>(&mut self, value: &D) -> Result<(), FormatError> {
if self.alternate {
value.serialize(self.target.as_pretty()).map_err(Into::into)
} else {
value
.serialize(self.target.as_compact())
.map_err(Into::into)
}
}
#[cfg(not(feature = "json"))]
fn serialize<D: Serialize>(&mut self, _value: &D) -> Result<(), FormatError> {
Err(FormatError::Type(FormatType::Object))
}
fn fmt_internal<T>(&mut self, value: &T, fmt: FormatFn<T>) -> Result<(), FormatError> {
let proxy = FmtProxy::new(value, fmt);
if self.alternate {
write!(self.target.as_write(), "{:#}", proxy).map_err(FormatError::Io)
} else {
write!(self.target.as_write(), "{}", proxy).map_err(FormatError::Io)
}
}
// TODO: Implement this
#[allow(unused)]
fn debug<D: fmt::Debug>(&mut self, value: &D) -> Result<(), FormatError> {
self.fmt_internal(value, fmt::Debug::fmt)
}
fn display<D: fmt::Display>(&mut self, value: &D) -> Result<(), FormatError> {
self.fmt_internal(value, fmt::Display::fmt)
}
fn octal<D: fmt::Octal>(&mut self, value: &D) -> Result<(), FormatError> {
self.fmt_internal(value, fmt::Octal::fmt)
}
fn lower_hex<D: fmt::LowerHex>(&mut self, value: &D) -> Result<(), FormatError> {
self.fmt_internal(value, fmt::LowerHex::fmt)
}
fn upper_hex<D: fmt::UpperHex>(&mut self, value: &D) -> Result<(), FormatError> {
self.fmt_internal(value, fmt::UpperHex::fmt)
}
fn pointer<D: fmt::Pointer>(&mut self, value: &D) -> Result<(), FormatError> {
self.fmt_internal(value, fmt::Pointer::fmt)
}
fn binary<D: fmt::Binary>(&mut self, value: &D) -> Result<(), FormatError> {
self.fmt_internal(value, fmt::Binary::fmt)
}
fn lower_exp<D: fmt::LowerExp>(&mut self, value: &D) -> Result<(), FormatError> {
self.fmt_internal(value, fmt::LowerExp::fmt)
}
fn upper_exp<D: fmt::UpperExp>(&mut self, value: &D) -> Result<(), FormatError> {
self.fmt_internal(value, fmt::UpperExp::fmt)
}
}
#[cfg(feature = "json")]
pub enum SerializeSeq<'a, W: io::Write> {
Compact(<&'a mut CompactJsonSerializer<W> as Serializer>::SerializeSeq),
Pretty(<&'a mut PrettyJsonSerializer<W> as Serializer>::SerializeSeq),
}
#[cfg(feature = "json")]
impl<'a, W: io::Write> serde::ser::SerializeSeq for SerializeSeq<'a, W> {
type Ok = ();
type Error = FormatError;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
match self {
SerializeSeq::Compact(compound) => compound.serialize_element(value),
SerializeSeq::Pretty(compound) => compound.serialize_element(value),
}
.map_err(Into::into)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
match self {
SerializeSeq::Compact(compound) => compound.end(),
SerializeSeq::Pretty(compound) => compound.end(),
}
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
pub enum SerializeTuple<'a, W: io::Write> {
Compact(<&'a mut CompactJsonSerializer<W> as Serializer>::SerializeTuple),
Pretty(<&'a mut PrettyJsonSerializer<W> as Serializer>::SerializeTuple),
}
#[cfg(feature = "json")]
impl<'a, W: io::Write> serde::ser::SerializeTuple for SerializeTuple<'a, W> {
type Ok = ();
type Error = FormatError;
fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
match self {
SerializeTuple::Compact(compound) => compound.serialize_element(value),
SerializeTuple::Pretty(compound) => compound.serialize_element(value),
}
.map_err(Into::into)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
match self {
SerializeTuple::Compact(compound) => compound.end(),
SerializeTuple::Pretty(compound) => compound.end(),
}
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
pub enum SerializeTupleStruct<'a, W: io::Write> {
Compact(<&'a mut CompactJsonSerializer<W> as Serializer>::SerializeTupleStruct),
Pretty(<&'a mut PrettyJsonSerializer<W> as Serializer>::SerializeTupleStruct),
}
#[cfg(feature = "json")]
impl<'a, W: io::Write> serde::ser::SerializeTupleStruct for SerializeTupleStruct<'a, W> {
type Ok = ();
type Error = FormatError;
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
match self {
SerializeTupleStruct::Compact(compound) => compound.serialize_field(value),
SerializeTupleStruct::Pretty(compound) => compound.serialize_field(value),
}
.map_err(Into::into)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
match self {
SerializeTupleStruct::Compact(compound) => compound.end(),
SerializeTupleStruct::Pretty(compound) => compound.end(),
}
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
pub enum SerializeTupleVariant<'a, W: io::Write> {
Compact(<&'a mut CompactJsonSerializer<W> as Serializer>::SerializeTupleVariant),
Pretty(<&'a mut PrettyJsonSerializer<W> as Serializer>::SerializeTupleVariant),
}
#[cfg(feature = "json")]
impl<'a, W: io::Write> serde::ser::SerializeTupleVariant for SerializeTupleVariant<'a, W> {
type Ok = ();
type Error = FormatError;
fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
match self {
SerializeTupleVariant::Compact(compound) => compound.serialize_field(value),
SerializeTupleVariant::Pretty(compound) => compound.serialize_field(value),
}
.map_err(Into::into)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
match self {
SerializeTupleVariant::Compact(compound) => compound.end(),
SerializeTupleVariant::Pretty(compound) => compound.end(),
}
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
pub enum SerializeMap<'a, W: io::Write> {
Compact(<&'a mut CompactJsonSerializer<W> as Serializer>::SerializeMap),
Pretty(<&'a mut PrettyJsonSerializer<W> as Serializer>::SerializeMap),
}
#[cfg(feature = "json")]
impl<'a, W: io::Write> serde::ser::SerializeMap for SerializeMap<'a, W> {
type Ok = ();
type Error = FormatError;
fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
match self {
SerializeMap::Compact(compound) => compound.serialize_key(key),
SerializeMap::Pretty(compound) => compound.serialize_key(key),
}
.map_err(Into::into)
}
fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>
where
T: Serialize,
{
match self {
SerializeMap::Compact(compound) => compound.serialize_value(value),
SerializeMap::Pretty(compound) => compound.serialize_value(value),
}
.map_err(Into::into)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
match self {
SerializeMap::Compact(compound) => compound.end(),
SerializeMap::Pretty(compound) => compound.end(),
}
.map_err(Into::into)
}
fn serialize_entry<K: ?Sized, V: ?Sized>(
&mut self,
key: &K,
value: &V,
) -> Result<(), Self::Error>
where
K: Serialize,
V: Serialize,
{
match self {
SerializeMap::Compact(compound) => compound.serialize_entry(key, value),
SerializeMap::Pretty(compound) => compound.serialize_entry(key, value),
}
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
pub enum SerializeStruct<'a, W: io::Write> {
Compact(<&'a mut CompactJsonSerializer<W> as Serializer>::SerializeStruct),
Pretty(<&'a mut PrettyJsonSerializer<W> as Serializer>::SerializeStruct),
}
#[cfg(feature = "json")]
impl<'a, W: io::Write> serde::ser::SerializeStruct for SerializeStruct<'a, W> {
type Ok = ();
type Error = FormatError;
fn serialize_field<T: ?Sized>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error>
where
T: Serialize,
{
match self {
SerializeStruct::Compact(compound) => compound.serialize_field(key, value),
SerializeStruct::Pretty(compound) => compound.serialize_field(key, value),
}
.map_err(Into::into)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
match self {
SerializeStruct::Compact(compound) => compound.end(),
SerializeStruct::Pretty(compound) => compound.end(),
}
.map_err(Into::into)
}
fn skip_field(&mut self, key: &'static str) -> Result<(), Self::Error> {
match self {
SerializeStruct::Compact(compound) => compound.skip_field(key),
SerializeStruct::Pretty(compound) => compound.skip_field(key),
}
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
pub enum SerializeStructVariant<'a, W: io::Write> {
Compact(<&'a mut CompactJsonSerializer<W> as Serializer>::SerializeStructVariant),
Pretty(<&'a mut PrettyJsonSerializer<W> as Serializer>::SerializeStructVariant),
}
#[cfg(feature = "json")]
impl<'a, W: io::Write> serde::ser::SerializeStructVariant for SerializeStructVariant<'a, W> {
type Ok = ();
type Error = FormatError;
fn serialize_field<T: ?Sized>(
&mut self,
key: &'static str,
value: &T,
) -> Result<(), Self::Error>
where
T: Serialize,
{
match self {
SerializeStructVariant::Compact(compound) => compound.serialize_field(key, value),
SerializeStructVariant::Pretty(compound) => compound.serialize_field(key, value),
}
.map_err(Into::into)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
match self {
SerializeStructVariant::Compact(compound) => compound.end(),
SerializeStructVariant::Pretty(compound) => compound.end(),
}
.map_err(Into::into)
}
fn skip_field(&mut self, key: &'static str) -> Result<(), Self::Error> {
match self {
SerializeStructVariant::Compact(compound) => compound.skip_field(key),
SerializeStructVariant::Pretty(compound) => compound.skip_field(key),
}
.map_err(Into::into)
}
}
impl<'a, W> Serializer for &'a mut Formatter<W>
where
W: io::Write,
{
type Ok = ();
type Error = FormatError;
#[cfg(feature = "json")]
type SerializeSeq = SerializeSeq<'a, W>;
#[cfg(feature = "json")]
type SerializeTuple = SerializeTuple<'a, W>;
#[cfg(feature = "json")]
type SerializeTupleStruct = SerializeTupleStruct<'a, W>;
#[cfg(feature = "json")]
type SerializeTupleVariant = SerializeTupleVariant<'a, W>;
#[cfg(feature = "json")]
type SerializeMap = SerializeMap<'a, W>;
#[cfg(feature = "json")]
type SerializeStruct = SerializeStruct<'a, W>;
#[cfg(feature = "json")]
type SerializeStructVariant = SerializeStructVariant<'a, W>;
#[cfg(not(feature = "json"))]
type SerializeSeq = serde::ser::Impossible<Self::Ok, Self::Error>;
#[cfg(not(feature = "json"))]
type SerializeTuple = serde::ser::Impossible<Self::Ok, Self::Error>;
#[cfg(not(feature = "json"))]
type SerializeTupleStruct = serde::ser::Impossible<Self::Ok, Self::Error>;
#[cfg(not(feature = "json"))]
type SerializeTupleVariant = serde::ser::Impossible<Self::Ok, Self::Error>;
#[cfg(not(feature = "json"))]
type SerializeMap = serde::ser::Impossible<Self::Ok, Self::Error>;
#[cfg(not(feature = "json"))]
type SerializeStruct = serde::ser::Impossible<Self::Ok, Self::Error>;
#[cfg(not(feature = "json"))]
type SerializeStructVariant = serde::ser::Impossible<Self::Ok, Self::Error>;
fn serialize_bool(self, v: bool) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_i8(self, v: i8) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::Octal => self.octal(&v),
FormatType::LowerHex => self.lower_hex(&v),
FormatType::UpperHex => self.upper_hex(&v),
FormatType::Binary => self.binary(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_i16(self, v: i16) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::Octal => self.octal(&v),
FormatType::LowerHex => self.lower_hex(&v),
FormatType::UpperHex => self.upper_hex(&v),
FormatType::Binary => self.binary(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_i32(self, v: i32) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::Octal => self.octal(&v),
FormatType::LowerHex => self.lower_hex(&v),
FormatType::UpperHex => self.upper_hex(&v),
FormatType::Binary => self.binary(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::Octal => self.octal(&v),
FormatType::LowerHex => self.lower_hex(&v),
FormatType::UpperHex => self.upper_hex(&v),
FormatType::Binary => self.binary(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_u8(self, v: u8) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::Octal => self.octal(&v),
FormatType::LowerHex => self.lower_hex(&v),
FormatType::UpperHex => self.upper_hex(&v),
FormatType::Binary => self.binary(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_u16(self, v: u16) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::Octal => self.octal(&v),
FormatType::LowerHex => self.lower_hex(&v),
FormatType::UpperHex => self.upper_hex(&v),
FormatType::Binary => self.binary(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_u32(self, v: u32) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::Octal => self.octal(&v),
FormatType::LowerHex => self.lower_hex(&v),
FormatType::UpperHex => self.upper_hex(&v),
FormatType::Binary => self.binary(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::Octal => self.octal(&v),
FormatType::LowerHex => self.lower_hex(&v),
FormatType::UpperHex => self.upper_hex(&v),
FormatType::Binary => self.binary(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_f32(self, v: f32) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::LowerExp => self.lower_exp(&v),
FormatType::UpperExp => self.upper_exp(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_f64(self, v: f64) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::LowerExp => self.lower_exp(&v),
FormatType::UpperExp => self.upper_exp(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_char(self, v: char) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_str(self, v: &str) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&v),
FormatType::Object => self.serialize(&v),
FormatType::Pointer => self.pointer(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_bytes(self, v: &[u8]) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Object => self.serialize(&v),
FormatType::Pointer => self.pointer(&v),
other => Err(FormatError::Type(other)),
}
}
fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
self.serialize_unit()
}
fn serialize_some<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error>
where
T: Serialize,
{
value.serialize(self)
}
fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
match self.ty {
FormatType::Display => self.display(&"null"),
FormatType::Object => self.serialize(&()),
other => Err(FormatError::Type(other)),
}
}
fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
self.serialize_unit()
}
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
) -> Result<Self::Ok, Self::Error> {
self.serialize_str(variant)
}
fn serialize_newtype_struct<T: ?Sized>(
self,
_name: &'static str,
value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: Serialize,
{
value.serialize(self)
}
fn serialize_newtype_variant<T: ?Sized>(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: Serialize,
{
Err(FormatError::Type(self.ty))
}
#[cfg(feature = "json")]
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
if self.ty != FormatType::Object && self.ty != FormatType::Display {
return Err(FormatError::Type(self.ty));
}
if self.alternate {
self.target
.as_pretty()
.serialize_seq(len)
.map(SerializeSeq::Pretty)
.map_err(Into::into)
} else {
self.target
.as_compact()
.serialize_seq(len)
.map(SerializeSeq::Compact)
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
if self.ty != FormatType::Object && self.ty != FormatType::Display {
return Err(FormatError::Type(self.ty));
}
if self.alternate {
self.target
.as_pretty()
.serialize_tuple(len)
.map(SerializeTuple::Pretty)
.map_err(Into::into)
} else {
self.target
.as_compact()
.serialize_tuple(len)
.map(SerializeTuple::Compact)
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
fn serialize_tuple_struct(
self,
name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
if self.ty != FormatType::Object && self.ty != FormatType::Display {
return Err(FormatError::Type(self.ty));
}
if self.alternate {
self.target
.as_pretty()
.serialize_tuple_struct(name, len)
.map(SerializeTupleStruct::Pretty)
.map_err(Into::into)
} else {
self.target
.as_compact()
.serialize_tuple_struct(name, len)
.map(SerializeTupleStruct::Compact)
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
fn serialize_tuple_variant(
self,
name: &'static str,
variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
if self.ty != FormatType::Object && self.ty != FormatType::Display {
return Err(FormatError::Type(self.ty));
}
if self.alternate {
self.target
.as_pretty()
.serialize_tuple_variant(name, variant_index, variant, len)
.map(SerializeTupleVariant::Pretty)
.map_err(Into::into)
} else {
self.target
.as_compact()
.serialize_tuple_variant(name, variant_index, variant, len)
.map(SerializeTupleVariant::Compact)
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
if self.ty != FormatType::Object && self.ty != FormatType::Display {
return Err(FormatError::Type(self.ty));
}
if self.alternate {
self.target
.as_pretty()
.serialize_map(len)
.map(SerializeMap::Pretty)
.map_err(Into::into)
} else {
self.target
.as_compact()
.serialize_map(len)
.map(SerializeMap::Compact)
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
fn serialize_struct(
self,
name: &'static str,
len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
if self.ty != FormatType::Object && self.ty != FormatType::Display {
return Err(FormatError::Type(self.ty));
}
if self.alternate {
self.target
.as_pretty()
.serialize_struct(name, len)
.map(SerializeStruct::Pretty)
.map_err(Into::into)
} else {
self.target
.as_compact()
.serialize_struct(name, len)
.map(SerializeStruct::Compact)
.map_err(Into::into)
}
}
#[cfg(feature = "json")]
fn serialize_struct_variant(
self,
name: &'static str,
variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
if self.ty != FormatType::Object && self.ty != FormatType::Display {
return Err(FormatError::Type(self.ty));
}
if self.alternate {
self.target
.as_pretty()
.serialize_struct_variant(name, variant_index, variant, len)
.map(SerializeStructVariant::Pretty)
.map_err(Into::into)
} else {
self.target
.as_compact()
.serialize_struct_variant(name, variant_index, variant, len)
.map(SerializeStructVariant::Compact)
.map_err(Into::into)
}
}
#[cfg(not(feature = "json"))]
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
Err(FormatError::Type(self.ty))
}
#[cfg(not(feature = "json"))]
fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
Err(FormatError::Type(self.ty))
}
#[cfg(not(feature = "json"))]
fn serialize_tuple_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
Err(FormatError::Type(self.ty))
}
#[cfg(not(feature = "json"))]
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
Err(FormatError::Type(self.ty))
}
#[cfg(not(feature = "json"))]
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
Err(FormatError::Type(self.ty))
}
#[cfg(not(feature = "json"))]
fn serialize_struct(
self,
_name: &'static str,
_len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
Err(FormatError::Type(self.ty))
}
#[cfg(not(feature = "json"))]
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
_variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
Err(FormatError::Type(self.ty))
}
}
|
#![allow(non_camel_case_types)]
use std::c_str::CString;
use std::{fmt,mem,ptr};
use libc::{c_char,c_int,c_uint,c_long};
#[repr(C)]
enum CURLversion {
CURL_VERSION_FIRST,
CURL_VERSION_SECOND,
CURL_VERSION_THIRD,
CURL_VERSION_FOURTH,
CURL_VERSION_LAST /* never actually use this */
}
static CURL_VERSION_NOW: CURLversion = CURL_VERSION_FOURTH;
static CURL_VERSION_IPV6: c_int = (1 << 0);
static CURL_VERSION_KERBEROS4: c_int = (1 << 1);
static CURL_VERSION_SSL: c_int = (1 << 2);
static CURL_VERSION_LIBZ: c_int = (1 << 3);
static CURL_VERSION_NTLM: c_int = (1 << 4);
static CURL_VERSION_GSSNEGOTIATE: c_int = (1 << 5);
static CURL_VERSION_DEBUG: c_int = (1 << 6);
static CURL_VERSION_ASYNCHDNS: c_int = (1 << 7);
static CURL_VERSION_SPNEGO: c_int = (1 << 8);
static CURL_VERSION_LARGEFILE: c_int = (1 << 9);
static CURL_VERSION_IDN: c_int = (1 << 10);
static CURL_VERSION_SSPI: c_int = (1 << 11);
static CURL_VERSION_CONV: c_int = (1 << 12);
static CURL_VERSION_CURLDEBUG: c_int = (1 << 13);
static CURL_VERSION_TLSAUTH_SRP: c_int = (1 << 14);
static CURL_VERSION_NTLM_WB: c_int = (1 << 15);
static CURL_VERSION_HTTP2: c_int = (1 << 16);
struct curl_version_info_data {
#[allow(dead_code)]
age: CURLversion,
version: *const c_char,
version_num: c_uint,
host: *const c_char,
features: c_int,
ssl_version: *const c_char,
#[allow(dead_code)]
ssl_version_num: c_long,
libz_version: *const c_char,
/* protocols is terminated by an entry with a NULL protoname */
protocols: *const *const c_char,
/* The fields below this were added in CURL_VERSION_SECOND */
ares: *const c_char,
ares_num: c_int,
/* This field was added in CURL_VERSION_THIRD */
libidn: *const c_char,
/* These field were added in CURL_VERSION_FOURTH */
iconv_ver_num: c_int,
libssh_version: *const c_char,
}
pub type Version = curl_version_info_data;
impl curl_version_info_data {
pub fn version_str<'a>(&'a self) -> &'a str {
as_str(self.version).unwrap()
}
pub fn version_major(&self) -> uint {
(self.version_num as uint & 0xFF0000) >> 16
}
pub fn version_minor(&self) -> uint {
(self.version_num as uint & 0xFF00) >> 8
}
pub fn version_patch(&self) -> uint {
(self.version_num as uint & 0xFF)
}
pub fn host<'a>(&'a self) -> &'a str {
as_str(self.host).unwrap()
}
pub fn is_ipv6_enabled(&self) -> bool {
(self.features & CURL_VERSION_IPV6) == CURL_VERSION_IPV6
}
pub fn is_kerbos4_enabled(&self) -> bool {
(self.features & CURL_VERSION_KERBEROS4) == CURL_VERSION_KERBEROS4
}
pub fn is_ssl_enabled(&self) -> bool {
(self.features & CURL_VERSION_SSL) == CURL_VERSION_SSL
}
pub fn is_libz_enabled(&self) -> bool {
(self.features & CURL_VERSION_LIBZ) == CURL_VERSION_LIBZ
}
pub fn is_ntlm_enabled(&self) -> bool {
(self.features & CURL_VERSION_NTLM) == CURL_VERSION_NTLM
}
pub fn is_gss_negotiate_enabled(&self) -> bool {
(self.features & CURL_VERSION_GSSNEGOTIATE) == CURL_VERSION_GSSNEGOTIATE
}
pub fn is_debug_enabled(&self) -> bool {
(self.features & CURL_VERSION_DEBUG) == CURL_VERSION_DEBUG
}
pub fn is_async_dns_enabled(&self) -> bool {
(self.features & CURL_VERSION_ASYNCHDNS) == CURL_VERSION_ASYNCHDNS
}
pub fn is_spengo_enabled(&self) -> bool {
(self.features & CURL_VERSION_SPNEGO) == CURL_VERSION_SPNEGO
}
pub fn is_large_file_enabled(&self) -> bool {
(self.features & CURL_VERSION_LARGEFILE) == CURL_VERSION_LARGEFILE
}
pub fn is_idn_enabled(&self) -> bool {
(self.features & CURL_VERSION_IDN) == CURL_VERSION_IDN
}
pub fn is_sspi_enabled(&self) -> bool {
(self.features & CURL_VERSION_SSPI) == CURL_VERSION_SSPI
}
pub fn is_conv_enabled(&self) -> bool {
(self.features & CURL_VERSION_CONV) == CURL_VERSION_CONV
}
pub fn is_curl_debug_enabled(&self) -> bool {
(self.features & CURL_VERSION_CURLDEBUG) == CURL_VERSION_CURLDEBUG
}
pub fn is_tls_auth_srp_enabled(&self) -> bool {
(self.features & CURL_VERSION_TLSAUTH_SRP) == CURL_VERSION_TLSAUTH_SRP
}
pub fn is_ntlm_wb_enabled(&self) -> bool {
(self.features & CURL_VERSION_NTLM_WB) == CURL_VERSION_NTLM_WB
}
pub fn is_http2_enabled(&self) -> bool {
(self.features & CURL_VERSION_HTTP2) == CURL_VERSION_HTTP2
}
pub fn ssl_version<'a>(&'a self) -> Option<&'a str> {
as_str(self.ssl_version)
}
pub fn libz_version<'a>(&'a self) -> Option<&'a str> {
as_str(self.libz_version)
}
pub fn protocols<'a>(&'a self) -> Protocols<'a> {
Protocols { curr: self.protocols }
}
pub fn ares_version<'a>(&'a self) -> Option<&'a str> {
as_str(self.ares)
}
pub fn ares_version_num(&self) -> Option<uint> {
match self.ares_version() {
Some(_) => Some(self.ares_num as uint),
None => None
}
}
pub fn idn_version<'a>(&'a self) -> Option<&'a str> {
if self.is_idn_enabled() {
as_str(self.libidn)
}
else {
None
}
}
pub fn iconv_version(self) -> Option<uint> {
if self.is_conv_enabled() {
Some(self.iconv_ver_num as uint)
}
else {
None
}
}
pub fn ssh_version<'a>(&'a self) -> Option<&'a str> {
as_str(self.libssh_version)
}
}
#[deriving(Clone)]
#[allow(raw_pointer_deriving)] // TODO: Implement this by hand
pub struct Protocols<'a> {
curr: *const *const c_char
}
impl<'a> Iterator<&'a str> for Protocols<'a> {
fn next(&mut self) -> Option<&'a str> {
unsafe {
let proto = *self.curr;
if proto == ptr::null() {
return None;
}
self.curr = self.curr.offset(1);
as_str(proto)
}
}
}
impl<'a> fmt::Show for Protocols<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::FormatError> {
let mut i = self.clone();
try!(write!(fmt, "["));
match i.next() {
Some(proto) => try!(write!(fmt, "{}", proto)),
None => return write!(fmt, "]")
}
for proto in i {
try!(write!(fmt, ", {}", proto));
}
write!(fmt, "]")
}
}
fn as_str<'a>(p: *const c_char) -> Option<&'a str> {
if p == ptr::null() {
return None;
}
unsafe {
let v = CString::new(p, false);
mem::transmute(v.as_str())
}
}
#[link(name = "curl")]
extern {
fn curl_version() -> *const c_char;
fn curl_version_info(t: CURLversion) -> &'static curl_version_info_data;
}
pub fn version_info() -> &'static Version {
unsafe { curl_version_info(CURL_VERSION_NOW) }
}
pub fn version() -> &'static str {
unsafe {
let v = CString::new(curl_version(), false);
mem::transmute(v.as_str().unwrap())
}
}
|
#[derive(Debug, PartialEq, Eq)]
pub enum Classification {
Abundant,
Perfect,
Deficient,
}
pub fn classify(num: u64) -> Option<Classification> {
let result: Option<Classification> = None;
if num == 0 {
result
} else {
match (1..num).filter(|x| num % x == 0).fold(0, |sum, x| sum + x) {
x if x == num => Some(Classification::Perfect),
x if x > num => Some(Classification::Abundant),
x if x < num => Some(Classification::Deficient),
_ => None
}
}
}
|
// Day 4 2019
#[cfg(test)]
mod tests {
use crate::day4::*;
#[test]
fn test_factorial() {
assert_eq!(factorial(0), 1);
assert_eq!(factorial(1), 1);
assert_eq!(factorial(2), 2);
assert_eq!(factorial(3), 6);
assert_eq!(factorial(10), 3628800);
}
#[test]
fn test_choose() {
assert_eq!(choose(1, 0), 1);
assert_eq!(choose(1, 1), 1);
assert_eq!(choose(2, 2), 1);
assert_eq!(choose(2, 1), 2);
assert_eq!(choose(10, 0), 1);
assert_eq!(choose(10, 3), 120);
}
#[test]
fn test_count_ascending() {
assert_eq!(count_ascending(3, 2), 4);
assert_eq!(count_ascending(3, 3), 10);
assert_eq!(count_ascending(3, 4), 20);
}
#[test]
fn test_to_increasing_digits() {
assert_eq!(to_increasing_digits(123, true), 123);
assert_eq!(to_increasing_digits(213, true), 223);
assert_eq!(to_increasing_digits(321, true), 333);
assert_eq!(to_increasing_digits(321, false), 299);
}
#[test]
fn test_count_ascending_with_pairs_between() {
assert_eq!(count_ascending_with_pairs_between(6, 165432, 707912), 1716);
}
#[test]
// This test passes on the commented line, but takes forever
fn test_brute() {
// assert_eq!(brute(165432, 707912), 1716);
assert_eq!(1, 1);
}
}
/// Split a number into a vector of it's digits
fn to_digits(number: usize) -> Vec<usize> {
use std::str::FromStr;
number
.to_string()
.split("")
.filter(|x| x != &"")
.map(|x| usize::from_str(x).unwrap())
.collect()
}
/// Collect a vector of digits into a number
fn to_number(digits: Vec<usize>) -> usize {
let mut number = 0;
let length = digits.len();
digits
.iter()
.enumerate()
// The first digit in the interator is the nth digit it the number,
// where n is the length. That means it is in the nth place, or the
// 10 ^ (nth - 1) place. Successive digits are in the (n - i)th place.
.for_each(|(i, d)| number += d * 10_usize.pow((length - i - 1) as u32));
number
}
/// Modify a number so it's digits are >= to the previous. `increase`
/// determines whether the resulting number can greater than the input number.
/// - E.g. `increase == true` with `number == 321` will produce 333
/// - E.g. `increase == false` with `number == 321` will produce 299, the
/// largest increasing-digit number less than 321
fn to_increasing_digits(number: usize, increase: bool) -> usize {
// Let's not get ahead of ourselves
if has_increasing_digits(number) {
return number;
}
let digits = to_digits(number);
let mut satisfied = vec![];
if increase {
digits.iter().enumerate().for_each(|(i, d)| {
// If this isn't the first digit, and d is less than the previous
// increased digit, make it the previous increased digit. Otherwise,
// stay d.
let increase = if i > 0 && d < &satisfied[i - 1] {
*&satisfied[i - 1]
} else {
*d
};
satisfied.push(increase);
});
} else {
if digits[0] == 0 {
panic!(
"When `increase` == false, number must not be zero-padded, but was {}",
number
);
}
digits.iter().for_each(|_| satisfied.push(9));
satisfied[0] = digits[0] - 1;
}
to_number(satisfied)
}
/// Checks whether a number has increasing digits.
fn has_increasing_digits(number: usize) -> bool {
let digits = to_digits(number);
for (i, d) in digits.iter().enumerate() {
if i > 0 && d < &digits[i - 1] {
return false;
}
}
true
}
/// Checks a number is within the interval (lower, upper).
fn within_range(lower: usize, upper: usize, number: usize) -> bool {
lower < number && number < upper
}
/// Checks whether a number has an adjacent repeated digit.
fn has_double(number: usize) -> bool {
let digits = to_digits(number);
for (i, d) in digits.iter().enumerate() {
if i < digits.len() - 1 && d == &digits[i + 1] {
return true;
}
}
false
}
/// Calculates n factorial
fn factorial(n: usize) -> usize {
match n {
0 => 1,
1 => 1,
_ => n * factorial(n - 1),
}
}
/// Calculates the combinations n choose r
fn choose(n: usize, r: usize) -> usize {
if r > n {
panic!("r ({}) must not be greater than n ({})", r, n);
}
// These could save significant time if n is large
if r == 0 || r == n {
return 1;
}
if r == 1 {
return n;
}
// n! / ( r! * (n - r)! )
factorial(n) / factorial(r) / factorial(n - r)
}
/// Calculates the number of `n` length numbers of `d` digits where the digits
/// are ascending.
/// - E.g. a 3 digit number with digits 1..=4: `n` = 3, `d` = 4
/// - E.g. a 3 digit number iwth digits 5..=9: `n` = 3, `d` = 4
fn count_ascending(n: usize, d: usize) -> usize {
if d <= 0 {
panic!("d must be a positive integer, not {}", d);
}
if n == 0 {
// There are no 0 length numbers
return 0;
}
choose(n + d - 1, d - 1)
}
/// Calculates the number of `n` length numbers between `min` and `max` that
/// have at least one pair of repeated digits and are ascending.
fn count_ascending_with_pairs_between(n: usize, min: usize, max: usize) -> usize {
if n < 2 {
panic!("n must be at least 2, but was {}", n);
}
if min >= max {
panic!("min must be less than max, but were {} and {}", min, max);
}
let lower = to_increasing_digits(min, true);
let lower_digits = to_digits(lower);
let upper = to_increasing_digits(max, false);
let upper_digits = to_digits(upper);
if lower_digits.len() != n || upper_digits.len() != n {
panic!(
"min and max must be n digits long, but were {} and {}",
min, max
);
}
println!("{}, {}", lower, upper);
let mut count = 0;
// i is the first digit (not it's index, it's actual digit value)
for i in lower_digits[0]..=upper_digits[0] {
// j is the index of the first pair digit, which be inclusively between
// the first digit and the penultimate digit
for j in 0..=(n - 2) {
if j == 0 {
// --- Count the head digits ---
// There are no head digits
// --- Count the pair digits ---
// The pair digit = i, and occupies the first and second digit
// spot. If i is less than the second digit's min, than this j
// isn't valid
if i < lower_digits[1] {
continue;
}
// The tail digits can be between i and 9, and are
// n - 2 digits long (because j = 0)
count += count_ascending(n - 2, 10 - i);
} else if j == 1 {
// --- Count the head digits ---
// There is only one head digit and it is i
// --- Count the pair digits ---
// l is the pair digit inclusively between i and 9, and occupies
// the second and third digit spot.
for l in i..=9 {
// If i is less than the second digit's min, than this j
// isn't valid
if l < lower_digits[1] {
continue;
}
// --- Count the tail digits ---
// The tail digits can be inclusively between l and 9, and
// are n - 3 digits long (because j = 1)
if l != 9 {
count += count_ascending(n - 3, 10 - l)
}
}
} else {
// k is the max of the head ascending digits
for k in i..=9 {
// --- Count the head digits ---
// The head digits after the first digit can be inclusively
// between i and k (unless i is it's min, at which point
// the min isn't i but max(i, the second digit's min)), and are j
// digits long
let head_min = if i == lower_digits[0] {
std::cmp::max(i, lower_digits[1])
} else {
i
};
// If the maximum of the head digits is less than their
// minimum, this k isn't valid
if k < head_min {
continue;
}
count += count_ascending(j - 1, 1 + k - head_min);
// --- Count the pair digits ---
// l is the pair digit, inclusively between k and 9
for l in k..=9 {
// --- Count the tail digits ---
// The tail digits can be inclusively between l and 9,
// and are n - j - 2 digits long
count += count_ascending(n - j - 2, 10 - l)
}
}
}
}
}
count
}
fn brute(min: usize, max: usize) -> usize {
let mut count = 0;
for number in min..max {
if has_double(number) && has_increasing_digits(number) {
count += 1;
}
}
count
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MonitorsCollection {
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Monitor>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Monitor {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<MonitorProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MonitorProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "monitorId", default, skip_serializing_if = "Option::is_none")]
pub monitor_id: Option<String>,
#[serde(rename = "monitorName", default, skip_serializing_if = "Option::is_none")]
pub monitor_name: Option<String>,
#[serde(rename = "monitorDisplayName", default, skip_serializing_if = "Option::is_none")]
pub monitor_display_name: Option<String>,
#[serde(rename = "parentMonitorName", default, skip_serializing_if = "Option::is_none")]
pub parent_monitor_name: Option<String>,
#[serde(rename = "parentMonitorDisplayName", default, skip_serializing_if = "Option::is_none")]
pub parent_monitor_display_name: Option<String>,
#[serde(rename = "monitorType", default, skip_serializing_if = "Option::is_none")]
pub monitor_type: Option<monitor_properties::MonitorType>,
#[serde(rename = "monitorCategory", default, skip_serializing_if = "Option::is_none")]
pub monitor_category: Option<monitor_properties::MonitorCategory>,
#[serde(rename = "componentTypeId", default, skip_serializing_if = "Option::is_none")]
pub component_type_id: Option<String>,
#[serde(rename = "componentTypeName", default, skip_serializing_if = "Option::is_none")]
pub component_type_name: Option<String>,
#[serde(rename = "componentTypeDisplayName", default, skip_serializing_if = "Option::is_none")]
pub component_type_display_name: Option<String>,
#[serde(rename = "monitorState", default, skip_serializing_if = "Option::is_none")]
pub monitor_state: Option<monitor_properties::MonitorState>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub criteria: Vec<MonitorCriteria>,
#[serde(rename = "alertGeneration", default, skip_serializing_if = "Option::is_none")]
pub alert_generation: Option<monitor_properties::AlertGeneration>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub frequency: Option<i32>,
#[serde(rename = "lookbackDuration", default, skip_serializing_if = "Option::is_none")]
pub lookback_duration: Option<i32>,
#[serde(rename = "documentationURL", default, skip_serializing_if = "Option::is_none")]
pub documentation_url: Option<String>,
#[serde(rename = "signalName", default, skip_serializing_if = "Option::is_none")]
pub signal_name: Option<String>,
#[serde(rename = "signalType", default, skip_serializing_if = "Option::is_none")]
pub signal_type: Option<String>,
}
pub mod monitor_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum MonitorType {
Aggregate,
Dependency,
Unit,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum MonitorCategory {
AvailabilityHealth,
Configuration,
EntityHealth,
PerformanceHealth,
Security,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum MonitorState {
Enabled,
Disabled,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AlertGeneration {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MonitorCriteria {
#[serde(rename = "healthState", default, skip_serializing_if = "Option::is_none")]
pub health_state: Option<monitor_criteria::HealthState>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub threshold: Option<f64>,
#[serde(rename = "comparisonOperator", default, skip_serializing_if = "Option::is_none")]
pub comparison_operator: Option<monitor_criteria::ComparisonOperator>,
}
pub mod monitor_criteria {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HealthState {
Error,
Warning,
Success,
Unknown,
Uninitialized,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ComparisonOperator {
Equals,
GreaterThan,
GreaterThanOrEqual,
LessThan,
LessThanOrEqual,
NotEquals,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ComponentsCollection {
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Component>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Component {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ComponentProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ComponentProperties {
#[serde(rename = "workspaceId", default, skip_serializing_if = "Option::is_none")]
pub workspace_id: Option<String>,
#[serde(rename = "solutionId", default, skip_serializing_if = "Option::is_none")]
pub solution_id: Option<String>,
#[serde(rename = "workloadType", default, skip_serializing_if = "Option::is_none")]
pub workload_type: Option<component_properties::WorkloadType>,
#[serde(rename = "componentName", default, skip_serializing_if = "Option::is_none")]
pub component_name: Option<String>,
#[serde(rename = "componentTypeId", default, skip_serializing_if = "Option::is_none")]
pub component_type_id: Option<String>,
#[serde(rename = "componentTypeName", default, skip_serializing_if = "Option::is_none")]
pub component_type_name: Option<String>,
#[serde(rename = "componentTypeGroupCategory", default, skip_serializing_if = "Option::is_none")]
pub component_type_group_category: Option<String>,
#[serde(rename = "healthState", default, skip_serializing_if = "Option::is_none")]
pub health_state: Option<component_properties::HealthState>,
#[serde(rename = "healthStateCategory", default, skip_serializing_if = "Option::is_none")]
pub health_state_category: Option<component_properties::HealthStateCategory>,
#[serde(rename = "healthStateChangesStartTime", default, skip_serializing_if = "Option::is_none")]
pub health_state_changes_start_time: Option<String>,
#[serde(rename = "healthStateChangesEndTime", default, skip_serializing_if = "Option::is_none")]
pub health_state_changes_end_time: Option<String>,
#[serde(rename = "lastHealthStateChangeTime", default, skip_serializing_if = "Option::is_none")]
pub last_health_state_change_time: Option<String>,
#[serde(rename = "vmId", default, skip_serializing_if = "Option::is_none")]
pub vm_id: Option<String>,
#[serde(rename = "vmName", default, skip_serializing_if = "Option::is_none")]
pub vm_name: Option<String>,
#[serde(rename = "vmTags", default, skip_serializing_if = "Option::is_none")]
pub vm_tags: Option<serde_json::Value>,
#[serde(rename = "aggregateProperties", default, skip_serializing_if = "Option::is_none")]
pub aggregate_properties: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub children: Vec<Component>,
}
pub mod component_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum WorkloadType {
#[serde(rename = "BaseOS")]
BaseOs,
#[serde(rename = "SQL")]
Sql,
#[serde(rename = "IIS")]
Iis,
Apache,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HealthState {
Error,
Warning,
Success,
Unknown,
Uninitialized,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HealthStateCategory {
Identity,
CustomGroup,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MonitorInstancesCollection {
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<MonitorInstance>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MonitorInstance {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<MonitorInstanceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MonitorInstanceProperties {
#[serde(rename = "workspaceId", default, skip_serializing_if = "Option::is_none")]
pub workspace_id: Option<String>,
#[serde(rename = "solutionId", default, skip_serializing_if = "Option::is_none")]
pub solution_id: Option<String>,
#[serde(rename = "workloadType", default, skip_serializing_if = "Option::is_none")]
pub workload_type: Option<monitor_instance_properties::WorkloadType>,
#[serde(rename = "componentId", default, skip_serializing_if = "Option::is_none")]
pub component_id: Option<String>,
#[serde(rename = "componentName", default, skip_serializing_if = "Option::is_none")]
pub component_name: Option<String>,
#[serde(rename = "componentTypeId", default, skip_serializing_if = "Option::is_none")]
pub component_type_id: Option<String>,
#[serde(rename = "componentTypeName", default, skip_serializing_if = "Option::is_none")]
pub component_type_name: Option<String>,
#[serde(rename = "monitorId", default, skip_serializing_if = "Option::is_none")]
pub monitor_id: Option<String>,
#[serde(rename = "monitorName", default, skip_serializing_if = "Option::is_none")]
pub monitor_name: Option<String>,
#[serde(rename = "monitorType", default, skip_serializing_if = "Option::is_none")]
pub monitor_type: Option<monitor_instance_properties::MonitorType>,
#[serde(rename = "monitorCategory", default, skip_serializing_if = "Option::is_none")]
pub monitor_category: Option<monitor_instance_properties::MonitorCategory>,
#[serde(rename = "healthState", default, skip_serializing_if = "Option::is_none")]
pub health_state: Option<monitor_instance_properties::HealthState>,
#[serde(rename = "healthStateCategory", default, skip_serializing_if = "Option::is_none")]
pub health_state_category: Option<monitor_instance_properties::HealthStateCategory>,
#[serde(rename = "healthStateChanges", default, skip_serializing_if = "Vec::is_empty")]
pub health_state_changes: Vec<HealthStateChange>,
#[serde(rename = "healthStateChangesStartTime", default, skip_serializing_if = "Option::is_none")]
pub health_state_changes_start_time: Option<String>,
#[serde(rename = "healthStateChangesEndTime", default, skip_serializing_if = "Option::is_none")]
pub health_state_changes_end_time: Option<String>,
#[serde(rename = "lastHealthStateChangeTime", default, skip_serializing_if = "Option::is_none")]
pub last_health_state_change_time: Option<String>,
#[serde(rename = "alertGeneration", default, skip_serializing_if = "Option::is_none")]
pub alert_generation: Option<monitor_instance_properties::AlertGeneration>,
#[serde(rename = "aggregateProperties", default, skip_serializing_if = "Option::is_none")]
pub aggregate_properties: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub children: Vec<MonitorInstance>,
}
pub mod monitor_instance_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum WorkloadType {
#[serde(rename = "BaseOS")]
BaseOs,
#[serde(rename = "SQL")]
Sql,
#[serde(rename = "IIS")]
Iis,
Apache,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum MonitorType {
Aggregate,
Dependency,
Unit,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum MonitorCategory {
AvailabilityHealth,
Configuration,
EntityHealth,
PerformanceHealth,
Security,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HealthState {
Error,
Warning,
Success,
Unknown,
Uninitialized,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HealthStateCategory {
Identity,
CustomGroup,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AlertGeneration {
Enabled,
Disabled,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct HealthStateChange {
#[serde(rename = "healthState", default, skip_serializing_if = "Option::is_none")]
pub health_state: Option<health_state_change::HealthState>,
#[serde(rename = "healthStateChangeTime", default, skip_serializing_if = "Option::is_none")]
pub health_state_change_time: Option<String>,
}
pub mod health_state_change {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum HealthState {
Error,
Warning,
Success,
Unknown,
Uninitialized,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NotificationSettingsCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<NotificationSetting>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NotificationSetting {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub etag: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<NotificationSettingProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NotificationSettingProperties {
#[serde(rename = "actionGroupResourceIds", default, skip_serializing_if = "Vec::is_empty")]
pub action_group_resource_ids: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorFieldContract {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorFieldContract>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyResource {
#[serde(flatten)]
pub resource: Resource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
|
#![macro_use]
macro_rules! impl_op {
($Op:ident, $op:ident, $T:ty, $Tx:expr) => {
impl<F: FullFloat> $Op<$T> for $T {
type Output = $T;
fn $op(self, rhs: $T) -> $T {
$Tx((self.0).$op(rhs.0))
}
}
};
}
macro_rules! impl_op_f {
($Op:ident, $op:ident, $T:ty, $Tx:expr) => {
impl<F: FullFloat> $Op<F> for $T {
type Output = $T;
fn $op(self, rhs: F) -> $T {
$Tx((self.0).$op(rhs))
}
}
};
}
macro_rules! impl_uop {
($Op:ident, $op:ident, $T:ty, $Tx:expr) => {
impl<F: FullFloat> $Op for $T {
type Output = $T;
fn $op(self) -> $T {
$Tx((self.0).$op())
}
}
};
}
macro_rules! impl_aop {
($Op:ident, $op:ident, $T:ty, $Tx:expr) => {
impl<F: FullFloat> $Op for $T {
fn $op(&mut self, rhs: $T) {
(self.0).$op(rhs.0)
}
}
};
}
macro_rules! impl_aop_f {
($Op:ident, $op:ident, $T:ty, $Tx:expr) => {
impl<F: FullFloat> $Op<F> for $T {
fn $op(&mut self, rhs: F) {
(self.0).$op(rhs)
}
}
};
}
|
// Modified from: http://www.adammil.net/blog/v125_roguelike_vision_algorithms.html#mycode
use rl_utils::{Area, Coord};
use crate::{utils::Octant, Fov, FovCallbackEnum, FovConfig, Los, VisionShape};
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
struct Slope {
x: isize,
y: isize,
}
impl Slope {
pub fn new(x: isize, y: isize) -> Slope {
Slope { x, y }
}
pub fn greater(&self, s: Slope) -> bool {
self.y * s.x > self.x * s.y
}
pub fn greater_or_equal(&self, s: Slope) -> bool {
self.y * s.x >= self.x * s.y
}
pub fn less_or_equal(&self, s: Slope) -> bool {
self.y * s.x <= self.x * s.y
}
pub fn less(&self, s: Slope) -> bool {
self.y * s.x < self.x * s.y
}
}
impl From<(isize, isize)> for Slope {
fn from(t: (isize, isize)) -> Self {
Self::new(t.0, t.1)
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
enum Opaque {
Uninitialised,
Transparent,
Opaque,
}
#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
pub struct BevelledWalls<'a, T, Func>
where Func: FnMut(&mut T, Coord, FovCallbackEnum) -> bool, {
pub symmetric: bool,
pub area: Area,
pub radius: usize,
pub vision: VisionShape,
pub cb_type: &'a mut T,
pub callback: Func,
}
impl<'a, T, Func> BevelledWalls<'a, T, Func> where Func: FnMut(&mut T, Coord, FovCallbackEnum) -> bool, {
fn compute(&mut self, src: Coord, octant: Octant, row: isize, mut top: Slope, bottom: Slope) {
for x in row..=self.radius as isize {
let mut was_opaque = Opaque::Uninitialised;
let top_y = if top.x == 1 {
x as isize
} else {
let top_y = ((x * 2 - 1) * top.y + top.x) / (top.x * 2);
if (self.callback)(self.cb_type, (x, top_y).into(), FovCallbackEnum::IsBlocked) {
if top.greater_or_equal((x * 2, top_y * 2 + 1).into())
&& !(self.callback)(self.cb_type, (x, top_y + 1).into(), FovCallbackEnum::IsBlocked)
{
top_y + 1
} else {
top_y
}
} else {
top_y
}
};
let bottom_y = if bottom.y == 0 {
0
} else {
let bottom_y = ((x * 2 - 1) * bottom.y + bottom.x) / (bottom.x * 2);
if bottom.greater_or_equal((bottom_y * 2 + 1, x * 2).into())
&& (self.callback)(self.cb_type, (x, bottom_y).into(), FovCallbackEnum::IsBlocked)
&& !(self.callback)(self.cb_type, (x, bottom_y + 1).into(), FovCallbackEnum::IsBlocked)
{
bottom_y + 1
} else {
bottom_y
}
};
for y in (bottom_y..=top_y).rev() {
let point = octant.calc_point(src, (x, y).into());
if !self.area.point_within(point) {
continue;
} else if !self.vision.in_radius(x as usize, y as usize, self.radius) {
continue;
}
let is_opaque = (self.callback)(self.cb_type, point, FovCallbackEnum::IsBlocked);
let is_visible = if !self.symmetric {
is_opaque
|| ((y != top_y || top.greater_or_equal((x, y).into()))
&& (y != bottom_y || bottom.less_or_equal((x, y).into())))
} else {
(y != top_y || top.greater((x * 4 + 1, y * 4 - 1).into()))
&& (y != bottom_y || bottom.less((x * 4 - 1, y * 4 + 1).into()))
};
(self.callback)(self.cb_type, point, FovCallbackEnum::SetVisible(is_visible));
if x != self.radius as isize {
if is_opaque {
if was_opaque == Opaque::Transparent {
let mut nx = x * 2;
let ny = y * 2 + 1;
if (self.callback)(self.cb_type, (x, y + 1).into(), FovCallbackEnum::IsBlocked) {
nx -= 1;
}
if top.greater((nx, ny).into()) {
let bottom = (x * 2 - 1, y * 2 + 1).into();
if y == bottom_y {
break;
} else {
self.compute(src, octant, x + 1, top, bottom);
}
} else if y == bottom_y {
return;
}
}
was_opaque = Opaque::Opaque;
} else {
if was_opaque == Opaque::Opaque {
let nx =
if !self.symmetric
&& (self.callback)(self.cb_type, (x + 1, y + 1).into(), FovCallbackEnum::IsBlocked)
{
(x * 2) + 1
} else {
x * 2
};
let ny = y * 2 + 1;
if bottom.greater_or_equal((nx, ny).into()) {
return;
}
top = (nx, ny).into();
}
was_opaque = Opaque::Transparent;
}
}
}
if was_opaque != Opaque::Transparent {
break;
}
}
}
}
impl<'a, T, Func> FovConfig for BevelledWalls<'a, T, Func> where Func: FnMut(&mut T, Coord, FovCallbackEnum) -> bool, {
fn with_area(mut self, area: Area) -> Self {
self.area = area;
self
}
fn with_radius(mut self, radius: usize) -> Self {
self.radius = radius;
self
}
fn with_vision_shape(mut self, vision: VisionShape) -> Self {
self.vision = vision;
self
}
}
impl<'a, T, Func> Fov for BevelledWalls<'a, T, Func> where Func: FnMut(&mut T, Coord, FovCallbackEnum) -> bool, {
fn fov(&mut self, src: Coord) {
for octant in Octant::iterator() {
self.compute(src, *octant, 1, Slope::new(1, 1), Slope::new(1, 0));
}
}
}
|
pub struct Image {
pub width: usize,
pub height: usize,
data: Vec<Vec<char>>
}
impl Image {
pub fn new(width: usize, height: usize) -> Image {
let mut v = Vec::with_capacity(height);
for _ in 0..width {
v.push(Vec::with_capacity(width));
}
for i in 0..width {
for _ in 0..height {
v[i].push('.');
}
}
Image { width: width, height: height, data: v }
}
pub fn set(&mut self, x: usize, y: usize, c: char) {
self.data[x][y] = c;
}
pub fn display(&self) {
for y in 0..self.height - 1 {
for x in 0..self.width - 1 {
print!("{}", self.data[x][y]);
}
println!("");
}
}
}
|
use std::env;
use std::thread::spawn;
use futures::{Future, Stream};
use ipnetwork::IpNetwork;
use tokio_core::reactor::Core;
use netlink_packet_route::link::nlas::LinkNla;
use rtnetlink::new_connection;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() != 3 {
return usage();
}
let link_name = &args[1];
let ip: IpNetwork = args[2].parse().unwrap_or_else(|_| {
eprintln!("invalid address");
std::process::exit(1);
});
// Create a netlink connection, and a handle to send requests via this connection
let (connection, handle) = new_connection().unwrap();
// The connection we run in its own thread
spawn(move || Core::new().unwrap().run(connection));
// Get the list of links
let links = handle.link().get().execute().collect().wait().unwrap();
for link in links {
for nla in link.nlas {
if let LinkNla::IfName(ref name) = nla {
if name == link_name {
let req = handle
.address()
.del(link.header.index, ip.ip(), ip.prefix());
match req.execute().wait() {
Ok(()) => println!("done"),
Err(e) => eprintln!("error: {}", e),
}
return;
}
}
}
}
eprintln!("link {} not found", link_name);
}
fn usage() {
eprintln!(
"usage:
cargo run --example del_address -- <link_name> <ip_address>
Note that you need to run this program as root. Instead of running cargo as root,
build the example normally:
cd rtnetlink ; cargo build --example del_address
Then find the binary in the target directory:
cd ../target/debug/example ; sudo ./del_address <link_name> <ip_address>"
);
}
|
use super::types::{BigNum, DoubleBigNum, GroupG1};
use super::ECCurve::big::{BASEBITS, MODBYTES as curve_MODBYTES, NLEN as curve_NLEN};
use super::ECCurve::rom;
pub const MODBYTES: usize = curve_MODBYTES;
pub const NLEN: usize = curve_NLEN;
pub const BIG_NUM_BITS: usize = BASEBITS;
pub const FIELD_ORDER_ELEMENT_SIZE: usize = MODBYTES;
#[cfg(feature = "bls381")]
pub const CURVE_ORDER_ELEMENT_SIZE: usize = 32;
#[cfg(feature = "bn254")]
pub const CURVE_ORDER_ELEMENT_SIZE: usize = 32;
#[cfg(feature = "secp256k1")]
pub const CURVE_ORDER_ELEMENT_SIZE: usize = 32;
#[cfg(feature = "ed25519")]
pub const CURVE_ORDER_ELEMENT_SIZE: usize = 32;
// Byte size of element in group G1, 1 extra byte for compression flag
pub const GROUP_G1_SIZE: usize = (2 * MODBYTES + 1) as usize;
pub const MODULUS: BigNum = BigNum { w: rom::MODULUS };
pub const CURVE_ORDER: BigNum = BigNum { w: rom::CURVE_ORDER };
pub const FIELD_ELEMENT_ZERO: BigNum = BigNum { w: [0; NLEN] };
lazy_static! {
pub static ref GENERATOR_G1: GroupG1 = GroupG1::generator();
pub static ref BARRETT_REDC_K: usize = MODULUS.nbits();
pub static ref BARRETT_REDC_U: BigNum = {
let k = CURVE_ORDER.nbits();
let mut u = DoubleBigNum::new();
u.w[0] = 1;
// `u.shl(2*k)` crashes, so perform shl(k) twice
u.shl(k);
u.shl(k);
// div returns floored value
u.div(&CURVE_ORDER)
};
pub static ref BARRETT_REDC_V: BigNum = {
let k = CURVE_ORDER.nbits();
let mut v = BigNum::new_int(1isize);
v.shl(k+1);
v
};
}
#[cfg(any(feature = "bls381", feature = "bn254"))]
pub use crate::types_g2::{GENERATOR_G2, GROUP_G2_SIZE, GROUP_GT_SIZE};
|
use std::{fs, io};
use unicode_segmentation::UnicodeSegmentation;
fn main() -> std::io::Result<()> {
let entries = fs::read_dir(".")?
.map(|res| res.map(|e| e.path()))
.collect::<Result<Vec<_>, io::Error>>()?;
entries.iter().for_each(|e| {
let name = e.file_name().unwrap().to_str().unwrap();
let first_two = take_first_graphemes(name, 2);
if take_last_grapheme(&first_two) == "_" {
fs::rename(&name, ["0", name].join("")).unwrap_or_else(|err|{
println!("Error during rename action: {:?}", err)
});
}
});
Ok(())
}
pub fn take_first_graphemes(s: &str, n: usize) -> String {
s.graphemes(true).take(n).collect::<Vec<&str>>().join("")
}
pub fn take_last_grapheme(word: &str) -> String {
let s = String::from(word);
let last = s.graphemes(true).last();
String::from(match last {
Some(letter) => letter,
None => "",
})
}
|
pub const INCLUDED: &str = file!();
|
pub mod dictionary;
pub mod hint_table;
pub mod utils;
use self::dictionary::make_dict;
use self::hint_table::make_hint_table;
use self::utils::{load_new_data, load_new_data_drop};
use crate::Result;
use ez_io::WriteE;
use std::io::{Cursor, Seek, SeekFrom, Write};
const DICT_LEN: usize = 256;
const HINT_BITS: usize = 10;
/// In case the data is compressed, we need some extra info to decompress it.
#[derive(Clone)]
pub struct CompressedData {
/// Starts where hdr_offset in PacData starts
pub data: Vec<u8>,
pub info: Vec<CompressInfo>,
pub total_decompressed_size: usize,
}
#[derive(Clone)]
pub struct CompressInfo {
pub offset: usize,
pub decompressed_size: usize,
}
impl CompressedData {
/// Decompresses data in .pac files.
pub fn decompress(&self) -> Result<Vec<u8>> {
// Create Cursor for Input
let reader = &mut Cursor::new(&self.data);
// Create vector for output
let mut out = Vec::with_capacity(self.total_decompressed_size);
// Create Cursor for output
let writer = &mut Cursor::new(&mut out);
// Init Dict and Hints
let mut dict = [0u16; DICT_LEN * 2];
let mut hints = [[0u16; 2]; 1 << HINT_BITS];
// Process the data
for info in &self.info {
// Count how many bytes we wrote
let mut written_bytes = 0usize;
// Go to location specified by PacInfo
reader.seek(SeekFrom::Start(
info.offset as u64, // Lossy
))?;
// Make the dict and values
let mut pak_k = 0;
let mut pak_m = 0;
let dict_result = make_dict(&mut dict, &mut 256, &mut pak_m, &mut pak_k, reader);
// Check if data is always the same value
if dict_result > 255 {
// Make the hints
make_hint_table(&dict, &mut hints);
loop {
// decode_rep
if pak_m < HINT_BITS as u32 {
load_new_data(reader, &mut pak_k, &mut pak_m)?;
}
// test_hint_bits
pak_m -= HINT_BITS as u32;
let hints_index = (pak_k >> (pak_m & 255)) & ((1 << HINT_BITS) - 1);
let mut read_value = u32::from(hints[hints_index as usize][0]);
pak_m += u32::from(hints[hints_index as usize][1]);
if read_value > 255 {
loop {
// search_ch_rep
if pak_m != 0 {
pak_m -= 1;
} else {
load_new_data_drop(reader, &mut pak_k, &mut pak_m)?;
}
// test_hbit
let bit_test = (pak_k >> (pak_m & 255)) & 1;
let index = 2 * read_value - 512 + bit_test;
read_value = u32::from(dict[index as usize]);
if read_value <= 255 {
break;
}
}
}
// put_ch
writer.write_to_u8(read_value as u8)?;
written_bytes += 1;
if written_bytes >= info.decompressed_size {
break;
}
}
} else {
// This part of the data is the same byte repeated, write the output
writer.write_all(&vec![dict_result as u8; info.decompressed_size])?;
}
}
Ok(out)
}
}
|
/// HUST OS Lab2 Implementation in Rust
use std::{
thread,
sync::{Mutex, Arc},
};
const THREAD_NUM: i32 = 5;
const TICKET_NUM: i32 = 100;
fn main() {
println!("Welcome to hust os lab in rust");
let tickets = Arc::new(Mutex::new(TICKET_NUM));
let mut handles = vec![];
for i in 0..THREAD_NUM {
let tickets = Arc::clone(&tickets);
let handle = thread::spawn(move || {
let mut buy_tickets = 0;
loop {
{
let mut ticket = tickets.lock().unwrap();
if *ticket <= 0 {
break;
}
*ticket -= 1;
buy_tickets += 1;
println!("Thread {} buy a ticket, remain {} tickets.", i, *ticket);
}
// let sleep_time = time::Duration::from_millis(100);
// thread::sleep(sleep_time);
}
println!("Thread {} exit with {} tickets", i, buy_tickets);
});
handles.push(handle);
}
for handle in handles {
handle.join().unwrap();
}
println!("Main thread exit, remain {} tickets.", *tickets.lock().unwrap());
}
|
use std::collections::{HashMap, HashSet};
use anyhow::Context;
use web3::{
futures::future::try_join_all,
types::{FilterBuilder, Transaction, TransactionId, U256},
};
use crate::core::Chain;
use crate::ethereum::{
contract::{REGISTER_MEMORY_PAGE_FUNCTION, STATE_TRANSITION_FACT_EVENT},
log::{
BackwardFetchError, BackwardLogFetcher, EitherMetaLog, MemoryPageFactContinuousLog,
MemoryPagesHashesLog, StateTransitionFactLog, StateUpdateLog,
},
state_update::RetrieveStateUpdateError,
transport::EthereumTransport,
};
/// Retrieves the [StateTransitionFactLog] associated with the given [StateUpdateLog].
pub async fn retrieve_transition_fact(
transport: &impl EthereumTransport,
state_update: StateUpdateLog,
chain: Chain,
) -> Result<StateTransitionFactLog, RetrieveStateUpdateError> {
// StateTransitionFactLog and StateUpdateLog are always emitted
// as pairs. So we query the same block.
let addresses = crate::ethereum::contract::addresses(chain);
let filter = FilterBuilder::default()
.address(vec![addresses.core])
.topics(
Some(vec![STATE_TRANSITION_FACT_EVENT.signature()]),
None,
None,
None,
)
.block_hash(state_update.origin.block.hash.0)
.build();
let logs = transport.logs(filter).await?;
for log in logs {
let log = StateTransitionFactLog::try_from(log)?;
if log.origin.block == state_update.origin.block
&& log.origin.transaction == state_update.origin.transaction
{
return Ok(log);
}
}
Err(RetrieveStateUpdateError::StateTransitionFactNotFound)
}
/// Retrieves the [MemoryPagesHashesLog] associated with the given [StateTransitionFactLog].
pub async fn retrieve_mempage_hashes(
transport: &impl EthereumTransport,
fact: StateTransitionFactLog,
chain: Chain,
) -> Result<MemoryPagesHashesLog, RetrieveStateUpdateError> {
let fact_hash = fact.fact_hash;
let mut fetcher = BackwardLogFetcher::<StateTransitionFactLog, MemoryPagesHashesLog>::new(
EitherMetaLog::Left(fact),
chain,
);
loop {
use RetrieveStateUpdateError::*;
let logs = match fetcher.fetch(transport).await {
Ok(logs) => logs,
Err(BackwardFetchError::GenesisReached) => return Err(MemoryPageHashesNotFound),
Err(BackwardFetchError::Reorg) => return Err(Reorg),
Err(BackwardFetchError::Other(other)) => return Err(Other(other)),
};
for log in logs {
if let EitherMetaLog::Right(mempage_hashes) = log {
if fact_hash == mempage_hashes.hash {
return Ok(mempage_hashes);
}
}
}
}
}
/// Retrieves the list of [MemoryPageFactContinuousLog] associated with the given [MemoryPagesHashesLog].
pub async fn retrieve_memory_page_logs(
transport: &impl EthereumTransport,
mempage_hashes: MemoryPagesHashesLog,
chain: Chain,
) -> Result<Vec<MemoryPageFactContinuousLog>, RetrieveStateUpdateError> {
let hashes = mempage_hashes.mempage_hashes.clone();
let mut required_hashes = hashes.iter().cloned().collect::<HashSet<_>>();
let mut found_hashes = HashMap::with_capacity(hashes.len());
let mut fetcher = BackwardLogFetcher::<MemoryPagesHashesLog, MemoryPageFactContinuousLog>::new(
EitherMetaLog::Left(mempage_hashes),
chain,
);
loop {
use RetrieveStateUpdateError::*;
let logs = match fetcher.fetch(transport).await {
Ok(logs) => logs,
Err(BackwardFetchError::GenesisReached) => return Err(MemoryPageLogNotFound),
Err(BackwardFetchError::Reorg) => return Err(Reorg),
Err(BackwardFetchError::Other(other)) => return Err(Other(other)),
};
for log in logs {
if let EitherMetaLog::Right(mempage_log) = log {
if required_hashes.remove(&mempage_log.hash) {
found_hashes.insert(mempage_log.hash, mempage_log);
}
}
}
if required_hashes.is_empty() {
break;
}
}
let mempages = hashes
.into_iter()
.map(|hash| {
found_hashes
.remove(&hash)
.expect("All required memory pages should have been found")
})
.collect::<Vec<_>>();
Ok(mempages)
}
/// Retrieves and parses the transaction data of the given [MemoryPageFactContinuousLog]'s.
///
/// These can be parsed into a [StateUpdate](crate::ethereum::state_update::StateUpdate).
pub async fn retrieve_mempage_transaction_data(
transport: &impl EthereumTransport,
mempages: Vec<MemoryPageFactContinuousLog>,
) -> Result<Vec<Vec<U256>>, RetrieveStateUpdateError> {
let fut = mempages
.iter()
.map(|page| transport.transaction(TransactionId::Hash(page.origin.transaction.hash.0)))
.collect::<Vec<_>>();
let transactions = try_join_all(fut)
.await
.context("failed to retrieve memory page transactions")?;
let mut data = Vec::with_capacity(mempages.len());
for tx in transactions {
let tx = tx.ok_or(RetrieveStateUpdateError::MemoryPageTransactionNotFound)?;
data.push(decode_mempage_transaction(tx)?);
}
Ok(data)
}
fn decode_mempage_transaction(transaction: Transaction) -> anyhow::Result<Vec<U256>> {
// The first 4 bytes of data represent the short-signature of the function.
// These must exist in order to be valid. We should compare the signature as
// well, but this requires web3 to bump ethabi to v15.
anyhow::ensure!(
transaction.input.0.len() >= 4,
"memory page transaction input has incomplete signature"
);
// The mempage data is stored in 'values' (2nd token), which is an array of U256.
//
// The complete structure is defined in the mempage json ABI.
// `decode_input` wants the raw data, excluding the short-signature.
// The indexing is safe due to the `ensure` above.
REGISTER_MEMORY_PAGE_FUNCTION
.decode_input(&transaction.input.0[4..])
.context("mempage input decoding failed")?
.get(1)
.cloned()
.context("missing values array field")?
.into_array()
.context("values field could not be cast to an array")?
.iter()
.map(|t| {
t.clone()
.into_uint()
.context("values element could not be cast to U256")
})
.collect()
}
|
#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct AnsiColor(u8);
impl AnsiColor {
#[inline(always)]
pub const fn from_num(color: u8) -> Self {
AnsiColor(color)
}
#[inline(always)]
pub const fn as_num(self) -> u8 {
self.0
}
}
|
use crate::constants::CONNECTORX_PROTOCOL;
use crate::errors::{ConnectorXError, Result};
use anyhow::anyhow;
use fehler::throws;
use std::convert::TryFrom;
use url::Url;
#[derive(Debug, Clone)]
pub enum SourceType {
Postgres,
SQLite,
MySQL,
MsSQL,
Oracle,
BigQuery,
DuckDB,
}
#[derive(Debug, Clone)]
pub struct SourceConn {
pub ty: SourceType,
pub conn: Url,
pub proto: String,
}
impl TryFrom<&str> for SourceConn {
type Error = ConnectorXError;
fn try_from(conn: &str) -> Result<SourceConn> {
let old_url = Url::parse(conn).map_err(|e| anyhow!("parse error: {}", e))?;
// parse connectorx protocol
let proto = match old_url.query_pairs().find(|p| p.0 == CONNECTORX_PROTOCOL) {
Some((_, proto)) => proto.to_owned().to_string(),
None => "binary".to_string(),
};
// create url by removing connectorx protocol
let stripped_query: Vec<(_, _)> = old_url
.query_pairs()
.filter(|p| &*p.0 != CONNECTORX_PROTOCOL)
.collect();
let mut url = old_url.clone();
url.set_query(None);
for pair in stripped_query {
url.query_pairs_mut()
.append_pair(&pair.0.to_string()[..], &pair.1.to_string()[..]);
}
// users from sqlalchemy may set engine in connection url (e.g. mssql+pymssql://...)
// only for compatablility, we don't use the same engine
match url.scheme().split('+').collect::<Vec<&str>>()[0] {
"postgres" | "postgresql" => Ok(SourceConn::new(SourceType::Postgres, url, proto)),
"sqlite" => Ok(SourceConn::new(SourceType::SQLite, url, proto)),
"mysql" => Ok(SourceConn::new(SourceType::MySQL, url, proto)),
"mssql" => Ok(SourceConn::new(SourceType::MsSQL, url, proto)),
"oracle" => Ok(SourceConn::new(SourceType::Oracle, url, proto)),
"bigquery" => Ok(SourceConn::new(SourceType::BigQuery, url, proto)),
"duckdb" => Ok(SourceConn::new(SourceType::DuckDB, url, proto)),
_ => unimplemented!("Connection: {} not supported!", conn),
}
}
}
impl SourceConn {
pub fn new(ty: SourceType, conn: Url, proto: String) -> Self {
Self { ty, conn, proto }
}
pub fn set_protocol(&mut self, protocol: &str) {
self.proto = protocol.to_string();
}
}
#[throws(ConnectorXError)]
pub fn parse_source(conn: &str, protocol: Option<&str>) -> SourceConn {
let mut source_conn = SourceConn::try_from(conn)?;
match protocol {
Some(p) => source_conn.set_protocol(p),
None => {}
}
source_conn
}
|
pub const fn round_to_multiple_of(divisor: usize, x: usize) -> usize {
x + (divisor - 1) & !(divisor - 1)
}
|
#![no_std]
#![no_main]
#![feature(asm)]
#![feature(abi_efiapi)]
extern crate rlibc;
extern crate panic_halt;
mod fonts;
mod console;
mod graphics;
mod pci;
mod mouse;
use core::fmt::Write;
use mikan::{FrameBufferConfig, PixelFormat};
use graphics::{write_pixel, write_string, PixelColor};
use console::{Console};
struct Context {
fb_config: Option<FrameBufferConfig>,
console: Console,
}
impl Context {
fn fb_config(&self) -> &FrameBufferConfig {
self.fb_config.as_ref().unwrap()
}
}
static mut G_CONTEXT: Context = Context {
fb_config: None,
console: Console::new(PixelColor { r: 0, b: 0, g: 0 }, PixelColor { r: 255, b: 255, g: 255}),
};
#[no_mangle]
extern "efiapi" fn kernel_main(fb_config: FrameBufferConfig) -> ! {
unsafe { G_CONTEXT.fb_config = Some(fb_config); }
unsafe { G_CONTEXT.console.clear(); }
let mut bus_scan = pci::ScanPciDevices::new();
bus_scan.scan_devices().unwrap();
for i in 0..bus_scan.num_devices {
let dev = bus_scan.result[i];
let config = dev.config;
printk!("{}.{}.{} vend:0x{:x} devid:0x{:x} base:0x{:x} sub:0x{:x} interface:0x{:x}",
dev.bus, dev.device, dev.function, config.vendor_id, config.device_id, config.base_class, config.sub_class, config.interface);
}
// for x in 0..fb_config.horizontal_resolution {
// for y in 0..fb_config.vertical_resolution {
// write_pixel( x, y, PixelColor { r: 255, g: 255, b: 255 });
// }
// }
// for x in 0..200 {
// for y in 0..100 {
// write_pixel( 100+x, 100+y, PixelColor { r: 0, g: 0, b: 255 });
// }
// }
// write_string("akira developer", 300, 300, PixelColor { r: 255, g: 0, b: 0 });
loop {
unsafe {
asm!("hlt")
}
}
} |
use definitions::{Expression, LiteralValue, BinaryOperator, UnaryOperator, ColumnDef};
use table::{Table, TableRow, TableHeader, get_column};
use std::cell::Cell;
#[derive(PartialEq, Clone)]
pub enum ExpressionResult {
Value(LiteralValue),
ColumnDef(ColumnDef),
Null,
}
impl ExpressionResult {
pub fn neg(&self) -> ExpressionResult {
match self {
&ExpressionResult::Value(ref v) => ExpressionResult::Value(v.neg()),
_ => self.clone(),
}
}
}
pub struct ExpressionEvaluator<'a, 'b> {
// FIXME wtf am I doing?!?!?!
row: &'a TableRow,
head: &'a TableHeader,
tables: Option<Vec<&'b Table>>,
get_column_def: bool,
as_column_alias: bool,
order_pass: Cell<bool>,
}
impl<'a, 'b> ExpressionEvaluator<'a, 'b> {
pub fn new(row: &'a TableRow, head: &'a TableHeader) -> ExpressionEvaluator<'a, 'b> {
ExpressionEvaluator {
row: row,
head: head,
tables: None,
get_column_def: false,
as_column_alias: false,
order_pass: Cell::new(false),
}
}
pub fn with_column_def(&'a mut self) -> &mut ExpressionEvaluator<'a, 'b> {
self.get_column_def = true;
self
}
pub fn with_tables(&'a mut self, tables: Vec<&'b Table>) -> &mut ExpressionEvaluator<'a, 'b> {
self.tables = Some(tables);
self
}
pub fn as_column_alias(&'a mut self) -> &mut ExpressionEvaluator<'a, 'b> {
self.as_column_alias = true;
self
}
pub fn order_of_operations(&'a self, expr: &Expression) -> Expression {
let (b1, left1, right1) = expr.unwrap_binary_operator();
let (b2, left2, right2) = right1.unwrap_binary_operator();
if b1.ord_val() < b2.ord_val() {
let right2 = self.order_of_operations(&right2);
let new_expr_child = Expression::BinaryOperator((b1, box left2, box left1));
let new_expr_parent = Expression::BinaryOperator((b2, box right2, box new_expr_child));
return new_expr_parent;
} else if b1.ord_val() == b2.ord_val() && b1 > b2{
let right2 = self.order_of_operations(&right2);
let new_expr_child = Expression::BinaryOperator((b1, box left1, box left2));
let new_expr_parent = Expression::BinaryOperator((b2, box right2, box new_expr_child));
return new_expr_parent;
} else if b2 != BinaryOperator::Null {
let right2 = self.order_of_operations(&right2);
let new_expr_child = Expression::BinaryOperator((b2, box left2, box right2));
let new_expr_parent = Expression::BinaryOperator((b1, box left1, box new_expr_child));
return new_expr_parent;
} else {
return expr.clone();
}
}
pub fn eval_expr(&'a self, expr: &Expression) -> ExpressionResult {
match expr {
&Expression::LiteralValue(ref value) => ExpressionResult::Value(value.clone()),
&Expression::TableName(..) | &Expression::ColumnName(..) => self.eval_column_name(expr, None, None),
&Expression::BinaryOperator((b, ref expr1, ref expr2)) => {
if !self.order_pass.get() {
debug!("order_of_op before: {:?}", expr);
let expr = self.order_of_operations(expr);
debug!("order_of_op after: {:?}", expr);
self.order_pass.set(true);
self.eval_expr(&expr)
} else {
self.eval_binary_operator(b, &**expr1, &**expr2)
}
}
&Expression::UnaryOperator((u, ref exp)) => self.eval_unary_operator(u, &**exp),
_ => ExpressionResult::Null,
}
}
pub fn eval_bool(&'a self, expr: &Expression) -> bool {
match self.eval_expr(expr) {
ExpressionResult::Value(value) => {
match value {
LiteralValue::Boolean(b) => b,
_ => false,
}
}
_ => false,
}
}
fn eval_binary_operator(&'a self,
operator: BinaryOperator,
expr1: &Expression,
expr2: &Expression) -> ExpressionResult {
match operator {
BinaryOperator::Less => {
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left.lt(&right))
}
BinaryOperator::LessEq => {
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left.le(&right))
}
BinaryOperator::Greater => {
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left.gt(&right))
}
BinaryOperator::GreaterEq => {
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left.ge(&right))
}
BinaryOperator::LShift => {
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left << right)
}
BinaryOperator::RShift => {
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left >> right)
}
BinaryOperator::BitAnd => {
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left & right)
}
BinaryOperator::BitOr => {
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left | right)
}
BinaryOperator::Equals => {
ExpressionResult::Value(LiteralValue::Boolean(self.eval_expr(expr1) == self.eval_expr(expr2)))
}
BinaryOperator::NotEquals => {
ExpressionResult::Value(LiteralValue::Boolean(self.eval_expr(expr1) != self.eval_expr(expr2)))
}
BinaryOperator::And => {
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(LiteralValue::Boolean(left.to_bool() && right.to_bool()))
}
BinaryOperator::Or => {
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(LiteralValue::Boolean(left.to_bool() || right.to_bool()))
}
BinaryOperator::Plus => {
debug!("{:?} + {:?}", expr1, expr2);
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left + right)
}
BinaryOperator::Minus => {
debug!("{:?} - {:?}", expr1, expr2);
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(&self.neg(expr2)));
ExpressionResult::Value(left + right)
}
BinaryOperator::Mult => {
debug!("{:?} * {:?}", expr1, expr2);
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left * right)
}
BinaryOperator::Divide => {
debug!("{:?} / {:?}", expr1, expr2);
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left / right)
}
BinaryOperator::Modulo => {
debug!("{:?} % {:?}", expr1, expr2);
let left = result_to_literal(self.eval_expr(expr1));
let right = result_to_literal(self.eval_expr(expr2));
ExpressionResult::Value(left % right)
}
BinaryOperator::Null => ExpressionResult::Null,
}
}
fn eval_unary_operator(&'a self, operator: UnaryOperator, expr: &Expression) -> ExpressionResult {
debug!("{:?}", expr);
match operator {
UnaryOperator::Plus => self.eval_expr(expr),
UnaryOperator::Minus => self.eval_expr(expr).neg(),
UnaryOperator::Not => {
let lit = result_to_literal(self.eval_expr(expr));
ExpressionResult::Value(LiteralValue::Boolean(!lit.to_bool()))
}
UnaryOperator::BitNeg => {
let val = result_to_literal(self.eval_expr(expr));
ExpressionResult::Value(LiteralValue::Integer(!val.to_int()))
}
}
}
fn eval_column_name(&'a self, expr: &Expression, table: Option<&Table>, offset: Option<usize>) -> ExpressionResult {
match expr {
&Expression::TableName((ref name, ref expr)) => {
let mut table_opt: Option<&Table> = None;
let mut offset = 0us;
for table in self.tables.clone().unwrap().into_iter() {
if &table.name == name {
table_opt = Some(table);
break;
}
offset = offset + table.header.len();
}
if !table_opt.is_some() {
return ExpressionResult::Null;
}
self.eval_column_name(&**expr, table_opt, Some(offset))
}
&Expression::ColumnName(ref name) => self.column_data_or_def(name, table, offset),
_ => ExpressionResult::Null,
}
}
fn column_data_or_def(&'a self, name: &String, table: Option<&Table>, offset: Option<usize>) -> ExpressionResult {
if self.get_column_def {
if let Some(table) = table {
// We know which table to grab the def from...
if let Some(column_def) = table.get_column_def_by_name(name) {
return ExpressionResult::ColumnDef(column_def.clone());
}
} else {
// FIXME what if there are _other_ columns with the same name
// further down?
if let Some(ref tables) = self.tables {
for table in tables.iter() {
if let Some(column_def) = table.get_column_def_by_name(name) {
return ExpressionResult::ColumnDef(column_def.clone());
}
}
}
}
} else {
if self.as_column_alias {
return ExpressionResult::Value(LiteralValue::Integer(
// FIXME here I go with those blind unwraps again...
self.head.iter().position(|ref cols| &cols.name == name).unwrap() as isize));
}
if let Some(table) = table {
return ExpressionResult::Value(get_column(name, self.row, &table.header, offset));
} else if let Some(ref tables) = self.tables {
let mut offset = 0us;
for table in tables.iter() {
if let Some(_) = table.get_column_def_by_name(name) {
return ExpressionResult::Value(get_column(name, self.row, &table.header, Some(offset)));
}
offset += table.header.len();
}
} else {
return ExpressionResult::Value(get_column(name, self.row, self.head, offset));
}
}
ExpressionResult::Null
}
fn neg(&'a self, expr: &Expression) -> Expression {
match expr {
&Expression::LiteralValue(ref lit) => {
match lit {
&LiteralValue::Integer(i) => Expression::LiteralValue(LiteralValue::Integer(-i)),
_ => expr.clone()
}
}
&Expression::BinaryOperator((b, ref expr1, ref expr2)) => Expression::BinaryOperator((b.neg(), box self.neg(&**expr1), box self.neg(&**expr2))),
&Expression::UnaryOperator((u, ref expr)) => Expression::UnaryOperator((u.neg(), expr.clone())),
_ => expr.clone()
}
}
}
pub fn result_to_literal(result: ExpressionResult) -> LiteralValue {
match result {
ExpressionResult::Value(v) => v,
_ => LiteralValue::Null,
}
}
pub fn expr_to_literal(expr: &Expression) -> LiteralValue {
match expr {
&Expression::LiteralValue(ref literal_value) => literal_value.clone(),
_ => LiteralValue::Null,
}
}
|
use std::io;
pub fn run() {
println!("Search depth: ");
let mut input = String::new();
io::stdin()
.read_line(&mut input)
.expect("Failed to read line");
let depth = input.trim().parse::<i32>().unwrap();
let root = Node::new();
println!(
"{:?} {}",
root.board,
minimax(root, depth, i8::min_value(), i8::max_value())
);
}
fn max(a: i8, b: i8) -> i8 {
if a > b {
a
} else {
b
}
}
fn min(a: i8, b: i8) -> i8 {
if a < b {
a
} else {
b
}
}
fn minimax(node: Node, depth: i32, mut alpha: i8, mut beta: i8) -> i8 {
if depth == 0 || node.is_terminal() {
return node.value();
}
if node.player == Player::HUMAN {
let mut value = i8::min_value();
for child in node.get_children() {
value = max(value, minimax(child, depth - 1, alpha, beta));
alpha = max(alpha, value);
if alpha >= beta {
break;
}
}
return value;
} else {
let mut value = i8::max_value();
for child in node.get_children() {
value = min(value, minimax(child, depth - 1, alpha, beta));
beta = min(beta, value);
if alpha >= beta {
break;
}
}
return value;
}
}
#[derive(PartialEq, Debug, Copy, Clone)]
enum Player {
COMPUTER,
HUMAN,
}
fn next_player(player: Player) -> Player {
match player {
Player::COMPUTER => Player::HUMAN,
Player::HUMAN => Player::COMPUTER,
}
}
#[derive(Copy, Clone)]
struct Node {
move_count: i32,
board: [i8; 14],
player: Player,
}
impl Node {
const HUMAN_TILE_INDEX: usize = 6;
const COMPUTER_TILE_INDEX: usize = 13;
pub fn new() -> Node {
Node {
move_count: 0,
board: [4, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 4, 0],
player: Player::HUMAN,
}
}
pub fn clone(&self) -> Node {
let mut node = Node::new();
node.move_count = self.move_count;
node.board = self.board;
node.player = self.player;
node
}
fn current_player_tile_index(&self) -> usize {
match self.player {
Player::HUMAN => Node::HUMAN_TILE_INDEX,
Player::COMPUTER => Node::COMPUTER_TILE_INDEX,
}
}
fn get_adjacent_tile_index(tile_index: usize) -> usize {
/*
6
7 5
8 4
9 3
10 2
11 1
12 0
13
*/
12 - tile_index
}
fn steal_pieces(&mut self, tile_index: usize) {
let adjacent_index = Node::get_adjacent_tile_index(tile_index);
let player_index = self.current_player_tile_index();
self.board[player_index] += 1 + self.board[adjacent_index];
self.board[adjacent_index] = 0;
self.board[tile_index] = 0;
}
pub fn move_piece(&mut self, tile_index: usize) {
let mut pieces = self.board[tile_index];
self.board[tile_index] = 0;
let mut index = tile_index;
while pieces > 0 {
index += 1;
if (self.player == Player::COMPUTER && index == Node::HUMAN_TILE_INDEX)
|| (self.player == Player::HUMAN && index == Node::COMPUTER_TILE_INDEX)
{
index += 1;
}
if index >= 14 {
index = 0
}
if pieces == 1
&& index != Node::COMPUTER_TILE_INDEX
&& index != Node::HUMAN_TILE_INDEX
&& self.board[index] == 0
&& self.board[Node::get_adjacent_tile_index(index)] != 0
{
self.steal_pieces(index);
} else {
self.board[index] += 1;
}
pieces -= 1;
}
if self.current_player_tile_index() != index {
self.player = next_player(self.player);
}
self.move_count += 1;
}
fn available_moves(&self) -> Vec<usize> {
let mut moves: Vec<usize> = Vec::new();
let player_index = self.current_player_tile_index();
let tiles = (player_index - 6)..player_index;
for i in tiles {
if self.board[i] > 0 {
moves.push(i);
}
}
moves
}
pub fn get_children(&self) -> Vec<Node> {
let moves = self.available_moves();
let mut children: Vec<Node> = Vec::new();
for m in moves {
let mut child = self.clone();
child.move_piece(m);
children.push(child);
}
children.clone()
}
pub fn is_terminal(&self) -> bool {
self.available_moves().len() == 0
}
pub fn value(&self) -> i8 {
self.board[Node::HUMAN_TILE_INDEX] - self.board[Node::COMPUTER_TILE_INDEX]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn game_logic() {
let mut node = Node::new();
assert_eq!(node.board, [4, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 4, 0]);
assert_eq!(node.player, Player::HUMAN);
node.move_piece(2);
assert_eq!(node.board, [4, 4, 0, 5, 5, 5, 1, 4, 4, 4, 4, 4, 4, 0]);
assert_eq!(node.player, Player::HUMAN);
node.move_piece(5);
assert_eq!(node.board, [4, 4, 0, 5, 5, 0, 2, 5, 5, 5, 5, 4, 4, 0]);
assert_eq!(node.player, Player::COMPUTER);
node.move_piece(12);
assert_eq!(node.board, [5, 5, 0, 5, 5, 0, 2, 5, 5, 5, 0, 4, 0, 7]);
}
#[test]
fn available_moves() {
let node = Node::new();
assert_eq!(node.available_moves(), vec![0, 1, 2, 3, 4, 5]);
}
}
|
use crate::{
math::*,
regressor::{
config::Config,
regressor::Regressor,
},
};
macro_rules! builder_field {
($field:ident, $field_type:ty) => {
pub fn $field(mut self, $field: $field_type) -> Self {
self.$field = $field;
self
}
};
}
/// Conveniently define regularization term that is added to the MSE formula.
#[derive(Copy, Clone, Debug)]
pub enum Penalty {
L1,
L2,
None,
}
impl Penalty {
pub fn compute(&self, alpha: f64, weight: f64) -> f64 {
match self {
Penalty::L1 => alpha * if weight > 0f64 { 1f64 } else { -1f64 },
Penalty::L2 => 2f64 * alpha * weight,
Penalty::None => 0f64,
}
}
}
/// K-Folds cross-validator.
///
/// Provides train and test indices to split data in train and test sets.
/// Split dataset into `k` consecutive folds (without shuffling by default).
///
/// Each fold is then used once as a validation while the `k-1` remaining folds form the training set.
///
/// # Examples
/// ```rust
/// let X = Matrix::read("./data/X.csv")?;
///
/// let folds = KFold::new(5, X, y);
/// for (train, test) in folds {
/// let (X, y) = train;
/// let (X, y) = test;
/// }
/// ```
#[derive(Clone)]
pub struct KFold<'a> {
pub n: usize,
batch: usize,
rows: usize,
X: &'a Matrix,
y: &'a Vector,
}
impl<'a> KFold<'a> {
pub fn new(n: usize, X: &'a Matrix, y: &'a Vector) -> Self {
Self { n, batch: 1, rows: X.rows(), X, y }
}
}
impl<'a> Iterator for KFold<'a> {
type Item = ((Matrix, Vector), (Matrix, Vector));
fn next(&mut self) -> Option<Self::Item> {
if self.batch > self.n { return None; }
let l = self.rows * (self.batch - 1) / self.n;
let r = self.batch * self.rows / self.n;
let test: Vec<usize> = (l..r).collect();
let mut train: Vec<usize> = (0..l).collect();
train.extend(r..self.rows);
self.batch += 1;
Some((
slice(self.X, self.y, &train),
slice(self.X, self.y, &test)
))
}
}
/// Assess the best value of alpha coefficient in L2 regularization.
///
/// # Arguments
///
/// * `X`: Observation matrix.
/// * `y`: Target vector.
/// * `k`: Number of folds in cross-validation.
/// * `grid`: Vector of possible alpha values to use in the assessment.
///
/// # Examples
/// ```rust
/// let X = Matrix::read("./data/X.csv")?;
/// let y = Vector::read("./data/y.csv")?;
/// let grid = (0..250)
/// .map(|p| 1e-4 / p as f64)
/// .collect::<Vector>();
///
/// println!("Optimal alpha: {}", assess_alpha(&X, &y, 5, &grid, Penalty::L2));
/// ```
pub fn assess_alpha(X: &Matrix, y: &Vector, k: usize, grid: &Vec<f64>, penalty: Penalty) -> f64 {
let mut alpha = 0f64;
let mut best_error = f64::MAX;
let mut best_iteration = 0usize;
for (i, &p) in grid.iter().enumerate() {
let mut error = 0f64;
let folds = KFold::new(k, X, y);
for (j, (train, test)) in folds.enumerate() {
let (X, y) = train;
let model = Config::default()
.penalty(penalty)
.alpha(p)
.to_SGD()
.fit(X, y);
let (X, y) = test;
error += model.mse(&X, &y);
println!("-- Fold {}", j + 1);
}
error = error / k as f64;
if error < best_error {
best_error = error;
best_iteration = i;
alpha = p;
}
println!("-- Iteration {}, Error: {}, Alpha: {}", i + 1, error, p);
println!("-- Best error: {} on iteration {}, Alpha: {}", best_error, best_iteration, alpha);
}
alpha
}
|
#![allow(dead_code)]
use winit::{
event,
event::{Event, WindowEvent},
event_loop::{ControlFlow, EventLoop},
window::Window,
};
mod gfx;
mod state;
mod util;
use gfx::prelude::*;
async fn run(event_loop: EventLoop<()>, window: Window) {
env_logger::init();
let mut world = state::world::World::new();
// Initialize the gfx context.
let mut gfx_context = GfxContext::create(&window).await.unwrap();
let mut world_renderer = WorldRenderer::new(world.id);
// Start focused by default, assuming the application was executed with the intention of using
// it straight away.
let mut window_focused: bool = true;
event_loop.run(move |event, _, control_flow| {
*control_flow = ControlFlow::Poll;
match event {
Event::MainEventsCleared => window.request_redraw(),
Event::RedrawRequested(_) =>
gfx_context.render(&mut world, &mut world_renderer),
Event::WindowEvent { event: WindowEvent::Resized(size), .. } =>
gfx_context.resize(size),
// Handle requests to close the window...
Event::WindowEvent { event: WindowEvent::CloseRequested, .. } |
Event::WindowEvent { event: WindowEvent::KeyboardInput { input: event::KeyboardInput {
virtual_keycode: Some(event::VirtualKeyCode::Escape),
state: event::ElementState::Pressed, ..
}, .. }, .. } => {
*control_flow = ControlFlow::Exit;
window.set_cursor_grab(false).unwrap();
window.set_cursor_visible(true);
},
// We track if the window has focus so that we can ignore device events when focus is
// lost.
Event::WindowEvent { event: WindowEvent::Focused(b), .. } => window_focused = b,
Event::WindowEvent { event: WindowEvent::CursorEntered { .. }, .. } => {
window.set_cursor_grab(true).unwrap();
window.set_cursor_visible(false);
},
Event::WindowEvent { event: WindowEvent::CursorLeft { .. }, .. } => {
window.set_cursor_grab(false).unwrap();
window.set_cursor_visible(true);
},
// Ignore all device events if the window does not have focus.
Event::DeviceEvent { .. } if !window_focused => {}
_ => {}
}
});
}
fn main() {
let event_loop = EventLoop::new();
let window = winit::window::Window::new(&event_loop).unwrap();
window.set_inner_size(winit::dpi::PhysicalSize::new(1280, 720));
futures::executor::block_on(run(event_loop, window));
}
|
#[doc = "Register `SMPR` reader"]
pub type R = crate::R<SMPR_SPEC>;
#[doc = "Register `SMPR` writer"]
pub type W = crate::W<SMPR_SPEC>;
#[doc = "Field `SMP1` reader - Sampling time selection 1 These bits are written by software to select the sampling time that applies to all channels. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."]
pub type SMP1_R = crate::FieldReader;
#[doc = "Field `SMP1` writer - Sampling time selection 1 These bits are written by software to select the sampling time that applies to all channels. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."]
pub type SMP1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `SMP2` reader - Sampling time selection 2 These bits are written by software to select the sampling time that applies to all channels. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."]
pub type SMP2_R = crate::FieldReader;
#[doc = "Field `SMP2` writer - Sampling time selection 2 These bits are written by software to select the sampling time that applies to all channels. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."]
pub type SMP2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `SMPSEL0` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL0_R = crate::BitReader;
#[doc = "Field `SMPSEL0` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL1` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL1_R = crate::BitReader;
#[doc = "Field `SMPSEL1` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL2` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL2_R = crate::BitReader;
#[doc = "Field `SMPSEL2` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL3` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL3_R = crate::BitReader;
#[doc = "Field `SMPSEL3` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL4` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL4_R = crate::BitReader;
#[doc = "Field `SMPSEL4` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL5` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL5_R = crate::BitReader;
#[doc = "Field `SMPSEL5` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL5_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL6` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL6_R = crate::BitReader;
#[doc = "Field `SMPSEL6` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL6_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL7` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL7_R = crate::BitReader;
#[doc = "Field `SMPSEL7` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL7_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL8` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL8_R = crate::BitReader;
#[doc = "Field `SMPSEL8` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL8_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL9` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL9_R = crate::BitReader;
#[doc = "Field `SMPSEL9` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL9_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL10` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL10_R = crate::BitReader;
#[doc = "Field `SMPSEL10` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL10_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL11` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL11_R = crate::BitReader;
#[doc = "Field `SMPSEL11` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL11_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL12` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL12_R = crate::BitReader;
#[doc = "Field `SMPSEL12` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL12_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL13` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL13_R = crate::BitReader;
#[doc = "Field `SMPSEL13` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL13_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL14` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL14_R = crate::BitReader;
#[doc = "Field `SMPSEL14` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL14_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL15` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL15_R = crate::BitReader;
#[doc = "Field `SMPSEL15` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL15_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL16` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL16_R = crate::BitReader;
#[doc = "Field `SMPSEL16` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL16_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL17` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL17_R = crate::BitReader;
#[doc = "Field `SMPSEL17` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL17_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL18` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL18_R = crate::BitReader;
#[doc = "Field `SMPSEL18` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL18_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL19` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL19_R = crate::BitReader;
#[doc = "Field `SMPSEL19` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL19_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL20` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL20_R = crate::BitReader;
#[doc = "Field `SMPSEL20` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL20_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL21` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL21_R = crate::BitReader;
#[doc = "Field `SMPSEL21` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL21_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SMPSEL22` reader - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL22_R = crate::BitReader;
#[doc = "Field `SMPSEL22` writer - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
pub type SMPSEL22_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:2 - Sampling time selection 1 These bits are written by software to select the sampling time that applies to all channels. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."]
#[inline(always)]
pub fn smp1(&self) -> SMP1_R {
SMP1_R::new((self.bits & 7) as u8)
}
#[doc = "Bits 4:6 - Sampling time selection 2 These bits are written by software to select the sampling time that applies to all channels. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."]
#[inline(always)]
pub fn smp2(&self) -> SMP2_R {
SMP2_R::new(((self.bits >> 4) & 7) as u8)
}
#[doc = "Bit 8 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel0(&self) -> SMPSEL0_R {
SMPSEL0_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel1(&self) -> SMPSEL1_R {
SMPSEL1_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel2(&self) -> SMPSEL2_R {
SMPSEL2_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel3(&self) -> SMPSEL3_R {
SMPSEL3_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel4(&self) -> SMPSEL4_R {
SMPSEL4_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel5(&self) -> SMPSEL5_R {
SMPSEL5_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel6(&self) -> SMPSEL6_R {
SMPSEL6_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel7(&self) -> SMPSEL7_R {
SMPSEL7_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel8(&self) -> SMPSEL8_R {
SMPSEL8_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel9(&self) -> SMPSEL9_R {
SMPSEL9_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel10(&self) -> SMPSEL10_R {
SMPSEL10_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel11(&self) -> SMPSEL11_R {
SMPSEL11_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel12(&self) -> SMPSEL12_R {
SMPSEL12_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel13(&self) -> SMPSEL13_R {
SMPSEL13_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel14(&self) -> SMPSEL14_R {
SMPSEL14_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel15(&self) -> SMPSEL15_R {
SMPSEL15_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel16(&self) -> SMPSEL16_R {
SMPSEL16_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel17(&self) -> SMPSEL17_R {
SMPSEL17_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel18(&self) -> SMPSEL18_R {
SMPSEL18_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel19(&self) -> SMPSEL19_R {
SMPSEL19_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel20(&self) -> SMPSEL20_R {
SMPSEL20_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel21(&self) -> SMPSEL21_R {
SMPSEL21_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
pub fn smpsel22(&self) -> SMPSEL22_R {
SMPSEL22_R::new(((self.bits >> 30) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:2 - Sampling time selection 1 These bits are written by software to select the sampling time that applies to all channels. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."]
#[inline(always)]
#[must_use]
pub fn smp1(&mut self) -> SMP1_W<SMPR_SPEC, 0> {
SMP1_W::new(self)
}
#[doc = "Bits 4:6 - Sampling time selection 2 These bits are written by software to select the sampling time that applies to all channels. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."]
#[inline(always)]
#[must_use]
pub fn smp2(&mut self) -> SMP2_W<SMPR_SPEC, 4> {
SMP2_W::new(self)
}
#[doc = "Bit 8 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel0(&mut self) -> SMPSEL0_W<SMPR_SPEC, 8> {
SMPSEL0_W::new(self)
}
#[doc = "Bit 9 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel1(&mut self) -> SMPSEL1_W<SMPR_SPEC, 9> {
SMPSEL1_W::new(self)
}
#[doc = "Bit 10 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel2(&mut self) -> SMPSEL2_W<SMPR_SPEC, 10> {
SMPSEL2_W::new(self)
}
#[doc = "Bit 11 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel3(&mut self) -> SMPSEL3_W<SMPR_SPEC, 11> {
SMPSEL3_W::new(self)
}
#[doc = "Bit 12 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel4(&mut self) -> SMPSEL4_W<SMPR_SPEC, 12> {
SMPSEL4_W::new(self)
}
#[doc = "Bit 13 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel5(&mut self) -> SMPSEL5_W<SMPR_SPEC, 13> {
SMPSEL5_W::new(self)
}
#[doc = "Bit 14 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel6(&mut self) -> SMPSEL6_W<SMPR_SPEC, 14> {
SMPSEL6_W::new(self)
}
#[doc = "Bit 15 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel7(&mut self) -> SMPSEL7_W<SMPR_SPEC, 15> {
SMPSEL7_W::new(self)
}
#[doc = "Bit 16 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel8(&mut self) -> SMPSEL8_W<SMPR_SPEC, 16> {
SMPSEL8_W::new(self)
}
#[doc = "Bit 17 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel9(&mut self) -> SMPSEL9_W<SMPR_SPEC, 17> {
SMPSEL9_W::new(self)
}
#[doc = "Bit 18 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel10(&mut self) -> SMPSEL10_W<SMPR_SPEC, 18> {
SMPSEL10_W::new(self)
}
#[doc = "Bit 19 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel11(&mut self) -> SMPSEL11_W<SMPR_SPEC, 19> {
SMPSEL11_W::new(self)
}
#[doc = "Bit 20 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel12(&mut self) -> SMPSEL12_W<SMPR_SPEC, 20> {
SMPSEL12_W::new(self)
}
#[doc = "Bit 21 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel13(&mut self) -> SMPSEL13_W<SMPR_SPEC, 21> {
SMPSEL13_W::new(self)
}
#[doc = "Bit 22 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel14(&mut self) -> SMPSEL14_W<SMPR_SPEC, 22> {
SMPSEL14_W::new(self)
}
#[doc = "Bit 23 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel15(&mut self) -> SMPSEL15_W<SMPR_SPEC, 23> {
SMPSEL15_W::new(self)
}
#[doc = "Bit 24 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel16(&mut self) -> SMPSEL16_W<SMPR_SPEC, 24> {
SMPSEL16_W::new(self)
}
#[doc = "Bit 25 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel17(&mut self) -> SMPSEL17_W<SMPR_SPEC, 25> {
SMPSEL17_W::new(self)
}
#[doc = "Bit 26 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel18(&mut self) -> SMPSEL18_W<SMPR_SPEC, 26> {
SMPSEL18_W::new(self)
}
#[doc = "Bit 27 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel19(&mut self) -> SMPSEL19_W<SMPR_SPEC, 27> {
SMPSEL19_W::new(self)
}
#[doc = "Bit 28 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel20(&mut self) -> SMPSEL20_W<SMPR_SPEC, 28> {
SMPSEL20_W::new(self)
}
#[doc = "Bit 29 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel21(&mut self) -> SMPSEL21_W<SMPR_SPEC, 29> {
SMPSEL21_W::new(self)
}
#[doc = "Bit 30 - Channel-x sampling time selection These bits are written by software to define which sampling time is used. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing). Refer to for the maximum number of channels."]
#[inline(always)]
#[must_use]
pub fn smpsel22(&mut self) -> SMPSEL22_W<SMPR_SPEC, 30> {
SMPSEL22_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "ADC sampling time register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`smpr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`smpr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SMPR_SPEC;
impl crate::RegisterSpec for SMPR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`smpr::R`](R) reader structure"]
impl crate::Readable for SMPR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`smpr::W`](W) writer structure"]
impl crate::Writable for SMPR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SMPR to value 0"]
impl crate::Resettable for SMPR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! Internal implementation details of usbd-hid.
extern crate proc_macro;
extern crate usbd_hid_descriptors;
use proc_macro::TokenStream;
use proc_macro2::Span;
use quote::quote;
use syn::punctuated::Punctuated;
use syn::token::Bracket;
use syn::{parse, parse_macro_input, Expr, Fields, ItemStruct};
use syn::{Pat, PatSlice, Result};
use byteorder::{ByteOrder, LittleEndian};
use usbd_hid_descriptors::*;
mod spec;
use spec::*;
mod item;
use item::*;
mod packer;
use packer::{gen_serializer, uses_report_ids};
/// Attribute to generate a HID descriptor & serialization code
///
/// You are expected to provide two inputs to this generator:
///
/// - A struct of named fields (which follows the `gen_hid_descriptor` attribute)
/// - A specially-formatted section describing the properties of the descriptor (this
/// section must be provided as arguments to the `gen_hid_descriptor()` attribute)
///
/// The generated HID descriptor will be available as a `&[u8]` by calling
/// `YourStructType::desc()`. `YourStructType` also now implements `SerializedDescriptor`.
///
/// As long as a descriptor describes only input or output types, and a report ID is
/// not used, the wire format for transmitting and recieving the data described by the
/// descriptor is simply the packed representation of the struct itself.
/// Where report ID's are used anywhere in the descriptor, you must prepend the relevant
/// report ID to the packed representation of the struct prior to transmission.
///
/// If inputs and outputs are mixed within the same HID descriptor, then only the struct
/// fields used in that direction can be present in a payload being transmitted in that
/// direction.
///
/// If report ID's are not used, input (device-to-host) serialization code is generated
/// automatically, and is represented by the implementation of the `AsInputReport` trait.
///
/// # Examples
///
/// - Custom 32-octet array, sent from device to host
///
/// ``` no_run
/// #[gen_hid_descriptor(
/// (collection = APPLICATION, usage_page = VENDOR_DEFINED_START, usage = 0x01) = {
/// buff=input;
/// }
/// )]
/// struct CustomInputReport {
/// buff: [u8; 32],
/// }
/// ```
///
/// - Custom input / output, sent in either direction
///
/// ``` no_run
/// #[gen_hid_descriptor(
/// (collection = APPLICATION, usage_page = VENDOR_DEFINED_START, usage = 0x01) = {
/// input_buffer=input;
/// output_buffer=output;
/// }
/// )]
/// struct CustomBidirectionalReport {
/// input_buffer: [u8; 32],
/// output_buffer: [u8; 32],
/// }
/// ```
///
/// Because both inputs and outputs are used, the data format when sending / recieving is the
/// 32 bytes in the relevant direction, **NOT** the full 64 bytes contained within the struct.
///
/// - Packed bitfields
///
/// ``` no_run
/// #[gen_hid_descriptor(
/// (report_id = 0x01,) = {
/// #[packed_bits 3] f1=input;
/// #[packed_bits 9] f2=input;
/// }
/// )]
/// struct CustomPackedBits {
/// f1: u8,
/// f2: u16,
/// }
/// ```
///
/// Because the `#[packed_bits]` sub-attribute was used, the two input fields specified are
/// interpreted as packed bits. As such, `f1` describes 3 boolean inputs, and `f2` describes
/// 9 boolean inputs. Padding constants are automatically generated.
///
/// The `#[packed_bits <num bits>]` feature is intended to be used for describing button presses.
///
/// - Customizing the settings on a report item
///
/// ``` no_run
/// #[gen_hid_descriptor(
/// (collection = APPLICATION, usage_page = VENDOR_DEFINED_START, usage = 0x01) = {
/// (usage_min = X, usage_max = Y) = {
/// #[item_settings data,variable,relative] x=input;
/// #[item_settings data,variable,relative] y=input;
/// };
/// }
/// )]
/// struct CustomCoords {
/// x: i8,
/// y: i8,
/// }
/// ```
///
/// The above example describes a report which sends X & Y co-ordinates. As indicated in
/// the `#[item_settings]` sub-attribute, the individual inputs are described as:
///
/// - Datapoints (`data`) - as opposed to constant
/// - Variable (`variable`) - as opposed to an array
/// - Relative (`relative`) - as opposed to absolute
///
/// # Supported struct types
///
/// The struct following the attribute must consist entirely of named fields, using
/// only types enumerated below, or fixed-size arrays of the types enumerated below.
///
/// - u8 / i8
/// - u16 / i16
/// - u32 / i32
///
/// `LOGICAL_MINIMUM` & `LOGICAL_MAXIMUM` are automatically set in the descriptor, based
/// on the type & whether `#[packed_bits]` was set on the field or not.
///
/// # Descriptor format
///
/// The parameters of the HID descriptor should be provided as arguments to the attribute.
/// The arguments should follow the basic form:
///
/// ```
/// #[gen_hid_descriptor(
/// <collection-spec> OR <item-spec>;
/// <collection-spec> OR <item-spec>;
/// ...
/// <collection-spec> OR <item-spec>
/// )]
/// ```
///
/// ## `collection-spec`:
///
/// ```
/// (parameter = <constant or 0xxxx>, ...) = {
/// <collection-spec> OR <item-spec>;
/// ...
/// }
/// ```
///
/// Note: All collection specs must end in a semicolon, except the top-level one.
///
/// Note: Parameters are a tuple, so make sure you have a trailing comma if you only have one
/// parameter.
///
/// The valid parameters are `collection`, `usage_page`, `usage`, `usage_min`, `usage_max`, and
/// `report_id`. These simply configure parameters that apply to contained items in the report.
/// Use of the `collection` parameter automatically creates a collection feature for all items
/// which are contained within it, and other parameters specified in the same collection-spec
/// apply to the collection, not directly to the elements of the collection (ie: defining a
/// collection + a usage generates a descriptor where the usage is set on the collection, not the
/// items contained within the collection).
///
/// ## `item-spec`:
///
/// ```
/// #[packed_bits <num_items>] #[item_settings <setting>,...] <fieldname>=input OR output;
/// ```
///
/// The two sub-attributes are both optional.
///
/// - `fieldname` refers to the name of a field within the struct. All fields must be specified.
/// - `input` fields are sent in reports from device to host. `output` fields are sent in reports
/// from host to device. This matches the terminology used in the USB & HID specifications.
/// - `packed_bits` configures the field as a set of `num_items` booleans rather than a number.
/// If the number of packed bits is less than the natural bit width of the field, the
/// remaining most-significant bits are set as constants within the report and are not used.
/// `packed_bits` is typically used to implement buttons.
/// - `item_settings` describes settings on the input/output item, as enumerated in section
/// 6.2.2.5 of the [HID specification, version 1.11](https://www.usb.org/sites/default/files/documents/hid1_11.pdf).
/// By default, all items are configured as `(Data,Var,Abs,No Wrap,Linear,Preferred State,No Null Position)`.
///
/// ## Quirks
///
/// By default generated descriptors are such to maximize compatibility. To change this
/// behaviour, you can use a `#[quirks <settings>]` attribute on the relevant input/output
/// item.
/// For now, the only quirk is `#[quirks allow_short]`, which allows global features to be
/// serialized in a 1 byte form. This is disabled by default as the Windows HID parser
/// considers it invalid.
#[proc_macro_attribute]
pub fn gen_hid_descriptor(args: TokenStream, input: TokenStream) -> TokenStream {
let decl = parse_macro_input!(input as ItemStruct);
let spec = parse_macro_input!(args as GroupSpec);
let ident = decl.ident.clone();
// Error if the struct doesn't name its fields.
match decl.clone().fields {
Fields::Named(_) => (),
_ => {
return parse::Error::new(
ident.span(),
"`#[gen_hid_descriptor]` type must name fields",
)
.to_compile_error()
.into()
}
};
let do_serialize = !uses_report_ids(&Spec::Collection(spec.clone()));
let output = match compile_descriptor(spec, &decl.fields) {
Ok(d) => d,
Err(e) => return e.to_compile_error().into(),
};
let (descriptor, fields) = output;
let mut out = quote! {
#[derive(Debug, Clone, Copy)]
#[repr(C, packed)]
#decl
impl SerializedDescriptor for #ident {
fn desc() -> &'static[u8] {
&#descriptor
}
}
};
if do_serialize {
let input_serializer = match gen_serializer(fields, MainItemKind::Input) {
Ok(s) => s,
Err(e) => return e.to_compile_error().into(),
};
out = quote! {
#out
impl Serialize for #ident {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
#input_serializer
}
}
impl AsInputReport for #ident {}
};
}
TokenStream::from(out)
}
fn compile_descriptor(
spec: GroupSpec,
fields: &Fields,
) -> Result<(PatSlice, Vec<ReportUnaryField>)> {
let mut compiler = DescCompilation {
..Default::default()
};
let mut elems = Punctuated::new();
if let Err(e) = compiler.emit_group(&mut elems, &spec, fields) {
return Err(e);
};
Ok((
PatSlice {
attrs: vec![],
elems: elems,
bracket_token: Bracket {
span: Span::call_site(),
},
},
compiler.report_fields(),
))
}
#[derive(Default)]
struct DescCompilation {
logical_minimum: Option<isize>,
logical_maximum: Option<isize>,
report_size: Option<u16>,
report_count: Option<u16>,
processed_fields: Vec<ReportUnaryField>,
}
impl DescCompilation {
fn report_fields(&self) -> Vec<ReportUnaryField> {
self.processed_fields.clone()
}
fn emit(
&self,
elems: &mut Punctuated<Pat, syn::token::Comma>,
prefix: &mut ItemPrefix,
buf: [u8; 4],
signed: bool,
) {
// println!("buf: {:?}", buf);
if buf[1..4] == [0, 0, 0] && !(signed && buf[0] == 255) {
prefix.set_byte_count(1);
elems.push(byte_literal(prefix.0));
elems.push(byte_literal(buf[0]));
} else if buf[2..4] == [0, 0] && !(signed && buf[1] == 255) {
prefix.set_byte_count(2);
elems.push(byte_literal(prefix.0));
elems.push(byte_literal(buf[0]));
elems.push(byte_literal(buf[1]));
} else {
prefix.set_byte_count(3);
elems.push(byte_literal(prefix.0));
elems.push(byte_literal(buf[0]));
elems.push(byte_literal(buf[1]));
elems.push(byte_literal(buf[2]));
elems.push(byte_literal(buf[3]));
}
// println!("emitted {} data bytes", prefix.byte_count());
}
fn emit_item(
&self,
elems: &mut Punctuated<Pat, syn::token::Comma>,
typ: u8,
kind: u8,
num: isize,
signed: bool,
allow_short_form: bool,
) {
let mut prefix = ItemPrefix(0);
prefix.set_tag(kind);
prefix.set_type(typ);
// TODO: Support long tags.
// Section 6.2.2.4: An Input item could have a data size of zero (0)
// bytes. In this case the value of each data bit for the item can be
// assumed to be zero. This is functionally identical to using a item
// tag that specifies a 4-byte data item followed by four zero bytes.
let allow_short = typ == ItemType::Main.into() && kind == MainItemKind::Input.into();
if allow_short_form && allow_short && num == 0 {
prefix.set_byte_count(0);
elems.push(byte_literal(prefix.0));
return;
}
let mut buf = [0; 4];
LittleEndian::write_i32(&mut buf, num as i32);
self.emit(elems, &mut prefix, buf, signed);
}
fn handle_globals(&mut self, elems: &mut Punctuated<Pat, syn::token::Comma>, item: MainItem, quirks: ItemQuirks) {
if self.logical_minimum.is_none()
|| self.logical_minimum.clone().unwrap() != item.logical_minimum
{
self.emit_item(
elems,
ItemType::Global.into(),
GlobalItemKind::LogicalMin.into(),
item.logical_minimum as isize,
true,
quirks.allow_short_form,
);
self.logical_minimum = Some(item.logical_minimum);
}
if self.logical_maximum.is_none()
|| self.logical_maximum.clone().unwrap() != item.logical_maximum
{
self.emit_item(
elems,
ItemType::Global.into(),
GlobalItemKind::LogicalMax.into(),
item.logical_maximum as isize,
true,
quirks.allow_short_form,
);
self.logical_maximum = Some(item.logical_maximum);
}
if self.report_size.is_none() || self.report_size.clone().unwrap() != item.report_size {
self.emit_item(
elems,
ItemType::Global.into(),
GlobalItemKind::ReportSize.into(),
item.report_size as isize,
true,
quirks.allow_short_form,
);
self.report_size = Some(item.report_size);
}
if self.report_count.is_none() || self.report_count.clone().unwrap() != item.report_count {
self.emit_item(
elems,
ItemType::Global.into(),
GlobalItemKind::ReportCount.into(),
item.report_count as isize,
true,
quirks.allow_short_form,
);
self.report_count = Some(item.report_count);
}
}
fn emit_field(
&mut self,
elems: &mut Punctuated<Pat, syn::token::Comma>,
i: &ItemSpec,
item: MainItem,
) {
self.handle_globals(elems, item.clone(), i.quirks);
let item_data = match &i.settings {
Some(s) => s.0 as isize,
None => 0x02, // 0x02 = Data,Var,Abs
};
self.emit_item(
elems,
ItemType::Main.into(),
item.kind.into(),
item_data,
true,
i.quirks.allow_short_form,
);
if let Some(padding) = item.padding_bits {
// Make another item of type constant to carry the remaining bits.
let padding = MainItem {
report_size: 1,
report_count: padding,
..item
};
self.handle_globals(elems, padding.clone(), i.quirks);
let mut const_settings = MainItemSetting { 0: 0 };
const_settings.set_constant(true);
const_settings.set_variable(true);
self.emit_item(
elems,
ItemType::Main.into(),
item.kind.into(),
const_settings.0 as isize,
true,
i.quirks.allow_short_form,
);
}
}
fn emit_group(
&mut self,
elems: &mut Punctuated<Pat, syn::token::Comma>,
spec: &GroupSpec,
fields: &Fields,
) -> Result<()> {
// println!("GROUP: {:?}", spec);
if let Some(usage_page) = spec.usage_page {
self.emit_item(
elems,
ItemType::Global.into(),
GlobalItemKind::UsagePage.into(),
usage_page as isize,
false,
false,
);
}
for usage in &spec.usage {
self.emit_item(
elems,
ItemType::Local.into(),
LocalItemKind::Usage.into(),
*usage as isize,
false,
false,
);
}
if let Some(usage_min) = spec.usage_min {
self.emit_item(
elems,
ItemType::Local.into(),
LocalItemKind::UsageMin.into(),
usage_min as isize,
false,
false,
);
}
if let Some(usage_max) = spec.usage_max {
self.emit_item(
elems,
ItemType::Local.into(),
LocalItemKind::UsageMax.into(),
usage_max as isize,
false,
false,
);
}
if let Some(report_id) = spec.report_id {
self.emit_item(
elems,
ItemType::Global.into(),
GlobalItemKind::ReportID.into(),
report_id as isize,
false,
false,
);
}
if let Some(collection) = spec.collection {
self.emit_item(
elems,
ItemType::Main.into(),
MainItemKind::Collection.into(),
collection as isize,
false,
false,
);
}
if let Some(logical_minimum) = spec.logical_min {
self.emit_item(
elems,
ItemType::Main.into(),
GlobalItemKind::LogicalMin.into(),
logical_minimum as isize,
false,
false,
);
}
for name in spec.clone() {
let f = spec.get(name.clone()).unwrap();
match f {
Spec::MainItem(i) => {
let d = field_decl(fields, name);
match analyze_field(d.clone(), d.ty, i) {
Ok(item) => {
self.processed_fields.push(item.clone());
self.emit_field(elems, i, item.descriptor_item)
}
Err(e) => return Err(e),
}
}
Spec::Collection(g) => {
if let Err(e) = self.emit_group(elems, g, fields) {
return Err(e);
}
}
}
}
if let Some(_) = spec.collection {
// Close collection.
elems.push(byte_literal(0xc0));
}
Ok(())
}
}
fn byte_literal(lit: u8) -> Pat {
// print!("{:x} ", lit);
// println!();
Pat::Lit(syn::PatLit {
attrs: vec![],
expr: Box::new(Expr::Lit(syn::ExprLit {
attrs: vec![],
lit: syn::Lit::Byte(syn::LitByte::new(lit, Span::call_site())),
})),
})
}
|
//! Demonstrates how to retrieve a StarkNet state update from L1 using event logs.
//!
//! As a high-level overview of the log events:
//! (in chronological order, for a single state update)
//!
//! 1. Multiple LogMemoryPageFactContinuous get emitted by the MemoryPageFactRegistry contract.
//! These logs contain no data themselves, but the transactions they were emitted by do.
//!
//! 2. A LogMemoryPagesHashes is emitted by the GpsStatementVerifier contract.
//! It contains a list of memory pages which can be combined and parsed to
//! form the actual state update information.
//!
//! 3. The core contract emits two logs, LogStateUpdate and LogStateTransitionFact.
//! The latter identifies a specific fact (from 2) and links it to this state update.
//!
//! This example assumes you've identified the fact in (3). It then retrieves the logs from (2)
//! until we find one which matches the one you've identified. This log (2) then contains a list
//! of memory page logs (1) which we then retrieve. Finally, we retrieve these memory page logs'
//! data from their transactions which gets parsed into the state update.
//!
//! Note that these logs are spread over multiple L1 transactions and blocks. This example therefore
//! only searches an L1 block range of `N-10_000` : `N` where `N` is the block containing the state
//! update (3).
use std::str::FromStr;
use clap::Arg;
use pathfinder_lib::{
core::{EthereumBlockHash, StarknetBlockNumber},
ethereum::{
log::{MetaLog, StateUpdateLog},
state_update::StateUpdate,
transport::{EthereumTransport, HttpTransport},
},
};
use web3::types::{H256, U256};
use web3::{transports::Http, types::FilterBuilder, Web3};
#[tokio::main]
async fn main() {
let (transport, block_hash, block_no) = parse_cli_args();
let chain = transport
.chain()
.await
.expect("Failed to identify Ethereum network");
// Get the state update event at the given block.
let filter = FilterBuilder::default()
.block_hash(block_hash.0)
.address(vec![StateUpdateLog::contract_address(chain)])
.topics(Some(vec![StateUpdateLog::signature()]), None, None, None)
.build();
let logs = transport.logs(filter).await.unwrap();
let update_log = logs
.into_iter()
.map(|log| StateUpdateLog::try_from(log).expect("state update log parsing failed"))
.find(|log| log.block_number == block_no)
.expect("state update log not found");
let state_update = StateUpdate::retrieve(&transport, update_log, chain)
.await
.expect("Failed to retrieve the state update");
println!("State update:\n\n{:#?}", state_update);
}
/// Creates the CLI and parses the resulting arguments.
fn parse_cli_args() -> (HttpTransport, EthereumBlockHash, StarknetBlockNumber) {
let cli = clap::Command::new("fact-retrieval")
.about("Retrieves and displays a StarkNet state update fact")
.after_help("You can use Etherscan to identify a fact hash to retrieve. The fact hash for a state update is emitted as a `LogStateTransitionFact` log.")
.arg(
Arg::new("seq-no")
.long("sequence-number")
.short('s')
.takes_value(true)
.help("The state update's sequence number.")
.value_name("INT")
)
.arg(
Arg::new("block")
.long("block-hash")
.short('b')
.takes_value(true)
.value_name("HASH")
.help("The L1 block hash at which the state update occurred.")
)
.arg(
Arg::new("url")
.long("url")
.short('u')
.takes_value(true)
.value_name("HTTP(S) URL")
.long_help(r#"This should point to the HTTP RPC endpoint of your Ethereum entry-point, typically a local Ethereum client or a hosted gateway service such as Infura or Cloudflare.
Examples:
infura: https://goerli.infura.io/v3/<PROJECT_ID>
geth: https://localhost:8545"#));
let args = cli.get_matches();
let url = args.value_of("url").expect("Ethereum HTTP url is required");
let block = args.value_of("block").expect("block hash is required");
let seq_no = args
.value_of("seq-no")
.expect("sequence number is required");
let client = Http::new(url).expect("A valid HTTP URL");
let block = H256::from_str(block).expect("A valid block hash");
let block = EthereumBlockHash(block);
let seq_no = U256::from_dec_str(seq_no).expect("A valid sequence number");
let seq_no = StarknetBlockNumber(seq_no.as_u64());
let client = HttpTransport::new(Web3::new(client));
(client, block, seq_no)
}
|
// Copyright 2021 Red Hat, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use zbus::dbus_proxy;
#[dbus_proxy(
interface = "org.freedesktop.NetworkManager",
default_service = "org.freedesktop.NetworkManager",
default_path = "/org/freedesktop/NetworkManager"
)]
trait NetworkManager {
#[dbus_proxy(property)]
fn version(&self) -> zbus::Result<String>;
#[dbus_proxy(property)]
fn active_connections(
&self,
) -> zbus::Result<Vec<zvariant::OwnedObjectPath>>;
/// CheckpointCreate method
fn checkpoint_create(
&self,
devices: &[zvariant::ObjectPath],
rollback_timeout: u32,
flags: u32,
) -> zbus::Result<zvariant::OwnedObjectPath>;
/// CheckpointDestroy method
fn checkpoint_destroy(
&self,
checkpoint: &zvariant::ObjectPath,
) -> zbus::Result<()>;
/// CheckpointRollback method
fn checkpoint_rollback(
&self,
checkpoint: &zvariant::ObjectPath,
) -> zbus::Result<std::collections::HashMap<String, u32>>;
/// ActivateConnection method
fn activate_connection(
&self,
connection: &zvariant::ObjectPath,
device: &zvariant::ObjectPath,
specific_object: &zvariant::ObjectPath,
) -> zbus::Result<zvariant::OwnedObjectPath>;
/// DeactivateConnection method
fn deactivate_connection(
&self,
active_connection: &zvariant::ObjectPath,
) -> zbus::Result<()>;
/// GetDeviceByIpIface method
fn get_device_by_ip_iface(
&self,
iface: &str,
) -> zbus::Result<zvariant::OwnedObjectPath>;
/// GetAllDevices method
fn get_all_devices(&self) -> zbus::Result<Vec<zvariant::OwnedObjectPath>>;
/// CheckpointAdjustRollbackTimeout method
fn checkpoint_adjust_rollback_timeout(
&self,
checkpoint: &zvariant::ObjectPath,
add_timeout: u32,
) -> zbus::Result<()>;
}
#[dbus_proxy(
interface = "org.freedesktop.NetworkManager.Settings",
default_service = "org.freedesktop.NetworkManager",
default_path = "/org/freedesktop/NetworkManager/Settings"
)]
trait NetworkManagerSetting {
/// GetConnectionByUuid method
fn get_connection_by_uuid(
&self,
uuid: &str,
) -> zbus::Result<zvariant::OwnedObjectPath>;
/// AddConnection2 method
fn add_connection2(
&self,
settings: std::collections::HashMap<
&str,
std::collections::HashMap<&str, zvariant::Value>,
>,
flags: u32,
args: std::collections::HashMap<&str, zvariant::Value>,
) -> zbus::Result<(
zvariant::OwnedObjectPath,
std::collections::HashMap<String, zvariant::OwnedValue>,
)>;
/// ListConnections method
fn list_connections(&self) -> zbus::Result<Vec<zvariant::OwnedObjectPath>>;
}
|
use core::ops::{Deref, DerefMut};
use core::sync::atomic::{AtomicBool, Ordering};
use crate::uses::*;
use crate::int::apic::LocalApic;
use crate::gdt::{Gdt, Tss};
use crate::int::idt::Idt;
use crate::sched::Registers;
use crate::arch::x64::*;
#[repr(C)]
#[derive(Debug)]
pub struct GsData
{
// NOTE: these fields have to be first for assmebly code
pub call_rsp: usize,
pub call_save_rsp: usize,
pub out_regs: Registers,
pub last_time: u64,
pub last_switch_nsec: u64,
lapic: Option<LocalApic>,
pub gdt: Gdt,
pub tss: Tss,
pub idt: Idt,
other_alive: AtomicBool,
}
impl GsData
{
fn new() -> Self
{
let tss = Tss::new();
GsData {
call_rsp: 0,
call_save_rsp: 0,
out_regs: Registers::zero(),
last_time: 0,
last_switch_nsec: 0,
lapic: None,
gdt: Gdt::new(&tss),
tss,
idt: Idt::new(),
other_alive: AtomicBool::new(false),
}
}
pub fn lapic(&mut self) -> &mut LocalApic {
self.lapic.as_mut().unwrap()
}
pub fn set_lapic(&mut self, lapic: LocalApic) {
self.lapic = Some(lapic);
}
}
#[derive(Debug)]
pub struct GsRef {
data: *mut GsData,
intd: IntDisable,
}
impl Deref for GsRef {
type Target = GsData;
fn deref(&self) -> &Self::Target {
unsafe {
self.data.as_ref().unwrap()
}
}
}
impl DerefMut for GsRef {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe {
self.data.as_mut().unwrap()
}
}
}
impl Drop for GsRef {
fn drop(&mut self) {
self.other_alive.store(false, Ordering::Release);
}
}
// panics if another gsref on the same cpu is still alive
pub fn cpud() -> GsRef {
let intd = IntDisable::new();
let ptr = gs_addr();
let out = GsRef {
data: ptr as *mut GsData,
intd,
};
if out.other_alive.swap(true, Ordering::AcqRel) {
panic!("tried to get multiple gsrefs on the same cpu at the same time");
}
out
}
// returns none if another cpud ref on this core exists
pub fn try_cpud() -> Option<GsRef> {
let intd = IntDisable::new();
let ptr = gs_addr();
let out = GsRef {
data: ptr as *mut GsData,
intd,
};
if out.other_alive.swap(true, Ordering::AcqRel) {
None
} else {
Some(out)
}
}
// doesn't check if another cpud ref on this core exists
pub unsafe fn force_cpud() -> GsRef {
let intd = IntDisable::new();
let ptr = gs_addr();
let out = GsRef {
data: ptr as *mut GsData,
intd,
};
out.other_alive.store(true, Ordering::Release);
out
}
pub fn prid() -> usize {
let _intd = IntDisable::new();
raw_prid()
}
#[repr(C)]
#[derive(Debug, Clone, Copy)]
struct GsDataPtr {
gsdata_addr: usize,
temp: usize,
prid: usize,
}
pub fn init(prid: usize)
{
let _lock = crate::AP_ALLOC_LOCK.lock();
let gsdata_addr = Box::leak(Box::new(GsData::new())) as *mut _ as usize;
// need this layer of indirection because lea can't be used to get address with gs offset
// the temp field is used by syscall handler to store rip because there are not enough registers
let gsptr = GsDataPtr {
gsdata_addr,
temp: 0,
prid,
};
let gs_addr = Box::leak(Box::new(gsptr)) as *mut _ as u64;
wrmsr(GSBASE_MSR, gs_addr);
wrmsr(GSBASEK_MSR, gs_addr);
}
|
use super::message_destinations::MessageDestinations;
use super::registry::Registry as GenericRegistry;
use crate::data::message::Message;
use crate::data::timetoken::Timetoken;
use crate::data::{pubsub, request, response};
use crate::transport::Service;
use futures_channel::{mpsc, oneshot};
use futures_util::future::{select, Either, FutureExt};
use futures_util::sink::SinkExt;
use futures_util::stream::StreamExt;
use log::{debug, error};
use std::fmt::Debug;
pub(crate) use super::channel::{Rx as ChannelRx, Tx as ChannelTx};
pub(crate) use super::registry::ID as SubscriptionID;
pub(crate) type Registry = GenericRegistry<pubsub::SubscribeTo, ChannelTx>;
pub(crate) type ReadyTx = oneshot::Sender<()>;
pub(crate) type ExitTx = mpsc::Sender<()>;
pub(crate) type ControlTx = mpsc::Sender<ControlCommand>;
pub(crate) type ControlRx = mpsc::Receiver<ControlCommand>;
pub(crate) type SubscriptionIdTx = oneshot::Sender<SubscriptionID>;
/// Commands we pass via the control pipe.
#[derive(Debug)]
pub(crate) enum ControlCommand {
/// A stream for a channel or channel group is being dropped.
///
/// Only sent from `Subscription` to `SubscribeLoop`.
Drop(SubscriptionID, pubsub::SubscribeTo),
/// A stream for a channel or channel group is being created.
///
/// Only sent from `PubNub` to `SubscribeLoop`.
Add(pubsub::SubscribeTo, ChannelTx, SubscriptionIdTx),
}
#[derive(Debug)]
pub(crate) struct SubscribeLoopParams<TTransport> {
pub control_rx: ControlRx,
pub ready_tx: Option<ReadyTx>,
pub exit_tx: Option<ExitTx>,
pub transport: TTransport,
pub to: Registry,
}
#[derive(Debug)]
struct StateData {
pub to: Registry,
}
/// Implements the subscribe loop, which efficiently polls for new messages.
pub(crate) async fn subscribe_loop<TTransport>(params: SubscribeLoopParams<TTransport>)
where
TTransport: Service<request::Subscribe, Response = response::Subscribe> + Clone,
<TTransport as Service<request::Subscribe>>::Error: Debug + 'static,
{
debug!("Starting subscribe loop");
#[allow(clippy::unneeded_field_pattern)]
let SubscribeLoopParams {
mut control_rx,
mut ready_tx,
mut exit_tx,
transport,
to,
} = params;
let mut state_data = StateData { to };
let mut timetoken = Timetoken::default();
loop {
// TODO: re-add cache.
let to: Vec<pubsub::SubscribeTo> = state_data.to.keys().cloned().collect();
let request = request::Subscribe {
to,
timetoken,
heartbeat: None,
};
let response = transport.call(request);
let response = response.fuse();
futures_util::pin_mut!(response);
let control_rx_recv = control_rx.next();
futures_util::pin_mut!(control_rx_recv);
let (messages, next_timetoken) = match select(control_rx_recv, response).await {
Either::Left((msg, _)) => {
let outcome = handle_control_command(&mut state_data, msg).await;
if let ControlOutcome::Terminate = outcome {
// Termination requested, break the loop.
break;
}
// Control signalled we can continue with the polling, however
// we literally need to `continue` here in order to force rerun
// the loop from the beginning.
// We rely on the in-flight request to be properly cleaned up,
// since their futures are being dropped here.
continue;
}
Either::Right((res, _)) => {
match res {
Ok(v) => v,
Err(err) => {
// TODO: add some kind of circut breaker.
// Report error and retry - maybe it'd work this time.
error!("Transport error while polling: {:?}", err);
continue;
}
}
}
};
// Send ready message when the subscribe loop is capable of receiving
// messages.
// This is intended to signal the readiness (and the healthiness) of
// the setup. It is invoked after the `Ok` result from the request
// future, guaranteing that Transport was able to perform successfully
// at least once.
if let Some(ready_tx) = ready_tx.take() {
if let Err(err) = ready_tx.send(()) {
error!("Error sending ready message: {:?}", err);
break;
}
}
// Save Timetoken for next request
timetoken = next_timetoken;
debug!("messages: {:?}", messages);
debug!("timetoken: {:?}", timetoken);
// Distribute messages to each listener.
dispatch_messages(&mut state_data, messages).await;
}
debug!("Stopping subscribe loop");
if let Some(ref mut exit_tx) = exit_tx {
exit_tx.send(()).await.expect("Unable to send exit message");
}
}
/// Encodes action to be taken in response to control command.
#[derive(Debug)]
enum ControlOutcome {
Terminate,
CanContinue,
}
/// Handle a control command.
async fn handle_control_command(
state_data: &mut StateData,
msg: Option<ControlCommand>,
) -> ControlOutcome {
debug!("Got request: {:?}", msg);
let request = match msg {
Some(v) => v,
None => return ControlOutcome::CanContinue,
};
let StateData { to } = state_data;
match request {
ControlCommand::Drop(id, destination) => {
// Log the event.
debug!(
"Unregistering the listener at subscribe loop: {:?} {:?}",
destination, id
);
// Unregister specified listener from the registry.
let (_, _effect) = to
.unregister(&destination, id)
.expect("Unable to unregister destination from a subscribe loop");
// TODO: avoid terminating loop here to avoid special casing.
if to.is_empty() {
ControlOutcome::Terminate
} else {
ControlOutcome::CanContinue
}
}
ControlCommand::Add(destination, channel_tx, id_tx) => {
// Log the event.
debug!("Registering listener at subscribe loop: {:?}", destination);
// Register the destination listener with the registry.
let (id, _effect) = to.register(destination, channel_tx);
// Send Subscription ID.
id_tx.send(id).expect("Unable to send subscription id");
ControlOutcome::CanContinue
}
}
}
/// Dispatch messages to interested listeners.
async fn dispatch_messages(state_data: &mut StateData, messages: Vec<Message>) {
// Distribute messages to each listener.
for message in messages {
let destinations = MessageDestinations::new(&message);
for destination in destinations {
let listeners = state_data.to.get_iter_mut(&destination);
let listeners = match listeners {
None => {
debug!("No listeners for message");
continue;
}
Some(v) => v,
};
debug!(
"Delivering to {:?} listeners for {:?}...",
listeners.size_hint(),
destination
);
for channel_tx in listeners {
if let Err(error) = channel_tx.send(message.clone()).await {
error!("Delivery error: {:?}", error);
}
}
}
}
}
|
mod RrtWeekend;
mod RrtVec3;
mod RrtColor;
mod RrtRay;
mod RrtSphere;
mod RrtHittable;
mod RrtHittableList;
use std::rc::Rc;
use crate::RrtVec3::Vec3;
use crate::RrtRay::Ray;
use crate::RrtHittableList::HittableList;
use crate::RrtSphere::Sphere;
fn main() {
// Image
let aspect_ratio : f64 = 16.0 / 9.0;
let image_width : i64 = 640;
let image_height : i64 = (image_width as f64 / aspect_ratio) as i64;
// World
let mut world : HittableList = HittableList::new();
let vec1 : Vec3 = Vec3::new([0.0, 0.0, -1.0]);
let vec2 : Vec3 = Vec3::new([0.0, -100.5, -1.0]);
/*
* @TODO check if it's an acceptable solution...
*/
world = world.add(Rc::new(Sphere::new(vec1, 0.5)));
world = world.add(Rc::new(Sphere::new(vec2, 100.0)));
// Camera
let viewport_height : f64 = 2.0;
let viewport_width : f64 = aspect_ratio * viewport_height as f64;
let focal_length : f64 = 1.0;
let origin : Vec3 = Vec3::new([0.0, 0.0, 0.0]);
let horizontal : Vec3 = Vec3::new([viewport_width, 0.0, 0.0]);
let vertical : Vec3 = Vec3::new([0.0, viewport_height, 0.0]);
let lower_left_corner : Vec3 = origin - horizontal / 2.0 - vertical / 2.0 - Vec3::new([0.0, 0.0, focal_length]);
// Render
println!("P3\n{} {}\n255\n", image_width, image_height);
for j in (0..image_height).rev() {
eprintln!("Scanlines remaining : {}", j);
for i in 0..image_width {
let u : f64 = i as f64 / (image_width - 1) as f64;
let v : f64 = j as f64 / (image_height - 1) as f64;
let direction : Vec3 = lower_left_corner + Vec3::new([u, u, u]) * horizontal + Vec3::new([v, v, v]) * vertical - origin;
let ray : Ray = Ray::new(origin, direction);
/*
* @TODO check if world.clone is correct ?
*/
let pixel_color : Vec3 = RrtRay::ray_color(ray, world.clone());
RrtColor::write_color(pixel_color);
}
}
eprintln!("Done");
} |
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::is_trait_method;
use rustc_hir as hir;
use rustc_lint::LateContext;
use rustc_span::sym;
use super::FILTER_MAP;
/// lint use of `filter().flat_map()` for `Iterators`
pub(super) fn check<'tcx>(
cx: &LateContext<'tcx>,
expr: &'tcx hir::Expr<'_>,
_filter_args: &'tcx [hir::Expr<'_>],
_map_args: &'tcx [hir::Expr<'_>],
) {
// lint if caller of `.filter().flat_map()` is an Iterator
if is_trait_method(cx, expr, sym::Iterator) {
let msg = "called `filter(..).flat_map(..)` on an `Iterator`";
let hint = "this is more succinctly expressed by calling `.flat_map(..)` \
and filtering by returning `iter::empty()`";
span_lint_and_help(cx, FILTER_MAP, expr.span, msg, None, hint);
}
}
|
use std::fmt::Display;
use hotplot::chart::line::data::ThemeSettings;
use iced::{button, checkbox, container, pick_list, progress_bar, radio, rule, scrollable, slider, text_input, Color};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Theme {
Default = 0,
Light = 1,
Dark = 2,
}
impl Display for Theme {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Theme::Default => { write!(f, "Default") }
Theme::Light => { write!(f, "Light") }
Theme::Dark => { write!(f, "Dark") }
}
}
}
impl Theme {
pub const ALL: [Theme; 3] = [Theme::Default, Theme::Light, Theme::Dark];
pub fn from_discriminant(discriminant: u8) -> Option<Self> {
match discriminant {
0 => Some(Theme::Default),
1 => Some(Theme::Light),
2 => Some(Theme::Dark),
_ => None
}
}
}
impl Default for Theme {
fn default() -> Theme {
Theme::Default
}
}
impl From<Theme> for ThemeSettings {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default | Theme::Light => {
ThemeSettings {
background_color: Color::from_rgb8(211, 211, 211),
padded_background_color: Color::WHITE,
margined_background_color: Some(Color::from_rgb8(241, 241, 241)),
title_color: Color::BLACK,
..Default::default()
}
}
Theme::Dark => {
ThemeSettings {
background_color: Color::from_rgb8(0x36, 0x39, 0x3F),
padded_background_color: Color::from_rgb8(0x36, 0x39, 0x3F),
margined_background_color: Some(dark::SURFACE),
title_color: Color::WHITE,
data_description_color: Color::WHITE,
x_label_text_color: Color::WHITE,
y_label_text_color: Color::WHITE,
..Default::default()
}
}
}
}
}
impl From<Theme> for Box<dyn container::StyleSheet> {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default => Default::default(),
Theme::Light => Default::default(),
Theme::Dark => dark::Container.into(),
}
}
}
impl From<Theme> for Box<dyn radio::StyleSheet> {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default => Default::default(),
Theme::Light => Default::default(),
Theme::Dark => dark::Radio.into(),
}
}
}
impl From<Theme> for Box<dyn text_input::StyleSheet> {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default => Default::default(),
Theme::Light => light::TextInput.into(),
Theme::Dark => dark::TextInput.into(),
}
}
}
impl From<Theme> for Box<dyn button::StyleSheet> {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default => Default::default(),
Theme::Light => light::Button.into(),
Theme::Dark => dark::Button.into(),
}
}
}
impl From<Theme> for Box<dyn scrollable::StyleSheet> {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default => Default::default(),
Theme::Light => Default::default(),
Theme::Dark => dark::Scrollable.into(),
}
}
}
impl From<Theme> for Box<dyn slider::StyleSheet> {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default => Default::default(),
Theme::Light => light::Slider.into(),
Theme::Dark => dark::Slider.into(),
}
}
}
impl From<Theme> for Box<dyn progress_bar::StyleSheet> {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default => Default::default(),
Theme::Light => Default::default(),
Theme::Dark => dark::ProgressBar.into(),
}
}
}
impl From<Theme> for Box<dyn checkbox::StyleSheet> {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default => Default::default(),
Theme::Light => light::Checkbox.into(),
Theme::Dark => dark::Checkbox.into(),
}
}
}
impl From<Theme> for Box<dyn rule::StyleSheet> {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default => Default::default(),
Theme::Light => Default::default(),
Theme::Dark => dark::Rule.into(),
}
}
}
impl From<Theme> for Box<dyn pick_list::StyleSheet> {
fn from(theme: Theme) -> Self {
match theme {
Theme::Default => Default::default(),
Theme::Light => light::PickList.into(),
Theme::Dark => dark::PickList.into(),
}
}
}
mod light {
use iced::{Vector, Background, Color, button, checkbox, container, pick_list, progress_bar, radio, rule, scrollable, slider, text_input};
pub struct TextInput;
impl text_input::StyleSheet for TextInput {
fn active(&self) -> text_input::Style {
text_input::Style {
background: Color::from_rgb(0.11, 0.42, 0.87).into(),
border_radius: 2.0,
border_width: 0.0,
border_color: Color::TRANSPARENT,
}
}
fn focused(&self) -> text_input::Style {
text_input::Style {
border_width: 1.0,
border_color: Color::from_rgb(0.11, 0.42, 0.87),
..self.active()
}
}
fn hovered(&self) -> text_input::Style {
text_input::Style {
border_width: 1.0,
border_color: Color { a: 0.3, ..Color::from_rgb(0.11, 0.42, 0.87) },
..self.focused()
}
}
fn placeholder_color(&self) -> Color {
Color::from_rgb8(0xA0, 0xA0, 0xA0)
}
fn value_color(&self) -> Color {
Color::WHITE
}
fn selection_color(&self) -> Color {
Color::from_rgba8(0xA0, 0xA0, 0xA0, 0.3)
}
}
pub struct Button;
impl button::StyleSheet for Button {
fn active(&self) -> button::Style {
button::Style {
background: Color::from_rgb(0.11, 0.42, 0.87).into(),
border_radius: 12.0,
shadow_offset: Vector::new(1.0, 1.0),
text_color: Color::from_rgb8(0xEE, 0xEE, 0xEE),
..button::Style::default()
}
}
fn hovered(&self) -> button::Style {
button::Style {
text_color: Color::WHITE,
shadow_offset: Vector::new(1.0, 2.0),
..self.active()
}
}
}
pub struct PickList;
impl pick_list::StyleSheet for PickList {
fn menu(&self) -> pick_list::Menu {
pick_list::Menu {
background: Color::from_rgb(0.11, 0.42, 0.87).into(),
text_color: Color::from_rgb8(0xEE, 0xEE, 0xEE),
..Default::default()
}
}
fn active(&self) -> pick_list::Style {
pick_list::Style {
background: Color::from_rgb(0.11, 0.42, 0.87).into(),
border_radius: 12.0,
text_color: Color::from_rgb8(0xEE, 0xEE, 0xEE),
..Default::default()
}
}
fn hovered(&self) -> pick_list::Style {
pick_list::Style {
background: Color::from_rgb(0.11, 0.42, 0.87).into(),
border_radius: 12.0,
text_color: Color::from_rgb8(0xEE, 0xEE, 0xEE),
..Default::default()
}
}
}
pub struct Slider;
impl slider::StyleSheet for Slider {
fn active(&self) -> slider::Style {
slider::Style {
rail_colors: (Color::from_rgb(0.11, 0.42, 0.87), Color { a: 0.1, ..Color::from_rgb(0.11, 0.42, 0.87) }),
handle: slider::Handle {
shape: slider::HandleShape::Circle { radius: 9.0 },
color: Color::from_rgb(0.11, 0.42, 0.87),
border_width: 0.0,
border_color: Color::TRANSPARENT,
},
}
}
fn hovered(&self) -> slider::Style {
let active = self.active();
slider::Style {
handle: slider::Handle {
color: Color::from_rgb(0.11, 0.42, 0.87),
..active.handle
},
..active
}
}
fn dragging(&self) -> slider::Style {
let active = self.active();
slider::Style {
handle: slider::Handle {
color: Color::from_rgb(0.11, 0.42, 0.87),
..active.handle
},
..active
}
}
}
pub struct Checkbox;
impl checkbox::StyleSheet for Checkbox {
fn active(&self, is_checked: bool) -> checkbox::Style {
checkbox::Style {
background: if is_checked { Color::from_rgb(0.11, 0.42, 0.87).into() } else { Color::from_rgb(0.4, 0.4, 0.4).into() },
checkmark_color: Color::from_rgb8(0xEE, 0xEE, 0xEE).into(),
border_radius: 12.0,
border_width: 1.0,
border_color: Color::BLACK
}
}
fn hovered(&self, is_checked: bool) -> checkbox::Style {
checkbox::Style {
background: if is_checked { Color::from_rgb(0.11, 0.42, 0.87).into() } else { Color::from_rgb(0.4, 0.4, 0.4).into() },
checkmark_color: Color::from_rgb8(0xEE, 0xEE, 0xEE).into(),
border_radius: 12.0,
border_width: 1.0,
border_color: Color::BLACK
}
}
}
}
mod dark {
use iced::{Background, Color, button, checkbox, container, pick_list, progress_bar, radio, rule, scrollable, slider, text_input};
pub const SURFACE: Color = Color::from_rgb(
0x40 as f32 / 255.0,
0x44 as f32 / 255.0,
0x4B as f32 / 255.0,
);
pub const ACCENT: Color = Color::from_rgb(
0x6F as f32 / 255.0,
0xFF as f32 / 255.0,
0xE9 as f32 / 255.0,
);
pub const ACTIVE: Color = Color::from_rgb(
0x72 as f32 / 255.0,
0x89 as f32 / 255.0,
0xDA as f32 / 255.0,
);
pub const HOVERED: Color = Color::from_rgb(
0x67 as f32 / 255.0,
0x7B as f32 / 255.0,
0xC4 as f32 / 255.0,
);
pub struct Container;
impl container::StyleSheet for Container {
fn style(&self) -> container::Style {
container::Style {
background: Color::from_rgb8(0x36, 0x39, 0x3F).into(),
text_color: Color::WHITE.into(),
..container::Style::default()
}
}
}
pub struct Radio;
impl radio::StyleSheet for Radio {
fn active(&self) -> radio::Style {
radio::Style {
background: SURFACE.into(),
dot_color: ACTIVE,
border_width: 1.0,
border_color: ACTIVE,
}
}
fn hovered(&self) -> radio::Style {
radio::Style {
background: Color { a: 0.5, ..SURFACE }.into(),
..self.active()
}
}
}
pub struct TextInput;
impl text_input::StyleSheet for TextInput {
fn active(&self) -> text_input::Style {
text_input::Style {
background: SURFACE.into(),
border_radius: 2.0,
border_width: 0.0,
border_color: Color::TRANSPARENT,
}
}
fn focused(&self) -> text_input::Style {
text_input::Style {
border_width: 1.0,
border_color: ACCENT,
..self.active()
}
}
fn hovered(&self) -> text_input::Style {
text_input::Style {
border_width: 1.0,
border_color: Color { a: 0.3, ..ACCENT },
..self.focused()
}
}
fn placeholder_color(&self) -> Color {
Color::from_rgb(0.4, 0.4, 0.4)
}
fn value_color(&self) -> Color {
Color::WHITE
}
fn selection_color(&self) -> Color {
ACTIVE
}
}
pub struct Button;
impl button::StyleSheet for Button {
fn active(&self) -> button::Style {
button::Style {
background: ACTIVE.into(),
border_radius: 3.0,
text_color: Color::WHITE,
..button::Style::default()
}
}
fn hovered(&self) -> button::Style {
button::Style {
background: HOVERED.into(),
text_color: Color::WHITE,
..self.active()
}
}
fn pressed(&self) -> button::Style {
button::Style {
border_width: 1.0,
border_color: Color::WHITE,
..self.hovered()
}
}
}
pub struct Scrollable;
impl scrollable::StyleSheet for Scrollable {
fn active(&self) -> scrollable::Scrollbar {
scrollable::Scrollbar {
background: SURFACE.into(),
border_radius: 2.0,
border_width: 0.0,
border_color: Color::TRANSPARENT,
scroller: scrollable::Scroller {
color: ACTIVE,
border_radius: 2.0,
border_width: 0.0,
border_color: Color::TRANSPARENT,
},
}
}
fn hovered(&self) -> scrollable::Scrollbar {
let active = self.active();
scrollable::Scrollbar {
background: Color { a: 0.5, ..SURFACE }.into(),
scroller: scrollable::Scroller {
color: HOVERED,
..active.scroller
},
..active
}
}
fn dragging(&self) -> scrollable::Scrollbar {
let hovered = self.hovered();
scrollable::Scrollbar {
scroller: scrollable::Scroller {
color: Color::from_rgb(0.85, 0.85, 0.85),
..hovered.scroller
},
..hovered
}
}
}
pub struct Slider;
impl slider::StyleSheet for Slider {
fn active(&self) -> slider::Style {
slider::Style {
rail_colors: (ACTIVE, Color { a: 0.1, ..ACTIVE }),
handle: slider::Handle {
shape: slider::HandleShape::Circle { radius: 9.0 },
color: ACTIVE,
border_width: 0.0,
border_color: Color::TRANSPARENT,
},
}
}
fn hovered(&self) -> slider::Style {
let active = self.active();
slider::Style {
handle: slider::Handle {
color: HOVERED,
..active.handle
},
..active
}
}
fn dragging(&self) -> slider::Style {
let active = self.active();
slider::Style {
handle: slider::Handle {
color: Color::from_rgb(0.85, 0.85, 0.85),
..active.handle
},
..active
}
}
}
pub struct ProgressBar;
impl progress_bar::StyleSheet for ProgressBar {
fn style(&self) -> progress_bar::Style {
progress_bar::Style {
background: SURFACE.into(),
bar: ACTIVE.into(),
border_radius: 10.0,
}
}
}
pub struct Checkbox;
impl checkbox::StyleSheet for Checkbox {
fn active(&self, is_checked: bool) -> checkbox::Style {
checkbox::Style {
background: if is_checked { ACTIVE } else { SURFACE }
.into(),
checkmark_color: Color::WHITE,
border_radius: 2.0,
border_width: 1.0,
border_color: ACTIVE,
}
}
fn hovered(&self, is_checked: bool) -> checkbox::Style {
checkbox::Style {
background: Color {
a: 0.8,
..if is_checked { ACTIVE } else { SURFACE }
}
.into(),
..self.active(is_checked)
}
}
}
pub struct Rule;
impl rule::StyleSheet for Rule {
fn style(&self) -> rule::Style {
rule::Style {
color: SURFACE,
width: 2,
radius: 1.0,
fill_mode: rule::FillMode::Padded(15),
}
}
}
pub struct PickList;
impl pick_list::StyleSheet for PickList {
fn menu(&self) -> pick_list::Menu {
pick_list::Menu {
text_color: Color::WHITE.into(),
background: Background::Color(SURFACE),
border_width: 1.0,
border_color: ACTIVE,
selected_text_color: Color::WHITE.into(),
selected_background: Background::Color(ACTIVE),
}
}
fn active(&self) -> pick_list::Style {
pick_list::Style {
text_color: Color::WHITE.into(),
background: Background::Color(SURFACE),
border_width: 1.0,
border_color: ACTIVE,
border_radius: 3.0,
icon_size: 0.7,
}
}
fn hovered(&self) -> pick_list::Style {
pick_list::Style {
text_color: Color::WHITE.into(),
background: Background::Color(SURFACE),
border_width: 1.0,
border_color: ACTIVE,
border_radius: 3.0,
icon_size: 0.7,
}
}
}
} |
use alloc::boxed::Box;
use alloc::vec::Vec;
use core::ffi::c_void;
extern "Rust" {
fn miri_backtrace_size(flags: u64) -> usize;
fn miri_get_backtrace(flags: u64, buf: *mut *mut ());
fn miri_resolve_frame(ptr: *mut (), flags: u64) -> MiriFrame;
fn miri_resolve_frame_names(ptr: *mut (), flags: u64, name_buf: *mut u8, filename_buf: *mut u8);
}
#[repr(C)]
pub struct MiriFrame {
pub name_len: usize,
pub filename_len: usize,
pub lineno: u32,
pub colno: u32,
pub fn_ptr: *mut c_void,
}
#[derive(Clone, Debug)]
pub struct FullMiriFrame {
pub name: Box<[u8]>,
pub filename: Box<[u8]>,
pub lineno: u32,
pub colno: u32,
pub fn_ptr: *mut c_void,
}
#[derive(Debug, Clone)]
pub struct Frame {
pub addr: *mut c_void,
pub inner: FullMiriFrame,
}
// SAFETY: Miri guarantees that the returned pointer
// can be used from any thread.
unsafe impl Send for Frame {}
unsafe impl Sync for Frame {}
impl Frame {
pub fn ip(&self) -> *mut c_void {
self.addr
}
pub fn sp(&self) -> *mut c_void {
core::ptr::null_mut()
}
pub fn symbol_address(&self) -> *mut c_void {
self.inner.fn_ptr
}
pub fn module_base_address(&self) -> Option<*mut c_void> {
None
}
}
pub fn trace<F: FnMut(&super::Frame) -> bool>(cb: F) {
// SAFETY: Miri guarantees that the backtrace API functions
// can be called from any thread.
unsafe { trace_unsynchronized(cb) };
}
pub fn resolve_addr(ptr: *mut c_void) -> Frame {
// SAFETY: Miri will stop execution with an error if this pointer
// is invalid.
let frame = unsafe { miri_resolve_frame(ptr as *mut (), 1) };
let mut name = Vec::with_capacity(frame.name_len);
let mut filename = Vec::with_capacity(frame.filename_len);
// SAFETY: name and filename have been allocated with the amount
// of memory miri has asked for, and miri guarantees it will initialize it
unsafe {
miri_resolve_frame_names(ptr as *mut (), 0, name.as_mut_ptr(), filename.as_mut_ptr());
name.set_len(frame.name_len);
filename.set_len(frame.filename_len);
}
Frame {
addr: ptr,
inner: FullMiriFrame {
name: name.into(),
filename: filename.into(),
lineno: frame.lineno,
colno: frame.colno,
fn_ptr: frame.fn_ptr,
},
}
}
unsafe fn trace_unsynchronized<F: FnMut(&super::Frame) -> bool>(mut cb: F) {
let len = miri_backtrace_size(0);
let mut frames = Vec::with_capacity(len);
miri_get_backtrace(1, frames.as_mut_ptr());
frames.set_len(len);
for ptr in frames.iter() {
let frame = resolve_addr(*ptr as *mut c_void);
if !cb(&super::Frame { inner: frame }) {
return;
}
}
}
|
use abstract_integers::*;
abstract_signed_secret_integer!(BigBounded, 256);
#[test]
#[should_panic]
fn bounded() {
println!("BigBounded::max(): {:x}", BigBounded::max());
let y1 = (BigBounded::pow2(255) - BigBounded::from_literal(1)) * BigBounded::from_literal(2);
let y2 = BigBounded::from_literal(4);
let _y3 = y1 + y2;
}
|
//! Soft-link with `config.toml`
//!
//! leetcode-cli will generate a `leetcode.toml` by default,
//! if you wanna change to it, you can:
//!
//! + Edit leetcode.toml at `~/.leetcode/leetcode.toml` directly
//! + Use `leetcode config` to update it
use crate::{
config::{code::Code, cookies::Cookies, storage::Storage, sys::Sys},
Error,
};
use serde::{Deserialize, Serialize};
use std::{fs, path::Path};
mod code;
mod cookies;
mod storage;
mod sys;
/// Sync with `~/.leetcode/leetcode.toml`
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct Config {
#[serde(default, skip_serializing)]
pub sys: Sys,
pub code: Code,
pub cookies: Cookies,
pub storage: Storage,
}
impl Config {
fn write_default(p: impl AsRef<Path>) -> Result<(), crate::Error> {
fs::write(p.as_ref(), toml::ser::to_string_pretty(&Self::default())?)?;
Ok(())
}
/// Locate lc's config file
pub fn locate() -> Result<Config, crate::Error> {
let conf = Self::root()?.join("leetcode.toml");
if !conf.is_file() {
Self::write_default(&conf)?;
}
let s = fs::read_to_string(&conf)?;
match toml::from_str::<Config>(&s) {
Ok(config) => Ok(config),
Err(e) => {
let tmp = Self::root()?.join("leetcode.tmp.toml");
Self::write_default(tmp)?;
Err(e.into())
}
}
}
/// Get root path of leetcode-cli
pub fn root() -> Result<std::path::PathBuf, Error> {
let dir = dirs::home_dir().ok_or(Error::NoneError)?.join(".leetcode");
if !dir.is_dir() {
info!("Generate root dir at {:?}.", &dir);
fs::DirBuilder::new().recursive(true).create(&dir)?;
}
Ok(dir)
}
/// Sync new config to config.toml
pub fn sync(&self) -> Result<(), Error> {
let home = dirs::home_dir().ok_or(Error::NoneError)?;
let conf = home.join(".leetcode/leetcode.toml");
fs::write(conf, toml::ser::to_string_pretty(&self)?)?;
Ok(())
}
}
|
#[macro_use]
mod utils;
inject_indy_dependencies!();
extern crate indyrs as api;
extern crate indyrs as indy;
use crate::utils::constants::*;
use crate::utils::metrics;
use crate::utils::wallet;
use crate::utils::Setup;
mod collect {
use super::*;
use std::collections::HashMap;
use serde_json::Value;
#[test]
fn test_metrics_schema() {
let setup = Setup::empty();
let config = config(&setup.name);
wallet::create_wallet(&config, WALLET_CREDENTIALS).unwrap();
let result_metrics = metrics::collect_metrics().unwrap();
let metrics_map = serde_json::from_str::<HashMap<String, Value>>(&result_metrics)
.expect("Top level object should be a dictionary");
for metrics_set in metrics_map.values() {
let metrics_set = metrics_set.as_array().expect("Metrics set should be an array");
for metric in metrics_set.iter() {
let metrics = metric.as_object().expect("Metrics should be an object");
metrics.contains_key("value");
metrics.contains_key("tags");
}
}
}
#[test]
fn collect_metrics_contains_wallet_service_statistics() {
let result_metrics = metrics::collect_metrics().unwrap();
let metrics_map = serde_json::from_str::<HashMap<String, Value>>(&result_metrics).unwrap();
assert!(metrics_map.contains_key("wallet_count"));
let wallet_count = metrics_map
.get("wallet_count")
.unwrap()
.as_array()
.unwrap();
assert!(wallet_count.contains(&json!({"tags":{"label":"opened"},"value":0})));
assert!(wallet_count.contains(&json!({"tags":{"label":"opened_ids"},"value":0})));
assert!(wallet_count.contains(&json!({"tags":{"label":"pending_for_import"},"value":0})));
assert!(wallet_count.contains(&json!({"tags":{"label":"pending_for_open"},"value":0})));
}
#[test]
fn collect_metrics_contains_thread_pool_service_statistics() {
let result_metrics = metrics::collect_metrics().unwrap();
let metrics_map = serde_json::from_str::<HashMap<String, Value>>(&result_metrics).unwrap();
assert!(metrics_map.contains_key("threadpool_threads_count"));
let threadpool_threads_count = metrics_map
.get("threadpool_threads_count")
.unwrap()
.as_array()
.unwrap();
assert!(threadpool_threads_count.contains(&json!({"tags":{"label":"active"},"value":0})));
assert!(threadpool_threads_count.contains(&json!({"tags":{"label":"queued"},"value":0})));
assert!(threadpool_threads_count.contains(&json!({"tags":{"label":"panic"},"value":0})));
}
#[test]
fn collect_metrics_includes_commands_count() {
let setup = Setup::empty();
let config = config(&setup.name);
wallet::create_wallet(&config, WALLET_CREDENTIALS).unwrap();
let result_metrics = metrics::collect_metrics().unwrap();
let metrics_map = serde_json::from_str::<HashMap<String, Value>>(&result_metrics).unwrap();
assert!(metrics_map.contains_key("commands_count"));
let commands_count = metrics_map
.get("commands_count")
.unwrap()
.as_array()
.unwrap();
assert!(commands_count.contains(&json!({"tags":{"command": "pairwise_command_pairwise_exists", "stage": "executed"} ,"value": 0})));
assert!(commands_count.contains(&json!({"tags":{"command": "pairwise_command_pairwise_exists", "stage": "queued"} ,"value": 0})));
assert!(commands_count.contains(&json!({"tags":{"command": "payments_command_build_set_txn_fees_req_ack", "stage": "executed"} ,"value": 0})));
assert!(commands_count.contains(&json!({"tags":{"command": "payments_command_build_set_txn_fees_req_ack", "stage": "queued"} ,"value": 0})));
}
#[test]
fn collect_metrics_includes_commands_duration_ms() {
let setup = Setup::empty();
let config = config(&setup.name);
wallet::create_wallet(&config, WALLET_CREDENTIALS).unwrap();
let result_metrics = metrics::collect_metrics().unwrap();
let metrics_map = serde_json::from_str::<HashMap<String, Value>>(&result_metrics).unwrap();
assert!(metrics_map.contains_key("commands_duration_ms"));
let commands_duration_ms = metrics_map
.get("commands_duration_ms")
.unwrap()
.as_array()
.unwrap();
assert!(commands_duration_ms.contains(&json!({"tags":{"command": "pairwise_command_pairwise_exists", "stage": "executed"} ,"value": 0})));
assert!(commands_duration_ms.contains(&json!({"tags":{"command": "pairwise_command_pairwise_exists", "stage": "queued"} ,"value": 0})));
assert!(commands_duration_ms.contains(&json!({"tags":{"command": "payments_command_build_set_txn_fees_req_ack", "stage": "executed"} ,"value": 0})));
assert!(commands_duration_ms.contains(&json!({"tags":{"command": "payments_command_build_set_txn_fees_req_ack", "stage": "queued"} ,"value": 0})));
}
#[test]
fn collect_metrics_includes_commands_duration_ms_bucket() {
let setup = Setup::empty();
let config = config(&setup.name);
wallet::create_wallet(&config, WALLET_CREDENTIALS).unwrap();
let result_metrics = metrics::collect_metrics().unwrap();
let metrics_map = serde_json::from_str::<HashMap<String, Value>>(&result_metrics).unwrap();
assert!(metrics_map.contains_key("commands_duration_ms_bucket"));
let commands_duration_ms_bucket = metrics_map
.get("commands_duration_ms_bucket")
.unwrap()
.as_array()
.unwrap();
assert!(commands_duration_ms_bucket.contains(&json!({"tags":{"command": "pairwise_command_pairwise_exists", "stage": "executed"} ,"value": 0})));
assert!(commands_duration_ms_bucket.contains(&json!({"tags":{"command": "pairwise_command_pairwise_exists", "stage": "queued"} ,"value": 0})));
assert!(commands_duration_ms_bucket.contains(&json!({"tags":{"command": "payments_command_build_set_txn_fees_req_ack", "stage": "executed"} ,"value": 0})));
assert!(commands_duration_ms_bucket.contains(&json!({"tags":{"command": "payments_command_build_set_txn_fees_req_ack", "stage": "queued"} ,"value": 0})));
}
fn config(name: &str) -> String {
json!({ "id": name }).to_string()
}
}
|
fn read<T: std::str::FromStr>() -> T {
let mut s = String::new();
std::io::stdin().read_line(&mut s).ok();
s.trim().parse().ok().unwrap()
}
fn cnt(mut u: u64) -> i32 {
let mut a = 0;
loop {
a += 1;
u /= 10;
if u == 0 {
break;
}
}
a
}
fn solve(n: u64) -> u64 {
let s = (n as f64).sqrt() as u64;
let mut ret: u64 = s;
for i in 0..s {
let tmp = s - i;
if tmp * (n / tmp) == n {
ret = n / tmp;
break;
}
}
ret
}
fn main() {
let n: u64 = read();
println!("{}", cnt(solve(n)));
}
|
use shrev::*;
use specs::*;
use types::*;
use protocol::client::Command;
use protocol::server::{PlayerFlag, PlayerRespawn, PlayerType};
use protocol::{to_bytes, FlagCode, ServerPacket, Upgrades as ProtocolUpgrades};
use websocket::OwnedMessage;
pub struct CommandHandler {
reader: Option<ReaderId<(ConnectionId, Command)>>,
}
#[derive(SystemData)]
pub struct CommandHandlerData<'a> {
channel: Read<'a, EventChannel<(ConnectionId, Command)>>,
conns: Read<'a, Connections>,
planes: WriteStorage<'a, Plane>,
flags: WriteStorage<'a, Flag>,
isspec: WriteStorage<'a, IsSpectating>,
isdead: WriteStorage<'a, IsDead>,
pos: WriteStorage<'a, Position>,
rot: WriteStorage<'a, Rotation>,
vel: WriteStorage<'a, Velocity>,
health: WriteStorage<'a, Health>,
energy: WriteStorage<'a, Energy>,
}
impl CommandHandler {
pub fn new() -> Self {
Self { reader: None }
}
}
impl<'a> System<'a> for CommandHandler {
type SystemData = CommandHandlerData<'a>;
fn setup(&mut self, res: &mut Resources) {
Self::SystemData::setup(res);
self.reader = Some(
res.fetch_mut::<EventChannel<(ConnectionId, Command)>>()
.register_reader(),
);
}
fn run(&mut self, mut data: Self::SystemData) {
for evt in data.channel.read(self.reader.as_mut().unwrap()) {
let player = match data.conns.0[&evt.0].player {
Some(p) => p,
None => continue,
};
let packet;
if evt.1.com == "flag" {
let flag = Flag::from_str(&evt.1.data).unwrap_or(FlagCode::UnitedNations);
packet = ServerPacket::PlayerFlag(PlayerFlag {
id: player,
flag: flag,
});
*data.flags.get_mut(player).unwrap() = flag;
} else if evt.1.com == "respawn" {
let num = match evt.1.data.parse() {
Ok(n) => n,
Err(_) => continue,
};
let ty = match Plane::try_from(num) {
Some(n) => n,
None => continue,
};
*data.pos.get_mut(player).unwrap() = Position::default();
*data.vel.get_mut(player).unwrap() = Velocity::default();
*data.rot.get_mut(player).unwrap() = Rotation::default();
*data.health.get_mut(player).unwrap() = Health::new(1.0);
*data.energy.get_mut(player).unwrap() = Energy::new(1.0);
*data.planes.get_mut(player).unwrap() = ty;
data.isspec.remove(player);
data.isdead.remove(player);
data.conns.send_to_all(OwnedMessage::Binary(
to_bytes(&ServerPacket::PlayerRespawn(PlayerRespawn {
id: player,
pos: *data.pos.get(player).unwrap(),
rot: *data.rot.get(player).unwrap(),
upgrades: ProtocolUpgrades::default(),
})).unwrap(),
));
packet = ServerPacket::PlayerType(PlayerType { id: player, ty: ty });
} else {
continue;
}
data.conns
.send_to_all(OwnedMessage::Binary(to_bytes(&packet).unwrap()));
}
}
}
use dispatch::SystemInfo;
use handlers::OnCloseHandler;
impl SystemInfo for CommandHandler {
type Dependencies = OnCloseHandler;
fn new() -> Self {
Self::new()
}
fn name() -> &'static str {
concat!(module_path!(), "::", line!())
}
}
|
mod account;
mod ldap;
mod models;
pub use self::ldap::login as ldap_login;
pub use self::models::{Account, AccountWithId};
use self::{account::AccountRole, ldap::Ldap};
use crate::{database::Database, Server};
use account::AccountType;
pub use account::{get_user_by_name, get_user_by_token, setup_root, SALT};
use anyhow::{anyhow, bail};
use chrono::Local;
use error::{ErrorBadRequest, ErrorInternalServerError, ErrorUnauthorized};
use log::{debug, info, warn};
use rand::{distributions::Alphanumeric, Rng};
use serde::{Deserialize, Serialize};
use spa_server::re_export::{
error::{self, ErrorForbidden},
get, post,
web::{self, Query},
HttpRequest, HttpResponse, Identity, Responder, Result,
};
use std::collections::{HashMap, VecDeque};
use tokio::sync::Mutex;
pub(crate) struct AuthContext {
nonce_list: Mutex<VecDeque<(String, String)>>,
ldap: Mutex<Ldap>,
}
impl AuthContext {
pub async fn new() -> anyhow::Result<AuthContext> {
Ok(AuthContext {
nonce_list: Mutex::new(VecDeque::new()),
ldap: Mutex::new(Ldap::new()),
})
}
}
#[derive(Default, Debug)]
struct Authorization {
username: String,
realm: String,
nonce: String,
uri: String,
qop: String,
nc: String,
cnonce: String,
response: String,
opaque: String,
}
#[derive(Serialize)]
struct UserContext {
username: String,
token: String,
role: String,
r#type: String,
}
const DIGEST_MARK: &str = "Digest";
fn parse_auth<S: AsRef<str>>(auth: S) -> anyhow::Result<Authorization> {
let auth = auth.as_ref();
let (mark, content) = auth.split_at(DIGEST_MARK.len());
let content = content.trim();
if mark != DIGEST_MARK {
bail!("only support digest authorization");
}
let mut result = Authorization::default();
for c in content.split(",").into_iter() {
let c = c.trim();
let i = c
.find('=')
.ok_or(anyhow!("invalid part of authorization: {}", c))?;
let (k, v) = c.split_at(i);
let v = v.trim_start_matches('=').trim_matches('"');
match k {
"username" => result.username = v.to_string(),
"realm" => result.realm = v.to_string(),
"nonce" => result.nonce = v.to_string(),
"uri" => result.uri = v.to_string(),
"qop" => result.qop = v.to_string(),
"nc" => result.nc = v.to_string(),
"cnonce" => result.cnonce = v.to_string(),
"response" => result.response = v.to_string(),
"opaque" => result.opaque = v.to_string(),
_ => {
warn!("unknown authorization part: {}", c);
continue;
}
}
}
Ok(result)
}
async fn unauthorized(data: &web::Data<Server>, msg: impl Into<String>) -> Result<HttpResponse> {
let nonce = rand_str(32);
let opaque = rand_str(32);
let www_authenticate = format!(
r#"Digest realm="{}",qop="auth",nonce="{}",opaque="{}""#,
&SALT.lock().await,
nonce,
opaque
);
{
let mut nonce_list = data.auth_context.nonce_list.lock().await;
while nonce_list.len() >= 256 {
nonce_list.pop_front();
}
nonce_list.push_back((nonce, opaque));
}
Ok(HttpResponse::Unauthorized()
.append_header(("WWW-Authenticate", www_authenticate))
.body(msg.into()))
}
#[get("/login")]
pub async fn login(
req: HttpRequest,
id: Identity,
data: web::Data<Server>,
) -> Result<impl Responder> {
let conn = &*data.database.lock().await;
if let Ok(user) = check(&id, conn) {
if let Some(token) = user.token {
return Ok(HttpResponse::Ok().json(UserContext {
username: user.username,
role: user.role,
r#type: user.type_,
token,
}));
}
}
if let Some(auth) = req.headers().get("Authorization") {
let auth = match parse_auth(auth.to_str().map_err(|e| ErrorBadRequest(e))?) {
Ok(a) => a,
Err(e) => return unauthorized(&data, format!("{:?}", e)).await,
};
debug!("get auth: {:?}", auth);
if !auth.uri.starts_with("/auth/login") {
return unauthorized(&data, "authorization uri not match").await;
}
let mut found_nonce = false;
{
let mut nonce_list = data.auth_context.nonce_list.lock().await;
let mut index = nonce_list.len().saturating_sub(1);
for (nonce, opaque) in nonce_list.iter().rev() {
if nonce == &auth.nonce || opaque == &auth.opaque {
found_nonce = true;
nonce_list.remove(index);
break;
}
index = index.saturating_sub(1);
}
}
if !found_nonce {
return unauthorized(&data, "invalid nonce or opaque").await;
}
if auth.qop != "auth" {
return unauthorized(&data, "only support qop = auth").await;
}
let mut user = match get_user_by_name(conn, &auth.username).map_err(|e| {
ErrorInternalServerError(format!("get user failed from database: {:?}", e))
})? {
Some(u) => {
if u.type_ != AccountType::Internal.as_ref() {
return unauthorized(&data, "invalid login type").await;
}
u
}
None => {
return unauthorized(&data, "invalid username or password").await;
}
};
let ha1 = &user.password;
let ha2 = md5::compute(format!("{}:{}", req.method().to_string(), req.uri()));
let password = md5::compute(format!(
"{}:{}:{}:{}:{}:{:x}",
ha1, auth.nonce, auth.nc, auth.cnonce, auth.qop, ha2
));
if format!("{:x}", password) != auth.response {
warn!(
"remote: {} user: {} wrong username or password",
req.connection_info().remote_addr().unwrap_or("<unknown>"),
auth.username
);
return unauthorized(&data, "invalid username or password").await;
}
info!(
"remote: {} user: {} login ok",
req.connection_info().remote_addr().unwrap_or("<unknown>"),
&auth.username
);
user.last_login(Local::now().to_string())
.token(rand_str(64))
.update(conn)
.map_err(|e| ErrorInternalServerError(e))?;
id.remember(auth.username.clone());
let query_string = req.query_string();
if !query_string.is_empty() {
let query = Query::<HashMap<String, String>>::from_query(query_string)?;
if let Some(redirect_url) = query.get("redirect") {
return Ok(HttpResponse::TemporaryRedirect()
.append_header(("Location", &**redirect_url))
.finish());
}
}
return Ok(HttpResponse::Ok().json(UserContext {
username: user.display_name,
token: user.token.unwrap(),
role: user.role,
r#type: user.type_,
}));
}
unauthorized(&data, "cancelled").await
}
fn rand_str(num: usize) -> String {
rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(num)
.map(char::from)
.collect()
}
#[get("/logout")]
pub(crate) async fn logout(id: Identity) -> Result<impl Responder> {
id.forget();
Ok(HttpResponse::Ok())
}
#[post("/modify")]
pub(crate) async fn modify(
id: Identity,
data: web::Data<Server>,
info: web::Json<NewAccount>,
) -> Result<HttpResponse> {
let db = data.database.lock().await;
let op_account = check(&id, &db)?;
let new_account = info.into_inner();
let found_account = get_user_by_name(&db, &new_account.username)
.map_err(|e| ErrorInternalServerError(e))?
.ok_or(ErrorBadRequest(format!(
"no such user: [{}]",
&new_account.username
)))?;
if found_account.type_ == AccountType::Ldap.as_ref() {
return Err(ErrorForbidden("can not modify LDAP user"));
}
if op_account.role == AccountRole::Root.as_ref() || op_account.username == new_account.username
{
db::update_account(&db, new_account).map_err(|e| ErrorInternalServerError(e))?;
return Ok(HttpResponse::Ok().finish());
}
Err(ErrorForbidden(
"only the root user or oneself can change the password",
))
}
#[get("who")]
async fn who(id: Identity, data: web::Data<Server>) -> Result<impl Responder> {
let name = match id.identity() {
Some(user) => user,
None => {
return Ok(HttpResponse::MovedPermanently()
.append_header(("Location", "/auth/login?redirect=/auth/who"))
.finish());
}
};
let account = get_user_by_name(&*data.database.lock().await, name)
.map_err(|e| ErrorInternalServerError(e))?;
if let Some(account) = account {
Ok(HttpResponse::Ok().json(UserContext {
username: account.username,
role: account.role,
r#type: account.type_,
token: match account.token {
Some(tk) => tk,
None => return unauthorized(&data, "session timeout").await,
},
}))
} else {
unauthorized(&data, "no such user").await
}
}
pub(crate) fn check(id: &Identity, db: &Database) -> Result<Account> {
if let Some(id) = id.identity() {
return Ok(get_user_by_name(db, id)
.map_err(|e| ErrorInternalServerError(e))?
.ok_or(ErrorBadRequest("invalid session"))?);
}
Err(ErrorUnauthorized("You need login first"))
}
#[post("create")]
async fn create(
new_account: web::Json<NewAccount>,
data: web::Data<Server>,
) -> Result<impl Responder> {
let new_account = new_account.into_inner();
let cfg = data.config.read().await;
if !cfg.registry.can_create_account {
return Err(ErrorBadRequest("Account creation has been disabled"));
}
let mut account = Account::new(
new_account.username,
AccountType::Internal.as_ref(),
AccountRole::User.as_ref(),
);
account
.encoded_password(new_account.password)
.salt(SALT.lock().await.clone());
if let Some(email) = new_account.email {
account.email(email);
}
let db = data.database.lock().await;
db::create_account(&*db, &account)
.map_err(|e| ErrorBadRequest(format!("create account failed: {:?}", e)))?;
info!("created new account {}", account.username);
Ok(HttpResponse::Ok())
}
mod db {
use super::{models::Account, NewAccount};
use crate::database::{schema::accounts::dsl::*, Database};
use anyhow::{bail, Result};
use diesel::{associations::HasTable, dsl::count_star, prelude::*};
pub(super) fn create_account(db: &Database, account: &Account) -> Result<()> {
let count = accounts
.filter(username.eq(&account.username))
.select(count_star())
.first::<i64>(&db.connection)?;
if count != 0 {
bail!("account {} already exists", &account.username);
}
diesel::insert_into(accounts)
.values(account)
.execute(&db.connection)?;
Ok(())
}
pub(super) fn update_account(db: &Database, new_account: NewAccount) -> Result<()> {
match new_account.email.as_ref() {
Some(e) => diesel::update(accounts::table())
.filter(username.eq(new_account.username))
.set((password.eq(new_account.password), email.eq(e)))
.execute(&db.connection)?,
None => diesel::update(accounts::table())
.filter(username.eq(new_account.username))
.set(password.eq(new_account.password))
.execute(&db.connection)?,
};
Ok(())
}
}
#[derive(Deserialize)]
pub(crate) struct NewAccount {
username: String,
password: String,
email: Option<String>,
}
|
// Copyright 2015 Ted Mielczarek. See the COPYRIGHT
// file at the top-level directory of this distribution.
//! A collection for storing data associated with a range of values.
use std::cmp::Ordering;
use std::iter::FromIterator;
use std::slice::Iter;
/// The value type for the endpoints of ranges.
pub type Addr = u64;
/// Entries are indexed by a `Range` of `Addr`s.
///
/// The start of the range is inclusive, the end is exclusive.
pub type Range = (Addr, Addr);
/// Entry type, a tuple of `Range` and `T`.
pub type Entry<T> = (Range, T);
/// A `RangeMap` stores values of `T` that map to `Range`s.
#[derive(Debug, PartialEq)]
pub struct RangeMap<T> {
/// Entries are stored in a sorted list internally.
entries: Vec<Entry<T>>,
}
fn compare_address_to_entry<T>(addr : Addr, entry : &Entry<T>) -> Ordering {
let &((start, end), _) = entry;
return if start <= addr && end > addr {
Ordering::Equal
} else if start > addr {
Ordering::Greater
} else {
Ordering::Less
}
}
impl<T> RangeMap<T> {
/// Create a new, empty `RangeMap`.
pub fn new() -> RangeMap<T> {
RangeMap::<T> { entries: Vec::new() }
}
/// Create a `RangeMap` with `entries`.
pub fn from(mut entries : Vec<Entry<T>>) -> RangeMap<T> {
entries.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
RangeMap::<T> { entries: entries }
}
/// Returns the number of entries in the `RangeMap`.
pub fn len(&self) -> usize { self.entries.len() }
/// Insert `value` in `range`.
pub fn insert(&mut self, range : Range, value : T) -> Result<(),()> {
match self.entries.binary_search_by(|&(ref r, ref _v)| r.cmp(&range)) {
Ok(_) => Err(()),
Err(index) => {
self.entries.insert(index, (range, value));
Ok(())
}
}
}
/// Find an entry whose `Range` encompasses `addr`.
pub fn lookup(&self, addr : Addr) -> Option<&T> {
if let Ok(index) = self.entries.binary_search_by(|ref entry| compare_address_to_entry(addr, entry)) {
let ((_, _), ref value) = self.entries[index];
Some(value)
} else {
None
}
}
/// Return an iterator over the entries of the `RangeMap`.
pub fn iter(&self) -> Iter<Entry<T>> {
self.entries.iter()
}
}
impl<T> IntoIterator for RangeMap<T> {
type Item = Entry<T>;
type IntoIter = <Vec<Entry<T>> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.entries.into_iter()
}
}
impl<T> FromIterator<Entry<T>> for RangeMap<T> {
fn from_iter<U>(iterator: U) -> RangeMap<T> where U: IntoIterator<Item=Entry<T>> {
RangeMap::from(iterator.into_iter().collect())
}
}
impl<T : Clone> Clone for RangeMap<T> {
fn clone(&self) -> RangeMap<T> {
RangeMap::<T> {
entries: self.entries.clone(),
}
}
}
#[test]
fn test_range_map() {
let mut map = RangeMap::<u32>::new();
map.insert((7,10), 2).unwrap();
map.insert((0,4), 1).unwrap();
map.insert((15,16), 3).unwrap();
assert_eq!(map.len(), 3);
assert_eq!(map.lookup(7).unwrap(), &2);
assert_eq!(map.lookup(9).unwrap(), &2);
assert_eq!(map.lookup(0).unwrap(), &1);
assert_eq!(map.lookup(3).unwrap(), &1);
assert_eq!(map.lookup(15).unwrap(), &3);
assert_eq!(map.lookup(4), None);
assert_eq!(map.lookup(6), None);
assert_eq!(map.lookup(10), None);
assert_eq!(map.lookup(16), None);
let items : Vec<_> = map.into_iter().collect();
assert_eq!(items, vec!(
((0,4), 1),
((7,10), 2),
((15,16), 3),
));
}
#[test]
fn test_clone() {
let mut map = RangeMap::<u32>::new();
map.insert((7,10), 2).unwrap();
map.insert((0,4), 1).unwrap();
map.insert((15,16), 3).unwrap();
assert_eq!(map.len(), 3);
let map2 = map.clone();
let items : Vec<_> = map2.into_iter().collect();
assert_eq!(items, vec!(
((0,4), 1),
((7,10), 2),
((15,16), 3),
));
}
#[test]
fn test_from_iter() {
let v = vec!(
((10, 20), 1),
((5, 6), 2),
((20, 22), 3),
((8, 10), 4),
);
let map = v.into_iter().collect::<RangeMap<u32>>();
assert_eq!(map.len(), 4);
let items : Vec<_> = map.into_iter().collect();
assert_eq!(items, vec!(
((5, 6), 2),
((8, 10), 4),
((10, 20), 1),
((20, 22), 3),
));
}
|
use std::io::{self, Read};
use std::collections::HashMap;
#[derive(Debug,Clone)]
enum Rule { Disjunction(Vec<Rule>), Literal(String), Sequence(Vec<usize>) }
fn parse_rule(rule_string : &str) -> Rule {
if rule_string.contains('|') {
return Rule::Disjunction(rule_string.split(" | ").map(|s| parse_rule(s)).collect());
} else if rule_string.contains('"') {
return Rule::Literal(rule_string[1..(rule_string.len() - 1)].to_string());
} else {
return Rule::Sequence(rule_string.split(' ').map(|s| s.parse::<usize>().unwrap()).collect());
}
}
fn match_rule_number(rules : &HashMap<usize, Rule>, string: &str, rule_number: usize) -> Vec<usize> {
return match_rule(rules, string, rules.get(&rule_number).unwrap());
}
// for each way in which the rule matches, return the length of the substrings that it matches
fn match_rule(rules : &HashMap<usize, Rule>, string: &str, rule: &Rule) -> Vec<usize> {
match rule {
Rule::Disjunction(disjunction_rules) => disjunction_rules.iter().map(|r| match_rule(rules, string, r)).flatten().collect(),
Rule::Literal(pattern) => if string.starts_with(pattern) { vec!(pattern.len()) } else { vec!() },
Rule::Sequence(rule_numbers) => {
let mut offsets = vec!(0);
for rule_number in rule_numbers {
let mut new_offsets = Vec::new();
for offset in offsets {
new_offsets.extend(match_rule_number(&rules, &string[offset..], *rule_number).iter().map(|o| offset + o));
}
offsets = new_offsets;
}
return offsets;
}
}
}
fn matches_zero(rules : &HashMap<usize, Rule>, string: &str) -> bool {
return match_rule_number(rules, string, 0).iter().any(|o| *o == string.len());
}
fn main() {
let mut input : String = String::new();
io::stdin().read_to_string(&mut input).unwrap();
let sections : Vec<&str> = input.split("\n\n").collect();
let mut rules : HashMap<usize, Rule> = sections[0].split("\n").map(|rule| {
let mut iter = rule.split(": ");
let number = iter.next().unwrap().parse::<usize>().unwrap();
let body = parse_rule(iter.next().unwrap());
return (number, body);
}).collect();
println!("{}", sections[1].split("\n").filter(|c| matches_zero(&rules, c)).count());
rules.insert(8, Rule::Disjunction(vec!(Rule::Sequence(vec!(42)), Rule::Sequence(vec!(42, 8)))));
rules.insert(11, Rule::Disjunction(vec!(Rule::Sequence(vec!(42, 31)), Rule::Sequence(vec!(42, 11, 31)))));
println!("{}", sections[1].split("\n").filter(|c| matches_zero(&rules, c)).count());
}
|
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use std::sync::Arc;
use parking_lot::RwLock;
use node_chain::DBTransaction;
use node_consensus_base::support::ConsensusSupport;
use primitives::errors::CommonResult;
use primitives::{codec, Hash};
use crate::get_hotstuff_authorities;
use crate::proof::Proof;
use crate::protocol::{MessageType, Node, Proposal, QC};
const DB_KEY_VIEW: &[u8] = b"view";
const DB_KEY_PREPARE_QC: &[u8] = b"prepare_qc";
const DB_KEY_LOCKED_QC: &[u8] = b"locked_qc";
const DB_KEY_NODES: &[u8] = b"nodes";
const DB_KEY_PROPOSALS: &[u8] = b"proposals";
const DB_KEY_PROPOSAL_PREFIX: &[u8] = b"proposal_";
pub struct Storage<S>
where
S: ConsensusSupport,
{
base_commit_qc: RwLock<QC>,
base_block_hash: RwLock<Hash>,
view: RwLock<u64>,
prepare_qc: RwLock<QC>,
locked_qc: RwLock<QC>,
nodes: RwLock<HashMap<Hash, Node>>,
proposals: RwLock<HashMap<Hash, Node>>,
support: Arc<S>,
}
impl<S> Storage<S>
where
S: ConsensusSupport,
{
pub fn new(support: Arc<S>) -> CommonResult<Self> {
let genesis_qc = Self::get_genesis_qc(&support)?;
let this = Self {
base_commit_qc: RwLock::new(genesis_qc.clone()),
base_block_hash: RwLock::new(Hash(Default::default())),
view: RwLock::new(0),
prepare_qc: RwLock::new(genesis_qc.clone()),
locked_qc: RwLock::new(genesis_qc),
nodes: RwLock::new(HashMap::new()),
proposals: RwLock::new(HashMap::new()),
support,
};
this.refresh()?;
Ok(this)
}
pub fn refresh(&self) -> CommonResult<()> {
let proof = self.get_proof()?;
let genesis_qc = Self::get_genesis_qc(&self.support)?;
let base_commit_qc = match proof {
Some(proof) => proof.commit_qc,
None => genesis_qc.clone(),
};
let current_state = self.support.get_current_state();
let base_block_hash = current_state.confirmed_block_hash.clone();
// init view
// and fix if needed
let mut view: u64 = self
.support
.get_consensus_data(DB_KEY_VIEW)?
.unwrap_or_default();
if view < base_commit_qc.view + 1 {
view = base_commit_qc.view + 1;
self.commit_consensus_data(|transaction| {
self.support
.put_consensus_data(transaction, DB_KEY_VIEW, view)?;
Ok(())
})?;
}
// init prepare qc
// and fix if needed
let mut prepare_qc: QC = self
.support
.get_consensus_data(DB_KEY_PREPARE_QC)?
.unwrap_or_else(|| genesis_qc.clone());
if prepare_qc.view <= base_commit_qc.view {
prepare_qc = base_commit_qc.clone();
self.commit_consensus_data(|transaction| {
self.support
.put_consensus_data(transaction, DB_KEY_PREPARE_QC, &prepare_qc)?;
Ok(())
})?;
}
// init locked qc
// and fix if needed
let mut locked_qc: QC = self
.support
.get_consensus_data(DB_KEY_LOCKED_QC)?
.unwrap_or(genesis_qc);
if locked_qc.view <= base_commit_qc.view {
locked_qc = base_commit_qc.clone();
self.commit_consensus_data(|transaction| {
self.support
.put_consensus_data(transaction, DB_KEY_LOCKED_QC, &locked_qc)?;
Ok(())
})?;
}
// init nodes
// and fix if needed
let mut nodes = {
let nodes: Vec<(Hash, Node)> = self
.support
.get_consensus_data(DB_KEY_NODES)?
.unwrap_or_default();
nodes.into_iter().collect::<HashMap<_, _>>()
};
let to_remove_key = nodes
.iter()
.filter(|(_, v)| v.parent_hash != base_block_hash)
.map(|(k, _)| k.clone())
.collect::<Vec<_>>();
for k in &to_remove_key {
nodes.remove(k);
}
if !to_remove_key.is_empty() {
let nodes_vec = nodes.iter().collect::<Vec<_>>();
self.commit_consensus_data(|transaction| {
self.support
.put_consensus_data(transaction, DB_KEY_NODES, &nodes_vec)?;
Ok(())
})?;
}
// init proposals
// and fix if needed
let mut proposals = {
let proposals: Vec<(Hash, Node)> = self
.support
.get_consensus_data(DB_KEY_PROPOSALS)?
.unwrap_or_default();
proposals.into_iter().collect::<HashMap<_, _>>()
};
let to_remove_key = proposals
.iter()
.filter(|(_, v)| v.parent_hash != base_block_hash)
.map(|(k, _)| k.clone())
.collect::<Vec<_>>();
for k in &to_remove_key {
let proposal_key = [DB_KEY_PROPOSAL_PREFIX, &k.0].concat();
self.commit_consensus_data(|transaction| {
self.support
.delete_consensus_data(transaction, &proposal_key)?;
Ok(())
})?;
proposals.remove(k);
}
if !to_remove_key.is_empty() {
let nodes_vec = nodes.iter().collect::<Vec<_>>();
self.commit_consensus_data(|transaction| {
self.support
.put_consensus_data(transaction, DB_KEY_NODES, &nodes_vec)?;
Ok(())
})?;
}
(*self.base_commit_qc.write()) = base_commit_qc;
(*self.base_block_hash.write()) = base_block_hash;
(*self.view.write()) = view;
(*self.prepare_qc.write()) = prepare_qc;
(*self.locked_qc.write()) = locked_qc;
(*self.nodes.write()) = nodes;
(*self.proposals.write()) = proposals;
Ok(())
}
pub fn get_base_commit_qc(&self) -> QC {
self.base_commit_qc.read().clone()
}
pub fn get_view(&self) -> u64 {
*self.view.read()
}
pub fn update_view(&self, view: u64) -> CommonResult<()> {
self.commit_consensus_data(|transaction| {
self.support
.put_consensus_data(transaction, DB_KEY_VIEW, view)?;
Ok(())
})?;
*self.view.write() = view;
Ok(())
}
pub fn get_prepare_qc(&self) -> QC {
self.prepare_qc.read().clone()
}
pub fn update_prepare_qc(&self, prepare_qc: QC) -> CommonResult<()> {
self.commit_consensus_data(|transaction| {
self.support
.put_consensus_data(transaction, DB_KEY_PREPARE_QC, prepare_qc.clone())?;
Ok(())
})?;
*self.prepare_qc.write() = prepare_qc;
Ok(())
}
pub fn get_locked_qc(&self) -> QC {
self.locked_qc.read().clone()
}
pub fn update_locked_qc(&self, locked_qc: QC) -> CommonResult<()> {
self.commit_consensus_data(|transaction| {
self.support
.put_consensus_data(transaction, DB_KEY_LOCKED_QC, locked_qc.clone())?;
Ok(())
})?;
*self.locked_qc.write() = locked_qc;
Ok(())
}
pub fn get_proposal(&self, block_hash: &Hash) -> CommonResult<Option<Proposal>> {
self.get_proposal_using(block_hash, |x| x.clone())
}
pub fn get_proposal_using<T, F: Fn(&Option<Proposal>) -> T>(
&self,
block_hash: &Hash,
using: F,
) -> CommonResult<T> {
let proposal_key = [DB_KEY_PROPOSAL_PREFIX, &block_hash.0].concat();
let proposal: Option<Proposal> = self.support.get_consensus_data(&proposal_key)?;
Ok(using(&proposal))
}
pub fn put_proposal(&self, proposal: Proposal) -> CommonResult<()> {
let proposal_key = [DB_KEY_PROPOSAL_PREFIX, &proposal.block_hash.0].concat();
// save proposals
let node = Node {
block_hash: proposal.block_hash.clone(),
parent_hash: proposal.parent_hash.clone(),
};
(*self.proposals.write()).insert(node.block_hash.clone(), node);
let proposals_vec = self
.proposals
.read()
.iter()
.map(|(k, v)| (k.clone(), v.clone()))
.collect::<Vec<_>>();
self.commit_consensus_data(|transaction| {
self.support
.put_consensus_data(transaction, DB_KEY_PROPOSALS, &proposals_vec)?;
Ok(())
})?;
// save proposal
self.commit_consensus_data(|transaction| {
self.support
.put_consensus_data(transaction, &proposal_key, &proposal)?;
Ok(())
})?;
Ok(())
}
fn get_genesis_qc(support: &Arc<S>) -> CommonResult<QC> {
let genesis_hash = support.get_current_state().genesis_hash.clone();
let genesis_authorities = get_hotstuff_authorities(&support, &0)?;
let genesis_leader_address = genesis_authorities.members[0].0.clone();
Ok(QC {
message_type: MessageType::Decide,
view: 0,
node: Node {
block_hash: genesis_hash,
parent_hash: Hash(vec![]),
},
leader_address: genesis_leader_address,
sig: vec![],
})
}
fn get_proof(&self) -> CommonResult<Option<Proof>> {
let current_state = self.support.get_current_state();
let confirmed_number = current_state.confirmed_number;
let proof = match confirmed_number {
0 => None,
_ => {
let confirmed_block_hash = ¤t_state.confirmed_block_hash;
let proof = self
.support
.get_proof(confirmed_block_hash)?
.ok_or_else(|| {
node_consensus_base::errors::ErrorKind::Data(format!(
"Missing proof: block_hash: {}",
confirmed_block_hash
))
})?;
let data = proof.data;
let proof: Proof = codec::decode(&mut &data[..]).map_err(|_| {
node_consensus_base::errors::ErrorKind::Data("Decode proof error".to_string())
})?;
Some(proof)
}
};
Ok(proof)
}
fn commit_consensus_data<OP: Fn(&mut DBTransaction) -> CommonResult<()>>(
&self,
op: OP,
) -> CommonResult<()> {
let mut transaction = DBTransaction::new();
op(&mut transaction)?;
self.support.commit_consensus_data(transaction)
}
}
|
extern crate pretty_env_logger;
use std::env;
use std::fs::File;
use std::net::ToSocketAddrs;
use anyhow::{anyhow, Result};
use async_dup::Mutex;
use async_std::net::TcpStream;
use async_std::task;
use crate::config::IrcConfig;
use crate::ctcp::{ClientInfoCtcpResponse, CtcpEvent, FingerCtcpResponse, PingCtcpResponse, SourceCtcpResponse, TimeCtcpResponse, UserInfoCtcpResponse, VersionCtcpResponse};
use crate::irc_handler::IrcHandler;
use crate::irc_state::IrcState;
use crate::privmsg::{GeoIpPrivMsgEvent, Iai55Chan, PrivMsgEvent};
mod ctcp;
mod irc_ext;
mod geoip_response;
mod privmsg;
mod irc_handler;
mod irc_state;
mod config;
fn main() -> Result<()> {
task::block_on(async {
if env::var_os("RUST_LOG").is_none() {
env::set_var("RUST_LOG", "jomp16_bot_own=debug");
}
pretty_env_logger::init();
let config: IrcConfig = serde_yaml::from_reader(File::open("config.yml")?)?;
if config.servers.len() == 0 {
return Err(anyhow!("No servers!"));
}
let mut futures = vec![];
for server in config.servers {
futures.push(task::spawn(async move {
for socket_addr in format!("{}:{}", &server.hostname, server.port).to_socket_addrs().unwrap() {
let stream_result = TcpStream::connect(socket_addr).await;
match stream_result {
Ok(stream) => {
let irc_state = &mut IrcState { ..Default::default() };
if server.sasl.enabled {
irc_state.cap_requested.push("sasl".to_string());
}
let mut privmsg_plugins: Vec<Box<dyn PrivMsgEvent>> = vec![];
let mut ctcp_plugins: Vec<Box<dyn CtcpEvent>> = vec![];
for plugin in &server.privmsg_plugins {
match plugin.as_str() {
"geoip" => privmsg_plugins.push(Box::new(GeoIpPrivMsgEvent { ..Default::default() })),
"iai_55chan" => privmsg_plugins.push(Box::new(Iai55Chan {})),
_ => log::warn!("Unknown plugin: {}", plugin),
}
}
for plugin in &server.ctcp.enabled {
match plugin.as_str() {
"CLIENTINFO" => ctcp_plugins.push(Box::new(ClientInfoCtcpResponse { available_ctcp: server.ctcp.enabled.clone() })),
"FINGER" => ctcp_plugins.push(Box::new(FingerCtcpResponse {})),
"PING" => ctcp_plugins.push(Box::new(PingCtcpResponse {})),
"SOURCE" => ctcp_plugins.push(Box::new(SourceCtcpResponse {})),
"TIME" => ctcp_plugins.push(Box::new(TimeCtcpResponse {})),
"VERSION" => ctcp_plugins.push(Box::new(VersionCtcpResponse {})),
"USERINFO" => ctcp_plugins.push(Box::new(UserInfoCtcpResponse {})),
_ => log::warn!("Unknown CTCP plugin: {}", plugin),
}
}
let mut handler = IrcHandler {
server: &mut server.clone(),
irc_state,
ctcp_event: &ctcp_plugins,
privmsg_event: &privmsg_plugins,
};
if server.use_tls {
let stream = async_native_tls::connect(&server.hostname, stream).await.unwrap();
let mut stream = &Mutex::new(stream);
handler.handle(stream, &mut stream).await;
} else {
let mut stream = &Mutex::new(stream);
handler.handle(stream, &mut stream).await;
}
}
Err(e) => {
log::error!("{:}", e);
continue;
}
}
}
}));
}
futures::future::join_all(futures).await;
Ok(())
})
}
|
use tokio::{fs, io};
use url::Url;
use super::consts::{
http::HTTP_CLIENT,
paths::{NODE_DIST_URL, NODE_VERSION_INDEX_URL, SUMCHECK_FILE_NAME, TMP_DIR_PATH},
};
use super::types::GeneralError;
use super::utils::os::get_os_node_file_name;
#[cfg(test)]
mod tests;
pub async fn get_dist_index() -> reqwest::Result<serde_json::Value> {
let json_response = HTTP_CLIENT.get(NODE_VERSION_INDEX_URL).send().await?;
assert!(json_response.status().is_success());
let json_response: serde_json::Value = json_response.json().await?;
Ok(json_response)
}
pub async fn get_sumcheck_file(node_version: &str) -> reqwest::Result<String> {
let hashmap_url = format!("{}/{}/{}", NODE_DIST_URL, node_version, SUMCHECK_FILE_NAME);
assert!(
Url::parse(&hashmap_url).is_ok(),
"({}) is not a valid URL",
hashmap_url
);
let json_response = HTTP_CLIENT.get(&hashmap_url).send().await?;
assert!(
json_response.status().is_success(),
"{}",
json_response.status().to_string()
);
let json_response = json_response.text().await?;
Ok(json_response)
}
pub async fn save_remote_file(node_version: &str) -> Result<String, GeneralError> {
let filename = get_os_node_file_name(node_version);
let url = format!(
"{dist_url}{version}/{filename}",
dist_url = NODE_DIST_URL,
version = node_version,
filename = filename.as_ref().unwrap(),
);
assert!(Url::parse(&url).is_ok(), "({}) is not a valid URL", url);
let package = HTTP_CLIENT.get(&url).send().await?;
let package = package.bytes().await?;
let mut package = package.as_ref();
if fs::metadata(TMP_DIR_PATH.to_owned()).await.is_err() {
fs::create_dir_all(TMP_DIR_PATH.to_owned()).await?;
}
let filepath = TMP_DIR_PATH.join(filename.as_ref().unwrap());
let mut file = fs::File::create(&filepath).await?;
let _ = io::copy(&mut package, &mut file).await?;
let filepath = filepath.to_str().unwrap().to_string();
Ok(filepath)
}
|
extern crate getopts;
use std::env;
use getopts::Options;
mod lfsr;
mod feistel;
mod gcd;
mod hash;
fn main(){
println!("Welcome to CryptoPlayground");
let args: Vec<String> = env::args().collect();
let mut opts = Options::new();
opts.optflag("h", "help", "Print this helptext");
opts.optflag("", "feistel", "Start Feistel encryption");
opts.optflag("", "lfsr", "Start LFSR");
opts.optflag("", "hash", "Create a hash");
opts.optflag("", "gcd", "Calculate the greatest common divisor of two numbers");
let matches = match opts.parse(&args[1..]){
Ok(m) => m,
Err(f) => panic!("Error: {}",f),
};
if matches.opt_present("h"){
print!("{}", opts.usage(""));
}
if matches.opt_present("feistel"){
feistel::encrypt();
}
if matches.opt_present("lfsr"){
lfsr::lfsr(15);
}
if matches.opt_present("gcd"){
gcd::calculate_gcd(3220, 70);
}
if matches.opt_present("hash"){
hash::fnv1a(String::from("Foobar"));
}
}
|
fn main() {
print_numbers_to(255);
println!("Is 5 an even number? {}", is_even(5));
}
fn print_numbers_to(to_num: u8) {
for num in 1..to_num {
println!("{}", num);
}
}
fn is_even(num: u8) -> bool {
return num % 2 == 0;
} |
/*
A simple HTTP server that serves static content from a given directory,
built on [rotor] and [rotor-http].
It creates a number of rotor server threads, all listening on the same
port (via [libc::SO_REUSEPORT]). These are state machines performing
non-blocking network I/O on top of [mio]. The HTTP requests are parsed
and responses emitted on these threads.
Files are read sequentially in a thread pool. You might think they
would be read on the I/O loop, but no: async file I/O is hard, and mio
is only for network I/O.
[rotor]: https://github.com/tailhook/rotor
[rotor-http]: https://github.com/tailhook/rotor-http
[libc::SO_REUSEPORT]: https://lwn.net/Articles/542629/
[mio]: https://github.com/carllerche/mio
*/
// Non-blocking I/O.
//
// https://github.com/carllerche/mio
extern crate mio;
// rotor, a library for building state machines on top of mio, along
// with an HTTP implementation, and its stream abstraction.
//
// https://medium.com/@paulcolomiets/async-io-in-rust-part-iii-cbfd10f17203
extern crate rotor;
extern crate rotor_http;
extern crate rotor_stream;
// A simple library for dealing with command line arguments
//
// https://github.com/kbknapp/clap-rs
extern crate clap;
// A basic thread pool.
//
// http://frewsxcv.github.io/rust-threadpool/threadpool/
extern crate threadpool;
// Extensions to the standard networking types.
//
// This is an official nursery crate that contains networking features
// that aren't in std. We're using in for [TcpBuilder].
// https://doc.rust-lang.org/net2-rs/net2/index.html
//
// [TcpBuilder]: https://doc.rust-lang.org/net2-rs/net2/struct.TcpBuilder.html
extern crate net2;
// Bindings to the C library.
//
// We need it for `setsockopt` and `SO_REUSEPORT`.
//
// http://doc.rust-lang.org/libc/index.html
extern crate libc;
// The error_type! macro to avoid boilerplate trait
// impls for error handling.
#[macro_use]
extern crate error_type;
// Some deprecated time types that rotor needs, Duration,
// and SteadyTime, that rotor needs.
extern crate time;
use clap::App;
use mio::tcp::TcpListener;
use rotor::Scope;
use rotor_http::header::ContentLength;
use rotor_http::server::{RecvMode, Server, Head, Response, Parser, Context};
use rotor_http::status::StatusCode;
use rotor_http::uri::RequestUri;
use rotor_stream::{Deadline, Accept, Stream};
use std::error::Error as StdError;
use std::fs::File;
use std::io::{self, Read};
use std::net::SocketAddr;
use std::os::unix::io::AsRawFd;
use std::path::{Path, PathBuf};
use std::sync::mpsc::{self, Receiver};
use std::thread;
use threadpool::ThreadPool;
use time::Duration;
fn main() {
// Set up our error handling immediatly. Everything in this crate
// that can return an error returns our custom Error type. `try!`
// will convert from all other error types by our `From<SomeError>
// to Error` implementations. Every time a conversion doesn't
// exist the compiler will tell us to create it. This crate uses
// the `error_type!` macro to reduce error boilerplate.
if let Err(e) = run() {
println!("error: {}", e.description());
}
}
fn run() -> Result<(), Error> {
// Create the configuration from the command line arguments. It
// includes the IP address and port to listen on, the path to use
// as the HTTP server's root directory, and the file I/O thread
// pool.
let config = try!(parse_config_from_cmdline());
let (tx, rx) = mpsc::channel::<Result<(), Error>>();
// Create multiple threads all listening on the same address and
// port, and sharing a thread pool for their file I/O.
// TODO: This needs to report panicks.
for _ in 0..config.num_server_threads {
let tx = tx.clone();
let config = config.clone();
thread::spawn(move || {
let r = run_server(config);
// It would be very strange for this send to fail,
// but there's nothing we can do if it does.
tx.send(r).unwrap();
});
}
// Wait for each thread to exit and report the result. Note that
// there's no way for the server threads to exit successfully,
// so normally this will block forever.
for i in 0..config.num_server_threads {
match rx.recv() {
Ok(Ok(())) => {
println!("thread {} exited successfully", i);
}
Ok(Err(e)) => {
println!("thread {} exited with error: {}", i, e.description());
}
Err(e) => {
// This will happen if some threads panicked.
println!("thread {} disappeared: {:?}", i, e.description());
}
}
}
Ok(())
}
// The configuration object, created from command line options
#[derive(Clone)]
struct Config {
addr: SocketAddr,
root_dir: PathBuf,
thread_pool: ThreadPool,
num_server_threads: u16,
}
fn parse_config_from_cmdline() -> Result<Config, Error> {
let matches = App::new("basic-http-server")
.version("0.1")
.about("A basic HTTP file server")
.args_from_usage(
"[ROOT] 'Sets the root dir (default \".\")'
-a --addr=[ADDR] 'Sets the IP:PORT combination (default \"127.0.0.1:4000\")'
-t --threads=[THREADS] 'Sets the number of server threads (default 4)'
--file-threads=[FILE-THREADS] 'Sets the number of threads in the file I/O thread pool (default 100)'")
.get_matches();
let default_server_threads = 4;
let default_file_threads = 100;
let addr = matches.value_of("ADDR").unwrap_or("127.0.0.1:4000");
let root_dir = matches.value_of("ROOT").unwrap_or(".");
let num_server_threads = match matches.value_of("THREADS") {
Some(t) => { try!(t.parse()) }
None => default_server_threads
};
let num_file_threads = match matches.value_of("FILE-THREADS") {
Some(t) => { try!(t.parse()) }
None => default_file_threads
};
// Display the configuration to be helpful
println!("addr: {}", addr);
println!("root dir: {:?}", root_dir);
println!("server threads: {}", num_server_threads);
println!("file threads: {}", num_file_threads);
println!("");
Ok(Config {
addr: try!(addr.parse()),
root_dir: PathBuf::from(root_dir),
thread_pool: ThreadPool::new(num_file_threads),
num_server_threads: num_server_threads,
})
}
// Run a single HTTP server forever.
fn run_server(config: Config) -> Result<(), Error> {
let Config {
addr, root_dir, thread_pool, ..
} = config;
// Our custom server context
let context = ServerContext {
root_dir: root_dir,
thread_pool: thread_pool,
};
let sock = try!(net2::TcpBuilder::new_v4());
set_reuse_port(&sock);
try!(sock.bind(&addr));
let listener = try!(sock.listen(4096));
let listener = try!(TcpListener::from_listener(listener, &addr));
let config = rotor::Config::new();
let event_loop = try!(rotor::Loop::new(&config));
let mut loop_inst = event_loop.instantiate(context);
loop_inst.add_machine_with(|scope| {
Accept::<Stream<Parser<RequestState, _>>, _>::new(listener, scope)
}).unwrap();
println!("listening on {}", addr);
try!(loop_inst.run());
Ok(())
}
// The ServerContext, implementing the rotor-http Context,
// and RequestState, implementing the rotor-http Server.
//
// RequestState is a state machine that lasts for the lifecycle of a
// single request. All RequestStates have access to the shared
// ServerContext.
struct ServerContext {
root_dir: PathBuf,
thread_pool: ThreadPool
}
impl Context for ServerContext { }
enum RequestState {
Init,
ReadyToRespond(Head),
WaitingForData(Receiver<DataMsg>, bool /* headers_sent */)
}
// Messages sent from the file I/O thread back to the state machine
enum DataMsg {
NotFound,
Header(u64),
Data(Vec<u8>),
Done,
IoError(std::io::Error),
}
impl Server for RequestState {
type Context = ServerContext;
fn headers_received(_head: &Head, _scope: &mut Scope<Self::Context>)
-> Result<(Self, RecvMode, Deadline), StatusCode> {
Ok((RequestState::Init, RecvMode::Buffered(1024),
Deadline::now() + Duration::seconds(10)))
}
fn request_start(self, head: Head, _response: &mut Response,
_scope: &mut Scope<Self::Context>)
-> Option<Self> {
Some(RequestState::ReadyToRespond(head))
}
fn request_received(self, _data: &[u8], response: &mut Response,
scope: &mut Scope<Self::Context>)
-> Option<Self> {
// Now that the request is received, prepare the response.
let head = if let RequestState::ReadyToRespond(head) = self {
head
} else {
unreachable!()
};
let path = if let Some(path) = local_path_for_request(head, &scope.root_dir) {
path
} else {
internal_server_error(response);
return None;
};
// We're going to do the file I/O in another thread.
// This channel will transmit info from the I/O thread to the
// rotor machine.
let (tx, rx) = mpsc::channel();
// This rotor Notifier will trigger a wakeup when data is
// ready, upon which the response will be written in `wakeup`.
let notifier = scope.notifier();
scope.thread_pool.execute(move || {
match File::open(path) {
Ok(mut file) => {
let mut buf = Vec::new();
match file.read_to_end(&mut buf) {
Ok(_) => {
tx.send(DataMsg::Header(buf.len() as u64)).unwrap();
tx.send(DataMsg::Data(buf)).unwrap();
tx.send(DataMsg::Done).unwrap();
}
Err(e) => {
tx.send(DataMsg::IoError(e)).unwrap();
}
}
}
Err(e) => {
match e.kind() {
io::ErrorKind::NotFound => {
tx.send(DataMsg::NotFound).unwrap();
}
_ => {
tx.send(DataMsg::IoError(e)).unwrap();
}
}
}
}
notifier.wakeup().unwrap();
});
Some(RequestState::WaitingForData(rx, false))
}
fn wakeup(self, response: &mut Response, _scope: &mut Scope<Self::Context>)
-> Option<Self> {
// Write the HTTP response in reaction to the messages sent by
// the file I/O thread.
let mut state = self;
loop {
state = match state {
RequestState::WaitingForData(rx, headers_sent) => {
match rx.try_recv() {
Ok(DataMsg::NotFound) => {
response.status(StatusCode::NotFound);
response.add_header(ContentLength(0)).unwrap();
response.done_headers().unwrap();
response.done();
return None;
}
Ok(DataMsg::Header(length)) => {
response.status(StatusCode::Ok);
response.add_header(ContentLength(length)).unwrap();
response.done_headers().unwrap();
RequestState::WaitingForData(rx, true)
}
Ok(DataMsg::Data(buf)) => {
assert!(headers_sent);
response.write_body(&buf);
RequestState::WaitingForData(rx, headers_sent)
}
Ok(DataMsg::Done) => {
assert!(headers_sent);
response.done();
return None;
}
Err(mpsc::TryRecvError::Empty) => {
return Some(RequestState::WaitingForData(rx, headers_sent));
}
Ok(DataMsg::IoError(_)) |
Err(mpsc::TryRecvError::Disconnected) => {
if headers_sent {
// We've arleady said this isn't an
// error by sending successful
// headers. Just give up.
response.done();
return None;
} else {
internal_server_error(response);
return None;
}
}
}
}
_ => {
unreachable!()
}
}
}
}
// I don't know what to do with these yet.
fn request_chunk(self, _chunk: &[u8], _response: &mut Response,
_scope: &mut Scope<Self::Context>)
-> Option<Self> {
unimplemented!()
}
fn request_end(self, _response: &mut Response,
_scope: &mut Scope<Self::Context>)
-> Option<Self> {
unimplemented!()
}
fn timeout(self, _response: &mut Response, _scope: &mut Scope<Self::Context>)
-> Option<(Self, Deadline)> {
unimplemented!()
}
}
fn local_path_for_request(head: Head, root_dir: &Path) -> Option<PathBuf> {
let request_path = match head.uri {
RequestUri::AbsolutePath(p) => p,
_ => {
return None;
}
};
// Trim off the url parameters starting with '?'
let end = request_path.find('?').unwrap_or(request_path.len());
let request_path = &request_path[0..end];
// Append the requested path to the root directory
let mut path = root_dir.to_owned();
if request_path.starts_with('/') {
path.push(&request_path[1..]);
} else {
return None;
}
// Maybe turn directory requests into index.html requests
if request_path.ends_with('/') {
path.push("index.html");
}
Some(path)
}
fn set_reuse_port(sock: &net2::TcpBuilder) {
let one = 1i32;
unsafe {
assert!(libc::setsockopt(
sock.as_raw_fd(), libc::SOL_SOCKET,
libc::SO_REUSEPORT,
&one as *const libc::c_int as *const libc::c_void, 4) == 0);
}
}
fn internal_server_error(response: &mut Response) {
response.status(StatusCode::InternalServerError);
response.add_header(ContentLength(0)).unwrap();
response.done_headers().unwrap();
response.done();
}
// The custom Error type that encapsulates all the possible errors
// that can occur in this crate. This macro defines it and
// automatically creates Display, Error, and From implementations for
// all the variants.
error_type! {
#[derive(Debug)]
enum Error {
Io(io::Error) { },
AddrParse(std::net::AddrParseError) { },
Std(Box<StdError + Send + Sync>) {
desc (e) e.description();
},
ParseInt(std::num::ParseIntError) { },
}
}
|
#![feature(plugin)]
#![plugin(rocket_codegen)]
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
extern crate rocket;
extern crate postgres;
extern crate chrono;
extern crate biscuit;
#[cfg(test)]
extern crate reqwest;
use std::env;
use std::fs::File;
use std::io::Read;
use std::sync::mpsc::Sender;
mod log;
mod node;
use log::Log;
use node::{NodeType, NodeConfig};
use node::http_input_node::HttpInputNode;
use node::stdout_output_node::StdoutOutputNode;
use node::postgres_output_node::PostgresOutputNode;
use node::start_node::StartNode;
use node::Node;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
println!("Please specify pipeline configuration path to read from");
return;
}
let pipeline_path = &args[1];
let config = read_pipeline_config(pipeline_path).unwrap();
start_pipeline(&config);
}
fn read_pipeline_config(path: &str) -> Result<NodeConfig, String> {
let mut config_json = String::new();
File::open(path)
.map_err(|e| format!("{:?}", e))?
.read_to_string(&mut config_json)
.map_err(|e| format!("{:?}", e))?;
let config: NodeConfig = serde_json::from_str(config_json.as_ref()).map_err(|e| {
format!("{:?}", e)
})?;
Ok(config)
}
fn start_pipeline(config: &NodeConfig) -> Option<Sender<Log>> {
let _ = chain_pipeline(config);
Some(
StartNode {}
.start()
.map_err(|e| format!("{:?}", e))
.unwrap(),
)
}
fn chain_pipeline(node_config: &NodeConfig) -> Option<Sender<Log>> {
let next = if let Some(ref next_config) = node_config.next {
chain_pipeline(next_config)
} else {
None
};
match node_config.node {
NodeType::StdoutOutputNode => {
StdoutOutputNode::new(node_config.conf.clone(), next)
.start()
.map_err(|e| format!("{:?}", e))
.ok()
}
NodeType::HttpInputNode => {
HttpInputNode::new(node_config.conf.clone(), next)
.start()
.map_err(|e| format!("{:?}", e))
.ok()
}
NodeType::PostgresOutputNode => {
PostgresOutputNode::new(node_config.conf.clone(), next)
.start()
.map_err(|e| format!("{:?}", e))
.ok()
}
}
}
|
#![feature(rust_2018_preview)]
#![feature(uniform_paths)]
extern crate getopts;
use getopts::Options;
use std::env;
use std::iter::Peekable;
use std::mem::swap;
use std::str::Chars;
mod char_class;
use char_class::CharClass;
#[derive(Clone, Debug)]
enum AST {
Empty,
Universe,
Epsilon,
Literal(CharClass),
Star(Box<AST>),
Plus(Box<AST>),
Question(Box<AST>),
Not(Box<AST>),
Seq(Box<AST>, Box<AST>),
And(Box<AST>, Box<AST>),
Or(Box<AST>, Box<AST>),
}
#[derive(Clone, Debug, Eq, PartialEq)]
enum Token {
Bare(char),
Quoted(char),
Class(CharClass),
}
struct TokenSeq<'a> {
chars: &'a mut Chars<'a>,
}
impl<'a> Iterator for TokenSeq<'a> {
type Item = Token;
fn next(&mut self) -> Option<Token> {
match self.chars.next() {
None => None,
Some('\\') => match self.chars.next() {
None => {
panic!("Trailing backslash");
}
Some(c) => Some(Token::Quoted(c)),
},
Some('[') => match char_class::parse_char_class(&mut self.chars) {
Ok(cls) => Some(Token::Class(cls)),
Err(msg) => {
panic!(msg);
}
},
Some(c) => Some(Token::Bare(c)),
}
}
}
fn parse_regular_expression_literal(tokens: &mut Peekable<TokenSeq>) -> Box<AST> {
let token = tokens.peek().cloned();
let mut a = match token {
None => {
return Box::new(AST::Epsilon);
}
Some(Token::Bare('*')) => {
// grep(1) says this is OK
tokens.next();
Box::new(AST::Literal(CharClass::from_char('*')))
}
Some(Token::Bare('.')) => {
tokens.next();
Box::new(AST::Literal(CharClass::new().complement()))
}
Some(Token::Bare(c)) => {
tokens.next();
Box::new(AST::Literal(CharClass::from_char(c)))
}
Some(Token::Quoted(c)) if c == ')' => {
panic!("Unmatched \\)");
}
Some(Token::Quoted(c)) if c == '+' || c == '?' => {
// grep(1) says this is OK
tokens.next();
Box::new(AST::Literal(CharClass::from_char(c)))
}
Some(Token::Quoted(c)) if c == '&' || c == '|' => {
panic!();
}
Some(Token::Quoted('(')) => {
tokens.next();
let b = parse_regular_expression_root(tokens);
assert_eq!(tokens.next(), Some(Token::Quoted(')')));
b
}
Some(Token::Quoted(c)) => {
tokens.next();
Box::new(AST::Literal(CharClass::from_char(c)))
}
Some(Token::Class(cls)) => {
tokens.next();
Box::new(AST::Literal(cls))
}
};
loop {
match tokens.peek() {
Some(Token::Bare('*')) => {
a = Box::new(AST::Star(a));
}
Some(Token::Quoted('+')) => {
a = Box::new(AST::Plus(a));
}
Some(Token::Quoted('?')) => {
a = Box::new(AST::Question(a));
}
Some(Token::Quoted('~')) => {
a = Box::new(AST::Not(a));
}
_ => {
break;
}
}
tokens.next();
}
return a;
}
fn parse_regular_expression_root(tokens: &mut Peekable<TokenSeq>) -> Box<AST> {
fn bump(a: &mut Option<Box<AST>>, b: &mut Option<Box<AST>>, op: fn(Box<AST>, Box<AST>) -> AST) {
let mut a1 = None;
let mut b1 = None;
swap(&mut a1, a);
swap(&mut b1, b);
let b2 = b1.unwrap_or(Box::new(AST::Epsilon));
a1 = Some(match a1 {
None => b2,
Some(a2) => Box::new(op(a2, b2)),
});
swap(a, &mut a1);
}
let mut a = None;
let mut b = None;
let mut c = None;
loop {
match tokens.peek() {
None | Some(Token::Quoted(')')) => {
break;
}
Some(Token::Quoted('|')) => {
bump(&mut b, &mut c, AST::And);
bump(&mut a, &mut b, AST::Or);
tokens.next();
}
Some(Token::Quoted('&')) => {
bump(&mut b, &mut c, AST::And);
tokens.next();
}
Some(_) => {
let d = parse_regular_expression_literal(tokens);
bump(&mut c, &mut Some(d), AST::Seq);
}
}
}
bump(&mut b, &mut c, AST::And);
bump(&mut a, &mut b, AST::Or);
return a.unwrap();
}
fn parse_regular_expression(input: &str) -> Box<AST> {
let mut chars = input.chars();
let mut tokens = (TokenSeq { chars: &mut chars }).peekable();
let a = parse_regular_expression_root(&mut tokens);
assert!(tokens.peek().is_none());
return a;
}
fn format_regular_expression(ast: &Box<AST>) -> (String, i8) {
fn paren(s: String, x: i8, y: i8) -> String {
if x <= y {
s
} else {
"\\(".to_string() + s.as_str() + "\\)"
}
}
match &**ast {
AST::Empty => ("0&1".to_string(), 2),
AST::Universe => (".*".to_string(), 1),
AST::Epsilon => ("".to_string(), 1),
AST::Literal(cls) => (cls.to_string(), 0),
AST::Star(a) => {
let (s, x) = format_regular_expression(&a);
(paren(s, x, 0) + "*", 0)
}
AST::Plus(a) => {
let (s, x) = format_regular_expression(&a);
(paren(s, x, 0) + "\\+", 0)
}
AST::Question(a) => {
let (s, x) = format_regular_expression(&a);
(paren(s, x, 0) + "\\?", 0)
}
AST::Not(a) => {
let (s, x) = format_regular_expression(&a);
(paren(s, x, 0) + "\\~", 0)
}
AST::Seq(a, b) => {
let (s, x) = format_regular_expression(&a);
let (t, y) = format_regular_expression(&b);
(paren(s, x, 1) + paren(t, y, 1).as_str(), 1)
}
AST::And(a, b) => {
let (s, x) = format_regular_expression(&a);
let (t, y) = format_regular_expression(&b);
(paren(s, x, 2) + "\\&" + paren(t, y, 2).as_str(), 2)
}
AST::Or(a, b) => {
let (s, x) = format_regular_expression(&a);
let (t, y) = format_regular_expression(&b);
(paren(s, x, 3) + "\\|" + paren(t, y, 3).as_str(), 3)
}
}
}
fn is_empty_set(ast: &Box<AST>) -> Option<bool> {
match &**ast {
AST::Empty => Some(true),
AST::Universe => Some(false),
AST::Epsilon => Some(false),
AST::Literal(cls) => Some(cls.is_empty()),
AST::Star(a) => Some(false),
AST::Plus(a) => is_empty_set(a),
AST::Question(a) => Some(false),
AST::Not(a) => is_universe(a),
AST::Seq(a, b) => match (is_empty_set(a), is_empty_set(b)) {
(Some(true), _) => Some(true),
(_, Some(true)) => Some(true),
(Some(false), Some(false)) => Some(false),
_ => None,
},
AST::And(a, b) => match (is_empty_set(a), is_empty_set(b)) {
(Some(true), _) => Some(true),
(_, Some(true)) => Some(true),
_ => None,
},
AST::Or(a, b) => match (is_empty_set(a), is_empty_set(b)) {
(Some(true), Some(true)) => Some(true),
(_, Some(false)) => Some(false),
(Some(false), _) => Some(false),
_ => None,
},
}
}
fn is_universe(ast: &Box<AST>) -> Option<bool> {
match &**ast {
AST::Empty => Some(false),
AST::Universe => Some(true),
AST::Epsilon => Some(false),
AST::Literal(cls) => Some(false),
AST::Star(a) => None,
AST::Plus(a) => None,
AST::Question(a) => None,
AST::Not(a) => is_empty_set(a),
AST::Seq(a, b) => match (is_universe(a), is_universe(b)) {
(Some(true), Some(true)) => Some(true),
_ => None,
},
AST::And(a, b) => match (is_universe(a), is_universe(b)) {
(Some(true), Some(true)) => Some(true),
(Some(false), _) => Some(false),
(_, Some(false)) => Some(false),
_ => None,
},
AST::Or(a, b) => match (is_universe(a), is_universe(b)) {
(Some(true), _) => Some(true),
(_, Some(true)) => Some(true),
_ => None,
},
}
}
fn is_epsilon(ast: &Box<AST>) -> Option<bool> {
match &**ast {
AST::Empty => Some(false),
AST::Universe => Some(false),
AST::Epsilon => Some(true),
AST::Literal(cls) => Some(false),
AST::Star(a) => match (is_empty_set(a), is_epsilon(a)) {
(Some(true), _) => Some(true),
(_, Some(true)) => Some(true),
(Some(false), Some(false)) => Some(false),
_ => None,
},
AST::Plus(a) => is_epsilon(a),
AST::Question(a) => match (is_empty_set(a), is_epsilon(a)) {
(Some(true), _) => Some(true),
(_, Some(true)) => Some(true),
(Some(false), Some(false)) => Some(false),
_ => None,
},
AST::Not(a) => None,
AST::Seq(a, b) => match (is_epsilon(a), is_epsilon(b)) {
(Some(true), Some(true)) => Some(true),
(Some(false), _) => Some(false),
(_, Some(false)) => Some(false),
_ => None,
},
AST::And(a, b) => match (is_epsilon(a), is_epsilon(b)) {
(Some(true), Some(true)) => Some(true),
_ => None,
},
AST::Or(a, b) => match (is_epsilon(a), is_epsilon(b)) {
(Some(true), Some(true)) => Some(true),
_ => None,
},
}
}
fn simplify_regular_expression(ast: Box<AST>) -> Box<AST> {
if is_empty_set(&ast) == Some(true) {
return Box::new(AST::Empty);
}
if is_universe(&ast) == Some(true) {
return Box::new(AST::Universe);
}
match *ast {
AST::Empty => Box::new(AST::Empty),
AST::Universe => Box::new(AST::Universe),
AST::Epsilon => Box::new(AST::Epsilon),
AST::Literal(cls) => Box::new(AST::Literal(cls)),
AST::Star(a) => Box::new(AST::Star(simplify_regular_expression(a))),
AST::Plus(a) => Box::new(AST::Plus(simplify_regular_expression(a))),
AST::Question(a) => Box::new(AST::Question(simplify_regular_expression(a))),
AST::Not(a) => Box::new(AST::Not(simplify_regular_expression(a))),
AST::Seq(a, b) => {
if is_empty_set(&a) == Some(true) || is_empty_set(&b) == Some(true) {
Box::new(AST::Empty)
} else if is_universe(&a) == Some(true) {
Box::new(AST::Universe)
} else if is_epsilon(&a) == Some(true) {
simplify_regular_expression(b)
} else if is_epsilon(&b) == Some(true) {
simplify_regular_expression(a)
} else {
Box::new(AST::Seq(
simplify_regular_expression(a),
simplify_regular_expression(b),
))
}
}
AST::And(a, b) => {
if is_empty_set(&a) == Some(true) || is_empty_set(&b) == Some(true) {
Box::new(AST::Empty)
} else if is_universe(&a) == Some(true) {
simplify_regular_expression(b)
} else if is_universe(&b) == Some(true) {
simplify_regular_expression(a)
} else {
Box::new(AST::And(
simplify_regular_expression(a),
simplify_regular_expression(b),
))
}
}
AST::Or(a, b) => {
if is_universe(&a) == Some(true) || is_universe(&b) == Some(true) {
Box::new(AST::Universe)
} else if is_empty_set(&a) == Some(true) {
simplify_regular_expression(b)
} else if is_empty_set(&b) == Some(true) {
simplify_regular_expression(a)
} else {
Box::new(AST::Or(
simplify_regular_expression(a),
simplify_regular_expression(b),
))
}
}
}
}
fn contains_epsilon_as_element(ast: &Box<AST>) -> bool {
match &**ast {
AST::Empty => false,
AST::Universe => true,
AST::Epsilon => true,
AST::Literal(_) => false,
AST::Star(_) => true,
AST::Plus(a) => contains_epsilon_as_element(a),
AST::Question(_) => true,
AST::Not(a) => !contains_epsilon_as_element(a),
AST::Seq(a, b) => contains_epsilon_as_element(a) && contains_epsilon_as_element(b),
AST::And(a, b) => contains_epsilon_as_element(a) && contains_epsilon_as_element(b),
AST::Or(a, b) => contains_epsilon_as_element(a) || contains_epsilon_as_element(b),
}
}
fn differentiate_regular_expression(ast: Box<AST>, c: char) -> Box<AST> {
match *ast {
AST::Empty => Box::new(AST::Empty),
AST::Universe => Box::new(AST::Universe),
AST::Epsilon => Box::new(AST::Empty),
AST::Literal(cls) => {
if cls.contains(c) {
Box::new(AST::Epsilon)
} else {
Box::new(AST::Empty)
}
}
AST::Star(a) => {
let da = differentiate_regular_expression(a.clone(), c);
Box::new(AST::Seq(da, Box::new(AST::Star(a))))
}
AST::Plus(a) => {
let da = differentiate_regular_expression(a.clone(), c);
Box::new(AST::Seq(da, Box::new(AST::Star(a))))
}
AST::Question(a) => differentiate_regular_expression(a, c),
AST::Not(a) => Box::new(AST::Not(differentiate_regular_expression(a, c))),
AST::Seq(a, b) => {
let has_epsilon = contains_epsilon_as_element(&a);
let da = differentiate_regular_expression(a, c);
if has_epsilon {
let db = differentiate_regular_expression(b.clone(), c);
let da_b = Box::new(AST::Seq(da, b));
Box::new(AST::Or(da_b, db))
} else {
Box::new(AST::Seq(da, b))
}
}
AST::And(a, b) => {
let da = differentiate_regular_expression(a, c);
let db = differentiate_regular_expression(b, c);
Box::new(AST::And(da, db))
}
AST::Or(a, b) => {
let da = differentiate_regular_expression(a, c);
let db = differentiate_regular_expression(b, c);
Box::new(AST::Or(da, db))
}
}
}
fn match_regular_expression(ast: &Box<AST>, text: &str) -> bool {
let mut ast: Box<AST> = ast.clone();
for c in text.chars() {
ast = differentiate_regular_expression(ast, c);
ast = simplify_regular_expression(ast);
let (s, _) = format_regular_expression(&ast);
println!("d({}): {}", c, s);
}
return contains_epsilon_as_element(&ast);
}
fn do_work(input: &str, text: &str) -> bool {
println!("input regexp text: {}", input);
let ast = parse_regular_expression(&input);
let (output, _) = format_regular_expression(&ast);
println!("parsed text: {}", output);
let is_matched = match_regular_expression(&ast, &text);
println!("match: {}", is_matched);
return is_matched;
}
fn print_usage(program: &str, opts: Options) {
let brief = format!("Usage: {} REGEX [options]", program);
print!("{}", opts.usage(&brief));
}
fn main() {
let args: Vec<String> = env::args().collect();
let program = args[0].clone();
let mut opts = Options::new();
opts.optflag("h", "help", "");
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => {
print_usage(&program, opts);
panic!(f.to_string())
}
};
if matches.opt_present("h") {
print_usage(&program, opts);
return;
}
if matches.free.len() != 2 {
print_usage(&program, opts);
panic!();
} else {
let pattern = matches.free[0].clone();
let text = matches.free[1].clone();
do_work(&pattern, &text)
};
}
#[test]
fn it_works() {
assert!(do_work("", ""));
assert!(do_work("[abc]", "a"));
assert!(!do_work("[abc]", "z"));
assert!(!do_work("[^abc]", "a"));
assert!(do_work("[^abc]", "z"));
assert!(do_work("a*", ""));
assert!(do_work("a*", "aaa"));
assert!(!do_work("a*", "b"));
assert!(!do_work("a*", "baa"));
assert!(do_work("aaabbb", "aaabbb"));
assert!(!do_work("aaabbb", "aaabb"));
assert!(!do_work("aaabbb", "aaabbbb"));
assert!(do_work("\\(regexp\\)\\~", "rege"));
assert!(do_work("\\(regexp\\)\\~", "regex"));
assert!(!do_work("\\(regexp\\)\\~", "regexp"));
assert!(do_work("\\(regexp\\)\\~", "regexpr"));
assert!(do_work("\\(regexp\\)\\~", "regexpre"));
assert!(!do_work("a\\&b\\&c\\|d\\|e\\|f\\&g", "a"));
assert!(!do_work("a\\&b\\&c\\|d\\|e\\|f\\&g", "b"));
assert!(!do_work("a\\&b\\&c\\|d\\|e\\|f\\&g", "c"));
assert!(do_work("a\\&b\\&c\\|d\\|e\\|f\\&g", "d"));
assert!(do_work("a\\&b\\&c\\|d\\|e\\|f\\&g", "e"));
assert!(!do_work("a\\&b\\&c\\|d\\|e\\|f\\&g", "f"));
assert!(!do_work("a\\&b\\&c\\|d\\|e\\|f\\&g", "g"));
}
|
mod parser;
mod program;
use std::io::{Read, Write};
use std::error::Error;
use std::fmt;
pub fn run<R: Read, W:Write>(code: Vec<u8>, input: &mut R, output: &mut W) -> Result<(), BFError> {
let program = parser::parse(code)?;
program.run(input, output);
Ok(())
}
#[derive(Debug, Clone)]
pub struct BFError {
message: String,
}
impl Error for BFError {
fn description(&self) -> &str {
self.message.as_str()
}
}
impl fmt::Display for BFError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "BFError: {}", self.message)
}
} |
extern mod rsfml;
use std::hashmap::HashMap;
pub use rsfml::graphics::texture::Texture;
pub struct TextureCache {
textures: HashMap<~str, @Texture>
}
impl TextureCache {
pub fn new() -> TextureCache {
TextureCache {
textures: HashMap::new()
}
}
pub fn load(& mut self, path:~str) -> @Texture {
let s:@Texture = match(Texture::new_from_file(path)) {
Some(t) => @t,
None => fail!("fugg no texture :DDD") // TODO use failsafe texture
};
self.textures.insert(path, s);
return s;
}
}
|
//! The Styx type system.
use prelude::*;
use expr::{Expr};
use lexer::{HasSpan, Span};
use procedures::Proc;
use symbols::{Sym, Symbol, SymbolTree};
use std::collections::HashMap;
use std::hash::{Hash, Hasher};
use std::fmt::{self, Debug, Display, Formatter, Write};
use std::marker::PhantomData;
use std::sync::RwLock;
use rand::{Rng, SeedableRng, StdRng};
/// Implements `Hash` for the given type.
macro_rules! derive_symbol {
( $ty: ident ) => (
impl<'cx> Symbol for $ty<'cx> {
fn parts(&self) -> &[u64] {
self.symbol.parts()
}
fn full_name(&self) -> &str {
self.symbol.full_name()
}
}
)
}
/// Implements `Debug` for the given type, by using the type's implementation
/// of `Display`.
macro_rules! debug_from_display {
( $ty: ident ) => (
impl<'cx> Debug for $ty<'cx> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
Display::fmt(self, f)
}
}
);
}
/// A `SymbolTree` of `Member`s.
pub type MemberTree<'cx> = SymbolTree<Member<'cx>>;
/// A Styx member.
#[derive(Debug, Clone)]
pub enum Member<'cx> {
/// A [`Ty`].
Type(&'cx Ty<'cx>),
/// A [`Fun`].
Function(&'cx Fun<'cx>)
}
impl<'cx> Member<'cx> {
/// Returns a reference to the underlying type.
///
/// # Errors
/// The member is not a type.
pub fn as_type(&self) -> Option<&'cx Ty<'cx>> {
match self {
&Member::Type(ty) => Some(ty),
_ => None
}
}
/// Returns a reference to the underlying function.
///
/// # Errors
/// The member is not a function.
pub fn as_function(&self) -> Option<&'cx Fun<'cx>> {
match self {
&Member::Function(fun) => Some(fun),
_ => None
}
}
}
impl<'cx> From<&'cx Ty<'cx>> for Member<'cx> {
fn from(this: &'cx Ty<'cx>) -> Self { Member::Type(this) }
}
impl<'cx> From<&'cx Fun<'cx>> for Member<'cx> {
fn from(this: &'cx Fun<'cx>) -> Self { Member::Function(this) }
}
// //==========================================================================//
// // TYPES //
// //==========================================================================//
/// Indicates that this object has a Styx representation.
pub trait Typed<'cx> {
/// Gets the type of this expression.
fn ty(&self) -> &'cx Ty<'cx>;
/// Returns a boolean indicating whether this type is empty (it has a null size).
fn is_empty(&self) -> bool {
match &self.ty().def {
&TyDef::Raw(0) => true,
_ => false
}
}
}
/// Represents a generic argument.
pub struct Generic<'cx> {
name: String,
concept: &'cx Concept<'cx>
}
impl<'cx> Display for Generic<'cx> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{} {}", self.concept, self.name)
}
}
/// Defines the generic parameters whose type definition is known in a certain context.
pub enum TyParameters<'tp, 'cx> {
/// Two merged type parameter maps.
Siblings(*const TyParameters<'tp, 'cx>, *const TyParameters<'tp, 'cx>, PhantomData<&'tp ()>),
/// A single type parameter map.
Orphan(HashMap<u64, &'cx Ty<'cx>>)
}
unsafe impl<'tp, 'cx> Send for TyParameters<'tp, 'cx> {}
unsafe impl<'tp, 'cx> Sync for TyParameters<'tp, 'cx> {}
impl<'cx> Default for TyParameters<'static, 'cx> {
fn default() -> Self {
TyParameters::Orphan(HashMap::default())
}
}
impl<'tp, 'cx> TyParameters<'tp, 'cx> {
/// Creates a new type parameter map.
pub fn new(map: HashMap<u64, &'cx Ty<'cx>>) -> TyParameters<'static, 'cx> {
TyParameters::Orphan(map)
}
/// Merges two type parameter maps together.
pub fn merged<'a, 'otp>(&'a self, with: &'a TyParameters<'otp, 'cx>) -> TyParameters<'a, 'cx> {
TyParameters::Siblings(self, with, PhantomData)
}
/// Defines the type matching the specified type argument.
pub fn define(&mut self, gentyid: u64, ty: &'cx Ty<'cx>) {
if let &mut TyParameters::Orphan(ref mut map) = self {
map.insert(gentyid, ty);
} else {
panic!("Cannot define types in a merged type parameters map.");
}
}
/// Resolves a type, given its type id.
pub fn resolve(&self, gentyid: u64) -> Option<&'cx Ty<'cx>> {
match self {
&TyParameters::Siblings(a, b, _) => unsafe {
(*a).resolve(gentyid).or_else(|| (*b).resolve(gentyid))
},
&TyParameters::Orphan(ref map) => match map.get(&gentyid) {
Some(ty) => Some(ty),
None => None
}
}
}
}
impl<'tp, 'cx> Hash for TyParameters<'tp, 'cx> {
fn hash<H: Hasher>(&self, f: &mut H) {
match self {
&TyParameters::Siblings(a, b, _) => unsafe {
(*a).hash(f);
(*b).hash(f);
},
&TyParameters::Orphan(ref map) => {
for ty in map.values() {
ty.hash(f);
}
}
}
}
}
impl<'tp, 'cx> Display for TyParameters<'tp, 'cx> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if let &TyParameters::Orphan(ref map) = self {
let mut values = map.values();
let len = values.len();
if len == 0 {
return Ok(())
}
f.write_char('<')?;
for _ in 0..len-1 {
write!(f, "{}, ", values.next().unwrap())?;
}
write!(f, "{}", values.next().unwrap())
} else {
Ok(())
}
}
}
/// A `Ty`'s definition.
#[derive(Hash)]
pub enum TyDef<'cx> {
/// A placeholder for a generic type.
Generic,
/// A type whose data is implemented as raw bytes.
Raw(u16),
/// A tuple built on top of other types.
Tuple(Vec<&'cx Ty<'cx>>, TyParameters<'cx, 'cx>),
/// A type whose type parameters are known.
Refined(&'cx Ty<'cx>, TyParameters<'cx, 'cx>)
}
lazy_static! {
static ref RAND: RwLock<StdRng> = {
RwLock::new(StdRng::new().unwrap_or_else(|_| StdRng::from_seed(&[0x42, 0x43, 0x44])))
};
}
/// The Styx type, which has a name, and inner types.
///
/// Styx types are represented very simply as tuples of inner types,
/// and may take parameters. Those parameters define generic types,
/// whose size changes.
pub struct Ty<'cx> {
id: u64,
span: Span,
symbol: Sym,
generic_parameters: Vec<Ty<'cx>>,
def: TyDef<'cx>
}
impl<'cx> PartialEq for Ty<'cx> {
fn eq(&self, other: &Ty<'cx>) -> bool {
self.id == other.id
}
}
impl<'cx> Eq for Ty<'cx> {}
impl<'cx> Ty<'cx> {
/// Creates a new type, given its symbol and definition.
pub fn new(symbol: Sym, span: Span, def: TyDef<'cx>) -> Self {
Ty { symbol, def, span, id: (*RAND.write().unwrap()).next_u64(), generic_parameters: Vec::new() }
}
/// Creates a new generic parameter type.
pub fn generic(symbol: Sym, span: Span) -> Self {
Self::new(symbol, span, TyDef::Generic)
}
/// Creates a new raw type.
pub fn raw(symbol: Sym, span: Span, size: u16) -> Self {
Self::new(symbol, span, TyDef::Raw(size))
}
/// Creates a new tuple type.
pub fn tuple(symbol: Sym, span: Span, types: Vec<&'cx Ty<'cx>>) -> Self {
Self::new(symbol, span, TyDef::Tuple(types, TyParameters::default()))
}
/// Creates a new tuple type with type arguments.
pub fn tuple_with_args(symbol: Sym, span: Span, types: Vec<&'cx Ty<'cx>>, args: TyParameters<'cx, 'cx>) -> Self {
Self::new(symbol, span, TyDef::Tuple(types, args))
}
/// Creates a new refined type.
pub fn refined(symbol: Sym, span: Span, ty: &'cx Ty<'cx>, parameters: TyParameters<'cx, 'cx>) -> Self {
Self::new(symbol, span, TyDef::Refined(ty, parameters))
}
/// Creates a new refined type with no type parameters given.
pub fn alias(symbol: Sym, span: Span, ty: &'cx Ty<'cx>) -> Self {
Self::new(symbol, span, TyDef::Refined(ty, TyParameters::default()))
}
/// Returns a type identical to this one, with its generic parameters changed
/// to the given vector.
pub fn with_generic_parameters(mut self, params: Vec<Ty<'cx>>) -> Self {
self.generic_parameters = params;
self
}
/// Returns whether this type represents a known type.
pub fn is_known(&self) -> bool {
let a = self as *const _ as *const ();
let b = Self::unknown() as *const _ as *const ();
a == b &&
match &self.def {
&TyDef::Generic | &TyDef::Raw(_) => true,
&TyDef::Tuple(ref tys, _) => tys.iter().all(|t| t.is_known()),
&TyDef::Refined(ty, _) => ty.is_known()
}
}
/// Returns a slice containg all generic parameters of this type.
pub fn generic_parameters(&self) -> &[Ty<'cx>] {
&self.generic_parameters
}
/// Returns the name of the type.
pub fn name(&self) -> &str {
self.symbol.as_ref()
}
/// Returns the size in bytes occupied by the type.
pub fn size(&self) -> Option<u16> {
self.inner_size(&TyParameters::default())
}
fn inner_size<'tp>(&self, args: &TyParameters<'tp, 'cx>) -> Option<u16> {
match &self.def {
&TyDef::Raw(size) => Some(size),
&TyDef::Generic => match args.resolve(self.id) {
Some(param) => param.inner_size(args),
None => None
},
&TyDef::Tuple(ref tys, ref p) => {
let mut sum = 0;
let merged = p.merged(args);
for ty in tys {
sum += match ty.inner_size(&merged) {
Some(size) => size,
None => return None
};
}
Some(sum)
},
&TyDef::Refined(ty, ref p) => ty.inner_size(&p.merged(args))
}
}
}
impl<'cx> Display for Ty<'cx> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.symbol)?;
match &self.def {
&TyDef::Tuple(_, ref args) => write!(f, "{}", args),
_ => Ok(())
}
}
}
impl<'cx> HasSpan for Ty<'cx> {
fn span(&self) -> Span { self.span }
}
impl<'cx> Hash for Ty<'cx> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.symbol.hash(state);
self.def.hash(state);
}
}
debug_from_display!(Ty);
derive_symbol!(Ty);
impl<'cx> Ty<'cx> {
/// Returns the global unknown type, which represents types that could not be resolved.
pub fn unknown() -> &'cx Ty<'cx> {
static_type! {
UNKNOWN_TY, Ty::raw(Sym::from("<Unknown>"), span!(), 0)
}
unsafe { &*(&*UNKNOWN_TY as *const _ as *const () as *const _) }
}
/// Returns the global void type.
pub fn void() -> &'cx Ty<'cx> {
static_type! {
VOID_TY, Ty::raw(Sym::from("System.Void"), span!(), 0)
}
unsafe { &*(&*VOID_TY as *const _ as *const () as *const _) }
}
/// Returns the global expression type.
pub fn expression() -> &'cx Ty<'cx> {
static_type! {
EXPR_TY, Ty::raw(Sym::from("Styx.Expression"), span!(), 0)
.with_generic_parameters(vec!(Ty::generic(Sym::from("T"), span!())))
}
unsafe { &*(&*EXPR_TY as *const _ as *const () as *const _) }
}
/// Returns the global quote type.
pub fn quote() -> &'cx Ty<'cx> {
static_type! {
QUOTE_TY, Ty::raw(Sym::from("Styx.Quote"), span!(), 0)
.with_generic_parameters(vec!(Ty::generic(Sym::from("T"), span!())))
}
unsafe { &*(&*QUOTE_TY as *const _ as *const () as *const _) }
}
/// Returns the global pointer type for the given architecture.
pub fn pointer(arch: Architecture) -> &'cx Ty<'cx> {
static_type! {
PTR32_TY, Ty::raw(Sym::from("Styx.Pointer"), span!(), 4)
.with_generic_parameters(vec!(Ty::generic(Sym::from("T"), span!())))
}
static_type! {
PTR64_TY, Ty::raw(Sym::from("Styx.Pointer"), span!(), 8)
.with_generic_parameters(vec!(Ty::generic(Sym::from("T"), span!())))
}
unsafe {
match arch {
Architecture::X86_64 => &*(&*PTR64_TY as *const _ as *const () as *const _),
_ => &*(&*PTR32_TY as *const _ as *const () as *const _),
}
}
}
}
// //==========================================================================//
// // FUNCTIONS //
// //==========================================================================//
/// Defines a function parameter.
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Parameter<'cx> {
name: String,
ty: &'cx Ty<'cx>
}
impl<'cx> Parameter<'cx> {
/// Creates a new parameter, given its name and type constraint.
pub fn new(name: String, ty: &'cx Ty<'cx>) -> Parameter<'cx> {
Parameter { ty, name }
}
/// Returns the string that corresponds to the parameter's name.
pub fn name(&self) -> &str {
self.name.as_str()
}
}
impl<'cx> Typed<'cx> for Parameter<'cx> {
#[inline]
fn ty(&self) -> &'cx Ty<'cx> {
self.ty
}
}
impl<'cx> Display for Parameter<'cx> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{} {}", self.ty, self.name)
}
}
debug_from_display!(Parameter);
/// Represents a user-defined function.
pub struct Fun<'cx> {
symbol: Sym,
span: Span,
body: Expr<'cx>,
generic_parameters: Vec<Ty<'cx>>,
parameters: Vec<Parameter<'cx>>,
is_pure: bool
}
impl<'cx> Fun<'cx> {
/// Creates a new function, given its symbol, parameters, and body.
pub fn new(symbol: Sym, span: Span, parameters: Vec<Parameter<'cx>>, body: Expr<'cx>) -> Self {
Fun { symbol, is_pure: Self::compute_is_pure(&body), parameters, body, span, generic_parameters: Vec::new() }
}
/// Creates a new anonymous function, given its body.
pub fn anonymous(body: Expr<'cx>) -> Self {
Fun { symbol: Sym::default(), is_pure: Self::compute_is_pure(&body), parameters: Vec::new(), span: body.span(), body, generic_parameters: Vec::new() }
}
/// Returns a function identical to this one, with its generic parameters changed
/// to the given vector.
pub fn with_generic_parameters(mut self, params: Vec<Ty<'cx>>) -> Self {
self.generic_parameters = params;
self
}
/// Returns a boolean indicating whether the function can be compiled.
pub fn verify<'a>(&self, diagnostics: Diags, generics: &TyParameters<'a, 'cx>) -> bool {
use visitor::Verifier;
let mut verifier = Verifier::new(diagnostics, generics);
verifier.visit(&self.body);
// TODO: Verify type parameters; body
verifier.is_valid()
}
/// Returns the name of the function.
pub fn name(&self) -> &str {
self.symbol.as_ref()
}
/// Returns the slice containing all parameters that this function takes.
pub fn parameters(&self) -> &[Parameter<'cx>] {
&self.parameters
}
/// Returns the body of the function.
pub fn body(&self) -> &Expr<'cx> {
&self.body
}
/// Returns a boolean indicating whether the function is pure.
pub fn is_pure(&self) -> bool {
self.is_pure
}
/// Returns a boolean indicating whether the function represents an expression.
pub fn is_anonymous(&self) -> bool {
self.symbol == Sym::default() && self.parameters.is_empty()
}
/// Returns the global unknown function, which represents a function that could not be
/// resolved.
pub fn unknown() -> &'cx Fun<'cx> {
unsafe {
&*(&*UNKNOWN_FUN as *const Fun<'static> as *const () as *const Fun<'cx>)
}
}
/// Returns whether this function represents a known function.
pub fn is_known(&self) -> bool {
self as *const _ as *const () != &*UNKNOWN_FUN as *const _ as *const () &&
self.parameters.iter().all(|param| param.ty().is_known())
}
fn compute_is_pure(expr: &Expr<'cx>) -> bool {
use expr::{BuiltIns, Call, Expression};
use visitor::{ClosureVisitor, Visitor};
ClosureVisitor::<bool>::new(&|visitor: &mut ClosureVisitor<bool>, expr: &Expr| -> bool {
let children: Vec<bool> = visitor.visit_children(expr);
if children.iter().any(|b| !*b) {
return false
}
if expr.expr_ty() != BuiltIns::call() {
return true
}
match expr.data() {
&Expression::Native(ref native) => unsafe {
let call = Call::from_native(native);
call.target().is_pure()
},
_ => true
}
}).visit(expr)
}
}
impl<'cx> Display for Fun<'cx> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
// write name
write!(f, "{}", self.symbol)?;
// write parameters
let paramsc = self.parameters.len();
f.write_char('(')?;
if paramsc > 0 {
for param in &self.parameters[..self.parameters.len() - 1] {
write!(f, "{}, ", param)?;
}
let last = &self.parameters[paramsc - 1];
write!(f, "{}", last)?;
}
f.write_char(')')
}
}
impl<'cx> Typed<'cx> for Fun<'cx> {
#[inline]
fn ty(&self) -> &'cx Ty<'cx> {
Typed::ty(&self.body)
}
}
impl<'cx> PartialEq for Fun<'cx> {
fn eq(&self, other: &Fun<'cx>) -> bool {
self.name() == other.name() && self.parameters == other.parameters
}
}
impl<'cx> Eq for Fun<'cx> {}
impl<'cx> HasSpan for Fun<'cx> {
fn span(&self) -> Span {
self.span
}
}
impl<'cx> Hash for Fun<'cx> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.symbol.hash(state);
self.ty().hash(state);
self.parameters.hash(state);
}
}
unsafe impl<'cx> Sync for Fun<'cx> {}
unsafe impl<'cx> Send for Fun<'cx> {}
debug_from_display!(Fun);
derive_symbol!(Fun);
lazy_static! {
static ref UNKNOWN_FUN: Fun<'static> = {
Fun::new(Sym::from("<Unknown>"), span!(), Vec::new(), Expr::block(Vec::new(), span!()))
};
}
// //==========================================================================//
// // CONCEPTS //
// //==========================================================================//
/// A fact known or required of an `Expr`.
pub struct Axiom<'cx> {
repr: String,
#[allow(dead_code)]
predicate: Proc<'cx>
}
impl<'cx> Axiom<'cx> {
/// Creates a new `Axiom`, given its string representation and the predicate
/// it must satisfy.
pub fn new<S: ToString>(repr: &S, predicate: Proc<'cx>) -> Self {
Axiom { repr: repr.to_string(), predicate }
}
/// Returns a string representation of the axiom.
pub fn representation(&self) -> &str {
self.repr.as_str()
}
/// Returns whether the given type verifies the current axiom.
pub fn verify<'tcx>(&self, _ty: &Ty<'tcx>) -> bool {
unimplemented!()
}
/// Combines multiple axioms into a single axiom.
pub fn combine(axioms: &[Axiom<'cx>]) -> Self {
// make representation and expressions
let mut repr = String::new();
//let parameters = Vec::new();
//let mut body = Vec::new();
for axiom in axioms {
repr.push_str(axiom.representation());
//body.push(Call::new(axiom, ));
}
// build body
unimplemented!()
//let mut builder = Builder::new();
//Axiom { repr, body }
}
}
impl<'cx> Display for Axiom<'cx> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.write_str(self.representation())
}
}
debug_from_display!(Axiom);
/// The definition of a type that satisfies one or many `Axiom`s.
pub struct Concept<'cx> {
name: Sym,
axiom: Axiom<'cx>
}
impl<'cx> Concept<'cx> {
/// Creates a new concept, given its name and the axioms it must satisfy.
pub fn new(name: Sym, axioms: &[Axiom<'cx>]) -> Self {
Concept { name, axiom: Axiom::combine(axioms) }
}
/// Returns whether the given type matches the current concept.
pub fn verify<'tcx>(&self, ty: &Ty<'tcx>) -> bool {
self.axiom.verify(ty)
}
}
impl<'cx> Display for Concept<'cx> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}: {}", self.name, self.axiom)
}
}
debug_from_display!(Concept);
|
use std::collections::HashSet;
use std::env;
use std::process::Command;
use walkdir::WalkDir;
use builtin::*;
use common::LOGGER;
#[derive(Clone, Debug)]
pub struct CommandStore {
pub name: String,
pub args: Vec<String>,
pub stdin: Option<String>,
pub stdout: Option<String>,
pub stderr: Option<String>,
pub pipe_in: bool,
pub pipe_out: bool,
pub wait: bool,
}
impl CommandStore {
pub fn new() -> CommandStore {
CommandStore {
name: String::new(),
args: Vec::new(),
stdin: None,
stdout: None,
stderr: None,
pipe_in: false,
pipe_out: false,
wait: true,
}
}
pub fn add_name(&mut self, s: &str) {
self.name = s.to_owned();
}
pub fn add_arg(&mut self, s: &str) {
self.args.push(s.to_owned())
}
pub fn add_stdin(&mut self, s: &str) {
self.stdin = Some(s.to_owned());
}
pub fn add_stdout(&mut self, s: &str) {
self.stdout = Some(s.to_owned());
}
pub fn add_stderr(&mut self, s: &str) {
self.stderr = Some(s.to_owned());
}
}
// 'aはstaticのみ
pub struct CommandList<'a> {
commands_in_path: HashSet<String>,
commands_in_wd: HashSet<String>,
commands_builtin: HashSet<&'a str>,
}
// Permissionから実行可能かどうか判定できる?
impl<'a> CommandList<'a> {
// PATHのコマンドは一回しかsetしない
pub fn new() -> CommandList<'a> {
let mut path_cmds = HashSet::new();
let s = "";
match env::var_os("PATH") {
Some(paths) => for path in env::split_paths(&paths) {
let dirname = path.to_str().unwrap().to_owned();
for entry in WalkDir::new(&dirname).min_depth(1).max_depth(1) {
let e = ok_or_continue!(entry);
let fname = match e.file_name().to_os_string().into_string() {
Ok(s) => s,
Err(_) => panic!("Error in into_string"),
};
let fdata = ok_or_continue!(e.metadata());
if fdata.is_file() {
path_cmds.insert(fname);
}
}
},
None => {}
}
let bulitin_cmds: HashSet<&str> = BUILTIN_CMD.iter().cloned().collect();
CommandList {
commands_in_path: path_cmds,
commands_in_wd: HashSet::new(),
commands_builtin: bulitin_cmds,
}
}
pub fn upd_wd_commands(&mut self, wd: &str) {
let mut wd_cmd = HashSet::new();
for entry in WalkDir::new(wd).min_depth(1) {
let e = match entry {
Ok(e) => e,
Err(err) => {
error!(LOGGER, "error in upd_wd, {:?}", err);
break;
}
};
let fpath = e.path().to_str().unwrap().to_owned();
let fdata = e.metadata().ok().unwrap();
if fdata.is_file() {
wd_cmd.insert(fpath);
}
}
self.commands_in_wd = wd_cmd;
}
pub fn execute_command(&self, cmds: Vec<CommandStore>) {
for storecm in cmds {
trace!(LOGGER, "cmd: {:?}", storecm);
let name = &*storecm.name;
if self.commands_builtin.contains(name) {
exec_builtin(&storecm);
continue;
}
let ok = self.commands_in_path.contains(name) || self.commands_in_wd.contains(name);
if ok {
let mut cmd = Command::new(name);
if !storecm.args.is_empty() {
cmd.args(&storecm.args);
}
cmd.spawn().expect("failed to execute process");
} else {
debug!(LOGGER, "Fugu: Unknown command '{}'", name)
}
}
}
}
|
pub fn main() {
println!("planner")
}
|
use io::{outportb,inportb};
/* This structure is what is on the stack at the time the interrupt is received by the
* rust code. This is accomplished through some magic x86 calling conventions */
#[derive(Copy)]
#[repr(C, packed)]
pub struct Registers {
ds: u32,
edi: u32, esi: u32, ebp: u32, esp: u32, ebx: u32, edx: u32, ecx: u32, eax: u32,
int_num: u32, err_code: u32,
eip: u32, cs: u32, eflags: u32, useresp: u32, ss: u32
}
fn interrupt_unhandled(regs: Registers) {
print!("unhandled interrupt: {}\n", regs.int_num);
}
static mut interrupt_table: [fn(Registers); 16] = [interrupt_unhandled; 16];
/* General interrupt handler. If it's >= 32, then it's an external interrupt (like
* the keyboard). If it isn't, then it's an exception (like divide by zero) */
#[no_mangle] //need no_mangle because this is called from assembly code
pub unsafe extern "C" fn interrupt_handler(regs: Registers)
{
if regs.int_num >= 32 {
let irq: usize = regs.int_num as usize - 32;
interrupt_table[irq](regs);
if regs.int_num >= 40 {
outportb(0xA0, 0x20);
}
outportb(0x20, 0x20);
} else {
print!("EXCEPTION {}", regs.int_num);
asm!("hlt");
}
}
pub fn register_handler(index: usize, handler: fn(Registers))
{
unsafe {
interrupt_table[index] = handler;
}
}
pub fn sti()
{
unsafe{ asm!("sti") };
}
pub fn cli()
{
unsafe{ asm!("cli") };
}
|
#![feature(core_intrinsics)]
#![feature(asm)]
#![no_std]
#![no_main]
// #[macro_use]
// extern crate lazy_static;
mod arch;
mod bsp;
mod drivers;
mod interface;
mod memory;
mod panic;
mod runtime_init;
pub fn bootloader_entry() -> ! {
bsp::init();
loop {}
}
|
use khonsu_tools::{
anyhow,
code_coverage::{self, CodeCoverage},
};
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
pub enum Commands {
GenerateCodeCoverageReport {
#[structopt(long = "install-dependencies")]
install_dependencies: bool,
},
}
fn main() -> anyhow::Result<()> {
let command = Commands::from_args();
match command {
Commands::GenerateCodeCoverageReport {
install_dependencies,
} => CodeCoverage::<CoverageConfig>::execute(install_dependencies),
}
}
struct CoverageConfig;
impl code_coverage::Config for CoverageConfig {
fn ignore_paths() -> Vec<String> {
vec![
String::from("circulate/examples/*"),
String::from("bonsaidb/examples/*"),
]
}
}
|
//! BookCrossing commands.
mod cluster;
mod extract;
pub use cluster::Cluster;
pub use extract::Extract;
|
use std::borrow::Borrow;
use std::fmt::{Debug, Formatter, Error};
use std::rc::Rc;
use std::cmp::Ordering::*;
use Node::*;
use Color::*;
/// A persistent, immutable red-black tree.
pub struct RbMap<K, V> {
root: Node<K, V>,
}
impl<K, V> RbMap<K, V> {
/// Creates a new, empty map.
pub fn new() -> RbMap<K, V> {
RbMap { root: Leaf }
}
/// Inserts a key-value pair into the map.
/// Returns the updated map and the previous element with the given key, if any.
pub fn insert<'a>(&'a self, k: K, v: V) -> (RbMap<K, V>, Option<&'a (K, V)>) where K: Ord {
let (node, prev) = self.root.insert(k, v);
(RbMap { root: node }, prev.map(|x| &**x))
}
/// Inserts a key-value pair destructively.
/// Returns the previous element with the given key, if any.
pub fn insert_in_place<'a>(&'a mut self, k: K, v: V) -> Option<Rc<(K, V)>> where K: Ord {
let (node, prev) = {
let (node, prev) = self.root.insert(k, v);
(node, prev.cloned())
};
self.root = node;
prev
}
/// Looks up the given key in the map.
pub fn get<'a, Q: ?Sized + Ord>(&'a self, k: &Q) -> Option<&'a V> where K: Borrow<Q> {
self.root.lookup(k)
}
/// Removes a key from the map, if it exists.
/// Returns the new map and the removed element.
///
/// FIXME: Actually implement this.
pub fn remove<'a, Q: ?Sized + Ord>(&'a self, k: &Q) -> (RbMap<K, V>, Option<&'a (K, V)>) where K: Borrow<Q> {
if let Some((node, prev)) = self.root.remove(k) {
(RbMap { root: node }, Some(prev))
} else {
(self.clone(), None)
}
}
/// Iterates by reference over all the elements in the map.
pub fn iter<'a>(&'a self) -> RbMapIter<'a, K, V> {
RbMapIter { nodes: vec![IterNode::Node(&self.root)] }
}
}
impl<K, V> Clone for RbMap<K, V> {
fn clone(&self) -> RbMap<K, V> {
RbMap { root: self.root.clone() }
}
}
impl<K: Debug, V: Debug> Debug for RbMap<K, V> {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
try!(write!(f, "{{"));
for &(ref key, ref value) in self.iter() {
// TODO: For extra prettiness, get rid of last comma.
try!(write!(f, "{:?}: {:?}, ", key, value));
}
try!(write!(f, "}}"));
Ok(())
}
}
enum IterNode<'a, K: 'a, V: 'a> {
Node(&'a Node<K, V>),
Item(&'a (K, V)),
}
/// An iterator over the entries of an RbMap.
pub struct RbMapIter<'a, K: 'a, V: 'a> {
nodes: Vec<IterNode<'a K, V>>,
}
impl<'a, K, V> Iterator for RbMapIter<'a, K, V> {
type Item = &'a (K, V);
fn next(&mut self) -> Option<&'a (K, V)> {
loop {
let n = self.nodes.pop();
match n {
Some(IterNode::Node(&Branch(_, ref l, ref m, ref r))) => {
self.nodes.push(IterNode::Node(&**r));
self.nodes.push(IterNode::Item(&**m));
self.nodes.push(IterNode::Node(&**l));
}
Some(IterNode::Node(&Leaf)) => (),
Some(IterNode::Item(x)) => return Some(x),
None => return None,
}
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum Color {
Red,
Black,
}
#[derive(Debug)]
enum Node<K, V> {
Branch(Color, Rc<Node<K, V>>, Rc<(K, V)>, Rc<Node<K, V>>),
Leaf
}
impl<K, V> Clone for Node<K, V> {
fn clone(&self) -> Node<K, V> {
match *self {
Branch(c, ref l, ref m, ref r) => Branch(c, l.clone(), m.clone(), r.clone()),
Leaf => Leaf
}
}
}
/*
impl<K, V> Node<K, V> {
fn into_quasi(self) -> Quasi<K, V> {
match self {
Node::Red(a, b, c) => Quasi::Red(a, b, c),
Node::Black(a, b, c) => Quasi::Black(a, b, c),
Node::Leaf => Quasi::BLeaf,
}
}
}
enum Quasi<K, V> {
NBlack(Rc<Node<K, V>>, (K, V), Rc<Node<K, V>>),
Red(Rc<Node<K, V>>, (K, V), Rc<Node<K, V>>),
Black(Rc<Node<K, V>>, (K, V), Rc<Node<K, V>>),
BBlack(Rc<Node<K, V>>, (K, V), Rc<Node<K, V>>),
BLeaf,
BBLeaf,
}
fn red<K, V>(l: Rc<Node<K, V>>, m: Rc<(K, V)>, r: Rc<Node<K, V>>) -> Rc<Node<K, V>> {
Rc::new(Branch(Red, l, m, r))
}
*/
fn black<K, V>(l: Rc<Node<K, V>>, m: Rc<(K, V)>, r: Rc<Node<K, V>>) -> Rc<Node<K, V>> {
Rc::new(Branch(Black, l, m, r))
}
// XXX
fn leaf<K, V>() -> Rc<Node<K, V>> {
Rc::new(Leaf)
}
fn balance<K, V>(t: Node<K, V>) -> Node<K, V> {
// With manual derefs, because Rc.
match t {
Branch(_, ref l, ref m, ref r) => {
if let Branch(Red, ref l, ref lm, ref lr) = **l {
if let Branch(Red, ref a, ref x, ref b) = **l {
return Branch(Red, black(a.clone(), x.clone(), b.clone()), lm.clone(), black(lr.clone(), m.clone(), r.clone()));
}
if let Branch(Red, ref b, ref y, ref c) = **lr {
return Branch(Red, black(l.clone(), lm.clone(), b.clone()), y.clone(), black(c.clone(), m.clone(), r.clone()));
}
}
if let Branch(Red, ref rl, ref rm, ref r) = **r {
if let Branch(Red, ref b, ref y, ref c) = **rl {
return Branch(Red, black(l.clone(), m.clone(), b.clone()), y.clone(), black(c.clone(), rm.clone(), r.clone()));
}
if let Branch(Red, ref c, ref z, ref d) = **r {
return Branch(Red, black(l.clone(), m.clone(), rl.clone()), rm.clone(), black(c.clone(), z.clone(), d.clone()));
}
}
}
_ => ()
}
t
}
impl<K, V> Node<K, V> {
fn insert(&self, k: K, v: V) -> (Node<K, V>, Option<&Rc<(K, V)>>) where K: Ord {
match *self {
Branch(c, ref l, ref m, ref r) => match k.cmp(&m.0) {
Less => {
let (node, prev) = l.insert(k, v);
(balance(Branch(c, Rc::new(node), m.clone(), r.clone())), prev)
}
Greater => {
let (node, prev) = r.insert(k, v);
(balance(Branch(c, l.clone(), m.clone(), Rc::new(node))), prev)
}
Equal => {
(Branch(c, l.clone(), Rc::new((k, v)), r.clone()), Some(m))
}
},
Leaf => (Branch(Red, leaf(), Rc::new((k, v)), leaf()), None)
}
}
fn lookup<'a, Q: ?Sized + Ord>(&'a self, k: &Q) -> Option<&'a V> where K: Borrow<Q> {
match *self {
Branch(_, ref l, ref m, ref r) => match k.cmp(Borrow::borrow(&m.0)) {
Less => l.lookup(k),
Greater => r.lookup(k),
Equal => Some(&m.1),
},
Leaf => None,
}
}
fn remove<'a, Q: ?Sized + Ord>(&'a self, _k: &Q) -> Option<(Node<K, V>, &'a (K, V))> where K: Borrow<Q> {
panic!()
}
}
#[cfg(test)]
impl<K, V> Node<K, V> {
fn is_black(&self) -> bool {
match *self {
Branch(Black, _, _, _) | Leaf => true,
_ => false,
}
}
fn check_depth(&self, x: usize) -> usize {
match *self {
Branch(c, ref l, _, ref r) => {
let diff = if c == Black { 1 } else { 0 };
let ld = l.check_depth(x+diff);
let rd = r.check_depth(x+diff);
assert_eq!(ld, rd);
if c == Red && x > 0 {
assert!(l.is_black());
assert!(r.is_black());
}
ld
}
Leaf => x
}
}
}
#[cfg(test)]
mod test {
use super::*;
macro_rules! assert_matches {
($e: expr, $p: pat) => (assert!(if let $p = $e { true } else { false }));
}
#[test]
fn basic() {
let m = RbMap::new();
assert_eq!(m.root.check_depth(0), 0);
let (m, x) = m.insert(1, 2);
assert_eq!(x, None);
assert_eq!(m.iter().cloned().collect::<Vec<_>>(), vec![(1, 2)]);
m.root.check_depth(0);
let (m, x) = m.insert(2, 3);
assert_eq!(x, None);
assert_eq!(m.iter().cloned().collect::<Vec<_>>(), vec![(1, 2), (2, 3)]);
m.root.check_depth(0);
let (m, x) = m.insert(4, 2);
assert_eq!(x, None);
assert_eq!(m.iter().cloned().collect::<Vec<_>>(), vec![(1, 2), (2, 3), (4, 2)]);
m.root.check_depth(0);
let (m, x) = m.insert(3, 10);
assert_eq!(x, None);
assert_eq!(m.iter().cloned().collect::<Vec<_>>(), vec![(1, 2), (2, 3), (3, 10), (4, 2)]);
assert_matches!(m.get(&1), Some(&2));
assert_matches!(m.get(&3), Some(&10));
assert_matches!(m.get(&30), None);
m.root.check_depth(0);
let (m, x) = m.insert(1, 12);
assert!(if let Some(&(1, 2)) = x { true } else { false });
assert_eq!(m.iter().cloned().collect::<Vec<_>>(), vec![(1, 12), (2, 3), (3, 10), (4, 2)]);
m.root.check_depth(0);
let (m, x) = m.insert(-100, 0);
assert_eq!(x, None);
assert_eq!(m.iter().cloned().collect::<Vec<_>>(), vec![(-100, 0), (1, 12), (2, 3), (3, 10), (4, 2)]);
m.root.check_depth(0);
assert_matches!(m.get(&-100), Some(&0));
assert_matches!(m.get(&4), Some(&2));
assert_matches!(m.get(&5), None);
}
#[test]
fn range() {
let m = (0..100).fold(RbMap::new(), |x, v| x.insert(v, v*2).0);
assert_eq!(m.iter().cloned().collect::<Vec<_>>(), (0..100).map(|v| (v, v*2)).collect::<Vec<_>>());
assert_matches!(m.get(&3), Some(&6));
assert_matches!(m.get(&30), Some(&60));
assert_matches!(m.get(&300), None);
}
#[test]
fn rev_range() {
let m = (0..100).rev().fold(RbMap::new(), |x, v| x.insert(v, v*2).0);
assert_eq!(m.iter().cloned().collect::<Vec<_>>(), (0..100).map(|v| (v, v*2)).collect::<Vec<_>>());
}
}
|
mod short_msg_kat_224;
mod short_msg_kat_256;
mod short_msg_kat_384;
mod short_msg_kat_512;
mod short_msg_kat_shake128;
mod short_msg_kat_shake256;
|
#![feature(box_patterns)]
#![feature(str_escape)]
#![feature(slice_patterns)]
extern crate runic;
extern crate winit;
extern crate futures;
extern crate toml;
#[macro_use]
extern crate json;
extern crate mio;
extern crate regex;
#[cfg(target_os="windows")]
extern crate mio_named_pipes;
// txd: a text editor🖳
mod buffer;
mod mode;
mod res;
mod app;
mod movement;
mod lsp;
//mod fs_util;
use runic::*;
use winit::*;
use app::*;
use std::error::Error;
#[derive(Debug)]
enum ConfigError {
Parse(Box<Error>),
Missing(&'static str),
Invalid(&'static str)
}
impl Error for ConfigError {
fn description(&self) -> &str {
match self {
&ConfigError::Parse(_) => "parse error",
&ConfigError::Missing(_) => "incomplete config",
&ConfigError::Invalid(_) => "invalid config"
}
}
fn cause(&self) -> Option<&Error> {
match self {
&ConfigError::Parse(ref e) => Some(e.as_ref()),
_ => None
}
}
}
use std::fmt::*;
impl Display for ConfigError {
fn fmt(&self, f: &mut Formatter) -> Result {
match self {
&ConfigError::Parse(ref e) => write!(f, "parse error: {}", e),
&ConfigError::Missing(v) => write!(f, "missing config value \"{}\"", v),
&ConfigError::Invalid(v) => write!(f, "invalid config value \"{}\"", v)
}
}
}
fn main() {
runic::init();
let mut evl = EventsLoop::new();
let mut window = WindowBuilder::new().with_dimensions(1280, 640).with_title("txd").build(&evl).expect("create window!");
let mut rx = RenderContext::new(&mut window).expect("create render context!");
let mut app = TxdApp::init(&mut rx);
app.run(&mut rx, &mut evl);
}
|
#![allow(clippy::unreadable_literal)]
//! Solarized
//! <https://ethanschoonover.com/solarized/>
use iced::color;
use crate::gui::styles::types::custom_palette::{CustomPalette, PaletteExtension};
use crate::gui::styles::types::palette::Palette;
/// Solarized light (Day style)
pub(in crate::gui::styles) fn solarized_light() -> CustomPalette {
CustomPalette {
palette: Palette {
primary: color!(0xfdf6e3), // base3
secondary: color!(0x859900), // green
outgoing: color!(0x268bd2), // blue
buttons: color!(0x93a1a1), // base1
text_headers: color!(0xfdf6e3), // base3
text_body: color!(0x002b36), // base03
},
extension: PaletteExtension {
starred: color!(0xb58900, 0.9), // yellow
chart_badge_alpha: 0.75,
round_borders_alpha: 0.35,
round_containers_alpha: 0.15,
},
}
}
/// Solarized dark (Night style)
pub(in crate::gui::styles) fn solarized_dark() -> CustomPalette {
CustomPalette {
palette: Palette {
primary: color!(0x002b36), // base03
secondary: color!(0x859900), // green
outgoing: color!(0x268bd2), // blue
buttons: color!(0x586e75), // base01
text_headers: color!(0x002b36), // base03
text_body: color!(0xeee8d5), // base2
},
extension: PaletteExtension {
starred: color!(0xb58900), // yellow
chart_badge_alpha: 0.25,
round_borders_alpha: 0.15,
round_containers_alpha: 0.08,
},
}
}
|
use super::utils::*;
use std::io::{Error, ErrorKind};
use winapi::shared::minwindef::{DWORD, HMODULE};
use winapi::um::{
winnt::HANDLE,
psapi::{GetModuleInformation, MODULEINFO, EnumProcessModules, GetModuleBaseNameA},
};
#[derive(Debug)]
pub struct Module {
pub base: usize,
pub size: usize,
}
impl Module {
fn fix_offset(&self, offset: usize) -> usize {
(self.base as usize) + offset
}
pub unsafe fn read<T>(&self, offset: usize) -> &T {
&*(self.fix_offset(offset) as *const T)
}
pub unsafe fn write<T>(&mut self, offset: usize, value: T) {
*(self.fix_offset(offset) as *mut T) = value;
}
pub fn find_in_process(process_handle: HANDLE, dll_name: &str) -> Result<Module, Error> {
let mut size_needed: DWORD = 0;
let result = unsafe { EnumProcessModules(process_handle, std::ptr::null_mut(), 0, &mut size_needed) };
if result == 0 {
return Err(Error::last_os_error());
}
let handle_size = std::mem::size_of::<HMODULE>() as u32;
let module_count = size_needed / handle_size;
let mut modules: Vec<HMODULE> = vec![std::ptr::null_mut(); module_count as usize];
let result = unsafe {
EnumProcessModules(
process_handle,
modules.as_mut_ptr(),
module_count * handle_size,
&mut size_needed,
)
};
if result == 0 {
return Err(Error::new(ErrorKind::Other, "failed to enumerate process modules"));
}
const MODULE_NAME_LEN: usize = 50;
let mut module_name_buf: [i8; MODULE_NAME_LEN] = [0; MODULE_NAME_LEN];
for module_handle in modules {
let read_len = unsafe {
GetModuleBaseNameA(
process_handle,
module_handle,
&mut module_name_buf[0],
MODULE_NAME_LEN as DWORD,
)
};
if read_len == 0 {
continue;
}
let cur_mod_name = std::str::from_utf8(&realign_unchecked(&module_name_buf)[..read_len as usize])
.map_err(|_| Error::new(ErrorKind::Other, "failed to convert string"))?;
let cur_mod_info = Self::get_module_info(process_handle, module_handle)?;
if cur_mod_name == dll_name {
return Ok(Module {
base: module_handle as usize,
size: cur_mod_info.SizeOfImage as usize,
});
}
}
return Err(Error::new(ErrorKind::Other, "failed to find module"));
}
fn get_module_info(process_handle: HANDLE, module_handle: HMODULE) -> Result<MODULEINFO, Error> {
let mut result = MODULEINFO {
EntryPoint: std::ptr::null_mut(),
SizeOfImage: 0,
lpBaseOfDll: std::ptr::null_mut(),
};
let success = unsafe {
GetModuleInformation(
process_handle,
module_handle,
&mut result,
std::mem::size_of::<MODULEINFO>() as u32,
)
};
if success == 0 {
Err(Error::last_os_error())
} else {
Ok(result)
}
}
}
|
use ring::{agreement, rand};
pub fn example() -> Result<(), Box<dyn std::error::Error>> {
let rng = rand::SystemRandom::new();
info!("rng {:?}", rng);
// This next line breaks WASM:
// let my_private_key = agreement::EphemeralPrivateKey::generate(&agreement::X25519, &rng).unwrap();
//
// info!("SC: {:?}", my_private_key);
//
// // Make `my_public_key` a byte slice containing my public key. In a real
// // application, this would be sent to the peer in an encoded protocol
// // message.
// let my_public_key = my_private_key.compute_public_key().unwrap();
//
// info!("PK!! {:?}", my_public_key);
Ok(())
}
|
use crate::fast_buf::ConsumeBuf;
use crate::fast_buf::FastBuf;
use crate::limit::LimitWrite;
use crate::mpsc::{Receiver, Sender};
use crate::server::{DriveExternal, SyncDriveExternal};
use crate::AsyncRead;
use crate::Error;
use futures_util::future::poll_fn;
use futures_util::ready;
use std::fmt;
use std::io;
use std::io::Read;
use std::mem;
use std::pin::Pin;
use std::task::{Context, Poll};
/// Send some body data to a remote peer.
///
/// Obtained either via a [`client::SendRequest`] or a [`server::SendResponse`].
///
/// [`client::SendRequest`]: client/struct.SendRequest.html
/// [`server::SendResponse`]: server/struct.SendResponse.html
pub struct SendStream {
tx_body: Sender<(Vec<u8>, bool)>,
limit: LimitWrite,
ended: bool,
drive_external: Option<SyncDriveExternal>,
}
impl SendStream {
pub(crate) fn new(
tx_body: Sender<(Vec<u8>, bool)>,
limit: LimitWrite,
ended: bool,
drive_external: Option<SyncDriveExternal>,
) -> Self {
SendStream {
tx_body,
limit,
ended,
drive_external,
}
}
/// Send one chunk of data. Use `end_of_body` to signal end of data.
///
/// When the body is constrained by a `content-length` header, this will only accept
/// the amount of bytes specified in the header. If there is too much data, the
/// function will error with a `Error::User`.
///
/// For `transfer-encoding: chunked`, call to this function corresponds to one "chunk".
pub async fn send_data(&mut self, data: &[u8], end_of_body: bool) -> Result<(), Error> {
let data = Data::Shared(data);
self.do_send(data, end_of_body).await?;
Ok(())
}
/// Send one chunk of data. Use `end_of_body` to signal end of data.
///
/// This is an optimization which together with a `content-length` shortcuts
/// some unnecessary copying of data.
///
/// When the body is constrained by a `content-length` header, this will only accept
/// the amount of bytes specified in the header. If there is too much data, the
/// function will error with a `Error::User`.
///
/// For `transfer-encoding: chunked`, call to this function corresponds to one "chunk".
pub async fn send_data_owned(&mut self, data: Vec<u8>, end_of_body: bool) -> Result<(), Error> {
let data = Data::Owned(data);
self.do_send(data, end_of_body).await?;
Ok(())
}
async fn do_send(&mut self, mut data: Data<'_>, end_of_body: bool) -> Result<(), Error> {
trace!("Send len={} end_of_body={}", data.len(), end_of_body);
poll_fn(|cx| self.poll_drive_server(cx)).await?;
poll_fn(|cx| Pin::new(&mut *self).poll_send_data(cx, &mut data, end_of_body)).await?;
poll_fn(|cx| self.poll_drive_server(cx)).await?;
// If content is ended, we effectively "flush", by keep doing poll_drive_external
// until we have driven all content through. This is only needed when we have
// drive_external (server side), since it means we are "driving" the connection
// from this very send action.
if self.ended && self.drive_external.is_some() {
while !self.tx_body.is_empty() {
poll_fn(|cx| self.poll_drive_server(cx)).await?;
}
}
Ok(())
}
fn poll_drive_server(&mut self, cx: &mut Context) -> Poll<Result<(), io::Error>> {
if let Some(drive_external) = &self.drive_external {
drive_external.poll_drive_external(cx)
} else {
Ok(()).into()
}
}
/// Send some body data.
///
/// `end` controls whether this is the last body chunk to send. It's an error
/// to send more data after `end` is `true`.
fn poll_send_data(
self: Pin<&mut Self>,
cx: &mut Context,
data: &mut Data,
end: bool,
) -> Poll<Result<(), Error>> {
let this = self.get_mut();
if this.ended && end && data.is_empty() {
// this is a noop
return Ok(()).into();
}
if this.ended {
warn!("Body data is not expected");
return Err(Error::User("Body data is not expected".into())).into();
}
if !ready!(Pin::new(&this.tx_body).poll_ready(cx, true)) {
return Err(
io::Error::new(io::ErrorKind::ConnectionAborted, "Connection closed").into(),
)
.into();
}
let to_send = if data.is_owned() && this.limit.can_write_entire_vec() {
// This is an optmization when sending owned data. We can pass the
// Vec<u8> straight into the this.tx_body.send without copying the
// data into a FastBuf first.
let data = data.take_owned();
// so limit counters are correct
this.limit.accept_entire_vec(&data);
data
} else {
// This branch handles shared data as well as chunked body transfer.
let capacity = data.len() + this.limit.overhead();
let mut chunk = FastBuf::with_capacity(capacity);
this.limit.write(&data[..], &mut chunk)?;
if end {
this.ended = true;
this.limit.finish(&mut chunk)?;
}
chunk.into_vec()
};
let sent = this.tx_body.send((to_send, end));
if !sent {
return Err(
io::Error::new(io::ErrorKind::ConnectionAborted, "Connection closed").into(),
)
.into();
}
Ok(()).into()
}
}
/// Receives a body from the remote peer.
///
/// Obtained from either a [`client::ResponseFuture`] or [`server::Connection`].
///
/// [`client::ResponseFuture`]: client/struct.ResponseFuture.html
/// [`server::Connection`]: server/struct.Connection.html
pub struct RecvStream {
rx_body: Receiver<io::Result<Vec<u8>>>,
ready: Option<ConsumeBuf>,
ended: bool,
drive_external: Option<SyncDriveExternal>,
}
impl RecvStream {
pub(crate) fn new(
rx_body: Receiver<io::Result<Vec<u8>>>,
ended: bool,
drive_external: Option<SyncDriveExternal>,
) -> Self {
RecvStream {
rx_body,
ready: None,
ended,
drive_external,
}
}
/// Read some body data into a given buffer.
///
/// Ends when returned size is `0`.
pub async fn read(&mut self, buf: &mut [u8]) -> Result<usize, Error> {
Ok(poll_fn(move |cx| Pin::new(&mut *self).poll_read(cx, buf)).await?)
}
/// Returns `true` if there is no more data to receive.
///
/// Specifically any further call to `read` will result in `0` bytes read.
pub fn is_end_stream(&self) -> bool {
self.ended
}
fn poll_drive_server(&mut self, cx: &mut Context) -> Poll<Result<(), io::Error>> {
if let Some(drive_external) = &self.drive_external {
drive_external.poll_drive_external(cx)
} else {
Ok(()).into()
}
}
#[doc(hidden)]
/// Poll for some body data.
fn poll_body_data(
self: Pin<&mut Self>,
cx: &mut Context,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
let this = self.get_mut();
if this.ended {
return Ok(0).into();
}
loop {
// First ship out ready data already received.
if let Some(ready) = &mut this.ready {
let amt = (&ready[..]).read(buf)?;
ready.consume(amt);
if ready.is_empty() {
this.ready = None;
}
return Ok(amt).into();
}
// invariant: Should be no ready bytes if we're here.
assert!(this.ready.is_none());
match ready!(Pin::new(&this.rx_body).poll_recv(cx, true)) {
None => {
// Channel is closed which indicates end of body.
this.ended = true;
return Ok(0).into();
}
Some(v) => {
// nested io::Error
let v = v?;
this.ready = Some(ConsumeBuf::new(v));
}
}
}
}
}
impl AsyncRead for RecvStream {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
let this = self.get_mut();
// can't poll data with an empty buffer
assert!(!buf.is_empty(), "poll_read with empty buf");
ready!(this.poll_drive_server(cx))?;
Pin::new(this).poll_body_data(cx, buf)
}
}
impl fmt::Debug for SendStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "SendStream")
}
}
impl fmt::Debug for RecvStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "RecvStream")
}
}
enum Data<'a> {
Shared(&'a [u8]),
Owned(Vec<u8>),
Empty,
}
impl<'a> Data<'a> {
fn is_owned(&self) -> bool {
if let Data::Owned(_) = self {
return true;
}
false
}
pub fn is_empty(&self) -> bool {
match self {
Data::Shared(v) => v.is_empty(),
Data::Owned(v) => v.is_empty(),
Data::Empty => true,
}
}
pub fn len(&self) -> usize {
match self {
Data::Shared(v) => v.len(),
Data::Owned(v) => v.len(),
Data::Empty => 0,
}
}
pub fn take_owned(&mut self) -> Vec<u8> {
if self.is_owned() {
if let Data::Owned(v) = mem::replace(self, Data::Empty) {
return v;
}
}
panic!("Can't take_owned");
}
}
impl<'a> std::ops::Deref for Data<'a> {
type Target = [u8];
fn deref(&self) -> &Self::Target {
match self {
Data::Shared(v) => &v[..],
Data::Owned(v) => &v[..],
Data::Empty => panic!("Can't deref a Data::Empty"),
}
}
}
/// Check if kind indicates the other side closed the connection.
pub(crate) fn is_closed_kind(kind: io::ErrorKind) -> bool {
kind == io::ErrorKind::UnexpectedEof
|| kind == io::ErrorKind::ConnectionReset
|| kind == io::ErrorKind::ConnectionAborted
}
|
#[doc = "Register `MACHWF2R` reader"]
pub type R = crate::R<MACHWF2R_SPEC>;
#[doc = "Field `RXQCNT` reader - Number of MTL Receive Queues"]
pub type RXQCNT_R = crate::FieldReader;
#[doc = "Field `TXQCNT` reader - Number of MTL Transmit Queues"]
pub type TXQCNT_R = crate::FieldReader;
#[doc = "Field `RXCHCNT` reader - Number of DMA Receive Channels"]
pub type RXCHCNT_R = crate::FieldReader;
#[doc = "Field `TXCHCNT` reader - Number of DMA Transmit Channels"]
pub type TXCHCNT_R = crate::FieldReader;
#[doc = "Field `PPSOUTNUM` reader - Number of PPS Outputs"]
pub type PPSOUTNUM_R = crate::FieldReader;
#[doc = "Field `AUXSNAPNUM` reader - Number of Auxiliary Snapshot Inputs"]
pub type AUXSNAPNUM_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:3 - Number of MTL Receive Queues"]
#[inline(always)]
pub fn rxqcnt(&self) -> RXQCNT_R {
RXQCNT_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 6:9 - Number of MTL Transmit Queues"]
#[inline(always)]
pub fn txqcnt(&self) -> TXQCNT_R {
TXQCNT_R::new(((self.bits >> 6) & 0x0f) as u8)
}
#[doc = "Bits 12:15 - Number of DMA Receive Channels"]
#[inline(always)]
pub fn rxchcnt(&self) -> RXCHCNT_R {
RXCHCNT_R::new(((self.bits >> 12) & 0x0f) as u8)
}
#[doc = "Bits 18:21 - Number of DMA Transmit Channels"]
#[inline(always)]
pub fn txchcnt(&self) -> TXCHCNT_R {
TXCHCNT_R::new(((self.bits >> 18) & 0x0f) as u8)
}
#[doc = "Bits 24:26 - Number of PPS Outputs"]
#[inline(always)]
pub fn ppsoutnum(&self) -> PPSOUTNUM_R {
PPSOUTNUM_R::new(((self.bits >> 24) & 7) as u8)
}
#[doc = "Bits 28:30 - Number of Auxiliary Snapshot Inputs"]
#[inline(always)]
pub fn auxsnapnum(&self) -> AUXSNAPNUM_R {
AUXSNAPNUM_R::new(((self.bits >> 28) & 7) as u8)
}
}
#[doc = "HW feature 2 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`machwf2r::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MACHWF2R_SPEC;
impl crate::RegisterSpec for MACHWF2R_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`machwf2r::R`](R) reader structure"]
impl crate::Readable for MACHWF2R_SPEC {}
#[doc = "`reset()` method sets MACHWF2R to value 0x4100_0000"]
impl crate::Resettable for MACHWF2R_SPEC {
const RESET_VALUE: Self::Ux = 0x4100_0000;
}
|
use actix_web::{web, Error, HttpRequest, HttpResponse};
use bytes::BytesMut;
use chrono::Local;
use futures::StreamExt;
use std::fs::OpenOptions;
use std::io::prelude::*;
use std::path::Path;
use std::thread;
pub async fn collect_post(
req: HttpRequest,
mut payload: web::Payload,
) -> Result<HttpResponse, Error> {
let site = req.query_string().to_string();
// payload is a stream of Bytes objects
let mut body = BytesMut::new();
while let Some(chunk) = payload.next().await {
let chunk = chunk?;
// limit max size of in-memory payload
// if (body.len() + chunk.len()) > MAX_SIZE {
// return Err(error::ErrorBadRequest("overflow"));
// }
body.extend_from_slice(&chunk);
}
// 异步写入数据
thread::spawn(move || {
let file_path = format!("errors/{}_{}.log", site, Local::today().format("%Y%m%d"));
append_content(Path::new(&file_path), &body);
});
Ok(HttpResponse::Ok()
.content_type("application/json")
.body("{}"))
}
/// 追加文件
fn append_content(file_path: &Path, content: &[u8]) {
// 以读,写,创建,追加的方式打开文件
let file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.append(true)
.open(file_path);
// 向文件中写入内容
match file {
Ok(mut stream) => {
// if stream.len() > 1024 * 2 {
// append_content(file_path, content);
// }
// else{
stream.write_all(content).unwrap();
stream.write_all(",\n".as_bytes()).unwrap();
// }
}
Err(err) => {
println!("{:?}", err);
}
}
}
|
use dotenv::dotenv;
use messaging::{Publisher, PublisherOptions};
use std::env;
fn main() {
dotenv().ok();
let broker_address = env::var("RABBITMQ_URL").expect("'RABBITMQ_URL' environment variable");
let publisher_options = PublisherOptions::new(broker_address,
"zoee.topic".to_string(),
"zoee.rust.test".to_string());
let publisher = Publisher::new(publisher_options);
let message = b"Testing publish";
publisher.publish(message.to_vec());
} |
use crate::format::problem::Objective::{MinimizeCost, MinimizeUnassignedJobs};
use crate::format::problem::*;
use crate::format::{CoordIndex, Location};
use crate::format_time;
use crate::helpers::ToLocation;
use std::sync::Arc;
use vrp_core::models::common::Profile as CoreProfile;
use vrp_core::models::problem::{ActivityCost, SimpleActivityCost, TransportCost};
pub fn create_job_place(location: Vec<f64>) -> JobPlace {
JobPlace { times: None, location: location.to_loc(), duration: 1. }
}
pub fn create_task(location: Vec<f64>) -> JobTask {
JobTask { places: vec![create_job_place(location)], demand: Some(vec![1]), tag: None, order: None }
}
pub fn create_job(id: &str) -> Job {
Job {
id: id.to_string(),
pickups: None,
deliveries: None,
replacements: None,
services: None,
skills: None,
value: None,
}
}
pub fn create_delivery_job(id: &str, location: Vec<f64>) -> Job {
Job { deliveries: Some(vec![create_task(location.clone())]), ..create_job(id) }
}
pub fn create_delivery_job_with_order(id: &str, location: Vec<f64>, order: i32) -> Job {
Job {
deliveries: Some(vec![JobTask {
places: vec![create_job_place(location)],
demand: Some(vec![1]),
tag: None,
order: Some(order),
}]),
..create_job(id)
}
}
pub fn create_delivery_job_with_skills(id: &str, location: Vec<f64>, skills: JobSkills) -> Job {
Job { skills: Some(skills), ..create_delivery_job(id, location) }
}
pub fn create_delivery_job_with_demand(id: &str, location: Vec<f64>, demand: Vec<i32>) -> Job {
Job { deliveries: Some(vec![JobTask { demand: Some(demand), ..create_task(location) }]), ..create_job(id) }
}
pub fn create_delivery_job_with_duration(id: &str, location: Vec<f64>, duration: f64) -> Job {
Job {
deliveries: Some(vec![JobTask {
places: vec![JobPlace { duration, ..create_job_place(location) }],
demand: Some(vec![1]),
tag: None,
order: None,
}]),
..create_job(id)
}
}
pub fn create_delivery_job_with_times(id: &str, location: Vec<f64>, times: Vec<(i32, i32)>, duration: f64) -> Job {
Job {
deliveries: Some(vec![JobTask {
places: vec![JobPlace { duration, times: convert_times(×), ..create_job_place(location) }],
demand: Some(vec![1]),
tag: None,
order: None,
}]),
..create_job(id)
}
}
pub fn create_delivery_job_with_value(id: &str, location: Vec<f64>, value: f64) -> Job {
Job { deliveries: Some(vec![create_task(location.clone())]), value: Some(value), ..create_job(id) }
}
pub fn create_pickup_job(id: &str, location: Vec<f64>) -> Job {
Job { pickups: Some(vec![create_task(location.clone())]), ..create_job(id) }
}
pub fn create_pickup_job_with_demand(id: &str, location: Vec<f64>, demand: Vec<i32>) -> Job {
Job { pickups: Some(vec![JobTask { demand: Some(demand), ..create_task(location) }]), ..create_job(id) }
}
pub fn create_replacement_job(id: &str, location: Vec<f64>) -> Job {
Job { replacements: Some(vec![create_task(location.clone())]), ..create_job(id) }
}
pub fn create_service_job(id: &str, location: Vec<f64>) -> Job {
Job { services: Some(vec![JobTask { demand: None, ..create_task(location.clone()) }]), ..create_job(id) }
}
pub fn create_pickup_delivery_job(id: &str, pickup_location: Vec<f64>, delivery_location: Vec<f64>) -> Job {
Job {
pickups: Some(vec![JobTask { tag: Some("p1".to_string()), ..create_task(pickup_location.clone()) }]),
deliveries: Some(vec![JobTask { tag: Some("d1".to_string()), ..create_task(delivery_location.clone()) }]),
..create_job(id)
}
}
pub fn create_pickup_delivery_job_with_params(
id: &str,
demand: Vec<i32>,
pickup: (Vec<f64>, f64, Vec<(i32, i32)>),
delivery: (Vec<f64>, f64, Vec<(i32, i32)>),
) -> Job {
Job {
pickups: Some(vec![JobTask {
places: vec![JobPlace {
duration: pickup.1,
times: convert_times(&pickup.2),
..create_job_place(pickup.0.clone())
}],
demand: Some(demand.clone()),
tag: Some("p1".to_string()),
order: None,
}]),
deliveries: Some(vec![JobTask {
places: vec![JobPlace {
duration: delivery.1,
times: convert_times(&delivery.2),
..create_job_place(delivery.0.clone())
}],
demand: Some(demand.clone()),
tag: Some("d1".to_string()),
order: None,
}]),
..create_job(id)
}
}
pub fn create_delivery_job_with_index(id: &str, index: usize) -> Job {
Job {
deliveries: Some(vec![JobTask {
places: vec![JobPlace { times: None, location: Location::Reference { index }, duration: 1. }],
demand: Some(vec![1]),
tag: None,
order: None,
}]),
..create_job(id)
}
}
pub fn create_multi_job(
id: &str,
pickups: Vec<((f64, f64), f64, Vec<i32>)>,
deliveries: Vec<((f64, f64), f64, Vec<i32>)>,
) -> Job {
let create_tasks = |tasks: Vec<((f64, f64), f64, Vec<i32>)>, prefix: &str| {
let tasks = tasks
.into_iter()
.enumerate()
.map(|(i, (location, duration, demand))| JobTask {
places: vec![JobPlace { duration, ..create_job_place(vec![location.0, location.1]) }],
demand: Some(demand),
tag: Some(format!("{}{}", prefix, i + 1)),
order: None,
})
.collect::<Vec<_>>();
if tasks.is_empty() {
None
} else {
Some(tasks)
}
};
Job { pickups: create_tasks(pickups, "p"), deliveries: create_tasks(deliveries, "d"), ..create_job(id) }
}
pub fn create_default_vehicle_shift() -> VehicleShift {
create_default_vehicle_shift_with_locations((0., 0.), (0., 0.))
}
pub fn create_default_open_vehicle_shift() -> VehicleShift {
VehicleShift {
start: ShiftStart { earliest: format_time(0.), latest: None, location: vec![0., 0.].to_loc() },
end: None,
dispatch: None,
breaks: None,
reloads: None,
}
}
pub fn create_default_vehicle_shift_with_locations(start: (f64, f64), end: (f64, f64)) -> VehicleShift {
VehicleShift {
start: ShiftStart { earliest: format_time(0.), latest: None, location: vec![start.0, start.1].to_loc() },
end: Some(ShiftEnd {
earliest: None,
latest: format_time(1000.).to_string(),
location: vec![end.0, end.1].to_loc(),
}),
dispatch: None,
breaks: None,
reloads: None,
}
}
pub fn create_default_vehicle_costs() -> VehicleCosts {
VehicleCosts { fixed: Some(10.), distance: 1., time: 1. }
}
pub fn create_default_vehicle_profile() -> VehicleProfile {
VehicleProfile { matrix: "car".to_string(), scale: None }
}
pub fn create_vehicle_profile_with_name(name: &str) -> VehicleProfile {
VehicleProfile { matrix: name.to_string(), scale: None }
}
pub fn create_default_vehicle_type() -> VehicleType {
create_default_vehicle("my_vehicle")
}
pub fn create_default_vehicle(id: &str) -> VehicleType {
create_vehicle_with_capacity(id, vec![10])
}
pub fn create_vehicle_with_capacity(id: &str, capacity: Vec<i32>) -> VehicleType {
VehicleType {
type_id: id.to_string(),
vehicle_ids: vec![format!("{}_1", id)],
profile: create_default_vehicle_profile(),
costs: create_default_vehicle_costs(),
shifts: vec![create_default_vehicle_shift()],
capacity,
skills: None,
limits: None,
}
}
pub fn create_default_matrix_profiles() -> Vec<MatrixProfile> {
vec![MatrixProfile { name: "car".to_string(), speed: None }]
}
pub fn create_min_jobs_cost_objective() -> Option<Vec<Vec<Objective>>> {
Some(vec![vec![MinimizeUnassignedJobs { breaks: None }], vec![MinimizeCost]])
}
pub fn create_empty_problem() -> Problem {
Problem {
plan: Plan { jobs: vec![], relations: None },
fleet: Fleet { vehicles: vec![], profiles: vec![] },
objectives: None,
}
}
pub fn get_costs() -> (Arc<dyn TransportCost + Send + Sync>, Arc<dyn ActivityCost + Send + Sync>) {
struct ExampleTransportCost {}
impl TransportCost for ExampleTransportCost {
fn duration(&self, _: &CoreProfile, _: usize, _: usize, _: f64) -> f64 {
42.
}
fn distance(&self, _: &CoreProfile, _: usize, _: usize, _: f64) -> f64 {
42.
}
}
(Arc::new(ExampleTransportCost {}), Arc::new(SimpleActivityCost::default()))
}
pub fn create_matrix(data: Vec<i64>) -> Matrix {
let size = (data.len() as f64).sqrt() as i32;
assert_eq!((size * size) as usize, data.len());
Matrix {
profile: Some("car".to_owned()),
timestamp: None,
travel_times: data.clone(),
distances: data.clone(),
error_codes: None,
}
}
pub fn create_matrix_from_problem(problem: &Problem) -> Matrix {
let unique = CoordIndex::new(problem).unique();
let data: Vec<i64> = unique
.iter()
.cloned()
.flat_map(|a| {
let (a_lat, a_lng) = a.to_lat_lng();
unique.iter().map(move |b| {
let (b_lat, b_lng) = b.to_lat_lng();
((a_lat - b_lat).powf(2.) + (a_lng - b_lng).powf(2.)).sqrt().round() as i64
})
})
.collect();
create_matrix(data)
}
pub fn to_strings(data: Vec<&str>) -> Vec<String> {
data.iter().map(|item| item.to_string()).collect()
}
pub fn all_of_skills(skills: Vec<String>) -> JobSkills {
JobSkills { all_of: Some(skills), one_of: None, none_of: None }
}
fn convert_times(times: &Vec<(i32, i32)>) -> Option<Vec<Vec<String>>> {
if times.is_empty() {
None
} else {
Some(times.iter().map(|tw| vec![format_time(tw.0 as f64), format_time(tw.1 as f64)]).collect())
}
}
|
use super::winit;
use crate::input;
pub(crate) enum Event {
CloseRequested,
Resized(winit::dpi::LogicalSize),
Input(input::Event),
CursorMoved(winit::dpi::LogicalPosition),
}
pub struct EventLoop(winit::EventsLoop);
impl EventLoop {
pub fn new() -> Self {
Self(winit::EventsLoop::new())
}
pub(super) fn raw(&self) -> &winit::EventsLoop {
&self.0
}
pub(crate) fn poll<F>(&mut self, mut f: F)
where
F: FnMut(Event),
{
self.0.poll_events(|event| {
match event {
winit::Event::WindowEvent { event, .. } => match event {
winit::WindowEvent::KeyboardInput {
input:
winit::KeyboardInput {
state,
virtual_keycode: Some(key_code),
..
},
..
} => {
f(Event::Input(input::Event::KeyboardInput {
state,
key_code,
}));
}
winit::WindowEvent::MouseInput {
state, button, ..
} => f(Event::Input(input::Event::MouseInput {
state,
button,
})),
winit::WindowEvent::MouseWheel { delta, .. } => match delta
{
winit::MouseScrollDelta::LineDelta(x, y) => {
f(Event::Input(input::Event::MouseWheel {
delta_x: x,
delta_y: y,
}))
}
_ => {}
},
winit::WindowEvent::ReceivedCharacter(codepoint) => {
f(Event::Input(input::Event::TextInput {
character: codepoint,
}))
}
winit::WindowEvent::CursorMoved { position, .. } => {
f(Event::CursorMoved(position))
}
winit::WindowEvent::CursorEntered { .. } => {
f(Event::Input(input::Event::CursorEntered))
}
winit::WindowEvent::CursorLeft { .. } => {
f(Event::Input(input::Event::CursorLeft))
}
winit::WindowEvent::CloseRequested { .. } => {
f(Event::CloseRequested)
}
winit::WindowEvent::Resized(logical_size) => {
f(Event::Resized(logical_size))
}
winit::WindowEvent::Focused(focus) => {
f(Event::Input(if focus == true {
input::Event::WindowFocused
} else {
input::Event::WindowUnfocused
}))
}
winit::WindowEvent::Moved(
winit::dpi::LogicalPosition { x, y },
) => f(Event::Input(input::Event::WindowMoved {
x: x as f32,
y: y as f32,
})),
_ => {}
},
_ => (),
};
});
}
}
|
use crate::{ChannelReceiver, ChannelSender};
use crate::message::SlackPayload;
use tokio::task::JoinHandle;
/// Provides a background worker task that sends the messages generated by the
/// layer.
pub(crate) async fn worker(mut rx: ChannelReceiver) {
let client = reqwest::Client::new();
while let Some(message) = rx.recv().await {
match message {
WorkerMessage::Data(payload) => {
let webhook_url = payload.webhook_url().to_string();
let payload =
serde_json::to_string(&payload).expect("failed to deserialize slack payload, this is a bug");
match client.post(webhook_url).body(payload).send().await {
Ok(res) => {
tracing::debug!(?res);
}
Err(e) => {
tracing::error!(?e);
}
};
}
WorkerMessage::Shutdown => {
break;
}
}
}
}
/// This worker manages a background async task that schedules the network requests to send traces
/// to the Slack on the running tokio runtime.
///
/// Ensure to invoke `.startup()` before, and `.teardown()` after, your application code runs. This
/// is required to ensure proper initialization and shutdown.
///
/// `tracing-layer-slack` synchronously generates payloads to send to the Slack API using the
/// tracing events from the global subscriber. However, all network requests are offloaded onto
/// an unbuffered channel and processed by a provided future acting as an asynchronous worker.
pub struct SlackBackgroundWorker {
pub(crate) sender: ChannelSender,
pub(crate) handle: JoinHandle<()>,
}
impl SlackBackgroundWorker {
/// Initiate the worker's shutdown sequence.
///
/// Without invoking`.teardown()`, your application may exit before all Slack messages can be
/// sent.
pub async fn shutdown(self) {
self.sender.send(WorkerMessage::Shutdown).unwrap();
self.handle.await.unwrap();
}
}
#[derive(Debug)]
pub(crate) enum WorkerMessage {
Data(SlackPayload),
Shutdown,
}
|
//! Read Query Builder returned by InfluxDbQuery::raw_read_query
//!
//! Can only be instantiated by using InfluxDbQuery::raw_read_query
use crate::error::InfluxDbError;
use crate::query::{InfluxDbQuery, QueryType, ValidQuery};
pub struct InfluxDbReadQuery {
queries: Vec<String>,
}
impl InfluxDbReadQuery {
/// Creates a new [`InfluxDbReadQuery`]
pub fn new<S>(query: S) -> Self
where
S: ToString,
{
InfluxDbReadQuery {
queries: vec![query.to_string()],
}
}
/// Adds a query to the [`InfluxDbReadQuery`]
#[allow(clippy::should_implement_trait)]
pub fn add<S>(mut self, query: S) -> Self
where
S: ToString,
{
self.queries.push(query.to_string());
self
}
}
impl InfluxDbQuery for InfluxDbReadQuery {
fn build(&self) -> Result<ValidQuery, InfluxDbError> {
Ok(ValidQuery(self.queries.join(";")))
}
fn get_type(&self) -> QueryType {
QueryType::ReadQuery
}
}
#[cfg(test)]
mod tests {
use crate::query::{InfluxDbQuery, QueryType};
#[test]
fn test_read_builder_single_query() {
let query = InfluxDbQuery::raw_read_query("SELECT * FROM aachen").build();
assert_eq!(query.unwrap(), "SELECT * FROM aachen");
}
#[test]
fn test_read_builder_multi_query() {
let query = InfluxDbQuery::raw_read_query("SELECT * FROM aachen")
.add("SELECT * FROM cologne")
.build();
assert_eq!(query.unwrap(), "SELECT * FROM aachen;SELECT * FROM cologne");
}
#[test]
fn test_correct_query_type() {
let query = InfluxDbQuery::raw_read_query("SELECT * FROM aachen");
assert_eq!(query.get_type(), QueryType::ReadQuery);
}
}
|
use std::prelude::v1::*;
mod aggregate_context;
mod blend_context;
mod filter_context;
pub use aggregate_context::AggregateContext;
pub use blend_context::BlendContext;
pub use filter_context::FilterContext;
|
use std::fmt::{self, Display};
use hyper::header;
use hyper;
#[derive(Clone)]
pub struct XApiKey {
key: String,
}
impl XApiKey {
pub fn key(k: String) -> XApiKey {
XApiKey { key: k }
}
}
impl Display for XApiKey {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.key)
}
}
impl header::Header for XApiKey {
// Self = XApiKey
fn header_name() -> &'static str {
"X-API-Key"
}
fn parse_header(_: &header::Raw) -> hyper::Result<Self> {
Ok(XApiKey { key: String::new() })
}
fn fmt_header(&self, f: &mut header::Formatter) -> fmt::Result {
f.fmt_line(self)
}
}
|
impl Solution {
pub fn search_matrix(matrix: Vec<Vec<i32>>, target: i32) -> bool {
let (n,m) = (matrix.len(),matrix[0].len());
for i in 0..n{
for j in 0..m{
if matrix[i][j] == target{
return true;
}else if matrix[i][j] > target{
return false;
}
}
}
false
}
} |
use clap::ArgMatches;
use std::fs::File;
use std::io::{self, Write};
use std::str::FromStr;
use svm_codec::api::json;
pub fn clap_app_tx() -> clap::App<'static, 'static> {
use clap::*;
SubCommand::with_name("tx")
.about("Low-level API to craft transactions from JSON specification files")
.arg(
Arg::with_name("input")
.help("Reads JSON-formatted transactions from this file")
.short("i")
.long("input")
.required(true)
.takes_value(true),
)
.arg(
Arg::with_name("output")
.help("Writes the binary output to this file")
.short("o")
.long("output")
.required(true)
.takes_value(true),
)
.arg(
Arg::with_name("tx-type")
.help("The type of input transaction")
.long("tx-type")
.required(true)
.takes_value(true)
.possible_values(&["spawn", "deploy", "call"]),
)
}
pub fn subcmd_tx(args: &ArgMatches) -> anyhow::Result<()> {
let action = match args.value_of("tx-type").unwrap() {
"spawn" => Action::Spawn,
"call" => Action::Call,
"deploy" => Action::Deploy,
_ => unreachable!(),
};
let input_path = args.value_of("input").unwrap();
let input_s = std::fs::read_to_string(input_path)?;
let bytes = match action {
Action::Call => json::encode_call_raw(&input_s).expect("Invalid JSON"),
Action::Deploy => json::deploy_template(&input_s).expect("Invalid JSON"),
Action::Spawn => json::encode_spawn(&input_s).expect("Invalid JSON"),
};
let mut file = File::create(args.value_of("output").unwrap())?;
file.write_all(&bytes)?;
Ok(())
}
enum Action {
Spawn,
Deploy,
Call,
}
|
use gdnative::prelude::*;
#[derive(NativeClass)]
#[inherit(Node)]
pub struct Globals {
kills: u16,
current_stage: u16,
}
#[methods]
impl Globals {
fn new(_owner: &Node) -> Self {
Globals {
kills: 0,
current_stage: 0
}
}
#[export]
fn _ready(&self, _owner: &Node) {
godot_print!("hello, world.");
}
pub fn reset(&mut self) {
self.kills = 0;
self.current_stage = 1;
}
pub fn kills(&self, _owner: &Node) -> u16 {
self.kills
}
pub fn increment_kills(&mut self) {
self.kills += 1;
}
} |
pub fn word_pattern(pattern: String, s: String) -> bool {
use std::collections::*;
if pattern.len() != s.split(' ').count() {
return false;
}
let mut map = HashMap::new();
for (c, word) in pattern.chars().zip(s.split(' ')) {
match map.get(&c).clone() {
None => {
map.insert(c, word);
}
Some(w2) => {
if &word != w2 {
return false;
}
}
}
}
let mut set = HashSet::new();
for word in map.values() {
set.insert(word);
}
set.len() == map.len()
} |
use std::rc::Rc;
use std::cell::RefCell;
use std::borrow::Borrow;
use gtk::prelude::*;
use crate::app::*;
pub struct AppWindow {
window: gtk::Window,
}
impl AppWindow {
pub fn new(state: Rc<RefCell<AppState>>, app: App) -> AppWindow {
let glade_str = include_str!("app_window.glade");
let builder = gtk::Builder::from_string(glade_str);
let window: gtk::Window = builder.object("app_window").unwrap();
let camera_entry: gtk::Entry = builder.object("camera_entry").unwrap();
let film_entry: gtk::Entry = builder.object("film_entry").unwrap();
let iso_entry: gtk::Entry = builder.object("iso_entry").unwrap();
let author_entry: gtk::Entry = builder.object("author_entry").unwrap();
let comment_buffer: gtk::TextBuffer = builder.object("comment_buffer").unwrap();
let file_index_checkbox: gtk::CheckButton = builder.object("file_index_checkbox").unwrap();
let open_button: gtk::Button = builder.object("open_button").unwrap();
let save_button: gtk::Button = builder.object("save_button").unwrap();
let files_list_store: gtk::ListStore = builder.object("files_list_store").unwrap();
let camera_completion_list: gtk::ListStore = builder.object("camera_completion_list").unwrap();
let film_completion_list: gtk::ListStore = builder.object("film_completion_list").unwrap();
let iso_completion_list: gtk::ListStore = builder.object("iso_completion_list").unwrap();
let author_completion_list: gtk::ListStore = builder.object("author_completion_list").unwrap();
for camera in RefCell::borrow(&state).recent_cameras.iter() {
camera_completion_list.set(&camera_completion_list.append(), &[(0, &camera)]);
}
for film in RefCell::borrow(&state).recent_films.iter() {
film_completion_list.set(&film_completion_list.append(), &[(0, &film)]);
}
for iso in RefCell::borrow(&state).recent_isos.iter() {
iso_completion_list.set(&iso_completion_list.append(), &[(0, &iso)]);
}
for author in RefCell::borrow(&state).recent_authors.iter() {
author_completion_list.set(&author_completion_list.append(), &[(0, &author)]);
}
let window_clone = window.clone();
let state_clone = Rc::clone(&state);
let files_list_store_clone = files_list_store.clone();
open_button.connect_clicked(move |_| {
let dialog = gtk::FileChooserDialog::with_buttons::<gtk::Window>(Some("Select files"),
Some(window_clone.borrow()),
gtk::FileChooserAction::Open,
&[("_Cancel", gtk::ResponseType::Cancel), ("_Open", gtk::ResponseType::Accept)]);
let filter = gtk::FileFilter::new();
filter.add_pattern("*.jpg");
filter.add_pattern("*.JPG");
filter.add_pattern("*.jpeg");
filter.add_pattern("*.JPEG");
dialog.set_filter(&filter);
dialog.set_select_multiple(true);
if dialog.run() == gtk::ResponseType::Accept {
files_list_store_clone.clear();
let files = dialog.filenames();
for pathbuf in files.iter() {
state_clone.borrow_mut().files.push(pathbuf.clone().to_path_buf());
files_list_store_clone.set(&files_list_store_clone.append(), &[(0,
&pathbuf.clone().file_name().unwrap().to_str())]);
}
unsafe {
dialog.destroy();
}
} else {
unsafe {
dialog.destroy();
}
}
});
let state_clone = Rc::clone(&state);
save_button.connect_clicked(move |_| {
let state = &mut state_clone.borrow_mut();
state.camera = Some(camera_entry.text().as_str().to_string());
state.film = Some(film_entry.text().as_str().to_string());
state.iso = Some(iso_entry.text().as_str().to_string());
state.author = Some(author_entry.text().as_str().to_string());
state.comment = comment_buffer.text(&comment_buffer.start_iter(),
&comment_buffer.end_iter(), false).map(|s| { s.as_str().to_string() });
let result = app.save(state);
});
AppWindow {
window
}
}
pub fn show(&self) {
glib::set_application_name("Film Taggy");
self.window.set_wmclass("Film Taggy", "Film Taggy");
self.window.connect_delete_event(|_, _| { gtk::main_quit(); Inhibit(false) });
self.window.show_all();
}
}
|
use std::io::{Read, Write};
#[derive(Debug, PartialEq)] // to be able to test somewhat sensibly
pub enum Command {
Increment,
Decrement,
Input,
Output,
Left,
Right,
Loop(Program),
}
#[derive(Debug, PartialEq)]
pub struct Program {
commands: Vec<Command>,
}
impl Program {
pub fn new(commands: Vec<Command>) -> Program {
Program { commands }
}
pub fn run<R: Read, W: Write>(&self, input: &mut R, output: &mut W) {
let mut array = Array::new();
self.run_internal(&mut array, input, output);
}
fn run_internal<R: Read, W: Write>(&self, array: &mut Array, input: &mut R, output: &mut W) {
for command in &self.commands {
match *command {
Command::Increment => {
let value = array.get_value();
array.set_value(value + 1);
}
Command::Decrement => {
let value = array.get_value();
array.set_value(value - 1);
}
Command::Output => {
output
.write(&[array.get_value(); 1])
.expect("Failed to write");
}
Command::Input => {
let mut i = [0; 1];
input.read(&mut i).unwrap();
array.set_value(i[0]);
}
Command::Left => {
array.left();
}
Command::Right => {
array.right();
}
Command::Loop(ref program) => {
program.loop_internal(array, input, output);
}
}
}
}
fn loop_internal<R: Read, W: Write>(&self, array: &mut Array, input: &mut R, output: &mut W) {
if array.get_value() == 0 {
return;
}
loop {
self.run_internal(array, input, output);
if array.get_value() == 0 {
break;
}
}
}
}
struct Array {
data: Vec<u8>,
data_pointer: usize,
}
impl Array {
fn new() -> Array {
let mut array = Array {
data: Vec::with_capacity(30_000),
data_pointer: 0,
};
array.data.push(0);
array
}
fn right(&mut self) {
if self.data_pointer == (self.data.len() - 1) {
self.data.push(0);
}
self.data_pointer += 1;
}
fn left(&mut self) {
if self.data_pointer != 0 {
self.data_pointer -= 1;
}
}
fn get_value(&self) -> u8 {
self.data[self.data_pointer]
}
fn set_value(&mut self, value: u8) {
self.data[self.data_pointer] = value;
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_increment() {
let commands = vec![Command::Increment, Command::Increment, Command::Output];
let program = Program::new(commands);
let mut input = "".as_bytes();
let mut output = Vec::new();
program.run(&mut input, &mut output);
assert_eq!(1, output.len());
assert_eq!(2, output[0]);
}
#[test]
fn test_decrement() {
let commands = vec![Command::Increment, Command::Decrement, Command::Output];
let program = Program::new(commands);
let mut input = "".as_bytes();
let mut output = Vec::new();
program.run(&mut input, &mut output);
assert_eq!(1, output.len());
assert_eq!(0, output[0]);
}
#[test]
fn test_input() {
let commands = vec![Command::Input, Command::Output];
let program = Program::new(commands);
let mut input = "q".as_bytes();
let mut output = Vec::new();
program.run(&mut input, &mut output);
assert_eq!(1, output.len());
assert_eq!('q', output[0] as char);
}
#[test]
fn test_left_and_right() {
let commands = vec![
Command::Right,
Command::Increment,
Command::Right,
Command::Increment,
Command::Output,
Command::Left,
Command::Increment,
Command::Output,
Command::Left,
Command::Left,
Command::Output,
];
let program = Program::new(commands);
let mut input = "q".as_bytes();
let mut output = Vec::new();
program.run(&mut input, &mut output);
assert_eq!(3, output.len());
assert_eq!(1, output[0]);
assert_eq!(2, output[1]);
assert_eq!(0, output[2]);
}
#[test]
fn test_skip_loop() {
let loop_commands = vec![Command::Increment];
let loopp = Command::Loop(Program::new(loop_commands));
let commands = vec![loopp, Command::Output];
let program = Program::new(commands);
let mut input = "".as_bytes();
let mut output = Vec::new();
program.run(&mut input, &mut output);
assert_eq!(1, output.len());
assert_eq!(0, output[0]);
}
#[test]
fn test_loop() {
let loop_commands = vec![Command::Decrement, Command::Right, Command::Increment, Command::Left];
let loopp = Command::Loop(Program::new(loop_commands));
// ++>++<[->+<]>.
let commands = vec![Command::Increment, Command::Increment, Command::Right, Command::Increment, Command::Increment, Command::Left, loopp, Command::Right, Command::Output];
let program = Program::new(commands);
let mut input = "".as_bytes();
let mut output = Vec::new();
program.run(&mut input, &mut output);
assert_eq!(1, output.len());
assert_eq!(4, output[0]);
}
}
|
use crate::measurement::Measurement;
use std::fmt;
use std::ops::{Add, Sub, Mul, Div, Neg};
pub enum Value {
PosNumber(f64),
Number(f64),
Measurement(Measurement)
}
impl fmt::Display for Value {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Value::PosNumber(x) | Value::Number(x) => {
write!(f, "{}", x)
},
Value::Measurement(x) => {
write!(f, "{}", x)
}
}
}
}
impl Neg for Value {
type Output = Self;
fn neg(self) -> Self {
match self {
Value::PosNumber(x) => Value::PosNumber(-x),
Value::Number(x) => Value::Number(-x),
Value::Measurement(x) => Value::Measurement(-x),
}
}
}
impl Add for Value {
type Output = Self;
fn add(self, _rhs: Value) -> Value {
match self {
Value::PosNumber(x) => {
match _rhs {
Value::PosNumber(y) => Value::PosNumber(x+y),
Value::Number(y) => Value::Number(x+y),
Value::Measurement(y) => Value::Measurement(y+x),
}
},
Value::Number(x) => {
match _rhs {
Value::PosNumber(y) => Value::Number(x+y),
Value::Number(y) => Value::Number(x+y),
Value::Measurement(y) => Value::Measurement(y+x),
}
},
Value::Measurement(x) => {
match _rhs {
Value::PosNumber(y) => Value::Measurement(x+y),
Value::Number(y) => Value::Measurement(x+y),
Value::Measurement(y) => Value::Measurement(x+y),
}
}
}
}
}
impl Sub for Value {
type Output = Self;
fn sub(self, _rhs: Value) -> Value {
match self {
Value::PosNumber(x) => {
match _rhs {
Value::PosNumber(y) => Value::PosNumber(x-y),
Value::Number(y) => Value::Number(x-y),
Value::Measurement(y) => Value::Measurement(-y+x),
}
},
Value::Number(x) => {
match _rhs {
Value::PosNumber(y) => Value::Number(x-y),
Value::Number(y) => Value::Number(x-y),
Value::Measurement(y) => Value::Measurement(-y+x),
}
},
Value::Measurement(x) => {
match _rhs {
Value::PosNumber(y) => Value::Measurement(x-y),
Value::Number(y) => Value::Measurement(x-y),
Value::Measurement(y) => Value::Measurement(x-y),
}
}
}
}
}
impl Mul for Value {
type Output = Self;
fn mul(self, _rhs: Value) -> Value {
match self {
Value::PosNumber(x) => {
match _rhs {
Value::PosNumber(y) => Value::PosNumber(x*y),
Value::Number(y) => Value::Number(x*y),
Value::Measurement(y) => Value::Measurement(y*x),
}
},
Value::Number(x) => {
match _rhs {
Value::PosNumber(y) => Value::Number(x*y),
Value::Number(y) => Value::Number(x*y),
Value::Measurement(y) => Value::Measurement(y*x),
}
},
Value::Measurement(x) => {
match _rhs {
Value::PosNumber(y) => Value::Measurement(x*y),
Value::Number(y) => Value::Measurement(x*y),
Value::Measurement(y) => Value::Measurement(x*y),
}
}
}
}
}
impl Div for Value {
type Output = Self;
fn div(self, _rhs: Value) -> Value {
match self {
Value::PosNumber(x) => {
match _rhs {
Value::PosNumber(y) => Value::PosNumber(x/y),
Value::Number(y) => Value::Number(x/y),
Value::Measurement(y) => Value::Measurement(Measurement::new(x, 0.0) /y),
}
},
Value::Number(x) => {
match _rhs {
Value::PosNumber(y) => Value::Number(x/y),
Value::Number(y) => Value::Number(x/y),
Value::Measurement(y) => Value::Measurement(Measurement::new(x, 0.0)/y),
}
},
Value::Measurement(x) => {
match _rhs {
Value::PosNumber(y) => Value::Measurement(x/y),
Value::Number(y) => Value::Measurement(x/y),
Value::Measurement(y) => Value::Measurement(x/y),
}
}
}
}
} |
// This file is part of Substrate.
// Copyright (C) 2019-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! # Transaction Payment Module
//!
//! This module provides the basic logic needed to pay the absolute minimum amount needed for a
//! transaction to be included. This includes:
//! - _weight fee_: A fee proportional to amount of weight a transaction consumes.
//! - _length fee_: A fee proportional to the encoded length of the transaction.
//! - _tip_: An optional tip. Tip increases the priority of the transaction, giving it a higher
//! chance to be included by the transaction queue.
//!
//! Additionally, this module allows one to configure:
//! - The mapping between one unit of weight to one unit of fee via [`Trait::WeightToFee`].
//! - A means of updating the fee for the next block, via defining a multiplier, based on the
//! final state of the chain at the end of the previous block. This can be configured via
//! [`Trait::FeeMultiplierUpdate`]
#![cfg_attr(not(feature = "std"), no_std)]
use codec::{Decode, Encode};
use frame_support::{
decl_module, decl_storage,
dispatch::DispatchResult,
traits::{Currency, ExistenceRequirement, Get, Imbalance, OnUnbalanced, WithdrawReason},
weights::{
DispatchInfo, GetDispatchInfo, Pays, PostDispatchInfo, Weight, WeightToFeeCoefficient,
WeightToFeePolynomial,
},
};
use pallet_transaction_payment_rpc_runtime_api::RuntimeDispatchInfo;
use sp_runtime::{
traits::{
Convert, DispatchInfoOf, Dispatchable, PostDispatchInfoOf, SaturatedConversion, Saturating,
SignedExtension, Zero,
},
transaction_validity::{
InvalidTransaction, TransactionPriority, TransactionValidity, TransactionValidityError,
ValidTransaction,
},
FixedPointNumber, FixedPointOperand, FixedU128, Perquintill, RuntimeDebug,
};
use sp_std::prelude::*;
/// Fee multiplier.
pub type Multiplier = FixedU128;
type BalanceOf<T> =
<<T as Trait>::Currency as Currency<<T as frame_system::Trait>::AccountId>>::Balance;
type NegativeImbalanceOf<T> =
<<T as Trait>::Currency as Currency<<T as frame_system::Trait>::AccountId>>::NegativeImbalance;
/// A struct to update the weight multiplier per block. It implements `Convert<Multiplier,
/// Multiplier>`, meaning that it can convert the previous multiplier to the next one. This should
/// be called on `on_finalize` of a block, prior to potentially cleaning the weight data from the
/// system module.
///
/// given:
/// s = previous block weight
/// s'= ideal block weight
/// m = maximum block weight
/// diff = (s - s')/m
/// v = 0.00001
/// t1 = (v * diff)
/// t2 = (v * diff)^2 / 2
/// then:
/// next_multiplier = prev_multiplier * (1 + t1 + t2)
///
/// Where `(s', v)` must be given as the `Get` implementation of the `T` generic type. Moreover, `M`
/// must provide the minimum allowed value for the multiplier. Note that a runtime should ensure
/// with tests that the combination of this `M` and `V` is not such that the multiplier can drop to
/// zero and never recover.
///
/// note that `s'` is interpreted as a portion in the _normal transaction_ capacity of the block.
/// For example, given `s' == 0.25` and `AvailableBlockRatio = 0.75`, then the target fullness is
/// _0.25 of the normal capacity_ and _0.1875 of the entire block_.
///
/// This implementation implies the bound:
/// - `v ≤ p / k * (s − s')`
/// - or, solving for `p`: `p >= v * k * (s - s')`
///
/// where `p` is the amount of change over `k` blocks.
///
/// Hence:
/// - in a fully congested chain: `p >= v * k * (1 - s')`.
/// - in an empty chain: `p >= v * k * (-s')`.
///
/// For example, when all blocks are full and there are 28800 blocks per day (default in
/// `substrate-node`) and v == 0.00001, s' == 0.1875, we'd have:
///
/// p >= 0.00001 * 28800 * 0.8125
/// p >= 0.234
///
/// Meaning that fees can change by around ~23% per day, given extreme congestion.
///
/// More info can be found at:
/// https://w3f-research.readthedocs.io/en/latest/polkadot/Token%20Economics.html
pub struct TargetedFeeAdjustment<T, S, V, M>(sp_std::marker::PhantomData<(T, S, V, M)>);
/// Something that can convert the current multiplier to the next one.
pub trait MultiplierUpdate: Convert<Multiplier, Multiplier> {
/// Minimum multiplier
fn min() -> Multiplier;
/// Target block saturation level
fn target() -> Perquintill;
/// Variability factor
fn variability() -> Multiplier;
}
impl MultiplierUpdate for () {
fn min() -> Multiplier {
Default::default()
}
fn target() -> Perquintill {
Default::default()
}
fn variability() -> Multiplier {
Default::default()
}
}
impl<T, S, V, M> MultiplierUpdate for TargetedFeeAdjustment<T, S, V, M>
where
T: frame_system::Trait,
S: Get<Perquintill>,
V: Get<Multiplier>,
M: Get<Multiplier>,
{
fn min() -> Multiplier {
M::get()
}
fn target() -> Perquintill {
S::get()
}
fn variability() -> Multiplier {
V::get()
}
}
impl<T, S, V, M> Convert<Multiplier, Multiplier> for TargetedFeeAdjustment<T, S, V, M>
where
T: frame_system::Trait,
S: Get<Perquintill>,
V: Get<Multiplier>,
M: Get<Multiplier>,
{
fn convert(previous: Multiplier) -> Multiplier {
// Defensive only. The multiplier in storage should always be at most positive. Nonetheless
// we recover here in case of errors, because any value below this would be stale and can
// never change.
let min_multiplier = M::get();
let previous = previous.max(min_multiplier);
// the computed ratio is only among the normal class.
let normal_max_weight = <T as frame_system::Trait>::AvailableBlockRatio::get() *
<T as frame_system::Trait>::MaximumBlockWeight::get();
let normal_block_weight = <frame_system::Module<T>>::block_weight()
.get(frame_support::weights::DispatchClass::Normal)
.min(normal_max_weight);
let s = S::get();
let v = V::get();
let target_weight = (s * normal_max_weight) as u128;
let block_weight = normal_block_weight as u128;
// determines if the first_term is positive
let positive = block_weight >= target_weight;
let diff_abs = block_weight.max(target_weight) - block_weight.min(target_weight);
// defensive only, a test case assures that the maximum weight diff can fit in Multiplier
// without any saturation.
let diff = Multiplier::saturating_from_rational(diff_abs, normal_max_weight.max(1));
let diff_squared = diff.saturating_mul(diff);
let v_squared_2 = v.saturating_mul(v) / Multiplier::saturating_from_integer(2);
let first_term = v.saturating_mul(diff);
let second_term = v_squared_2.saturating_mul(diff_squared);
if positive {
let excess = first_term.saturating_add(second_term).saturating_mul(previous);
previous.saturating_add(excess).max(min_multiplier)
} else {
// Defensive-only: first_term > second_term. Safe subtraction.
let negative = first_term.saturating_sub(second_term).saturating_mul(previous);
previous.saturating_sub(negative).max(min_multiplier)
}
}
}
/// Storage releases of the module.
#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)]
enum Releases {
/// Original version of the module.
V1Ancient,
/// One that bumps the usage to FixedU128 from FixedI128.
V2,
}
impl Default for Releases {
fn default() -> Self {
Releases::V1Ancient
}
}
pub trait Trait: frame_system::Trait {
/// The currency type in which fees will be paid.
type Currency: Currency<Self::AccountId> + Send + Sync;
/// Handler for the unbalanced reduction when taking transaction fees. This is either one or
/// two separate imbalances, the first is the transaction fee paid, the second is the tip paid,
/// if any.
type OnTransactionPayment: OnUnbalanced<NegativeImbalanceOf<Self>>;
/// The fee to be paid for making a transaction; the per-byte portion.
type TransactionByteFee: Get<BalanceOf<Self>>;
/// Convert a weight value into a deductible fee based on the currency type.
type WeightToFee: WeightToFeePolynomial<Balance = BalanceOf<Self>>;
/// Update the multiplier of the next block, based on the previous block's weight.
type FeeMultiplierUpdate: MultiplierUpdate;
}
decl_storage! {
trait Store for Module<T: Trait> as TransactionPayment {
pub NextFeeMultiplier get(fn next_fee_multiplier): Multiplier = Multiplier::saturating_from_integer(1);
StorageVersion build(|_: &GenesisConfig| Releases::V2): Releases;
}
}
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
/// The fee to be paid for making a transaction; the per-byte portion.
const TransactionByteFee: BalanceOf<T> = T::TransactionByteFee::get();
/// The polynomial that is applied in order to derive fee from weight.
const WeightToFee: Vec<WeightToFeeCoefficient<BalanceOf<T>>> =
T::WeightToFee::polynomial().to_vec();
fn on_finalize() {
NextFeeMultiplier::mutate(|fm| {
*fm = T::FeeMultiplierUpdate::convert(*fm);
});
}
fn integrity_test() {
// given weight == u64, we build multipliers from `diff` of two weight values, which can
// at most be MaximumBlockWeight. Make sure that this can fit in a multiplier without
// loss.
use sp_std::convert::TryInto;
assert!(
<Multiplier as sp_runtime::traits::Bounded>::max_value() >=
Multiplier::checked_from_integer(
<T as frame_system::Trait>::MaximumBlockWeight::get().try_into().unwrap()
).unwrap(),
);
// This is the minimum value of the multiplier. Make sure that if we collapse to this
// value, we can recover with a reasonable amount of traffic. For this test we assert
// that if we collapse to minimum, the trend will be positive with a weight value
// which is 1% more than the target.
let min_value = T::FeeMultiplierUpdate::min();
let mut target =
T::FeeMultiplierUpdate::target() *
(T::AvailableBlockRatio::get() * T::MaximumBlockWeight::get());
// add 1 percent;
let addition = target / 100;
if addition == 0 {
// this is most likely because in a test setup we set everything to ().
return;
}
target += addition;
sp_io::TestExternalities::new_empty().execute_with(|| {
<frame_system::Module<T>>::set_block_limits(target, 0);
let next = T::FeeMultiplierUpdate::convert(min_value);
assert!(next > min_value, "The minimum bound of the multiplier is too low. When \
block saturation is more than target by 1% and multiplier is minimal then \
the multiplier doesn't increase."
);
})
}
}
}
impl<T: Trait> Module<T>
where
BalanceOf<T>: FixedPointOperand,
{
/// Query the data that we know about the fee of a given `call`.
///
/// This module is not and cannot be aware of the internals of a signed extension, for example
/// a tip. It only interprets the extrinsic as some encoded value and accounts for its weight
/// and length, the runtime's extrinsic base weight, and the current fee multiplier.
///
/// All dispatchables must be annotated with weight and will have some fee info. This function
/// always returns.
pub fn query_info<Extrinsic: GetDispatchInfo>(
unchecked_extrinsic: Extrinsic,
len: u32,
) -> RuntimeDispatchInfo<BalanceOf<T>>
where
T: Send + Sync,
BalanceOf<T>: Send + Sync,
T::Call: Dispatchable<Info = DispatchInfo>,
{
// NOTE: we can actually make it understand `ChargeTransactionPayment`, but would be some
// hassle for sure. We have to make it aware of the index of `ChargeTransactionPayment` in
// `Extra`. Alternatively, we could actually execute the tx's per-dispatch and record the
// balance of the sender before and after the pipeline.. but this is way too much hassle for
// a very very little potential gain in the future.
let dispatch_info = <Extrinsic as GetDispatchInfo>::get_dispatch_info(&unchecked_extrinsic);
let partial_fee = Self::compute_fee(len, &dispatch_info, 0u32.into());
let DispatchInfo { weight, class, .. } = dispatch_info;
RuntimeDispatchInfo { weight, class, partial_fee }
}
/// Compute the final fee value for a particular transaction.
///
/// The final fee is composed of:
/// - `base_fee`: This is the minimum amount a user pays for a transaction. It is declared as
/// a base _weight_ in the runtime and converted to a fee using `WeightToFee`.
/// - `len_fee`: The length fee, the amount paid for the encoded length (in bytes) of the
/// transaction.
/// - `weight_fee`: This amount is computed based on the weight of the transaction. Weight
/// accounts for the execution time of a transaction.
/// - `targeted_fee_adjustment`: This is a multiplier that can tune the final fee based on the
/// congestion of the network.
/// - (Optional) `tip`: If included in the transaction, the tip will be added on top. Only
/// signed transactions can have a tip.
///
/// The base fee and adjusted weight and length fees constitute the _inclusion fee,_ which is
/// the minimum fee for a transaction to be included in a block.
///
/// ```ignore
/// inclusion_fee = base_fee + len_fee + [targeted_fee_adjustment * weight_fee];
/// final_fee = inclusion_fee + tip;
/// ```
pub fn compute_fee(len: u32, info: &DispatchInfoOf<T::Call>, tip: BalanceOf<T>) -> BalanceOf<T>
where
T::Call: Dispatchable<Info = DispatchInfo>,
{
Self::compute_fee_raw(len, info.weight, tip, info.pays_fee)
}
/// Compute the actual post dispatch fee for a particular transaction.
///
/// Identical to `compute_fee` with the only difference that the post dispatch corrected
/// weight is used for the weight fee calculation.
pub fn compute_actual_fee(
len: u32,
info: &DispatchInfoOf<T::Call>,
post_info: &PostDispatchInfoOf<T::Call>,
tip: BalanceOf<T>,
) -> BalanceOf<T>
where
T::Call: Dispatchable<Info = DispatchInfo, PostInfo = PostDispatchInfo>,
{
Self::compute_fee_raw(
len,
post_info.calc_actual_weight(info),
tip,
post_info.pays_fee(info),
)
}
fn compute_fee_raw(
len: u32,
weight: Weight,
tip: BalanceOf<T>,
pays_fee: Pays,
) -> BalanceOf<T> {
if pays_fee == Pays::Yes {
let len = <BalanceOf<T>>::from(len);
let per_byte = T::TransactionByteFee::get();
// length fee. this is not adjusted.
let fixed_len_fee = per_byte.saturating_mul(len);
// the adjustable part of the fee.
let unadjusted_weight_fee = Self::weight_to_fee(weight);
let multiplier = Self::next_fee_multiplier();
// final adjusted weight fee.
let adjusted_weight_fee = multiplier.saturating_mul_int(unadjusted_weight_fee);
let base_fee = Self::weight_to_fee(T::ExtrinsicBaseWeight::get());
base_fee
.saturating_add(fixed_len_fee)
.saturating_add(adjusted_weight_fee)
.saturating_add(tip)
} else {
tip
}
}
fn weight_to_fee(weight: Weight) -> BalanceOf<T> {
// cap the weight to the maximum defined in runtime, otherwise it will be the
// `Bounded` maximum of its data type, which is not desired.
let capped_weight = weight.min(<T as frame_system::Trait>::MaximumBlockWeight::get());
T::WeightToFee::calc(&capped_weight)
}
}
impl<T> Convert<Weight, BalanceOf<T>> for Module<T>
where
T: Trait,
BalanceOf<T>: FixedPointOperand,
{
/// Compute the fee for the specified weight.
///
/// This fee is already adjusted by the per block fee adjustment factor and is therefore the
/// share that the weight contributes to the overall fee of a transaction. It is mainly
/// for informational purposes and not used in the actual fee calculation.
fn convert(weight: Weight) -> BalanceOf<T> {
NextFeeMultiplier::get().saturating_mul_int(Self::weight_to_fee(weight))
}
}
/// Require the transactor pay for themselves and maybe include a tip to gain additional priority
/// in the queue.
#[derive(Encode, Decode, Clone, Eq, PartialEq)]
pub struct ChargeTransactionPayment<T: Trait + Send + Sync>(#[codec(compact)] BalanceOf<T>);
impl<T: Trait + Send + Sync> ChargeTransactionPayment<T>
where
T::Call: Dispatchable<Info = DispatchInfo, PostInfo = PostDispatchInfo>,
BalanceOf<T>: Send + Sync + FixedPointOperand,
{
/// utility constructor. Used only in client/factory code.
pub fn from(fee: BalanceOf<T>) -> Self {
Self(fee)
}
fn withdraw_fee(
&self,
who: &T::AccountId,
info: &DispatchInfoOf<T::Call>,
len: usize,
) -> Result<(BalanceOf<T>, Option<NegativeImbalanceOf<T>>), TransactionValidityError> {
let tip = self.0;
let fee = Module::<T>::compute_fee(len as u32, info, tip);
// Only mess with balances if fee is not zero.
if fee.is_zero() {
return Ok((fee, None))
}
match T::Currency::withdraw(
who,
fee,
if tip.is_zero() {
WithdrawReason::TransactionPayment.into()
} else {
WithdrawReason::TransactionPayment | WithdrawReason::Tip
},
ExistenceRequirement::KeepAlive,
) {
Ok(imbalance) => Ok((fee, Some(imbalance))),
Err(_) => Err(InvalidTransaction::Payment.into()),
}
}
/// Get an appropriate priority for a transaction with the given length and info.
///
/// This will try and optimise the `fee/weight` `fee/length`, whichever is consuming more of the
/// maximum corresponding limit.
///
/// For example, if a transaction consumed 1/4th of the block length and half of the weight, its
/// final priority is `fee * min(2, 4) = fee * 2`. If it consumed `1/4th` of the block length
/// and the entire block weight `(1/1)`, its priority is `fee * min(1, 4) = fee * 1`. This means
/// that the transaction which consumes more resources (either length or weight) with the same
/// `fee` ends up having lower priority.
fn get_priority(
len: usize,
info: &DispatchInfoOf<T::Call>,
final_fee: BalanceOf<T>,
) -> TransactionPriority {
let weight_saturation = T::MaximumBlockWeight::get() / info.weight.max(1);
let len_saturation = T::MaximumBlockLength::get() as u64 / (len as u64).max(1);
let coefficient: BalanceOf<T> =
weight_saturation.min(len_saturation).saturated_into::<BalanceOf<T>>();
final_fee.saturating_mul(coefficient).saturated_into::<TransactionPriority>()
}
}
impl<T: Trait + Send + Sync> sp_std::fmt::Debug for ChargeTransactionPayment<T> {
#[cfg(feature = "std")]
fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result {
write!(f, "ChargeTransactionPayment<{:?}>", self.0)
}
#[cfg(not(feature = "std"))]
fn fmt(&self, _: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result {
Ok(())
}
}
impl<T: Trait + Send + Sync> SignedExtension for ChargeTransactionPayment<T>
where
BalanceOf<T>: Send + Sync + From<u64> + FixedPointOperand,
T::Call: Dispatchable<Info = DispatchInfo, PostInfo = PostDispatchInfo>,
{
const IDENTIFIER: &'static str = "ChargeTransactionPayment";
type AccountId = T::AccountId;
type Call = T::Call;
type AdditionalSigned = ();
type Pre = (BalanceOf<T>, Self::AccountId, Option<NegativeImbalanceOf<T>>, BalanceOf<T>);
fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> {
Ok(())
}
fn validate(
&self,
who: &Self::AccountId,
_call: &Self::Call,
info: &DispatchInfoOf<Self::Call>,
len: usize,
) -> TransactionValidity {
let (fee, _) = self.withdraw_fee(who, info, len)?;
Ok(ValidTransaction { priority: Self::get_priority(len, info, fee), ..Default::default() })
}
fn pre_dispatch(
self,
who: &Self::AccountId,
_call: &Self::Call,
info: &DispatchInfoOf<Self::Call>,
len: usize,
) -> Result<Self::Pre, TransactionValidityError> {
let (fee, imbalance) = self.withdraw_fee(who, info, len)?;
Ok((self.0, who.clone(), imbalance, fee))
}
fn post_dispatch(
pre: Self::Pre,
info: &DispatchInfoOf<Self::Call>,
post_info: &PostDispatchInfoOf<Self::Call>,
len: usize,
_result: &DispatchResult,
) -> Result<(), TransactionValidityError> {
let (tip, who, imbalance, fee) = pre;
if let Some(payed) = imbalance {
let actual_fee = Module::<T>::compute_actual_fee(len as u32, info, post_info, tip);
let refund = fee.saturating_sub(actual_fee);
let actual_payment = match T::Currency::deposit_into_existing(&who, refund) {
Ok(refund_imbalance) => {
// The refund cannot be larger than the up front payed max weight.
// `PostDispatchInfo::calc_unspent` guards against such a case.
match payed.offset(refund_imbalance) {
Ok(actual_payment) => actual_payment,
Err(_) => return Err(InvalidTransaction::Payment.into()),
}
},
// We do not recreate the account using the refund. The up front payment
// is gone in that case.
Err(_) => payed,
};
let imbalances = actual_payment.split(tip);
T::OnTransactionPayment::on_unbalanceds(
Some(imbalances.0).into_iter().chain(Some(imbalances.1)),
);
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use codec::Encode;
use frame_support::{
impl_outer_dispatch, impl_outer_event, impl_outer_origin, parameter_types,
weights::{
DispatchClass, DispatchInfo, GetDispatchInfo, PostDispatchInfo, Weight,
WeightToFeeCoefficient, WeightToFeeCoefficients, WeightToFeePolynomial,
},
};
use pallet_balances::Call as BalancesCall;
use pallet_transaction_payment_rpc_runtime_api::RuntimeDispatchInfo;
use smallvec::smallvec;
use sp_core::H256;
use sp_runtime::{
testing::{Header, TestXt},
traits::{BlakeTwo256, IdentityLookup},
Perbill,
};
use std::cell::RefCell;
const CALL: &<Runtime as frame_system::Trait>::Call =
&Call::Balances(BalancesCall::transfer(2, 69));
impl_outer_dispatch! {
pub enum Call for Runtime where origin: Origin {
pallet_balances::Balances,
frame_system::System,
}
}
impl_outer_event! {
pub enum Event for Runtime {
system<T>,
pallet_balances<T>,
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Runtime;
use frame_system as system;
impl_outer_origin! {
pub enum Origin for Runtime {}
}
thread_local! {
static EXTRINSIC_BASE_WEIGHT: RefCell<u64> = RefCell::new(0);
}
pub struct ExtrinsicBaseWeight;
impl Get<u64> for ExtrinsicBaseWeight {
fn get() -> u64 {
EXTRINSIC_BASE_WEIGHT.with(|v| *v.borrow())
}
}
parameter_types! {
pub const BlockHashCount: u64 = 250;
pub const MaximumBlockWeight: Weight = 1024;
pub const MaximumBlockLength: u32 = 2 * 1024;
pub const AvailableBlockRatio: Perbill = Perbill::one();
}
impl frame_system::Trait for Runtime {
type BaseCallFilter = ();
type Origin = Origin;
type Index = u64;
type BlockNumber = u64;
type Call = Call;
type Hash = H256;
type Hashing = BlakeTwo256;
type AccountId = u64;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = Event;
type BlockHashCount = BlockHashCount;
type MaximumBlockWeight = MaximumBlockWeight;
type DbWeight = ();
type BlockExecutionWeight = ();
type ExtrinsicBaseWeight = ExtrinsicBaseWeight;
type MaximumExtrinsicWeight = MaximumBlockWeight;
type MaximumBlockLength = MaximumBlockLength;
type AvailableBlockRatio = AvailableBlockRatio;
type Version = ();
type PalletInfo = ();
type AccountData = pallet_balances::AccountData<u64>;
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = ();
}
parameter_types! {
pub const ExistentialDeposit: u64 = 1;
}
impl pallet_balances::Trait for Runtime {
type Balance = u64;
type Event = Event;
type DustRemoval = ();
type ExistentialDeposit = ExistentialDeposit;
type AccountStore = System;
type MaxLocks = ();
type WeightInfo = ();
}
thread_local! {
static TRANSACTION_BYTE_FEE: RefCell<u64> = RefCell::new(1);
static WEIGHT_TO_FEE: RefCell<u64> = RefCell::new(1);
}
pub struct TransactionByteFee;
impl Get<u64> for TransactionByteFee {
fn get() -> u64 {
TRANSACTION_BYTE_FEE.with(|v| *v.borrow())
}
}
pub struct WeightToFee;
impl WeightToFeePolynomial for WeightToFee {
type Balance = u64;
fn polynomial() -> WeightToFeeCoefficients<Self::Balance> {
smallvec![WeightToFeeCoefficient {
degree: 1,
coeff_frac: Perbill::zero(),
coeff_integer: WEIGHT_TO_FEE.with(|v| *v.borrow()),
negative: false,
}]
}
}
impl Trait for Runtime {
type Currency = pallet_balances::Module<Runtime>;
type OnTransactionPayment = ();
type TransactionByteFee = TransactionByteFee;
type WeightToFee = WeightToFee;
type FeeMultiplierUpdate = ();
}
type Balances = pallet_balances::Module<Runtime>;
type System = frame_system::Module<Runtime>;
type TransactionPayment = Module<Runtime>;
pub struct ExtBuilder {
balance_factor: u64,
base_weight: u64,
byte_fee: u64,
weight_to_fee: u64,
}
impl Default for ExtBuilder {
fn default() -> Self {
Self { balance_factor: 1, base_weight: 0, byte_fee: 1, weight_to_fee: 1 }
}
}
impl ExtBuilder {
pub fn base_weight(mut self, base_weight: u64) -> Self {
self.base_weight = base_weight;
self
}
pub fn byte_fee(mut self, byte_fee: u64) -> Self {
self.byte_fee = byte_fee;
self
}
pub fn weight_fee(mut self, weight_to_fee: u64) -> Self {
self.weight_to_fee = weight_to_fee;
self
}
pub fn balance_factor(mut self, factor: u64) -> Self {
self.balance_factor = factor;
self
}
fn set_constants(&self) {
EXTRINSIC_BASE_WEIGHT.with(|v| *v.borrow_mut() = self.base_weight);
TRANSACTION_BYTE_FEE.with(|v| *v.borrow_mut() = self.byte_fee);
WEIGHT_TO_FEE.with(|v| *v.borrow_mut() = self.weight_to_fee);
}
pub fn build(self) -> sp_io::TestExternalities {
self.set_constants();
let mut t = frame_system::GenesisConfig::default().build_storage::<Runtime>().unwrap();
pallet_balances::GenesisConfig::<Runtime> {
balances: if self.balance_factor > 0 {
vec![
(1, 10 * self.balance_factor),
(2, 20 * self.balance_factor),
(3, 30 * self.balance_factor),
(4, 40 * self.balance_factor),
(5, 50 * self.balance_factor),
(6, 60 * self.balance_factor),
]
} else {
vec![]
},
}
.assimilate_storage(&mut t)
.unwrap();
t.into()
}
}
/// create a transaction info struct from weight. Handy to avoid building the whole struct.
pub fn info_from_weight(w: Weight) -> DispatchInfo {
// pays_fee: Pays::Yes -- class: DispatchClass::Normal
DispatchInfo { weight: w, ..Default::default() }
}
fn post_info_from_weight(w: Weight) -> PostDispatchInfo {
PostDispatchInfo { actual_weight: Some(w), pays_fee: Default::default() }
}
fn post_info_from_pays(p: Pays) -> PostDispatchInfo {
PostDispatchInfo { actual_weight: None, pays_fee: p }
}
fn default_post_info() -> PostDispatchInfo {
PostDispatchInfo { actual_weight: None, pays_fee: Default::default() }
}
#[test]
fn signed_extension_transaction_payment_work() {
ExtBuilder::default().balance_factor(10).base_weight(5).build().execute_with(|| {
let len = 10;
let pre = ChargeTransactionPayment::<Runtime>::from(0)
.pre_dispatch(&1, CALL, &info_from_weight(5), len)
.unwrap();
assert_eq!(Balances::free_balance(1), 100 - 5 - 5 - 10);
assert!(ChargeTransactionPayment::<Runtime>::post_dispatch(
pre,
&info_from_weight(5),
&default_post_info(),
len,
&Ok(())
)
.is_ok());
assert_eq!(Balances::free_balance(1), 100 - 5 - 5 - 10);
let pre = ChargeTransactionPayment::<Runtime>::from(5 /* tipped */)
.pre_dispatch(&2, CALL, &info_from_weight(100), len)
.unwrap();
assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5);
assert!(ChargeTransactionPayment::<Runtime>::post_dispatch(
pre,
&info_from_weight(100),
&post_info_from_weight(50),
len,
&Ok(())
)
.is_ok());
assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 50 - 5);
});
}
#[test]
fn signed_extension_transaction_payment_multiplied_refund_works() {
ExtBuilder::default().balance_factor(10).base_weight(5).build().execute_with(|| {
let len = 10;
NextFeeMultiplier::put(Multiplier::saturating_from_rational(3, 2));
let pre = ChargeTransactionPayment::<Runtime>::from(5 /* tipped */)
.pre_dispatch(&2, CALL, &info_from_weight(100), len)
.unwrap();
// 5 base fee, 10 byte fee, 3/2 * 100 weight fee, 5 tip
assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 150 - 5);
assert!(ChargeTransactionPayment::<Runtime>::post_dispatch(
pre,
&info_from_weight(100),
&post_info_from_weight(50),
len,
&Ok(())
)
.is_ok());
// 75 (3/2 of the returned 50 units of weight) is refunded
assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 75 - 5);
});
}
#[test]
fn signed_extension_transaction_payment_is_bounded() {
ExtBuilder::default().balance_factor(1000).byte_fee(0).build().execute_with(|| {
// maximum weight possible
assert!(ChargeTransactionPayment::<Runtime>::from(0)
.pre_dispatch(&1, CALL, &info_from_weight(Weight::max_value()), 10)
.is_ok());
// fee will be proportional to what is the actual maximum weight in the runtime.
assert_eq!(
Balances::free_balance(&1),
(10000 - <Runtime as frame_system::Trait>::MaximumBlockWeight::get()) as u64
);
});
}
#[test]
fn signed_extension_allows_free_transactions() {
ExtBuilder::default().base_weight(100).balance_factor(0).build().execute_with(|| {
// 1 ain't have a penny.
assert_eq!(Balances::free_balance(1), 0);
let len = 100;
// This is a completely free (and thus wholly insecure/DoS-ridden) transaction.
let operational_transaction =
DispatchInfo { weight: 0, class: DispatchClass::Operational, pays_fee: Pays::No };
assert!(ChargeTransactionPayment::<Runtime>::from(0)
.validate(&1, CALL, &operational_transaction, len)
.is_ok());
// like a InsecureFreeNormal
let free_transaction =
DispatchInfo { weight: 0, class: DispatchClass::Normal, pays_fee: Pays::Yes };
assert!(ChargeTransactionPayment::<Runtime>::from(0)
.validate(&1, CALL, &free_transaction, len)
.is_err());
});
}
#[test]
fn signed_ext_length_fee_is_also_updated_per_congestion() {
ExtBuilder::default().base_weight(5).balance_factor(10).build().execute_with(|| {
// all fees should be x1.5
NextFeeMultiplier::put(Multiplier::saturating_from_rational(3, 2));
let len = 10;
assert!(ChargeTransactionPayment::<Runtime>::from(10) // tipped
.pre_dispatch(&1, CALL, &info_from_weight(3), len)
.is_ok());
assert_eq!(
Balances::free_balance(1),
100 // original
- 10 // tip
- 5 // base
- 10 // len
- (3 * 3 / 2) // adjusted weight
);
})
}
#[test]
fn query_info_works() {
let call = Call::Balances(BalancesCall::transfer(2, 69));
let origin = 111111;
let extra = ();
let xt = TestXt::new(call, Some((origin, extra)));
let info = xt.get_dispatch_info();
let ext = xt.encode();
let len = ext.len() as u32;
ExtBuilder::default().base_weight(5).weight_fee(2).build().execute_with(|| {
// all fees should be x1.5
NextFeeMultiplier::put(Multiplier::saturating_from_rational(3, 2));
assert_eq!(
TransactionPayment::query_info(xt, len),
RuntimeDispatchInfo {
weight: info.weight,
class: info.class,
partial_fee: 5 * 2 /* base * weight_fee */
+ len as u64 /* len * 1 */
+ info.weight.min(MaximumBlockWeight::get()) as u64 * 2 * 3 / 2 /* weight */
},
);
});
}
#[test]
fn compute_fee_works_without_multiplier() {
ExtBuilder::default().base_weight(100).byte_fee(10).balance_factor(0).build().execute_with(
|| {
// Next fee multiplier is zero
assert_eq!(NextFeeMultiplier::get(), Multiplier::one());
// Tip only, no fees works
let dispatch_info = DispatchInfo {
weight: 0,
class: DispatchClass::Operational,
pays_fee: Pays::No,
};
assert_eq!(Module::<Runtime>::compute_fee(0, &dispatch_info, 10), 10);
// No tip, only base fee works
let dispatch_info = DispatchInfo {
weight: 0,
class: DispatchClass::Operational,
pays_fee: Pays::Yes,
};
assert_eq!(Module::<Runtime>::compute_fee(0, &dispatch_info, 0), 100);
// Tip + base fee works
assert_eq!(Module::<Runtime>::compute_fee(0, &dispatch_info, 69), 169);
// Len (byte fee) + base fee works
assert_eq!(Module::<Runtime>::compute_fee(42, &dispatch_info, 0), 520);
// Weight fee + base fee works
let dispatch_info = DispatchInfo {
weight: 1000,
class: DispatchClass::Operational,
pays_fee: Pays::Yes,
};
assert_eq!(Module::<Runtime>::compute_fee(0, &dispatch_info, 0), 1100);
},
);
}
#[test]
fn compute_fee_works_with_multiplier() {
ExtBuilder::default().base_weight(100).byte_fee(10).balance_factor(0).build().execute_with(
|| {
// Add a next fee multiplier. Fees will be x3/2.
NextFeeMultiplier::put(Multiplier::saturating_from_rational(3, 2));
// Base fee is unaffected by multiplier
let dispatch_info = DispatchInfo {
weight: 0,
class: DispatchClass::Operational,
pays_fee: Pays::Yes,
};
assert_eq!(Module::<Runtime>::compute_fee(0, &dispatch_info, 0), 100);
// Everything works together :)
let dispatch_info = DispatchInfo {
weight: 123,
class: DispatchClass::Operational,
pays_fee: Pays::Yes,
};
// 123 weight, 456 length, 100 base
assert_eq!(
Module::<Runtime>::compute_fee(456, &dispatch_info, 789),
100 + (3 * 123 / 2) + 4560 + 789,
);
},
);
}
#[test]
fn compute_fee_works_with_negative_multiplier() {
ExtBuilder::default().base_weight(100).byte_fee(10).balance_factor(0).build().execute_with(
|| {
// Add a next fee multiplier. All fees will be x1/2.
NextFeeMultiplier::put(Multiplier::saturating_from_rational(1, 2));
// Base fee is unaffected by multiplier.
let dispatch_info = DispatchInfo {
weight: 0,
class: DispatchClass::Operational,
pays_fee: Pays::Yes,
};
assert_eq!(Module::<Runtime>::compute_fee(0, &dispatch_info, 0), 100);
// Everything works together.
let dispatch_info = DispatchInfo {
weight: 123,
class: DispatchClass::Operational,
pays_fee: Pays::Yes,
};
// 123 weight, 456 length, 100 base
assert_eq!(
Module::<Runtime>::compute_fee(456, &dispatch_info, 789),
100 + (123 / 2) + 4560 + 789,
);
},
);
}
#[test]
fn compute_fee_does_not_overflow() {
ExtBuilder::default().base_weight(100).byte_fee(10).balance_factor(0).build().execute_with(
|| {
// Overflow is handled
let dispatch_info = DispatchInfo {
weight: Weight::max_value(),
class: DispatchClass::Operational,
pays_fee: Pays::Yes,
};
assert_eq!(
Module::<Runtime>::compute_fee(
<u32>::max_value(),
&dispatch_info,
<u64>::max_value()
),
<u64>::max_value()
);
},
);
}
#[test]
fn refund_does_not_recreate_account() {
ExtBuilder::default().balance_factor(10).base_weight(5).build().execute_with(|| {
// So events are emitted
System::set_block_number(10);
let len = 10;
let pre = ChargeTransactionPayment::<Runtime>::from(5 /* tipped */)
.pre_dispatch(&2, CALL, &info_from_weight(100), len)
.unwrap();
assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5);
// kill the account between pre and post dispatch
assert!(Balances::transfer(Some(2).into(), 3, Balances::free_balance(2)).is_ok());
assert_eq!(Balances::free_balance(2), 0);
assert!(ChargeTransactionPayment::<Runtime>::post_dispatch(
pre,
&info_from_weight(100),
&post_info_from_weight(50),
len,
&Ok(())
)
.is_ok());
assert_eq!(Balances::free_balance(2), 0);
// Transfer Event
assert!(System::events().iter().any(|event| {
event.event == Event::pallet_balances(pallet_balances::RawEvent::Transfer(2, 3, 80))
}));
// Killed Event
assert!(System::events()
.iter()
.any(|event| { event.event == Event::system(system::RawEvent::KilledAccount(2)) }));
});
}
#[test]
fn actual_weight_higher_than_max_refunds_nothing() {
ExtBuilder::default().balance_factor(10).base_weight(5).build().execute_with(|| {
let len = 10;
let pre = ChargeTransactionPayment::<Runtime>::from(5 /* tipped */)
.pre_dispatch(&2, CALL, &info_from_weight(100), len)
.unwrap();
assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5);
assert!(ChargeTransactionPayment::<Runtime>::post_dispatch(
pre,
&info_from_weight(100),
&post_info_from_weight(101),
len,
&Ok(())
)
.is_ok());
assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5);
});
}
#[test]
fn zero_transfer_on_free_transaction() {
ExtBuilder::default().balance_factor(10).base_weight(5).build().execute_with(|| {
// So events are emitted
System::set_block_number(10);
let len = 10;
let dispatch_info =
DispatchInfo { weight: 100, pays_fee: Pays::No, class: DispatchClass::Normal };
let user = 69;
let pre = ChargeTransactionPayment::<Runtime>::from(0)
.pre_dispatch(&user, CALL, &dispatch_info, len)
.unwrap();
assert_eq!(Balances::total_balance(&user), 0);
assert!(ChargeTransactionPayment::<Runtime>::post_dispatch(
pre,
&dispatch_info,
&default_post_info(),
len,
&Ok(())
)
.is_ok());
assert_eq!(Balances::total_balance(&user), 0);
// No events for such a scenario
assert_eq!(System::events().len(), 0);
});
}
#[test]
fn refund_consistent_with_actual_weight() {
ExtBuilder::default().balance_factor(10).base_weight(7).build().execute_with(|| {
let info = info_from_weight(100);
let post_info = post_info_from_weight(33);
let prev_balance = Balances::free_balance(2);
let len = 10;
let tip = 5;
NextFeeMultiplier::put(Multiplier::saturating_from_rational(5, 4));
let pre = ChargeTransactionPayment::<Runtime>::from(tip)
.pre_dispatch(&2, CALL, &info, len)
.unwrap();
ChargeTransactionPayment::<Runtime>::post_dispatch(
pre,
&info,
&post_info,
len,
&Ok(()),
)
.unwrap();
let refund_based_fee = prev_balance - Balances::free_balance(2);
let actual_fee =
Module::<Runtime>::compute_actual_fee(len as u32, &info, &post_info, tip);
// 33 weight, 10 length, 7 base, 5 tip
assert_eq!(actual_fee, 7 + 10 + (33 * 5 / 4) + 5);
assert_eq!(refund_based_fee, actual_fee);
});
}
#[test]
fn post_info_can_change_pays_fee() {
ExtBuilder::default().balance_factor(10).base_weight(7).build().execute_with(|| {
let info = info_from_weight(100);
let post_info = post_info_from_pays(Pays::No);
let prev_balance = Balances::free_balance(2);
let len = 10;
let tip = 5;
NextFeeMultiplier::put(Multiplier::saturating_from_rational(5, 4));
let pre = ChargeTransactionPayment::<Runtime>::from(tip)
.pre_dispatch(&2, CALL, &info, len)
.unwrap();
ChargeTransactionPayment::<Runtime>::post_dispatch(
pre,
&info,
&post_info,
len,
&Ok(()),
)
.unwrap();
let refund_based_fee = prev_balance - Balances::free_balance(2);
let actual_fee =
Module::<Runtime>::compute_actual_fee(len as u32, &info, &post_info, tip);
// Only 5 tip is paid
assert_eq!(actual_fee, 5);
assert_eq!(refund_based_fee, actual_fee);
});
}
}
|
use std::fs;
use std::error::Error;
use std::f32::consts::PI;
fn parse_input() -> Result<Vec<Vec<char>>, Box<dyn Error>> {
let input = fs::read_to_string("./src/level10/input.txt")?;
let input: Vec<Vec<char>> = input.trim().split("\n").map(|l| l.chars().collect()).collect();
Ok(input)
}
pub fn part1() -> Result<((usize,usize)), Box<dyn Error>>{
let input = parse_input()?;
let mut max = 0;
let mut pos = (0,0);
for (i,l) in input.iter().enumerate() {
for (j, &val) in l.iter().enumerate() {
if val == '#' {
let mut angles: Vec<f32> = vec![];
for (m,l) in input.iter().enumerate() {
for (n, &val) in l.iter().enumerate() {
if val == '#' && !(i == m && j == n){
let h: f32 = m as f32 -i as f32;
let w: f32 = n as f32 -j as f32;
let angle = h.atan2(w);
if !angles.contains(&angle) {
angles.push(angle);
}
}
}
}
if angles.len() > max {
max = angles.len();
pos = (i,j);
}
max = std::cmp::max(max, angles.len());
}
}
}
Ok((pos))
}
pub fn part2() -> Result<(), Box<dyn Error>> {
let input = parse_input()?;
let pos = part1()?;
let mut angles: Vec<(f32, f32, usize, usize)> = vec![];
for (i,l) in input.iter().enumerate() {
for (j, &val) in l.iter().enumerate() {
if val == '#' && !(i == pos.0 && j == pos.1){
let h: f32 = i as f32 - pos.0 as f32;
let w: f32 = j as f32 - pos.1 as f32;
let angle = h.atan2(w);
angles.push((angle, (h*h + w*w).sqrt(), i, j));
}
}
}
angles.sort_by(|a, b| a.partial_cmp(b).unwrap());
let mut i = 0;
while angles[i].0 < -PI/2f32 {
i += 1;
}
let mut last = angles.remove(i);
let mut cnt = 1;
let mut deleted = vec![];
loop {
if angles.len() == 0 {
break;
}
if angles[i % angles.len()].0 == last.0 && angles.len() != 1 {
i += 1
} else {
i = i % angles.len();
last = angles.remove(i % angles.len());
deleted.push(last);
cnt += 1;
if cnt == 200 {
break;
}
}
}
draw(input, deleted, pos);
println!("{:?}", last.3 * 100 + last.2);
Ok(())
}
fn draw (input: Vec<Vec<char>>, deleted: Vec<(f32, f32, usize, usize)>, pos: (usize, usize)) {
for (_, _, i2, j2) in deleted {
for (i,l) in input.iter().enumerate() {
for (j, val) in l.iter().enumerate() {
if i == i2 && j == j2 {
print!("\x1b[41m")
} else if i == pos.0 && j == pos.1 {
print!("\x1b[42m")
}
print!("{}", val);
print!("\x1b[0m")
}
println!();
}
std::io::stdin().read_line(&mut String::new());
print!("\x1b[2J");
}
}
|
//! Program counter
#[cfg(cortex_m)]
use core::arch::asm;
/// Reads the CPU register
#[cfg(cortex_m)]
#[inline]
pub fn read() -> u32 {
let r;
unsafe { asm!("mov {}, pc", out(reg) r, options(nomem, nostack, preserves_flags)) };
r
}
/// Writes `bits` to the CPU register
#[cfg(cortex_m)]
#[inline]
pub unsafe fn write(bits: u32) {
asm!("mov pc, {}", in(reg) bits, options(nomem, nostack, preserves_flags));
}
|
use yew::prelude::*;
pub struct Footer;
impl Component for Footer {
type Message = ();
type Properties = ();
fn create(_: Self::Properties, _: ComponentLink<Self>) -> Self {
Footer
}
fn update(&mut self, _: Self::Message) -> ShouldRender {
false
}
}
impl Renderable<Footer> for Footer {
fn view(&self) -> Html<Self> {
html!{
<footer class="Footer",>
{ "The source for this site is available " }
<a href="https://github.com/g-s-k/gsk-space",>{ "here" }</a>
{ "." }
</footer>
}
}
}
|
/*! A silly grammar */
use Symbol;
pub struct Foo;
rusty_peg! {
parser Parser<'input>: Foo {
Hi: u32 = ("Hi") => 1;
Ho: u32 = "Ho" => 2;
HiOrHo: u32 = (Hi / Ho);
Sum: u32 = (Sum1 / HiOrHo);
Sum1: u32 = (<x:HiOrHo> "+" <y:Sum>) => {x + y*10};
HiHo: () = (Hi Ho) => ();
Rep: Vec<u32> = {HiOrHo};
}
}
fn should_parse_prefix<'input,P:?Sized>(
symbol: &P,
text: &'input str)
-> P::Output
where P: Symbol<'input,Parser<'input>>
{
let mut parser = Parser::new(Foo);
symbol.parse_prefix(&mut parser, text).unwrap().1
}
#[test]
fn parse_hi_from_hi() {
assert_eq!(1, should_parse_prefix(&Hi, "Hi"));
}
#[test]
#[should_panic]
fn parse_hi_from_ho() {
assert_eq!(2, should_parse_prefix(&Hi, "Ho"));
}
#[test]
fn parse_hiorho_from_hi() {
assert_eq!(1, should_parse_prefix(&HiOrHo, "Hi"));
}
#[test]
fn parse_hiorho_from_ho() {
assert_eq!(2, should_parse_prefix(&HiOrHo, "Ho"));
}
#[test]
fn parse_hiho_from_ho() {
assert_eq!((), should_parse_prefix(&HiHo, "Hi Ho"));
}
#[test]
fn parse_sum_from_ho() {
assert_eq!(1221, should_parse_prefix(&Sum, "Hi + Ho + Ho + Hi"));
}
#[test]
fn parse_repeat() {
assert_eq!(vec![1, 2, 2, 1, 2], should_parse_prefix(&Rep, "Hi Ho Ho Hi Ho"));
}
|
pub mod error;
pub mod proto;
pub mod relays;
|
#[cfg(feature = "ffi")]
#[macro_use]
pub mod ctypes;
#[cfg(any(
feature = "bls_bls12381",
feature = "ed25519",
feature = "ed25519_asm",
feature = "ecdh_secp256k1",
feature = "ecdh_secp256k1_native",
feature = "ecdh_secp256k1_asm",
feature = "ecdsa_secp256k1",
feature = "ecdsa_secp256k1_native",
feature = "ecdsa_secp256k1_asm",
feature = "x25519",
feature = "x25519_asm",
feature = "wasm"
))]
#[macro_use]
pub mod macros;
#[cfg(feature = "logger")]
#[macro_use]
pub mod logger;
#[cfg(any(feature = "cl", feature = "cl_native"))]
pub mod commitment;
|
#[doc = "Register `NDAT1` reader"]
pub type R = crate::R<NDAT1_SPEC>;
#[doc = "Register `NDAT1` writer"]
pub type W = crate::W<NDAT1_SPEC>;
#[doc = "Field `ND0` reader - New data"]
pub type ND0_R = crate::BitReader;
#[doc = "Field `ND0` writer - New data"]
pub type ND0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND1` reader - New data"]
pub type ND1_R = crate::BitReader;
#[doc = "Field `ND1` writer - New data"]
pub type ND1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND2` reader - New data"]
pub type ND2_R = crate::BitReader;
#[doc = "Field `ND2` writer - New data"]
pub type ND2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND3` reader - New data"]
pub type ND3_R = crate::BitReader;
#[doc = "Field `ND3` writer - New data"]
pub type ND3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND4` reader - New data"]
pub type ND4_R = crate::BitReader;
#[doc = "Field `ND4` writer - New data"]
pub type ND4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND5` reader - New data"]
pub type ND5_R = crate::BitReader;
#[doc = "Field `ND5` writer - New data"]
pub type ND5_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND6` reader - New data"]
pub type ND6_R = crate::BitReader;
#[doc = "Field `ND6` writer - New data"]
pub type ND6_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND7` reader - New data"]
pub type ND7_R = crate::BitReader;
#[doc = "Field `ND7` writer - New data"]
pub type ND7_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND8` reader - New data"]
pub type ND8_R = crate::BitReader;
#[doc = "Field `ND8` writer - New data"]
pub type ND8_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND9` reader - New data"]
pub type ND9_R = crate::BitReader;
#[doc = "Field `ND9` writer - New data"]
pub type ND9_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND10` reader - New data"]
pub type ND10_R = crate::BitReader;
#[doc = "Field `ND10` writer - New data"]
pub type ND10_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND11` reader - New data"]
pub type ND11_R = crate::BitReader;
#[doc = "Field `ND11` writer - New data"]
pub type ND11_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND12` reader - New data"]
pub type ND12_R = crate::BitReader;
#[doc = "Field `ND12` writer - New data"]
pub type ND12_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND13` reader - New data"]
pub type ND13_R = crate::BitReader;
#[doc = "Field `ND13` writer - New data"]
pub type ND13_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND14` reader - New data"]
pub type ND14_R = crate::BitReader;
#[doc = "Field `ND14` writer - New data"]
pub type ND14_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND15` reader - New data"]
pub type ND15_R = crate::BitReader;
#[doc = "Field `ND15` writer - New data"]
pub type ND15_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND16` reader - New data"]
pub type ND16_R = crate::BitReader;
#[doc = "Field `ND16` writer - New data"]
pub type ND16_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND17` reader - New data"]
pub type ND17_R = crate::BitReader;
#[doc = "Field `ND17` writer - New data"]
pub type ND17_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND18` reader - New data"]
pub type ND18_R = crate::BitReader;
#[doc = "Field `ND18` writer - New data"]
pub type ND18_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND19` reader - New data"]
pub type ND19_R = crate::BitReader;
#[doc = "Field `ND19` writer - New data"]
pub type ND19_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND20` reader - New data"]
pub type ND20_R = crate::BitReader;
#[doc = "Field `ND20` writer - New data"]
pub type ND20_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND21` reader - New data"]
pub type ND21_R = crate::BitReader;
#[doc = "Field `ND21` writer - New data"]
pub type ND21_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND22` reader - New data"]
pub type ND22_R = crate::BitReader;
#[doc = "Field `ND22` writer - New data"]
pub type ND22_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND23` reader - New data"]
pub type ND23_R = crate::BitReader;
#[doc = "Field `ND23` writer - New data"]
pub type ND23_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND24` reader - New data"]
pub type ND24_R = crate::BitReader;
#[doc = "Field `ND24` writer - New data"]
pub type ND24_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND25` reader - New data"]
pub type ND25_R = crate::BitReader;
#[doc = "Field `ND25` writer - New data"]
pub type ND25_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND26` reader - New data"]
pub type ND26_R = crate::BitReader;
#[doc = "Field `ND26` writer - New data"]
pub type ND26_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND27` reader - New data"]
pub type ND27_R = crate::BitReader;
#[doc = "Field `ND27` writer - New data"]
pub type ND27_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND28` reader - New data"]
pub type ND28_R = crate::BitReader;
#[doc = "Field `ND28` writer - New data"]
pub type ND28_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND29` reader - New data"]
pub type ND29_R = crate::BitReader;
#[doc = "Field `ND29` writer - New data"]
pub type ND29_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND30` reader - New data"]
pub type ND30_R = crate::BitReader;
#[doc = "Field `ND30` writer - New data"]
pub type ND30_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ND31` reader - New data"]
pub type ND31_R = crate::BitReader;
#[doc = "Field `ND31` writer - New data"]
pub type ND31_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - New data"]
#[inline(always)]
pub fn nd0(&self) -> ND0_R {
ND0_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - New data"]
#[inline(always)]
pub fn nd1(&self) -> ND1_R {
ND1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - New data"]
#[inline(always)]
pub fn nd2(&self) -> ND2_R {
ND2_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - New data"]
#[inline(always)]
pub fn nd3(&self) -> ND3_R {
ND3_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - New data"]
#[inline(always)]
pub fn nd4(&self) -> ND4_R {
ND4_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - New data"]
#[inline(always)]
pub fn nd5(&self) -> ND5_R {
ND5_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - New data"]
#[inline(always)]
pub fn nd6(&self) -> ND6_R {
ND6_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - New data"]
#[inline(always)]
pub fn nd7(&self) -> ND7_R {
ND7_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - New data"]
#[inline(always)]
pub fn nd8(&self) -> ND8_R {
ND8_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - New data"]
#[inline(always)]
pub fn nd9(&self) -> ND9_R {
ND9_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - New data"]
#[inline(always)]
pub fn nd10(&self) -> ND10_R {
ND10_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - New data"]
#[inline(always)]
pub fn nd11(&self) -> ND11_R {
ND11_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - New data"]
#[inline(always)]
pub fn nd12(&self) -> ND12_R {
ND12_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - New data"]
#[inline(always)]
pub fn nd13(&self) -> ND13_R {
ND13_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - New data"]
#[inline(always)]
pub fn nd14(&self) -> ND14_R {
ND14_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - New data"]
#[inline(always)]
pub fn nd15(&self) -> ND15_R {
ND15_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - New data"]
#[inline(always)]
pub fn nd16(&self) -> ND16_R {
ND16_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - New data"]
#[inline(always)]
pub fn nd17(&self) -> ND17_R {
ND17_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - New data"]
#[inline(always)]
pub fn nd18(&self) -> ND18_R {
ND18_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - New data"]
#[inline(always)]
pub fn nd19(&self) -> ND19_R {
ND19_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - New data"]
#[inline(always)]
pub fn nd20(&self) -> ND20_R {
ND20_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - New data"]
#[inline(always)]
pub fn nd21(&self) -> ND21_R {
ND21_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - New data"]
#[inline(always)]
pub fn nd22(&self) -> ND22_R {
ND22_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - New data"]
#[inline(always)]
pub fn nd23(&self) -> ND23_R {
ND23_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - New data"]
#[inline(always)]
pub fn nd24(&self) -> ND24_R {
ND24_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - New data"]
#[inline(always)]
pub fn nd25(&self) -> ND25_R {
ND25_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - New data"]
#[inline(always)]
pub fn nd26(&self) -> ND26_R {
ND26_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - New data"]
#[inline(always)]
pub fn nd27(&self) -> ND27_R {
ND27_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - New data"]
#[inline(always)]
pub fn nd28(&self) -> ND28_R {
ND28_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - New data"]
#[inline(always)]
pub fn nd29(&self) -> ND29_R {
ND29_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - New data"]
#[inline(always)]
pub fn nd30(&self) -> ND30_R {
ND30_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - New data"]
#[inline(always)]
pub fn nd31(&self) -> ND31_R {
ND31_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - New data"]
#[inline(always)]
#[must_use]
pub fn nd0(&mut self) -> ND0_W<NDAT1_SPEC, 0> {
ND0_W::new(self)
}
#[doc = "Bit 1 - New data"]
#[inline(always)]
#[must_use]
pub fn nd1(&mut self) -> ND1_W<NDAT1_SPEC, 1> {
ND1_W::new(self)
}
#[doc = "Bit 2 - New data"]
#[inline(always)]
#[must_use]
pub fn nd2(&mut self) -> ND2_W<NDAT1_SPEC, 2> {
ND2_W::new(self)
}
#[doc = "Bit 3 - New data"]
#[inline(always)]
#[must_use]
pub fn nd3(&mut self) -> ND3_W<NDAT1_SPEC, 3> {
ND3_W::new(self)
}
#[doc = "Bit 4 - New data"]
#[inline(always)]
#[must_use]
pub fn nd4(&mut self) -> ND4_W<NDAT1_SPEC, 4> {
ND4_W::new(self)
}
#[doc = "Bit 5 - New data"]
#[inline(always)]
#[must_use]
pub fn nd5(&mut self) -> ND5_W<NDAT1_SPEC, 5> {
ND5_W::new(self)
}
#[doc = "Bit 6 - New data"]
#[inline(always)]
#[must_use]
pub fn nd6(&mut self) -> ND6_W<NDAT1_SPEC, 6> {
ND6_W::new(self)
}
#[doc = "Bit 7 - New data"]
#[inline(always)]
#[must_use]
pub fn nd7(&mut self) -> ND7_W<NDAT1_SPEC, 7> {
ND7_W::new(self)
}
#[doc = "Bit 8 - New data"]
#[inline(always)]
#[must_use]
pub fn nd8(&mut self) -> ND8_W<NDAT1_SPEC, 8> {
ND8_W::new(self)
}
#[doc = "Bit 9 - New data"]
#[inline(always)]
#[must_use]
pub fn nd9(&mut self) -> ND9_W<NDAT1_SPEC, 9> {
ND9_W::new(self)
}
#[doc = "Bit 10 - New data"]
#[inline(always)]
#[must_use]
pub fn nd10(&mut self) -> ND10_W<NDAT1_SPEC, 10> {
ND10_W::new(self)
}
#[doc = "Bit 11 - New data"]
#[inline(always)]
#[must_use]
pub fn nd11(&mut self) -> ND11_W<NDAT1_SPEC, 11> {
ND11_W::new(self)
}
#[doc = "Bit 12 - New data"]
#[inline(always)]
#[must_use]
pub fn nd12(&mut self) -> ND12_W<NDAT1_SPEC, 12> {
ND12_W::new(self)
}
#[doc = "Bit 13 - New data"]
#[inline(always)]
#[must_use]
pub fn nd13(&mut self) -> ND13_W<NDAT1_SPEC, 13> {
ND13_W::new(self)
}
#[doc = "Bit 14 - New data"]
#[inline(always)]
#[must_use]
pub fn nd14(&mut self) -> ND14_W<NDAT1_SPEC, 14> {
ND14_W::new(self)
}
#[doc = "Bit 15 - New data"]
#[inline(always)]
#[must_use]
pub fn nd15(&mut self) -> ND15_W<NDAT1_SPEC, 15> {
ND15_W::new(self)
}
#[doc = "Bit 16 - New data"]
#[inline(always)]
#[must_use]
pub fn nd16(&mut self) -> ND16_W<NDAT1_SPEC, 16> {
ND16_W::new(self)
}
#[doc = "Bit 17 - New data"]
#[inline(always)]
#[must_use]
pub fn nd17(&mut self) -> ND17_W<NDAT1_SPEC, 17> {
ND17_W::new(self)
}
#[doc = "Bit 18 - New data"]
#[inline(always)]
#[must_use]
pub fn nd18(&mut self) -> ND18_W<NDAT1_SPEC, 18> {
ND18_W::new(self)
}
#[doc = "Bit 19 - New data"]
#[inline(always)]
#[must_use]
pub fn nd19(&mut self) -> ND19_W<NDAT1_SPEC, 19> {
ND19_W::new(self)
}
#[doc = "Bit 20 - New data"]
#[inline(always)]
#[must_use]
pub fn nd20(&mut self) -> ND20_W<NDAT1_SPEC, 20> {
ND20_W::new(self)
}
#[doc = "Bit 21 - New data"]
#[inline(always)]
#[must_use]
pub fn nd21(&mut self) -> ND21_W<NDAT1_SPEC, 21> {
ND21_W::new(self)
}
#[doc = "Bit 22 - New data"]
#[inline(always)]
#[must_use]
pub fn nd22(&mut self) -> ND22_W<NDAT1_SPEC, 22> {
ND22_W::new(self)
}
#[doc = "Bit 23 - New data"]
#[inline(always)]
#[must_use]
pub fn nd23(&mut self) -> ND23_W<NDAT1_SPEC, 23> {
ND23_W::new(self)
}
#[doc = "Bit 24 - New data"]
#[inline(always)]
#[must_use]
pub fn nd24(&mut self) -> ND24_W<NDAT1_SPEC, 24> {
ND24_W::new(self)
}
#[doc = "Bit 25 - New data"]
#[inline(always)]
#[must_use]
pub fn nd25(&mut self) -> ND25_W<NDAT1_SPEC, 25> {
ND25_W::new(self)
}
#[doc = "Bit 26 - New data"]
#[inline(always)]
#[must_use]
pub fn nd26(&mut self) -> ND26_W<NDAT1_SPEC, 26> {
ND26_W::new(self)
}
#[doc = "Bit 27 - New data"]
#[inline(always)]
#[must_use]
pub fn nd27(&mut self) -> ND27_W<NDAT1_SPEC, 27> {
ND27_W::new(self)
}
#[doc = "Bit 28 - New data"]
#[inline(always)]
#[must_use]
pub fn nd28(&mut self) -> ND28_W<NDAT1_SPEC, 28> {
ND28_W::new(self)
}
#[doc = "Bit 29 - New data"]
#[inline(always)]
#[must_use]
pub fn nd29(&mut self) -> ND29_W<NDAT1_SPEC, 29> {
ND29_W::new(self)
}
#[doc = "Bit 30 - New data"]
#[inline(always)]
#[must_use]
pub fn nd30(&mut self) -> ND30_W<NDAT1_SPEC, 30> {
ND30_W::new(self)
}
#[doc = "Bit 31 - New data"]
#[inline(always)]
#[must_use]
pub fn nd31(&mut self) -> ND31_W<NDAT1_SPEC, 31> {
ND31_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "FDCAN New Data 1 Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ndat1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ndat1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct NDAT1_SPEC;
impl crate::RegisterSpec for NDAT1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ndat1::R`](R) reader structure"]
impl crate::Readable for NDAT1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ndat1::W`](W) writer structure"]
impl crate::Writable for NDAT1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets NDAT1 to value 0"]
impl crate::Resettable for NDAT1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
/*
Lists the private clouds, similar to:
az vmware private-cloud list --query [].id
az extension documentation:
https://docs.microsoft.com/cli/azure/ext/vmware/vmware/private-cloud?view=azure-cli-latest#ext_vmware_az_vmware_private_cloud_list
API documentation:
https://docs.microsoft.com/rest/api/vmware/privateclouds/list
cargo run --package azure_mgmt_vmware --example private_cloud_list
*/
use azure_identity::token_credentials::AzureCliCredential;
use azure_mgmt_vmware::operations::private_clouds;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let http_client = azure_core::new_http_client();
let token_credential = AzureCliCredential {};
let subscription_id = &AzureCliCredential::get_subscription()?;
let config = &azure_mgmt_vmware::config(http_client, Box::new(token_credential)).build();
let clouds = private_clouds::list_in_subscription(config, subscription_id).await?;
println!("# of private clouds {}", clouds.value.len());
for cloud in &clouds.value {
println!("{:?}", cloud.tracked_resource.resource.id);
}
Ok(())
}
|
use crate::num::Real;
use std::ops::Sub;
pub fn note_ratio() -> Real {
Real::powf(2.0, 1.0 / 12.0)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(u8)]
pub enum Key {
C = 0,
Cs = 1,
D = 2,
Ds = 3,
E = 4,
F = 5,
Fs = 6,
G = 7,
Gs = 8,
A = 9,
As = 10,
B = 11,
}
impl Key {
pub const TOTAL: usize = 12;
#[allow(non_upper_case_globals)]
pub const Db: Self = Key::Cs;
#[allow(non_upper_case_globals)]
pub const Eb: Self = Key::Ds;
#[allow(non_upper_case_globals)]
pub const Fb: Self = Key::E;
#[allow(non_upper_case_globals)]
pub const Es: Self = Key::F;
#[allow(non_upper_case_globals)]
pub const Gb: Self = Key::Fs;
#[allow(non_upper_case_globals)]
pub const Ab: Self = Key::Gs;
#[allow(non_upper_case_globals)]
pub const Bb: Self = Key::As;
#[allow(non_upper_case_globals)]
pub const Cb: Self = Key::B;
#[allow(non_upper_case_globals)]
pub const Bs: Self = Key::C;
}
impl Sub for Key {
type Output = i8;
fn sub(self, other: Self) -> Self::Output {
self as u8 as i8 - other as u8 as i8
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Pitch {
pub key: Key,
pub octave: u32,
}
impl Pitch {
pub fn freq(self, a5: Real) -> Real {
let a5_note = Pitch { key: Key::A, octave: 5 };
let octaves = self.octave as i32 - a5_note.octave as i32;
let notes = (self.key - a5_note.key) as i32;
a5 * Real::powi(2.0, octaves) * note_ratio().powi(notes)
}
}
impl Sub for Pitch {
type Output = i32;
fn sub(self, other: Self) -> Self::Output {
let total = Key::TOTAL as i32;
let octave = self.octave as i32 - other.octave as i32;
let note = (self.key - other.key) as i32;
octave * total + note
}
}
|
use std::ptr;
use std::ops::{Deref, DerefMut};
use std::ffi::CString;
use raw;
use super::effect_param;
pub struct Effect {
raw: *mut raw::Effect
}
impl Effect {
pub unsafe fn from_raw(raw: *mut raw::Effect) -> Effect {
Effect {
raw: raw
}
}
pub fn param_by_name(&mut self, name: &str) -> Option<effect_param::RefMut> {
let name = CString::new(name).expect("str contains null");
unsafe {
let param = raw::gs_effect_get_param_by_name(self.raw, name.as_ptr());
effect_param::RefMut::from_raw(param)
}
}
unsafe fn forget(&mut self) {
self.raw = ptr::null_mut();
}
pub fn as_raw(&self) -> *mut raw::Effect {
self.raw
}
}
impl Drop for Effect {
fn drop(&mut self) {
if !self.raw.is_null() {
// raw::rust_gs_effect_destroy(self.raw);
}
}
}
pub struct RefMut {
inner: Effect
}
impl RefMut {
pub unsafe fn from_raw(raw: *const raw::Effect) -> Option<RefMut> {
if raw.is_null() {
return None;
}
Some(RefMut {
inner: Effect::from_raw(raw as *mut _)
})
}
}
impl Deref for RefMut {
type Target = Effect;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl DerefMut for RefMut {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl Drop for RefMut {
fn drop(&mut self) {
unsafe {
self.inner.forget();
}
}
}
|
extern crate csv_multithread;
#[macro_use]
extern crate criterion;
extern crate itertools;
use std::fs;
use std::time::Duration;
use itertools::Itertools;
use criterion::*;
use csv_multithread::*;
use std::process::Command;
fn cpp_version(filename: &'static str) -> impl Fn() -> Command {
move || {
let mut command = Command::new("./target/cpp_version");
command.arg(filename);
command
}
}
fn file_size(c: &mut Criterion) {
c.bench(
"mutex",
ParameterizedBenchmark::new(
"filesize",
|b, file| { b.iter(|| mutex(format!("{}.paf", file).as_str(), 256, 4));},
(1..=6).map(|x| x*2)
)
.sample_size(40)
.warm_up_time(Duration::new(2, 0))
.throughput(|file| Throughput::Bytes(fs::metadata(format!("{}.paf", file)).unwrap().len() as u32))
);
c.bench(
"messsage",
ParameterizedBenchmark::new(
"filesize",
|b, file| { b.iter(|| mutex(format!("{}.paf", file).as_str(), 256, 4));},
(1..=6).map(|x| x*2)
)
.sample_size(40)
.warm_up_time(Duration::new(2, 0))
.throughput(|file| Throughput::Bytes(fs::metadata(format!("{}.paf", file)).unwrap().len() as u32))
);
}
fn buff_size(c: &mut Criterion) {
c.bench(
"mutex",
ParameterizedBenchmark::new(
"buffsize",
|b, size| { b.iter(|| mutex("2.paf", *size, 4));},
(1..=12).map(|x| (2 as usize).pow(x))
)
.sample_size(40)
.warm_up_time(Duration::new(2, 0))
.throughput(|_| Throughput::Bytes(fs::metadata("2.paf").unwrap().len() as u32))
);
c.bench(
"messsage",
ParameterizedBenchmark::new(
"buffsize",
|b, size| { b.iter(|| message("2.paf", *size, 4));},
(1..=12).map(|x| (2 as usize).pow(x))
)
.sample_size(40)
.warm_up_time(Duration::new(2, 0))
.throughput(|_| Throughput::Bytes(fs::metadata("2.paf").unwrap().len() as u32))
);
}
fn nb_thread(c: &mut Criterion) {
c.bench(
"mutex",
ParameterizedBenchmark::new(
"nbthread",
|b, thread| { b.iter(|| mutex("2.paf", 256, *thread));},
(1..=12).map(|x| x*2)
)
.sample_size(40)
.warm_up_time(Duration::new(2, 0))
.throughput(|_| Throughput::Bytes(fs::metadata("2.paf").unwrap().len() as u32))
);
c.bench(
"messsage",
ParameterizedBenchmark::new(
"nbthread",
|b, thread| { b.iter(|| message("2.paf", 256, *thread));},
(1..=12).map(|x| x*2)
)
.sample_size(40)
.warm_up_time(Duration::new(2, 0))
.throughput(|_| Throughput::Bytes(fs::metadata("2.paf").unwrap().len() as u32))
);
}
fn buffsize_nb_thread(c: &mut Criterion) {
c.bench(
"buffsize-nbthread",
ParameterizedBenchmark::new(
"mutex",
|b, param| { b.iter(|| message("2.paf", param.0, param.1));},
(1..=12).map(|x| (2 as usize).pow(x)).cartesian_product((1..=12).map(|x| x*2))
)
.sample_size(40)
.warm_up_time(Duration::new(2, 0))
.throughput(|_| Throughput::Bytes(fs::metadata("2.paf").unwrap().len() as u32))
);
c.bench(
"buffsize-nbthread",
ParameterizedBenchmark::new(
"messsage",
|b, param| { b.iter(|| message("2.paf", param.0, param.1));},
(1..=12).map(|x| (2 as usize).pow(x)).cartesian_product((1..=12).map(|x| x*2))
)
.sample_size(40)
.warm_up_time(Duration::new(2, 0))
.throughput(|_| Throughput::Bytes(fs::metadata("2.paf").unwrap().len() as u32))
);
}
fn compare(c: &mut Criterion) {
let mut command = Command::new("./target/cpp_version");
command.arg("2.paf");
c.bench(
"compare",
Benchmark::new("mutex", |b| { b.iter(|| mutex("2.paf", 1024, 14));})
.sample_size(100)
.warm_up_time(Duration::new(2, 0))
.throughput(Throughput::Bytes(fs::metadata("2.paf").unwrap().len() as u32))
.with_program("cpp", command)
.with_function("message", |b| {b.iter(|| message("2.paf", 1024, 14))})
.with_function("basic", |b| { b.iter(|| basic("2.paf"))})
);
}
criterion_group!(benches, file_size, buff_size, nb_thread, buffsize_nb_thread, compare);
criterion_main!(benches);
|
use std::cmp::*;
struct MinStack {
stack: Vec<i32>,
m: i32,
}
impl MinStack {
fn new() -> Self {
Self {
stack: vec![],
m: 0,
}
}
fn push(&mut self, x: i32) {
if self.stack.len() == 0 {
self.m = x;
} else {
self.m = min(self.m, x);
}
self.stack.push(x);
}
fn pop(&mut self) {
let x = self.stack.pop().unwrap();
if x <= self.m {
if self.stack.len() > 0 {
let mut mm = self.stack[0];
for i in 1..self.stack.len() {
mm = min(mm, self.stack[i]);
}
self.m = mm;
}
}
}
fn top(&self) -> i32 {
*self.stack.last().unwrap()
}
fn get_min(&self) -> i32 {
self.m
}
} |
/// Check if input is a valid domain label
pub fn is_label(input: &str) -> bool {
let mut chars = input.chars();
// we need at least one char
let first = match chars.next() {
None => {
return false;
}
Some(c) => c,
};
// it can start with an alphanumeric character
if !first.is_ascii_alphanumeric() {
return false;
}
// then optionally be followed by any combination of
// alphanumeric characters and dashes
let last_index = input.len() - 2.min(input.len());
for (index, c) in chars.enumerate() {
// before finally ending with an alphanumeric character
if !c.is_ascii_alphanumeric() && (index == last_index || c != '-') {
return false;
}
}
true
}
/// Check the local part of an email address (before @)
pub fn is_email_local(input: &str) -> bool {
let mut chars = input.chars();
// we need at least one char
let first = match chars.next() {
None => {
return false;
}
Some(c) => c,
};
let last_index = input.len() - 2.min(input.len());
if first == ' ' {
return false;
} else if first == '"' {
// quoted
if input.len() == 1 {
return false;
}
for (index, c) in chars.enumerate() {
if index == last_index {
if c != '"' {
return false;
}
} else {
if !is_combined(c) && !is_quoted(c) {
return false;
}
}
}
} else {
// not quoted
if first == '.' {
return false;
}
for (index, c) in chars.enumerate() {
if !is_combined(c) && (index == last_index || c != '.') {
return false;
}
}
}
true
}
// these characters can be anywhere in the expresion
// [[:alnum:]!#$%&'*+/=?^_`{|}~-]
fn is_global(c: char) -> bool {
c.is_ascii_alphanumeric()
|| c == '-'
|| c == '!'
|| c == '#'
|| c == '$'
|| c == '%'
|| c == '&'
|| c == '\''
|| c == '*'
|| c == '+'
|| c == '/'
|| c == '='
|| c == '?'
|| c == '^'
|| c == '_'
|| c == '`'
|| c == '{'
|| c == '|'
|| c == '}'
|| c == '~'
}
fn is_non_ascii(c: char) -> bool {
c as u32 > 0x7f // non-ascii characters (can also be unquoted)
}
fn is_quoted(c: char) -> bool {
// ["(),\\:;<>@\[\]. ]
c == '"'
|| c == '.'
|| c == ' '
|| c == '('
|| c == ')'
|| c == ','
|| c == '\\'
|| c == ':'
|| c == ';'
|| c == '<'
|| c == '>'
|| c == '@'
|| c == '['
|| c == ']'
}
fn is_combined(c: char) -> bool {
is_global(c) || is_non_ascii(c)
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn is_label_correct() {
for l in &["a", "ab", "a-b", "a--b", "0Z"] {
assert!(is_label(l));
}
}
#[test]
fn is_label_incorrect() {
for l in &["", "-", "a-", "-b", "$"] {
assert!(!is_label(l));
}
}
#[test]
fn is_email_local_correct() {
for l in &[
"a",
"ab",
"a.b",
"a\u{0080}",
"$",
"\"\"\"",
"\"a b\"",
"\" \"",
"\"a<>@\"",
] {
assert!(is_email_local(l));
}
}
#[test]
fn is_email_local_incorrect() {
for l in &["", " a", "a ", "a.", ".b", "a\x7f", "\"", "\"a", "a\""] {
assert!(!is_email_local(l));
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.