text stringlengths 8 4.13M |
|---|
//! Linear operator
use crate::solver::LinAlg;
/// Linear operator trait.
///
/// <script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script>
/// <script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-svg.js"></script>
///
/// Expresses a linear operator \\(K: \mathbb{R}^n \to \mathbb{R}^m\\) (or a matrix \\(K \in \mathbb{R}^{m \times n}\\)).
pub trait Operator<L: LinAlg>
{
/// Size of \\(K\\).
///
/// Returns a tuple of \\(m\\) and \\(n\\).
fn size(&self) -> (usize, usize);
/// Calculate \\(\alpha K x + \beta y\\).
///
/// * `alpha` is a scalar \\(\alpha\\).
/// * `x` is a vector \\(x\\).
/// The length of `x` shall be \\(n\\).
/// * `beta` is a scalar \\(\beta\\).
/// * `y` is a vector \\(y\\) before entry,
/// \\(\alpha K x + \beta y\\) on exit.
/// The length of `y` shall be \\(m\\).
fn op(&self, alpha: L::F, x: &L::Sl, beta: L::F, y: &mut L::Sl);
/// Calculate \\(\alpha K^T x + \beta y\\).
///
/// * `alpha` is a scalar \\(\alpha\\).
/// * `x` is a vector \\(x\\).
/// The length of `x` shall be \\(m\\).
/// * `beta` is a scalar \\(\beta\\).
/// * `y` is a vector \\(y\\) before entry,
/// \\(\alpha K^T x + \beta y\\) on exit.
/// The length of `y` shall be \\(n\\).
///
/// The calculation shall be equivalent to the general reference implementation shown below.
/// ```
/// # use num_traits::{Zero, One};
/// # use totsu_core::solver::{SliceLike, Operator};
/// # use totsu_core::{LinAlgEx, splitm, splitm_mut};
/// # struct OpRef<L>(std::marker::PhantomData<L>);
/// impl<L: LinAlgEx> Operator<L> for OpRef<L>
/// {
/// # fn size(&self) -> (usize, usize) {(0, 0)}
/// # fn op(&self, alpha: L::F, x: &L::Sl, beta: L::F, y: &mut L::Sl) {}
/// # fn absadd_cols(&self, tau: &mut L::Sl) {}
/// # fn absadd_rows(&self, sigma: &mut L::Sl) {}
/// fn trans_op(&self, alpha: L::F, x: &L::Sl, beta: L::F, y: &mut L::Sl)
/// {
/// let f0 = L::F::zero();
/// let f1 = L::F::one();
/// let (m, n) = self.size();
///
/// let mut col_v = std::vec![f0; m];
/// let mut row_v = std::vec![f0; n];
/// let mut col = L::Sl::new_mut(&mut col_v);
/// let mut row = L::Sl::new_mut(&mut row_v);
///
/// for c in 0.. n {
/// row.set(c, f1);
/// self.op(f1, &row, f0, &mut col);
/// row.set(c, f0);
///
/// splitm_mut!(y, (_y_done; c), (yc; 1));
/// L::transform_ge(true, m, 1, alpha, &col, x, beta, &mut yc);
/// }
/// }
/// }
/// ```
fn trans_op(&self, alpha: L::F, x: &L::Sl, beta: L::F, y: &mut L::Sl);
/// Calculate \\(\left[ \tau_j + \sum_{i=0}^{m-1}|K_{ij}| \right]_{j=0,...,n-1}\\).
///
/// * `tau` is a vector \\(\tau\\) before entry,
/// \\(\left[ \tau_j + \sum_{i=0}^{m-1}|K_{ij}| \right]_{j=0,...,n-1}\\) on exit.
/// The length of `tau` shall be \\(n\\).
///
/// The calculation shall be equivalent to the general reference implementation shown below.
/// ```
/// # use num_traits::{Zero, One};
/// # use totsu_core::solver::{SliceLike, LinAlg, Operator};
/// # struct OpRef<L>(std::marker::PhantomData<L>);
/// impl<L: LinAlg> Operator<L> for OpRef<L>
/// {
/// # fn size(&self) -> (usize, usize) {(0, 0)}
/// # fn op(&self, alpha: L::F, x: &L::Sl, beta: L::F, y: &mut L::Sl) {}
/// # fn trans_op(&self, alpha: L::F, x: &L::Sl, beta: L::F, y: &mut L::Sl) {}
/// # fn absadd_rows(&self, sigma: &mut L::Sl) {}
/// fn absadd_cols(&self, tau: &mut L::Sl)
/// {
/// let f0 = L::F::zero();
/// let f1 = L::F::one();
/// let (m, n) = self.size();
///
/// let mut col_v = std::vec![f0; m];
/// let mut row_v = std::vec![f0; n];
/// let mut col = L::Sl::new_mut(&mut col_v);
/// let mut row = L::Sl::new_mut(&mut row_v);
///
/// for c in 0.. tau.len() {
/// row.set(c, f1);
/// self.op(f1, &row, f0, &mut col);
/// row.set(c, f0);
///
/// let val_tau = tau.get(c) + L::abssum(&col, 1);
/// tau.set(c, val_tau);
/// }
/// }
/// }
/// ```
fn absadd_cols(&self, tau: &mut L::Sl);
/// Calculate \\(\left[ \sigma_i + \sum_{j=0}^{n-1}|K_{ij}| \right]_{i=0,...,m-1}\\).
///
/// * `sigma` is a vector \\(\sigma\\) before entry,
/// \\(\left[ \sigma_i + \sum_{j=0}^{n-1}|K_{ij}| \right]_{i=0,...,m-1}\\) on exit.
/// The length of `sigma` shall be \\(m\\).
///
/// The calculation shall be equivalent to the general reference implementation shown below.
/// ```
/// # use num_traits::{Zero, One};
/// # use totsu_core::solver::{SliceLike, LinAlg, Operator};
/// # struct OpRef<L>(std::marker::PhantomData<L>);
/// impl<L: LinAlg> Operator<L> for OpRef<L>
/// {
/// # fn size(&self) -> (usize, usize) {(0, 0)}
/// # fn op(&self, alpha: L::F, x: &L::Sl, beta: L::F, y: &mut L::Sl) {}
/// # fn trans_op(&self, alpha: L::F, x: &L::Sl, beta: L::F, y: &mut L::Sl) {}
/// # fn absadd_cols(&self, tau: &mut L::Sl) {}
/// fn absadd_rows(&self, sigma: &mut L::Sl)
/// {
/// let f0 = L::F::zero();
/// let f1 = L::F::one();
/// let (m, n) = self.size();
///
/// let mut col_v = std::vec![f0; m];
/// let mut row_v = std::vec![f0; n];
/// let mut col = L::Sl::new_mut(&mut col_v);
/// let mut row = L::Sl::new_mut(&mut row_v);
///
/// for r in 0.. sigma.len() {
/// col.set(r, f1);
/// self.trans_op(f1, &col, f0, &mut row);
/// col.set(r, f0);
///
/// let val_sigma = sigma.get(r) + L::abssum(&row, 1);
/// sigma.set(r, val_sigma);
/// }
/// }
/// }
/// ```
fn absadd_rows(&self, sigma: &mut L::Sl);
}
|
struct Solution;
impl Solution {
pub fn island_perimeter(grid: Vec<Vec<i32>>) -> i32 {
let mut ans = 0;
for row in 0..grid.len() {
for col in 0..grid[0].len() {
if grid[row][col] != 1 {
continue;
}
ans += 4; // 4 条边
// 如果上边也是岛,那么相连的两条边都从答案中去掉
if row > 0 && grid[row - 1][col] == 1 {
ans -= 2;
}
// 如果左边是岛,那么相连的两条边去掉
if col > 0 && grid[row][col - 1] == 1 {
ans -= 2;
}
}
}
ans
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_island_perimeter() {
let grid = vec![
vec![0, 1, 0, 0],
vec![1, 1, 1, 0],
vec![0, 1, 0, 0],
vec![1, 1, 0, 0],
];
assert_eq!(Solution::island_perimeter(grid), 16);
}
}
|
/**
* Common constants for glfw. You shouldn't have to access these directly as each module
* publicly exports them.
*/
use core::libc::c_int;
/* GLFW version */
pub static VERSION_MAJOR : c_int = 3;
pub static VERSION_MINOR : c_int = 0;
pub static VERSION_REVISION : c_int = 0;
/* Not actually defined in GLFW, but very useful */
pub static FALSE : c_int = 0;
pub static TRUE : c_int = 1;
/* Key and button state/action definitions */
pub static RELEASE : c_int = 0;
pub static PRESS : c_int = 1;
pub static REPEAT : c_int = 2;
/* Printable keys */
pub static KEY_SPACE : c_int = 32;
pub static KEY_APOSTROPHE : c_int = 39;
pub static KEY_COMMA : c_int = 44;
pub static KEY_MINUS : c_int = 45;
pub static KEY_PERIOD : c_int = 46;
pub static KEY_SLASH : c_int = 47;
pub static KEY_0 : c_int = 48;
pub static KEY_1 : c_int = 49;
pub static KEY_2 : c_int = 50;
pub static KEY_3 : c_int = 51;
pub static KEY_4 : c_int = 52;
pub static KEY_5 : c_int = 53;
pub static KEY_6 : c_int = 54;
pub static KEY_7 : c_int = 55;
pub static KEY_8 : c_int = 56;
pub static KEY_9 : c_int = 57;
pub static KEY_SEMICOLON : c_int = 59;
pub static KEY_EQUAL : c_int = 61;
pub static KEY_A : c_int = 65;
pub static KEY_B : c_int = 66;
pub static KEY_C : c_int = 67;
pub static KEY_D : c_int = 68;
pub static KEY_E : c_int = 69;
pub static KEY_F : c_int = 70;
pub static KEY_G : c_int = 71;
pub static KEY_H : c_int = 72;
pub static KEY_I : c_int = 73;
pub static KEY_J : c_int = 74;
pub static KEY_K : c_int = 75;
pub static KEY_L : c_int = 76;
pub static KEY_M : c_int = 77;
pub static KEY_N : c_int = 78;
pub static KEY_O : c_int = 79;
pub static KEY_P : c_int = 80;
pub static KEY_Q : c_int = 81;
pub static KEY_R : c_int = 82;
pub static KEY_S : c_int = 83;
pub static KEY_T : c_int = 84;
pub static KEY_U : c_int = 85;
pub static KEY_V : c_int = 86;
pub static KEY_W : c_int = 87;
pub static KEY_X : c_int = 88;
pub static KEY_Y : c_int = 89;
pub static KEY_Z : c_int = 90;
pub static KEY_LEFT_BRACKET : c_int = 91;
pub static KEY_BACKSLASH : c_int = 92;
pub static KEY_RIGHT_BRACKET : c_int = 93;
pub static KEY_GRAVE_ACCENT : c_int = 96;
pub static KEY_WORLD_1 : c_int = 161;
pub static KEY_WORLD_2 : c_int = 162;
/* Function keys */
pub static KEY_ESCAPE : c_int = 256;
pub static KEY_ENTER : c_int = 257;
pub static KEY_TAB : c_int = 258;
pub static KEY_BACKSPACE : c_int = 259;
pub static KEY_INSERT : c_int = 260;
pub static KEY_DELETE : c_int = 261;
pub static KEY_RIGHT : c_int = 262;
pub static KEY_LEFT : c_int = 263;
pub static KEY_DOWN : c_int = 264;
pub static KEY_UP : c_int = 265;
pub static KEY_PAGE_UP : c_int = 266;
pub static KEY_PAGE_DOWN : c_int = 267;
pub static KEY_HOME : c_int = 268;
pub static KEY_END : c_int = 269;
pub static KEY_CAPS_LOCK : c_int = 280;
pub static KEY_SCROLL_LOCK : c_int = 281;
pub static KEY_NUM_LOCK : c_int = 282;
pub static KEY_PRINT_SCREEN : c_int = 283;
pub static KEY_PAUSE : c_int = 284;
pub static KEY_F1 : c_int = 290;
pub static KEY_F2 : c_int = 291;
pub static KEY_F3 : c_int = 292;
pub static KEY_F4 : c_int = 293;
pub static KEY_F5 : c_int = 294;
pub static KEY_F6 : c_int = 295;
pub static KEY_F7 : c_int = 296;
pub static KEY_F8 : c_int = 297;
pub static KEY_F9 : c_int = 298;
pub static KEY_F10 : c_int = 299;
pub static KEY_F11 : c_int = 300;
pub static KEY_F12 : c_int = 301;
pub static KEY_F13 : c_int = 302;
pub static KEY_F14 : c_int = 303;
pub static KEY_F15 : c_int = 304;
pub static KEY_F16 : c_int = 305;
pub static KEY_F17 : c_int = 306;
pub static KEY_F18 : c_int = 307;
pub static KEY_F19 : c_int = 308;
pub static KEY_F20 : c_int = 309;
pub static KEY_F21 : c_int = 310;
pub static KEY_F22 : c_int = 311;
pub static KEY_F23 : c_int = 312;
pub static KEY_F24 : c_int = 313;
pub static KEY_F25 : c_int = 314;
pub static KEY_KP_0 : c_int = 320;
pub static KEY_KP_1 : c_int = 321;
pub static KEY_KP_2 : c_int = 322;
pub static KEY_KP_3 : c_int = 323;
pub static KEY_KP_4 : c_int = 324;
pub static KEY_KP_5 : c_int = 325;
pub static KEY_KP_6 : c_int = 326;
pub static KEY_KP_7 : c_int = 327;
pub static KEY_KP_8 : c_int = 328;
pub static KEY_KP_9 : c_int = 329;
pub static KEY_KP_DECIMAL : c_int = 330;
pub static KEY_KP_DIVIDE : c_int = 331;
pub static KEY_KP_MULTIPLY : c_int = 332;
pub static KEY_KP_SUBTRACT : c_int = 333;
pub static KEY_KP_ADD : c_int = 334;
pub static KEY_KP_ENTER : c_int = 335;
pub static KEY_KP_EQUAL : c_int = 336;
pub static KEY_LEFT_SHIFT : c_int = 340;
pub static KEY_LEFT_CONTROL : c_int = 341;
pub static KEY_LEFT_ALT : c_int = 342;
pub static KEY_LEFT_SUPER : c_int = 343;
pub static KEY_RIGHT_SHIFT : c_int = 344;
pub static KEY_RIGHT_CONTROL : c_int = 345;
pub static KEY_RIGHT_ALT : c_int = 346;
pub static KEY_RIGHT_SUPER : c_int = 347;
pub static KEY_MENU : c_int = 348;
pub static KEY_LAST : c_int = KEY_MENU;
/* Mouse buttons */
pub static MOUSE_BUTTON_1 : c_int = 0;
pub static MOUSE_BUTTON_2 : c_int = 1;
pub static MOUSE_BUTTON_3 : c_int = 2;
pub static MOUSE_BUTTON_4 : c_int = 3;
pub static MOUSE_BUTTON_5 : c_int = 4;
pub static MOUSE_BUTTON_6 : c_int = 5;
pub static MOUSE_BUTTON_7 : c_int = 6;
pub static MOUSE_BUTTON_8 : c_int = 7;
pub static MOUSE_BUTTON_LEFT : c_int = MOUSE_BUTTON_1;
pub static MOUSE_BUTTON_RIGHT : c_int = MOUSE_BUTTON_2;
pub static MOUSE_BUTTON_MIDDLE : c_int = MOUSE_BUTTON_3;
pub static MOUSE_BUTTON_LAST : c_int = MOUSE_BUTTON_8;
/* Joysticks */
pub static JOYSTICK_1 : c_int = 0;
pub static JOYSTICK_2 : c_int = 1;
pub static JOYSTICK_3 : c_int = 2;
pub static JOYSTICK_4 : c_int = 3;
pub static JOYSTICK_5 : c_int = 4;
pub static JOYSTICK_6 : c_int = 5;
pub static JOYSTICK_7 : c_int = 6;
pub static JOYSTICK_8 : c_int = 7;
pub static JOYSTICK_9 : c_int = 8;
pub static JOYSTICK_10 : c_int = 9;
pub static JOYSTICK_11 : c_int = 10;
pub static JOYSTICK_12 : c_int = 11;
pub static JOYSTICK_13 : c_int = 12;
pub static JOYSTICK_14 : c_int = 13;
pub static JOYSTICK_15 : c_int = 14;
pub static JOYSTICK_16 : c_int = 15;
pub static JOYSTICK_LAST : c_int = JOYSTICK_16;
/* glfwGetWindowParam tokens */
pub static FOCUSED : c_int = 0x00020001;
pub static ICONIFIED : c_int = 0x00020002;
pub static CONTEXT_REVISION : c_int = 0x00020004;
/* glfwWindowHint tokens */
pub static RED_BITS : c_int = 0x00021000;
pub static GREEN_BITS : c_int = 0x00021001;
pub static BLUE_BITS : c_int = 0x00021002;
pub static ALPHA_BITS : c_int = 0x00021003;
pub static DEPTH_BITS : c_int = 0x00021004;
pub static STENCIL_BITS : c_int = 0x00021005;
pub static ACCUM_RED_BITS : c_int = 0x00021006;
pub static ACCUM_GREEN_BITS : c_int = 0x00021007;
pub static ACCUM_BLUE_BITS : c_int = 0x00021008;
pub static ACCUM_ALPHA_BITS : c_int = 0x00021009;
pub static AUX_BUFFERS : c_int = 0x0002100A;
pub static STEREO : c_int = 0x0002100B;
pub static SAMPLES : c_int = 0x0002100C;
pub static SRGB_CAPABLE : c_int = 0x0002100D;
/* Used with both glfwGetWindowParam and glfwWindowHint */
pub static CLIENT_API : c_int = 0x00022000;
pub static CONTEXT_VERSION_MAJOR : c_int = 0x00022001;
pub static CONTEXT_VERSION_MINOR : c_int = 0x00022002;
pub static CONTEXT_ROBUSTNESS : c_int = 0x00022003;
pub static OPENGL_FORWARD_COMPAT : c_int = 0x00022004;
pub static OPENGL_DEBUG_CONTEXT : c_int = 0x00022005;
pub static OPENGL_PROFILE : c_int = 0x00022006;
pub static RESIZABLE : c_int = 0x00022007;
pub static VISIBLE : c_int = 0x00022008;
pub static UNDECORATED : c_int = 0x00022009;
/* GLFW_CLIENT_API tokens */
pub static OPENGL_API : c_int = 0x00000001;
pub static OPENGL_ES_API : c_int = 0x00000002;
/* GLFW_CONTEXT_ROBUSTNESS mode tokens */
pub static NO_ROBUSTNESS : c_int = 0x00000000;
pub static NO_RESET_NOTIFICATION : c_int = 0x00000001;
pub static LOSE_CONTEXT_ON_RESET : c_int = 0x00000002;
/* GLFW_OPENGL_PROFILE bit tokens */
pub static OPENGL_NO_PROFILE : c_int = 0x00000000;
pub static OPENGL_CORE_PROFILE : c_int = 0x00000001;
pub static OPENGL_COMPAT_PROFILE : c_int = 0x00000002;
/* glfwGetInputMode/glfwSetInputMode tokens */
pub static CURSOR_MODE : c_int = 0x00030001;
pub static STICKY_KEYS : c_int = 0x00030002;
pub static STICKY_MOUSE_BUTTONS : c_int = 0x00030003;
/* GLFW_CURSOR_MODE values */
pub static CURSOR_NORMAL : c_int = 0x00040001;
pub static CURSOR_HIDDEN : c_int = 0x00040002;
pub static CURSOR_CAPTURED : c_int = 0x00040003;
/* glfwGetJoystickParam tokens */
pub static PRESENT : c_int = 0x00050001;
pub static AXES : c_int = 0x00050002;
pub static BUTTONS : c_int = 0x00050003;
/* glfwGetError/glfwErrorString tokens */
pub static NOT_INITIALIZED : c_int = 0x00070001;
pub static NO_CURRENT_CONTEXT : c_int = 0x00070002;
pub static INVALID_ENUM : c_int = 0x00070003;
pub static INVALID_VALUE : c_int = 0x00070004;
pub static OUT_OF_MEMORY : c_int = 0x00070005;
pub static API_UNAVAILABLE : c_int = 0x00070006;
pub static VERSION_UNAVAILABLE : c_int = 0x00070007;
pub static PLATFORM_ERROR : c_int = 0x00070008;
pub static FORMAT_UNAVAILABLE : c_int = 0x00070009;
/* Gamma ramps */
pub static GAMMA_RAMP_SIZE : c_int = 256;
/* Monitor event tokens */
pub static CONNECTED : c_int = 0x00061000;
pub static DISCONNECTED : c_int = 0x00061001; |
use crypto::PublicKey;
use assets::AssetBundle;
use transactions::components::service::SERVICE_ID;
use error::{Error, ErrorKind};
/// Transaction ID.
pub const DELETE_ASSETS_ID: u16 = 400;
evo_message! {
/// `delete_assets` transaction.
struct DeleteAssets {
const TYPE = SERVICE_ID;
const ID = DELETE_ASSETS_ID;
pub_key: &PublicKey,
assets: Vec<AssetBundle>,
seed: u64,
}
}
#[derive(Clone, Debug)]
pub struct DeleteAssetsWrapper {
public_key: PublicKey,
assets: Vec<AssetBundle>,
seed: u64,
}
impl DeleteAssetsWrapper {
pub fn new(public_key: &PublicKey, seed: u64) -> Self {
DeleteAssetsWrapper {
public_key: *public_key,
assets: Vec::new(),
seed: seed,
}
}
pub fn from_ptr<'a>(
wrapper: *mut DeleteAssetsWrapper,
) -> Result<&'a mut DeleteAssetsWrapper, Error> {
if wrapper.is_null() {
return Err(Error::new(ErrorKind::Text(
"wrapper isn't initialized".to_string(),
)));
}
Ok(unsafe { &mut *wrapper })
}
pub fn add_asset(&mut self, asset: AssetBundle) {
self.assets.push(asset);
}
pub fn unwrap(&self) -> DeleteAssets {
DeleteAssets::new(
&self.public_key,
self.assets.clone(),
self.seed
)
}
}
|
use std::collections::HashMap;
pub fn solve_v1() -> i32 {
let data = super::load_file("day6.txt");
let groups: Vec<&str> = data.split("\n\n").collect();
let mut count = 0;
for group in groups {
let mut chars: HashMap<u8, i32> = HashMap::new();
let answers = group.as_bytes();
for &answer in answers {
if answer != b'\n' {
let count = chars.entry(answer).or_insert(0);
*count += 1;
}
}
count += chars.len() as i32;
}
count
}
pub fn solve_v2() -> i32 {
let data = super::load_file("day6.txt");
let groups: Vec<&str> = data.split("\n\n").collect();
let mut count = 0;
for group in groups {
let mut chars: HashMap<u8, i32> = HashMap::new();
let mut groups = 1;
let answers = group.as_bytes();
for &answer in answers {
if answer != b'\n' {
let count = chars.entry(answer).or_insert(0);
*count += 1;
} else {
groups += 1;
}
}
for (_, &v) in chars.iter() {
if v == groups {
count += 1;
}
}
}
count
}
|
use std::{fmt::Debug, ops::Index};
/// A Span is the information of a piece of source code inside a file.
///
/// `Span`s are only meaningful when indexing the file it is originated from.
#[derive(Clone, Copy, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Span {
/// The start index (in bytes or other meaningful item index)
/// in the file of this span
pub idx: usize,
/// The length of the span
pub len: usize,
}
pub const DUMMY_SPAN: Span = Span {
// ctx: usize::max_value(),
idx: 0,
len: 0,
};
impl Span {
pub fn start(&self) -> usize {
self.idx
}
pub fn end(&self) -> usize {
self.idx + self.len
}
pub fn new(idx: usize, len: usize) -> Span {
Span { idx, len }
}
pub fn new_idx(lo: usize, hi: usize) -> Span {
let (lo, hi) = if lo > hi { (hi, lo) } else { (lo, hi) };
let len = hi - lo;
Span { idx: lo, len }
}
pub const fn eof() -> Span {
Span {
idx: usize::max_value(),
len: 0,
}
}
}
impl std::ops::Add for Span {
type Output = Span;
fn add(self, rhs: Self) -> Self::Output {
let start = std::cmp::min(self.start(), rhs.start());
let end = std::cmp::max(self.end(), rhs.end());
Span::new_idx(start, end)
}
}
impl std::ops::AddAssign for Span {
fn add_assign(&mut self, rhs: Self) {
*self = *self + rhs
}
}
impl Debug for Span {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "[{}, {})", self.idx, self.idx + self.len)
}
}
impl Default for Span {
fn default() -> Self {
DUMMY_SPAN
}
}
impl<T> Index<Span> for Vec<T> {
type Output = [T];
fn index(&self, index: Span) -> &Self::Output {
&self[index.idx..(index.idx + index.len)]
}
}
impl From<logos::Span> for Span {
fn from(s: logos::Span) -> Self {
Span::new_idx(s.start, s.end)
}
}
|
use std::convert::TryFrom;
use std::time::Instant;
fn main() {
let img_path = "flower.jpeg";
invert_colours_threaded(img_path);
invert_colours(img_path);
}
fn invert_colours_threaded (img_path: &str) {
let img = image::open(img_path).unwrap().to_rgb();
let img_width = img.width();
let img_height = img.height();
let num_threads = 5;
let num_threads_u = usize::try_from(num_threads).unwrap();
// Convert image into a vector of pixels
let mut pixel_values = img.into_vec();
// Determine an approzimately equal sized chunk that will be assigned to each thread
let size_chunk = pixel_values.len() / (num_threads_u);
// Create thread pool of 5 threads, each for a given scope
let mut pool = scoped_threadpool::Pool::new(num_threads);
let now = Instant::now();
// Each scoped thread can reference things outside of closure
pool.scoped(|scope| {
// Assign each (non-overlapping) chunk to a thread in pool
for chunk in pixel_values.chunks_mut(size_chunk) {
scope.execute(move || {
for pixel in chunk.iter_mut() {
*pixel = 255 - *pixel;
}
})
}
});
let elapsed = now.elapsed();
println!("Time elapsed for colour inversion (threaded): {:?}", elapsed);
let _ = image::save_buffer("inverted_t.png", &pixel_values, img_width, img_height, image::ColorType::Rgb8).unwrap();
}
fn invert_colours(img_path: &str) {
let img = image::open(img_path).unwrap().to_rgb();
let img_width = img.width();
let img_height = img.height();
let mut pixel_values = img.into_vec();
let now = Instant::now();
// Iterate through entire pixel vector sequentially to invert colour value
for i in 0 .. pixel_values.len() {
let tmp = pixel_values[i];
pixel_values[i] = 255 - tmp;
}
let elapsed = now.elapsed();
println!("Time elapsed for colour inversion (non-threaded): {:?}", elapsed);
let _ = image::save_buffer("inverted.png", &pixel_values, img_width, img_height, image::ColorType::Rgb8).unwrap();
} |
use std::io::{TcpListener, TcpStream, Acceptor, Listener};
fn handle(mut stream: TcpStream) {
println!("New client {}", stream.peer_name());
let mut buf = [0u8, ..4096];
loop {
match stream.read(buf) {
Ok(0) => break,
Ok(n) => {
println!("{}", std::str::from_utf8(buf.slice(0, n)));
let key = "keeeeey";
let proto = "chat";
let response = upgrade_response(key, proto);
println!("{}", response);
stream.write(response.as_bytes()).unwrap();
},
Err(_) => break
};
}
}
fn upgrade_response<'a>(key: &'a str, protocol: &'a str) -> String {
format!("HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Accept: {}\r\nSec-WebSocket-Protocol: {}\r\n\r\n", key, protocol)
}
fn main() {
let listener = TcpListener::bind("127.0.0.1", 8080);
let mut acceptor = listener.listen();
for stream in acceptor.incoming() {
match stream {
Err(e) => { panic!("fuck me running! {}", e) }
Ok(stream) => spawn(proc() {
handle(stream)
})
}
}
drop(acceptor);
}
|
#[macro_use]
extern crate lazy_static;
extern crate parking_lot;
extern crate smallvec;
extern crate uuid;
mod btree;
mod fs;
mod id;
|
extern crate libc;
use std::ptr;
use std::ffi::CStr;
use std::ffi::CString;
use std::thread::sleep;
use std::time::Duration;
use libc::c_int;
use libc::c_void;
mod ffi;
pub use self::ffi::VslTransaction;
pub use self::ffi::VslReason;
pub use self::ffi::VslType;
/// VsmType allows you to specify the location of the shared memory, as well
/// as its type
pub enum VsmType<'a> {
/// A Varnish instance is running with this name
Active(&'a str),
/// Use an abandonned vsm file
Stale(&'a str),
/// Use the default vsm location
Default
}
/// Describe the different types of data you can retrieve using `VsmData::stats`
pub enum Semantics {
/// Bitmap value
Bitmap,
/// Counter, never decreases
Counter,
/// Gauge, goes up and down
Gauge,
/// Couldn't make out the type, but, well, here you go
Unknown
}
/// One item retrieved from the VSM
pub struct Stats {
/// Name constructed using the type, possibly ident of the C entry,
/// suffixed by the C description name
pub name: String,
/// Value of that item
pub value: u64,
/// Type of that item
pub semantics: Semantics
}
/// VSMData objects allow you to connect to the Varnish Shared Memory space
/// and iterate of all exposed counters
pub struct VsmData {
vsm: *const c_void,
}
impl VsmData {
/// Creates a new VSmData object and maybe points it to a specific
/// vsm file.
pub fn new(t: VsmType) -> Result<VsmData, &str> {
let p;
unsafe {
p = ffi::VSM_New();
}
if p.is_null() {
return Err("VSL_New return an empty pointer");
}
match t {
VsmType::Active(s) => unsafe {
assert!(ffi::VSC_Arg(p, 'n' as c_int,
CString::new(s).unwrap().
as_ptr()) > 0);
},
VsmType::Stale(s) => unsafe {
assert!(ffi::VSC_Arg(p, 'N' as c_int,
CString::new(s).unwrap().
as_ptr()) > 0);
},
VsmType::Default => {},
}
let mut vd = VsmData { vsm: p };
if vd.open() == 0 {
Ok(vd)
} else {
let message = vd.error();
vd.reset_error();
Err(message)
}
}
/// Actually connects the object to the log. It may fail for various
/// reasons, but you'll have to look at the error string to know which
/// one.
fn open(&mut self) -> c_int {
assert!(!self.vsm.is_null());
unsafe {
ffi::VSM_Open(self.vsm)
}
}
fn error(&self) -> &'static str {
assert!(!self.vsm.is_null());
let s;
unsafe {
s = ffi::VSM_Error(self.vsm);
}
if s.is_null() {
"No error"
} else {
unsafe {
CStr::from_ptr(s).to_str().unwrap()
//CStr::from_ptr(s).to_string_lossy().into_owned()
}
}
}
fn reset_error(&mut self) {
assert!(!self.vsm.is_null());
unsafe { ffi::VSM_ResetError(self.vsm); };
}
/// Calls provided callback on each stat item returned
pub fn stat_iter<F>(&self, cb: F)
where F: FnMut(&ffi::VsmEntry) -> bool {
assert!(!self.vsm.is_null());
let cb = &cb as *const _ as *const c_void;
unsafe {
ffi::VSC_Iter(self.vsm,
ptr::null(),
ffi::stat_bounce::<F>,
&cb as *const _ as *mut c_void);
}
}
/// returns a vector of owned `Stats` (segfaults for the moment...)
pub fn stats(&self) -> Vec<Stats> {
assert!(!self.vsm.is_null());
let mut v = Vec::new();
self.stat_iter(
|e: &ffi::VsmEntry| -> bool {
let mut name = e.t.to_string();
if !e.ident.is_empty() {
name = name + "." + e.ident;
}
name = name + "." + e.desc.name;
println!("{}: {}", name, e.value);
v.push(Stats {
name: name,
value: e.value,
semantics: match e.desc.semantics {
'b' => Semantics::Bitmap,
'c' => Semantics::Counter,
'g' => Semantics::Gauge,
_ => Semantics::Unknown,
}
});
true
}
);
v
}
/// Reads the log and call provided callback on each `&VslTransaction`
/// slice returned.
pub fn log_iter<F>(&self, cb: F)
where F: FnMut(& [&VslTransaction]) -> bool {
// TODO: replace assert with errors
let cb = &cb as *const _ as *const c_void;
let vsl = unsafe { ffi::VSL_New() };
assert!(!vsl.is_null());
let cur = unsafe { ffi::VSL_CursorVSM(vsl, self.vsm, 3) };
assert!(!cur.is_null());
let vslq = unsafe { ffi::VSLQ_New(vsl, ptr::null(), 1, ptr::null()) };
assert!(!vslq.is_null());
unsafe { ffi::VSLQ_SetCursor(vslq, &cur) };
loop {
match unsafe {ffi::VSLQ_Dispatch(vslq,
ffi::log_bounce::<F>,
&cb as *const _ as *mut c_void)} {
1 => { continue; }
0 => {
sleep(Duration::from_millis(10));
continue;
}
_ => { break; }
}
}
unsafe { ffi::VSLQ_Delete(&vslq) };
unsafe { ffi::VSL_DeleteCursor(vslq) };
unsafe { ffi::VSL_Delete(vsl) };
}
/// Example of iter_log use.
pub fn log(&self) {
self.log_iter( |pt| -> bool {
for t in pt {
println!("=> vxid: {}", t.vxid);
println!("=> vxid_parent: {}", t.vxid_parent);
println!("=> type: {}", t.typ);
println!("=> reason: {}", t.reason);
for c in *t {
println!("{:8}\t{:8}\t{}", c.get_stag(),
c.get_ntag(),
c.get_string());
}
}
true
}
);
}
/// Return the file location being used.
pub fn name(&self) -> String {
unsafe {
let s = ffi::VSM_Name(self.vsm);
CStr::from_ptr(s).to_string_lossy().into_owned()
}
}
/// Check if the VsmData object is open
pub fn is_open(&self) -> bool {
assert!(!self.vsm.is_null());
unsafe { ffi::VSM_IsOpen(self.vsm).is_positive() }
}
/// Check if the Varnish instance dropped the VSM
pub fn is_abandoned(&self) -> bool {
assert!(!self.vsm.is_null());
unsafe { ffi::VSM_Abandoned(self.vsm).is_positive() }
}
/// Close the VSM connection
pub fn close(&mut self) {
assert!(!self.vsm.is_null());
unsafe { ffi::VSM_Close(self.vsm); };
}
}
impl Drop for VsmData {
fn drop(&mut self) {
unsafe { ffi::VSM_Delete(self.vsm);}
}
}
|
use diesel::prelude::*;
use sl_lib::models::*;
use sl_lib::*; // delete it later and use filter for tera with rocket later instead
use sl_lib::crud::create_user;
use sl_lib::custom::{str_from_stdin, none_if_empty_or_string};
use console::Style;
// use console::{style, Style};
// pub struct User {
// pub id: i32,
// pub first_name: String,
// pub last_name: String,
// pub email: String,
// pub password: String,
// pub avatar: Option<String>,
// pub youtube_channel: Option<String>,
// }
pub fn write() {
let cyan = Style::new().cyan();
let bold = Style::new().bold();
use schema::users::dsl::users;
let connection = init_pool().get().unwrap();
// println!("{}", "What is author_id?");
println!("{}", bold.apply_to("What is your [First Name]?"));
let user_first_name = str_from_stdin();
println!("{}", bold.apply_to("What is your [Last Name]?"));
let user_last_name = str_from_stdin();
println!("{}", bold.apply_to("What is your [E-Mail]?"));
let user_email = str_from_stdin();
println!("{}", cyan.apply_to("Then, what [Password] you want to have?"));
let user_password = str_from_stdin();
println!("What do you want for your profile image?{}", bold.apply_to("(Not Required)"));
let user_avatar = none_if_empty_or_string(str_from_stdin());
println!("What is your youtube channel id?{}", bold.apply_to("(Not Required)"));
let user_youtube_channel = none_if_empty_or_string(str_from_stdin());
let new_user = create_user(
user_first_name,
user_last_name,
user_email,
user_password,
user_avatar,
user_youtube_channel,
);
let user = diesel::insert_into(users)
.values(new_user)
.get_result::<User>(&*connection)
.expect("Error inserting posts");
println!("Save User {} {} with id {}", user.first_name, user.last_name, user.id);
}
|
pub mod geometry;
pub mod lightning;
pub mod system;
pub mod frame;
pub mod rendering;
|
use crate::crypto::utils;
use std::vec::Vec;
pub struct ChaCha20 {
key: [u8; 32],
nonce: [u8; 12],
block_count: u32,
current_state: [u32; 16],
modulus: u64
}
impl ChaCha20 {
fn create_state(key: [u8; 32], nonce: [u8; 12], block_count: u32) -> [u32; 16] {
let mut new_state: [u32; 16] = [
0x61707865, 0x3320646E, 0x79622D32, 0x6B206574,
0, 0, 0, 0,
0, 0, 0, 0,
block_count,
utils::bytes_to_word(&nonce[0..4]),
utils::bytes_to_word(&nonce[4..8]),
utils::bytes_to_word(&nonce[8..12])
];
for i in 0..8 {
let index = i * 4;
new_state[4 + i] = utils::bytes_to_word(&key[index..index + 4]);
}
return new_state;
}
fn update_state(&self, mut working_state: [u32; 16], a: usize, b: usize, c: usize, d: usize) -> [u32; 16] {
let updated_points: [usize; 4] = [a, b, c, d];
let updated_set = self.quarter_round(working_state[a], working_state[b], working_state[c], working_state[d]);
for i in 0..4 {
working_state[updated_points[i]] = updated_set[i];
}
return working_state;
}
fn quarter_round(&self, mut a: u32, mut b: u32, mut c: u32, mut d: u32) -> [u32; 4] {
a = a.wrapping_add(b);
d = (d ^ a).rotate_left(16);
c = c.wrapping_add(d);
b = (b ^ c).rotate_left(12);
a = a.wrapping_add(b);
d = (d ^ a).rotate_left(8);
c = c.wrapping_add(d);
b = (b ^ c).rotate_left(7);
[a, b, c, d]
}
pub fn chacha_block(&mut self) -> [u8; 64] {
self.current_state = Self::create_state(self.key, self.nonce, self.block_count);
let mut working_state = self.current_state;
let mut keystream: [u8; 64] = [0; 64];
for _ in 0..10 {
// Horizontal round
working_state = self.update_state(working_state, 0, 4, 8, 12);
working_state = self.update_state(working_state, 1, 5, 9, 13);
working_state = self.update_state(working_state, 2, 6, 10, 14);
working_state = self.update_state(working_state, 3, 7, 11, 15);
// Diagonal round
working_state = self.update_state(working_state, 0, 5, 10, 15);
working_state = self.update_state(working_state, 1, 6, 11, 12);
working_state = self.update_state(working_state, 2, 7, 8, 13);
working_state = self.update_state(working_state, 3, 4, 9, 14);
}
for i in 0..16 {
let state_value = ((self.current_state[i] as u64 + working_state[i] as u64) % self.modulus) as u32;
self.current_state[i] = state_value;
}
for i in 0..16 {
let keystream_value = self.current_state[i].to_le_bytes();
let keystream_index = i * 4;
for n in 0..4 {
keystream[keystream_index + n] = keystream_value[n];
}
}
self.block_count += 1;
return keystream;
}
pub fn encrypt_stream(&mut self, plaintext: Vec<u8>) -> Vec<u8> {
let mut ciphertext: Vec<u8> = Vec::new();
let mut keystream: [u8; 64] = [0; 64];
for i in 0..plaintext.len() {
if i % 64 == 0 {
keystream = self.chacha_block();
}
ciphertext.push(plaintext[i] ^ keystream[i % 64]);
}
return ciphertext;
}
pub fn new(key: [u8; 32], nonce: [u8; 12]) -> ChaCha20 {
ChaCha20{
key: key,
nonce: nonce,
block_count: 0,
current_state: ChaCha20::create_state(key, nonce, 0),
modulus: (2 as u64).pow(32)
}
}
}
|
use derive_more::From;
#[derive(Clone, Debug)]
pub struct Foo;
#[derive(Clone, Debug)]
pub struct Bar;
// using [derive(From)] from derive_more save us
// implementing the following
//
// From<T> for Val {
// fn from(input: T) -> Self {
// Val::T(T)
// }
// }
//
// Where T can be Foo or Bar
#[derive(Clone, Debug, From)]
pub enum Val {
Foo(Foo),
Bar(Bar),
}
#[derive(Clone, Debug)]
pub struct Rec {
val: Val,
}
impl Rec {
pub fn new() -> Self {
let val = Val::from(Foo);
Rec { val }
}
// type coercion (conversion) part
// val can be Foo or Bar and it will
// get converted to Val
pub fn set_val<V>(&mut self, val: V)
where
V: Into<Val>,
{
self.val = val.into();
}
}
fn main() {
let mut rec = Rec::new();
println!("{:?}", &rec);
rec.set_val(Bar);
println!("{:?}", &rec);
rec.set_val(Foo);
println!("{:?}", &rec);
rec.set_val(Val::Bar(Bar));
println!("{:?}", &rec);
}
|
use anyhow::Result;
mod arguments;
mod commands;
mod configuration;
mod error;
fn main() -> Result<()> {
arguments::run()?;
Ok(())
}
|
use std::sync::Arc;
use vulkano::device::Queue;
use vulkano::format::ClearValue;
use vulkano::format::Format;
use vulkano::framebuffer::RenderPass;
use vulkano::framebuffer::RenderPassAbstract;
use vulkano::framebuffer::{
AttachmentDescription, LoadOp, PassDependencyDescription, PassDescription, RenderPassDesc,
RenderPassDescClearValues, StoreOp,
};
use vulkano::image::ImageLayout;
use vulkano::sync::AccessFlagBits;
use vulkano::sync::PipelineStages;
struct DefaultRenderPassDesc {
pub attachments: Vec<AttachmentDescription>,
pub subpasses: Vec<PassDescription>,
pub dependencies: Vec<PassDependencyDescription>,
}
unsafe impl RenderPassDesc for DefaultRenderPassDesc {
#[inline]
fn num_attachments(&self) -> usize {
self.attachments.len()
}
#[inline]
fn attachment_desc(&self, id: usize) -> Option<AttachmentDescription> {
if id < self.attachments.len() {
return Some(self.attachments[id].clone());
}
return None;
}
#[inline]
fn num_subpasses(&self) -> usize {
self.subpasses.len()
}
#[inline]
fn subpass_desc(&self, id: usize) -> Option<PassDescription> {
if id < self.subpasses.len() {
return Some(self.subpasses[id].clone());
}
return None;
}
#[inline]
fn num_dependencies(&self) -> usize {
self.dependencies.len()
}
#[inline]
fn dependency_desc(&self, id: usize) -> Option<PassDependencyDescription> {
if id < self.dependencies.len() {
return Some(self.dependencies[id].clone());
}
return None;
}
}
unsafe impl RenderPassDescClearValues<Vec<ClearValue>> for DefaultRenderPassDesc {
fn convert_clear_values(&self, values: Vec<ClearValue>) -> Box<Iterator<Item = ClearValue>> {
// FIXME: safety checks
Box::new(values.into_iter())
}
}
pub fn build_render_pass(
gfx_queue: &Arc<Queue>,
final_output_format: Format,
) -> Arc<dyn RenderPassAbstract + Send + Sync> {
let render_pass_description = {
let mut attachments = Vec::new();
// 0: Position
attachments.push(AttachmentDescription {
format: Format::R16G16B16A16Sfloat,
samples: 1,
load: LoadOp::Clear,
store: StoreOp::DontCare,
stencil_load: LoadOp::DontCare,
stencil_store: StoreOp::DontCare,
initial_layout: ImageLayout::Undefined,
final_layout: ImageLayout::ColorAttachmentOptimal,
});
// 1: Normal
attachments.push(AttachmentDescription {
format: Format::R16G16B16A16Sfloat,
samples: 1,
load: LoadOp::Clear,
store: StoreOp::DontCare,
stencil_load: LoadOp::DontCare,
stencil_store: StoreOp::DontCare,
initial_layout: ImageLayout::Undefined,
final_layout: ImageLayout::ColorAttachmentOptimal,
});
// 2: Albedo
attachments.push(AttachmentDescription {
format: Format::R8G8B8A8Unorm,
samples: 1,
load: LoadOp::Clear,
store: StoreOp::DontCare,
stencil_load: LoadOp::DontCare,
stencil_store: StoreOp::DontCare,
initial_layout: ImageLayout::Undefined,
final_layout: ImageLayout::ColorAttachmentOptimal,
});
// 3: Final Color
attachments.push(AttachmentDescription {
format: final_output_format,
samples: 1,
load: LoadOp::Clear,
store: StoreOp::Store,
stencil_load: LoadOp::Clear,
stencil_store: StoreOp::Store,
initial_layout: ImageLayout::Undefined,
final_layout: ImageLayout::PresentSrc,
});
// 4: Depth
attachments.push(AttachmentDescription {
format: Format::D16Unorm,
samples: 1,
load: LoadOp::Clear,
store: StoreOp::Store,
stencil_load: LoadOp::Clear,
stencil_store: StoreOp::Store,
initial_layout: ImageLayout::Undefined,
final_layout: ImageLayout::DepthStencilAttachmentOptimal,
});
let mut subpasses = Vec::new();
// Deferred rendering to GBuffer
subpasses.push(PassDescription {
color_attachments: vec![
(0, ImageLayout::ColorAttachmentOptimal), // Position
(1, ImageLayout::ColorAttachmentOptimal), // Normal
(2, ImageLayout::ColorAttachmentOptimal), // Albedo
],
depth_stencil: Some((4, ImageLayout::DepthStencilAttachmentOptimal)),
input_attachments: Vec::new(),
resolve_attachments: Vec::new(),
preserve_attachments: Vec::new(),
});
// Composition
subpasses.push(PassDescription {
color_attachments: vec![(3, ImageLayout::ColorAttachmentOptimal)],
depth_stencil: None,
input_attachments: vec![
(0, ImageLayout::ShaderReadOnlyOptimal), // Position
(1, ImageLayout::ShaderReadOnlyOptimal), // Normal
(2, ImageLayout::ShaderReadOnlyOptimal), // Albedo
],
resolve_attachments: Vec::new(),
preserve_attachments: Vec::new(),
});
let mut dependencies = Vec::new();
// ? -> deferred
dependencies.push(PassDependencyDescription {
source_subpass: vk::SUBPASS_EXTERNAL as usize,
destination_subpass: 0,
source_stages: PipelineStages {
bottom_of_pipe: true,
..PipelineStages::none()
},
destination_stages: PipelineStages {
color_attachment_output: true,
..PipelineStages::none()
},
source_access: AccessFlagBits {
memory_read: true,
..AccessFlagBits::none()
},
destination_access: AccessFlagBits {
color_attachment_read: true,
color_attachment_write: true,
..AccessFlagBits::none()
},
by_region: true,
});
// Deferred -> composition
dependencies.push(PassDependencyDescription {
source_subpass: 0,
destination_subpass: 1,
source_stages: PipelineStages {
color_attachment_output: true,
..PipelineStages::none()
},
destination_stages: PipelineStages {
fragment_shader: true,
..PipelineStages::none()
},
source_access: AccessFlagBits {
color_attachment_write: true,
..AccessFlagBits::none()
},
destination_access: AccessFlagBits {
shader_read: true,
..AccessFlagBits::none()
},
by_region: true,
});
dependencies.push(PassDependencyDescription {
source_subpass: 0,
destination_subpass: vk::SUBPASS_EXTERNAL as usize,
source_stages: PipelineStages {
color_attachment_output: true,
..PipelineStages::none()
},
destination_stages: PipelineStages {
bottom_of_pipe: true,
..PipelineStages::none()
},
source_access: AccessFlagBits {
color_attachment_read: true,
color_attachment_write: true,
..AccessFlagBits::none()
},
destination_access: AccessFlagBits {
memory_read: true,
..AccessFlagBits::none()
},
by_region: true,
});
DefaultRenderPassDesc {
attachments,
subpasses,
dependencies,
}
};
Arc::new(RenderPass::new(gfx_queue.device().clone(), render_pass_description).unwrap())
}
|
use std::{
cmp::{max, min},
fmt::Display,
sync::Arc,
sync::{
atomic::{AtomicBool, AtomicU64, AtomicUsize, Ordering},
Mutex,
},
task::Poll,
thread,
time::Duration,
};
use futures::FutureExt;
use futures_timer::Delay;
use prost::Message;
use rand::Rng;
use tokio::{
runtime::Builder,
sync::{
mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
oneshot::{channel, Receiver, Sender},
},
};
use tokio_stream::{Stream, StreamExt};
use tonic::{transport::Channel, Code, Request, Response, Status};
fn election_timeout() -> Duration {
let variant = rand::thread_rng().gen_range(600, 800);
Duration::from_millis(variant)
}
fn heartbeat_timeout() -> Duration {
// let variant = rand::thread_rng().gen_range(100, 104);
let variant = 50;
Duration::from_millis(variant)
}
use super::persister::*;
use crate::preclude::*;
// use crate::rpc::raft_service::*;
#[derive(Message, Clone)]
pub struct ApplyMsg {
#[prost(bool, tag = "1")]
pub command_valid: bool,
#[prost(bytes, tag = "2")]
pub command: Vec<u8>,
#[prost(uint64, tag = "3")]
pub command_index: u64,
}
#[derive(Message, Clone)]
pub struct Persistent {
#[prost(uint64, tag = "1")]
pub current_term: u64,
#[prost(int32, tag = "2")]
pub voted_for: i32,
#[prost(message, repeated, tag = "3")]
pub log: Vec<LogEntry>,
#[prost(uint64, tag = "4")]
pub last_included_index: u64,
#[prost(uint64, tag = "5")]
pub last_included_term: u64,
}
#[derive(PartialEq, Clone)]
enum RaftRole {
Follower,
Candidate,
Leader,
}
// A single RaftInner peer.
struct RaftInner {
// RPC end points of all peers
peers: Vec<RaftRpcClient<Channel>>,
// Object to hold this peer's persisted state
persister: Arc<dyn Persister>,
// this peer's index into peers[]
me: usize,
// Persistent state on all servers
// Updated on stable storage before responding to RPCs
current_term: Arc<AtomicU64>,
voted_for: Option<usize>,
log: Vec<LogEntry>,
// auxilary state
role: RaftRole,
is_leader: Arc<AtomicBool>,
// Volatile state on all servers
commit_index: Arc<AtomicU64>,
last_applied: Arc<AtomicU64>,
// Volatile state on leader
// Reinitialized after election
next_index: Vec<Arc<AtomicU64>>,
match_index: Vec<Arc<AtomicU64>>,
// Persistent state on all servers
// Add when using Snapshot
last_included_index: Arc<AtomicU64>,
last_included_term: Arc<AtomicU64>,
// current save size
// update when persist
raft_state_size: Arc<AtomicU64>,
// ApplyMsg channel
sender: UnboundedSender<RaftEvent>,
apply_ch: UnboundedSender<ApplyMsg>,
// Read Only
read_only: super::read_only::ReadOnly,
// for stream
receiver: UnboundedReceiver<RaftEvent>,
timeout: Delay,
apply_msg_delay: Delay,
}
impl Display for RaftInner {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let role = match self.role {
RaftRole::Follower => "Follower ",
RaftRole::Candidate => "Candidate",
RaftRole::Leader => "Leader ",
};
write!(
f,
"[{} {}] [Term {}] [Snap {} {}] [Log {} {}] [Commit {} {}]",
role,
self.me,
self.current_term.load(Ordering::SeqCst),
self.last_included_index.load(Ordering::SeqCst),
self.last_included_term.load(Ordering::SeqCst),
self.log.len(),
self.log.last().map_or(0, |v| v.term),
self.commit_index.load(Ordering::SeqCst),
self.last_applied.load(Ordering::SeqCst),
)
}
}
impl RaftInner {
/// the service or tester wants to create a RaftInner server. the ports
/// of all the RaftInner servers (including this one) are in peers. this
/// server's port is peers[me]. all the servers' peers arrays
/// have the same order. persister is a place for this server to
/// save its persistent state, and also initially holds the most
/// recent saved state, if any. apply_ch is a channel on which the
/// tester or service expects RaftInner to send ApplyMsg messages.
/// This method must return quickly.
pub fn new(
peers: Vec<RaftRpcClient<Channel>>,
me: usize,
persister: Arc<dyn Persister>,
apply_ch: UnboundedSender<ApplyMsg>,
) -> (RaftInner, UnboundedSender<RaftEvent>) {
let raft_state = persister.raft_state();
let peers_num = peers.len();
let (sender, receiver) = unbounded_channel();
let mut rf = RaftInner {
peers,
persister,
me,
// state: Arc::default(),
current_term: Arc::new(AtomicU64::new(0)),
voted_for: None,
log: vec![],
is_leader: Arc::new(AtomicBool::new(false)),
role: RaftRole::Follower,
commit_index: Arc::new(AtomicU64::new(0)),
last_applied: Arc::new(AtomicU64::new(0)),
last_included_index: Arc::new(AtomicU64::new(0)),
last_included_term: Arc::new(AtomicU64::new(0)),
raft_state_size: Arc::new(AtomicU64::new(0)),
next_index: Vec::new(),
match_index: Vec::new(),
sender: sender.clone(),
apply_ch,
read_only: super::read_only::ReadOnly::new(),
// for stream
receiver,
timeout: Delay::new(election_timeout()),
apply_msg_delay: Delay::new(heartbeat_timeout()),
};
for _i in 0..peers_num {
rf.next_index.push(Arc::new(AtomicU64::new(0)));
rf.match_index.push(Arc::new(AtomicU64::new(0)));
}
// initialize from state persisted before a crash
rf.restore(&raft_state);
debug!("{} Started!", rf);
(rf, sender)
}
/// save RaftInner's persistent state to stable storage,
/// where it can later be retrieved after a crash and restart.
/// see paper's Figure 2 for a description of what should be persistent.
fn persist(&mut self) {
let mut data = Vec::new();
let per = Persistent {
current_term: self.current_term.load(Ordering::SeqCst),
voted_for: self.voted_for.map_or(-1, |v| v as i32),
log: self.log.clone(),
last_included_index: self.last_included_index.load(Ordering::SeqCst),
last_included_term: self.last_included_term.load(Ordering::SeqCst),
};
per.encode(&mut data).unwrap();
self.raft_state_size
.store(data.len() as u64, Ordering::SeqCst);
self.persister.save_raft_state(data);
}
/// save RaftInner's persistent state to stable storage,
/// where it can later be retrieved after a crash and restart.
/// see paper's Figure 2 for a description of what should be persistent.
fn persist_with_snapshot(&mut self, snapshot: Vec<u8>) {
let mut data = Vec::new();
let per = Persistent {
current_term: self.current_term.load(Ordering::SeqCst),
voted_for: self.voted_for.map_or(-1, |v| v as i32),
log: self.log.clone(),
last_included_index: self.last_included_index.load(Ordering::SeqCst),
last_included_term: self.last_included_term.load(Ordering::SeqCst),
};
per.encode(&mut data).unwrap();
self.persister.save_state_and_snapshot(data, snapshot);
}
/// restore previously persisted state.
fn restore(&mut self, data: &[u8]) {
if data.is_empty() {
// bootstrap without any state?
return;
}
// match labcodec::decode::<Persistent>(data) {
match Persistent::decode(data) {
Ok(o) => {
self.current_term = Arc::new(AtomicU64::new(o.current_term));
self.voted_for = {
match o.voted_for < 0 {
true => Some(o.voted_for as usize),
false => None,
}
};
self.log = o.log;
self.last_included_index = Arc::new(AtomicU64::new(o.last_included_index));
self.last_included_term = Arc::new(AtomicU64::new(o.last_included_term));
self.commit_index = Arc::new(AtomicU64::new(o.last_included_index));
self.last_applied = Arc::new(AtomicU64::new(o.last_included_index));
}
Err(e) => {
panic!("{:?}", e);
}
}
}
fn send_apply_msg(&mut self) {
while !self.apply_ch.is_closed()
&& self.last_applied.load(Ordering::SeqCst) < self.commit_index.load(Ordering::SeqCst)
{
let index = (self.last_applied.load(Ordering::SeqCst)
- self.last_included_index.load(Ordering::SeqCst)) as usize;
// let mut apply_ch = self.apply_ch.clone();
let msg = ApplyMsg {
command_valid: true,
command: self.log[index].command.to_owned(),
command_index: self.last_applied.load(Ordering::SeqCst) + 1,
};
self.apply_ch.send(msg).expect("Unable send ApplyMsg");
self.last_applied.fetch_add(1, Ordering::SeqCst);
if self.is_leader.load(Ordering::SeqCst) {
info!(
"{} Apply command: [ApplyMsg {} Term {}]",
self,
self.last_applied.load(Ordering::SeqCst),
self.log[index].term
);
}
}
}
fn start(&mut self, command: &[u8]) -> Result<(u64, u64)> {
let index = self.log.len() as u64 + self.last_included_index.load(Ordering::SeqCst) + 1;
let term = self.current_term.load(Ordering::SeqCst);
let is_leader = self.is_leader.load(Ordering::SeqCst);
if is_leader {
self.log.push(LogEntry {
command: command.to_owned(),
index,
term,
});
info!(
"{} Receive a Command! Append to [log {} {}]",
self, index, term
);
self.match_index[self.me] = Arc::new(AtomicU64::new(index as u64));
self.persist();
Ok((index, term))
} else {
Err(KvError::NotLeader)
}
}
fn start_read_only(&mut self, command: &[u8]) -> Result<(u64, u64)> {
let commit_index = self.commit_index.load(Ordering::SeqCst);
if self.is_leader.load(Ordering::SeqCst) {
if self.term(commit_index).unwrap() == self.current_term.load(Ordering::SeqCst) {
self.read_only.add_request(commit_index, command.to_owned());
self.send_heart_beat_all();
Ok((commit_index, self.last_term()))
} else {
Err(KvError::StringError(
"New Leader without commit".to_string(),
))
}
} else {
Err(KvError::NotLeader)
}
}
fn apply_read_only(&mut self, index: u64, msgs: Vec<Vec<u8>>) {
if !self.apply_ch.is_closed() {
for msg in msgs {
let msg = ApplyMsg {
command_valid: true,
command: msg,
command_index: index,
};
self.apply_ch.send(msg).expect("Unable send ApplyMsg");
}
}
}
fn get_last_log_info(&self) -> (u64, u64) {
let mut index = self.last_included_index.load(Ordering::SeqCst);
let mut term = self.last_included_term.load(Ordering::SeqCst);
index += self.log.len() as u64;
if !self.log.is_empty() {
term = self.log.last().unwrap().term;
}
(index, term)
}
fn become_leader(&mut self, term: u64) {
self.current_term.store(term, Ordering::SeqCst);
self.role = RaftRole::Leader;
self.is_leader.store(true, Ordering::SeqCst);
let (index, _term) = self.get_last_log_info();
for i in 0..self.peers.len() {
self.next_index[i] = Arc::new(AtomicU64::new(index + 1));
self.match_index[i] = Arc::new(AtomicU64::new(0));
}
self.match_index[self.me] = Arc::new(AtomicU64::new(index));
self.persist();
let last_included_index = self.last_included_index.load(Ordering::SeqCst);
self.commit_index
.store(last_included_index, Ordering::SeqCst);
self.last_applied
.store(last_included_index, Ordering::SeqCst);
let snapshot = self.persister.snapshot();
let msg = ApplyMsg {
command_valid: false,
command: snapshot,
command_index: 0,
};
self.apply_ch.send(msg).expect("Unable send ApplyMsg");
info!("{} Become Leader", self);
}
fn become_follower(&mut self, term: u64) {
self.current_term.store(term, Ordering::SeqCst);
// self.voted_for = None;
self.role = RaftRole::Follower;
self.is_leader.store(false, Ordering::SeqCst);
self.persist();
debug!("{} Become Follower", self);
}
fn become_candidate(&mut self) {
self.current_term.fetch_add(1, Ordering::SeqCst);
self.role = RaftRole::Candidate;
self.is_leader.store(false, Ordering::SeqCst);
self.voted_for = Some(self.me);
self.persist();
debug!("{} Become Candidate", self);
self.send_request_vote_all();
}
fn update_commit_index(&mut self) {
let last_included_index = self.last_included_index.load(Ordering::SeqCst);
let last_included_term = self.last_included_term.load(Ordering::SeqCst);
self.match_index[self.me].store(
self.log.len() as u64 + last_included_index,
Ordering::SeqCst,
);
let mut match_index_all: Vec<u64> = self
.match_index
.iter()
.map(|v| v.load(Ordering::SeqCst))
.collect();
match_index_all.sort_unstable();
let match_n = match_index_all[self.peers.len() / 2];
if match_n > self.commit_index.load(Ordering::SeqCst)
&& (match_n == last_included_index
|| self
.log
.get((match_n - last_included_index - 1) as usize)
.map_or(last_included_term, |v| v.term)
== self.current_term.load(Ordering::SeqCst))
{
debug!("{} Update commit index: {}", self, match_n);
self.commit_index.store(match_n, Ordering::SeqCst);
}
self.send_apply_msg();
}
}
impl RaftInner {
fn last_index(&self) -> u64 {
self.log
.last()
.map_or(self.last_included_index.load(Ordering::SeqCst), |v| v.index)
}
fn last_term(&self) -> u64 {
self.log
.last()
.map_or(self.last_included_term.load(Ordering::SeqCst), |v| v.term)
}
fn term(&self, idx: u64) -> Option<u64> {
let last_included_index = self.last_included_index.load(Ordering::SeqCst);
if idx < last_included_index {
None
} else if idx == last_included_index {
Some(self.last_included_term.load(Ordering::SeqCst))
} else if idx <= self.last_index() {
Some(self.log[(idx - last_included_index - 1) as usize].term)
} else {
None
}
}
fn match_term(&self, idx: u64, term: u64) -> bool {
self.term(idx).map(|t| t == term).unwrap_or(false)
}
fn is_up_to_date(&self, last_index: u64, term: u64) -> bool {
term > self.last_term() || (term == self.last_term() && last_index >= self.last_index())
}
}
impl RaftInner {
fn send_request_vote(
&self,
server: usize,
args: RequestVoteArgs,
) -> Receiver<Result<RequestVoteReply>> {
let peer = &self.peers[server];
let mut peer_clone = peer.clone();
let (tx, rx) = channel::<Result<RequestVoteReply>>();
tokio::spawn(async move {
let res = peer_clone
.request_vote(Request::new(args))
.await
.map(|resp| resp.into_inner())
.map_err(KvError::Rpc);
let _ = tx.send(res);
});
rx
}
fn handle_request_vote(&mut self, args: RequestVoteArgs) -> RequestVoteReply {
let current_term = self.current_term.load(Ordering::SeqCst);
if current_term < args.term {
self.voted_for = None;
self.become_follower(args.term);
}
if args.term < current_term {
debug!("{} Handle {}, Vote false due to older term", self, args);
RequestVoteReply {
term: current_term,
vote_granted: false,
}
} else if self.voted_for.is_some() && self.voted_for != Some(args.candidate_id as usize) {
debug!(
"{} Handle {}, Vote false due to already vote for {}",
self,
args,
self.voted_for.unwrap()
);
RequestVoteReply {
term: args.term,
vote_granted: false,
}
} else if !self.is_up_to_date(args.last_log_index, args.last_log_term) {
debug!("{} Handle {}, Vote false due to older log", self, args);
RequestVoteReply {
term: args.term,
vote_granted: false,
}
} else {
info!("{} Handle {}, Vote true", self, args);
self.voted_for = Some(args.candidate_id as usize);
RequestVoteReply {
term: args.term,
vote_granted: true,
}
}
}
fn send_request_vote_all(&mut self) {
let vote_count = Arc::new(AtomicUsize::new(1));
let (last_log_index, last_log_term) = self.get_last_log_info();
let args = RequestVoteArgs {
term: self.current_term.load(Ordering::SeqCst),
candidate_id: self.me as i32,
last_log_index,
last_log_term,
};
// let mut rx_vec = FuturesUnordered::new();
info!("{} Send {} to ALL RaftNode", self, args);
let is_candidate = Arc::new(AtomicBool::new(true));
for server in 0..self.peers.len() {
if server != self.me {
let args = args.clone();
let tx = self.sender.clone();
let peers_num = self.peers.len();
let is_candidate = is_candidate.clone();
let term = self.current_term.load(Ordering::SeqCst);
let vote_count = vote_count.clone();
// rx_vec.push(self.send_request_vote(server, args));
let rx = self.send_request_vote(server, args);
tokio::spawn(async move {
if let Ok(reply) = rx.await {
if let Ok(reply) = reply {
if is_candidate.load(Ordering::SeqCst) {
debug!(
"Get one {}, current {}, total {}",
reply,
vote_count.load(Ordering::SeqCst),
peers_num
);
if reply.term > term {
tx.send(RaftEvent::BecomeFollower(reply.term)).unwrap();
} else if reply.vote_granted {
vote_count.fetch_add(1, Ordering::Relaxed);
if vote_count.load(Ordering::SeqCst) > peers_num / 2 {
is_candidate.store(false, Ordering::SeqCst);
tx.send(RaftEvent::BecomeLeader(reply.term)).unwrap();
}
}
}
}
}
});
}
}
}
}
impl RaftInner {
fn send_append_entries(
&self,
server: usize,
args: AppendEntriesArgs,
) -> Receiver<Result<AppendEntriesReply>> {
let peer = &self.peers[server];
let mut peer_clone = peer.clone();
let (tx, rx) = channel::<Result<AppendEntriesReply>>();
tokio::spawn(async move {
let res = peer_clone
.append_entries(Request::new(args))
.await
.map(|resp| resp.into_inner())
.map_err(KvError::Rpc);
let _ = tx.send(res);
});
rx
}
fn handle_append_entries(&mut self, args: AppendEntriesArgs) -> AppendEntriesReply {
let last_included_index = self.last_included_index.load(Ordering::SeqCst);
let last_included_term = self.last_included_term.load(Ordering::SeqCst);
let current_term = self.current_term.load(Ordering::SeqCst);
if current_term < args.term {
self.voted_for = Some(args.leader_id as usize);
self.become_follower(args.term);
debug!(
"{} Become Follower. New Leader id: {}",
self, args.leader_id
);
}
if args.term < current_term {
debug!("{} Handle {}, Success false due to older term", self, args);
AppendEntriesReply {
term: current_term,
success: false,
conflict_log_index: 0,
conflict_log_term: 0,
}
} else if args.prev_log_index < last_included_index
|| args.prev_log_index == last_included_index
&& args.prev_log_term != last_included_term
{
debug!(
"{} Handle {}, Success false due to Snapshot not match",
self, args
);
AppendEntriesReply {
term: current_term,
success: false,
conflict_log_term: last_included_term,
conflict_log_index: last_included_index,
}
} else if args.prev_log_index > self.last_index()
|| args.prev_log_index > last_included_index
&& !self.match_term(args.prev_log_index, args.prev_log_term)
{
debug!(
"{} Handle {}, Success false due to log not match",
self, args
);
let conflict_log_term = self.term(args.prev_log_index).unwrap_or(last_included_term);
AppendEntriesReply {
term: self.current_term.load(Ordering::SeqCst),
success: false,
conflict_log_term,
conflict_log_index: self
.log
.iter()
.filter(|v| v.term == conflict_log_term)
.take(1)
.next()
.map_or(last_included_index, |v| v.index),
}
} else {
debug!("{} Handle {}, Success true", self, args);
self.log
.truncate((args.prev_log_index - last_included_index) as usize);
self.log.extend(args.entries);
self.persist();
if args.leader_commit > self.commit_index.load(Ordering::SeqCst) {
self.commit_index
.store(min(args.leader_commit, self.last_index()), Ordering::SeqCst);
}
AppendEntriesReply {
term: self.current_term.load(Ordering::SeqCst),
success: true,
conflict_log_index: 0,
conflict_log_term: 0,
}
}
}
fn send_append_entries_all(&mut self) {
// let mut rx_vec = FuturesUnordered::new();
debug!("{} Send append entries to ALL RaftNode", self);
let term = self.current_term.load(Ordering::SeqCst);
// let peers_num = self.peers.len();
for server in 0..self.peers.len() {
if server != self.me {
let match_index = self.match_index[server].clone();
let next_index = self.next_index[server].clone();
let tx = self.sender.clone();
let is_leader = self.is_leader.clone();
let last_included_index = self.last_included_index.load(Ordering::SeqCst);
let last_included_term = self.last_included_term.load(Ordering::SeqCst);
let prev_log_index = max(1, self.next_index[server].load(Ordering::SeqCst)) - 1;
if prev_log_index < last_included_index {
let args = InstallSnapshotArgs {
term: self.current_term.load(Ordering::SeqCst),
leader_id: self.me as i32,
last_included_index,
last_included_term,
offset: 0,
data: self.persister.snapshot(),
done: true,
};
debug!("{} Send RaftNode {} {} ", self, server, args);
let rx = self.send_install_snapshot(server, args);
tokio::spawn(async move {
if let Ok(Ok(reply)) = rx.await {
if reply.term > term {
tx.send(RaftEvent::BecomeFollower(reply.term)).unwrap();
} else {
match_index.store(last_included_index, Ordering::SeqCst);
next_index.store(last_included_index + 1, Ordering::SeqCst);
}
}
});
} else {
let prev_log_term = {
if prev_log_index == last_included_index {
last_included_term
} else {
// self.log[(prev_log_index - 1) as usize].term
self.log[(prev_log_index - last_included_index - 1) as usize].term
}
};
// let upper_log_index = min(prev_log_index + 5, self.log.len() as u64);
let upper_log_index = last_included_index + self.log.len() as u64;
let entries = {
if prev_log_index < upper_log_index {
self.log[((prev_log_index - last_included_index) as usize)
..((upper_log_index - last_included_index) as usize)]
.to_vec()
} else {
Vec::new()
}
};
let args = AppendEntriesArgs {
term: self.current_term.load(Ordering::SeqCst),
leader_id: self.me as i32,
prev_log_index,
prev_log_term,
entries,
leader_commit: self.commit_index.load(Ordering::SeqCst),
};
debug!("{} Send RaftNode {} {} ", self, server, args);
// rx_vec.push(self.send_append_entries(server, args));
let rx = self.send_append_entries(server, args);
tokio::spawn(async move {
if let Ok(Ok(reply)) = rx.await {
if is_leader.load(Ordering::SeqCst) {
if !reply.success && reply.term > term {
is_leader.store(false, Ordering::SeqCst);
tx.send(RaftEvent::BecomeFollower(reply.term)).unwrap();
} else if reply.success {
// info!("recv {}, upper: {}", reply, upper_log_index);
match_index.store(upper_log_index, Ordering::SeqCst);
next_index.store(upper_log_index + 1, Ordering::SeqCst);
} else {
next_index.store(reply.conflict_log_index, Ordering::SeqCst);
}
}
}
});
self.update_commit_index();
}
self.update_commit_index();
}
}
}
}
impl RaftInner {
fn send_heart_beat(
&self,
server: usize,
args: HeartBeatArgs,
) -> Receiver<Result<HeartBeatReply>> {
let peer = &self.peers[server];
let mut peer_clone = peer.clone();
let (tx, rx) = channel::<Result<HeartBeatReply>>();
tokio::spawn(async move {
let res = peer_clone
.heart_beat(Request::new(args))
.await
.map(|resp| resp.into_inner())
.map_err(KvError::Rpc);
let _ = tx.send(res);
});
rx
}
fn handle_heart_beat(&mut self, args: HeartBeatArgs) -> HeartBeatReply {
let current_term = self.current_term.load(Ordering::SeqCst);
if current_term < args.term {
self.voted_for = Some(args.leader_id as usize);
self.become_follower(args.term);
debug!(
"{} Become Follower. New Leader id: {}",
self, args.leader_id
);
}
let commit_index = self.commit_index.load(Ordering::SeqCst);
if args.term < current_term || commit_index < args.leader_commit {
HeartBeatReply {
term: current_term,
success: false,
commit_index,
}
} else {
HeartBeatReply {
term: current_term,
success: true,
commit_index,
}
}
}
fn send_heart_beat_all(&mut self) {
debug!("{} Send append entries to ALL RaftNode", self);
let beat_count = Arc::new(AtomicUsize::new(1));
let term = self.current_term.load(Ordering::SeqCst);
let args = HeartBeatArgs {
term,
leader_id: self.me as i32,
leader_commit: self.commit_index.load(Ordering::SeqCst),
};
for server in 0..self.peers.len() {
if server != self.me {
let args = args.clone();
let tx = self.sender.clone();
let peers_num = self.peers.len();
let beat_count = beat_count.clone();
let rx = self.send_heart_beat(server, args);
tokio::spawn(async move {
if let Ok(reply) = rx.await {
if let Ok(reply) = reply {
if reply.term > term {
tx.send(RaftEvent::BecomeFollower(reply.term)).unwrap();
} else if reply.success {
beat_count.fetch_add(1, Ordering::Relaxed);
if beat_count.load(Ordering::SeqCst) > peers_num / 2 {
tx.send(RaftEvent::ReadOnlyCommit(reply.commit_index))
.unwrap();
}
}
}
}
});
}
}
}
}
impl RaftInner {
fn send_install_snapshot(
&self,
server: usize,
args: InstallSnapshotArgs,
) -> Receiver<Result<InstallSnapshotReply>> {
let peer = &self.peers[server];
let mut peer_clone = peer.clone();
let (tx, rx) = channel::<Result<InstallSnapshotReply>>();
tokio::spawn(async move {
let res = peer_clone
.install_snapshot(Request::new(args))
.await
.map(|resp| resp.into_inner())
.map_err(KvError::Rpc);
let _ = tx.send(res);
});
rx
}
fn handle_install_snapshot(&mut self, args: InstallSnapshotArgs) -> InstallSnapshotReply {
let current_term = self.current_term.load(Ordering::SeqCst);
if current_term < args.term {
self.voted_for = Some(args.leader_id as usize);
self.become_follower(args.term);
debug!(
"{} Become Follower. New Leader id: {}",
self, args.leader_id
);
}
let last_included_index = self.last_included_index.load(Ordering::SeqCst);
if args.term == current_term && args.last_included_index > last_included_index {
let range = min(
self.log.len(),
(args.last_included_index - last_included_index) as usize,
);
self.log.drain(..range);
self.last_included_index
.store(args.last_included_index, Ordering::SeqCst);
self.last_included_term
.store(args.last_included_term, Ordering::SeqCst);
self.persist_with_snapshot(args.data.clone());
self.commit_index
.fetch_max(args.last_included_index, Ordering::SeqCst);
self.last_applied
.fetch_max(args.last_included_index, Ordering::SeqCst);
let msg = ApplyMsg {
command_valid: false,
command: args.data,
command_index: 0,
};
self.apply_ch.send(msg).expect("Unable send ApplyMsg");
}
InstallSnapshotReply { term: current_term }
}
}
#[derive(Debug)]
enum RaftEvent {
RequestVote(RequestVoteArgs, Sender<RequestVoteReply>),
AppendEntries(AppendEntriesArgs, Sender<AppendEntriesReply>),
HeartBeat(HeartBeatArgs, Sender<HeartBeatReply>),
InstallSnapshot(InstallSnapshotArgs, Sender<InstallSnapshotReply>),
BecomeLeader(u64),
BecomeFollower(u64),
ReadOnlyCommit(u64),
StartCommand(Vec<u8>, Sender<Result<(u64, u64)>>),
StartReadOnly(Vec<u8>, Sender<Result<(u64, u64)>>),
StartSnapshot(Vec<u8>, u64),
Shutdown,
}
impl Stream for RaftInner {
type Item = ();
fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Option<Self::Item>> {
trace!("{} poll event!", self);
match self.timeout.poll_unpin(cx) {
Poll::Ready(()) => {
return {
trace!("{} poll timeout ready!", self);
if self.is_leader.load(Ordering::SeqCst) {
self.timeout.reset(heartbeat_timeout());
self.send_append_entries_all();
Poll::Ready(Some(()))
} else {
trace!("{} loss Leader connection", self);
self.timeout.reset(election_timeout());
self.become_candidate();
Poll::Ready(Some(()))
}
};
}
Poll::Pending => {}
};
match self.apply_msg_delay.poll_unpin(cx) {
Poll::Ready(()) => {
trace!("{} poll Apply Msg ready!", self);
self.apply_msg_delay.reset(heartbeat_timeout());
self.send_apply_msg();
return Poll::Ready(Some(()));
}
Poll::Pending => {}
};
match self.receiver.poll_recv(cx) {
Poll::Ready(Some(event)) => match event {
RaftEvent::RequestVote(args, tx) => {
let reply = self.handle_request_vote(args);
if reply.vote_granted {
self.timeout.reset(election_timeout());
}
let _ = tx.send(reply);
Poll::Ready(Some(()))
}
RaftEvent::AppendEntries(args, tx) => {
let current_term = args.term;
let reply = self.handle_append_entries(args);
if reply.success || reply.term == current_term {
self.timeout.reset(election_timeout());
}
let _ = tx.send(reply);
Poll::Ready(Some(()))
}
RaftEvent::HeartBeat(args, tx) => {
let current_term = args.term;
let reply = self.handle_heart_beat(args);
if reply.success || reply.term == current_term {
self.timeout.reset(election_timeout());
}
let _ = tx.send(reply);
Poll::Ready(Some(()))
}
RaftEvent::InstallSnapshot(args, tx) => {
let reply = self.handle_install_snapshot(args);
let _ = tx.send(reply);
Poll::Ready(Some(()))
}
RaftEvent::BecomeLeader(term) => {
self.become_leader(term);
self.timeout.reset(heartbeat_timeout());
self.start(&Vec::new()).unwrap();
self.send_append_entries_all();
Poll::Ready(Some(()))
}
RaftEvent::BecomeFollower(term) => {
self.become_follower(term);
self.timeout.reset(election_timeout());
Poll::Ready(Some(()))
}
RaftEvent::ReadOnlyCommit(commit) => {
let msgs = self.read_only.pop_requests(commit);
self.apply_read_only(commit, msgs);
Poll::Ready(Some(()))
}
RaftEvent::StartCommand(command, tx) => {
debug!("{} Exexutor -- Receive command!", self);
let _ = tx.send(self.start(&command));
Poll::Ready(Some(()))
}
RaftEvent::StartReadOnly(command, tx) => {
debug!("{} Exexutor -- Receive command!", self);
let _ = tx.send(self.start_read_only(&command));
Poll::Ready(Some(()))
}
RaftEvent::StartSnapshot(snapshot, last_applied) => {
let snapshot_len =
(last_applied - self.last_included_index.load(Ordering::SeqCst)) as usize;
if snapshot_len > 0 {
info!("{} Exexutor -- Receive Snapshot!", self);
self.last_included_index
.store(last_applied, Ordering::SeqCst);
self.last_included_term
.store(self.log[snapshot_len - 1].term, Ordering::SeqCst);
self.log.drain(..snapshot_len);
self.persist_with_snapshot(snapshot);
info!("{} Exexutor -- Finish Snapshot!", self);
}
Poll::Ready(Some(()))
}
RaftEvent::Shutdown => Poll::Ready(None),
},
Poll::Ready(None) => Poll::Ready(Some(())),
Poll::Pending => Poll::Pending,
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, None)
}
}
/// RaftNode can replicate log entries to all raft nodes
#[derive(Clone)]
pub struct RaftNode {
// Your code here.
handle: Arc<Mutex<thread::JoinHandle<()>>>,
me: usize,
sender: UnboundedSender<RaftEvent>,
term: Arc<AtomicU64>,
is_leader: Arc<AtomicBool>,
raft_state_size: Arc<AtomicU64>,
commit_index: Arc<AtomicU64>,
last_applied: Arc<AtomicU64>,
}
impl RaftNode {
/// Create a new raft service.
pub fn new(
peers: Vec<RaftRpcClient<Channel>>,
me: usize,
persister: Arc<dyn Persister>,
apply_ch: UnboundedSender<ApplyMsg>,
) -> RaftNode {
let (mut raft, sender) = RaftInner::new(peers, me, persister, apply_ch);
let term = raft.current_term.clone();
let is_leader = raft.is_leader.clone();
let raft_state_size = raft.raft_state_size.clone();
let commit_index = raft.commit_index.clone();
let last_applied = raft.last_applied.clone();
let threaded_rt = Builder::new_multi_thread().enable_all().build().unwrap();
let handle = thread::Builder::new()
.name(format!("RaftNode-{}", me))
.spawn(move || {
threaded_rt.block_on(async move {
debug!("Enter main executor!");
while raft.next().await.is_some() {
trace!("get event");
}
debug!("Leave main executor!");
})
})
.unwrap();
RaftNode {
handle: Arc::new(Mutex::new(handle)),
me,
sender,
term,
is_leader,
raft_state_size,
commit_index,
last_applied,
}
}
/// start a command that can encode to Bytes
pub fn start<M>(&self, command: &M) -> Result<(u64, u64)>
where
M: Message,
{
if self.is_leader() {
let mut buf = vec![];
// labcodec::encode(command, &mut buf).unwrap();
command.encode(&mut buf).unwrap();
let threaded_rt = Builder::new_multi_thread().build().unwrap();
let (tx, rx) = channel();
let sender = self.sender.clone();
let handle = thread::spawn(move || {
sender
.send(RaftEvent::StartCommand(buf, tx))
.expect("Unable to send start command to RaftExecutor");
let fut_values = async { rx.await };
threaded_rt.block_on(fut_values).unwrap()
});
let response = handle.join().unwrap();
debug!(
"RaftNode {} -- Start a Command, response with: {:?}",
self.me, response
);
response
} else {
debug!("RaftNode {} -- Start a Command but in Not Leader", self.me);
Err(KvError::NotLeader)
}
}
/// start a command that can encode to Bytes
pub fn start_read_only<M>(&self, command: &M) -> Result<(u64, u64)>
where
M: Message,
{
if self.is_leader() {
let mut buf = vec![];
// labcodec::encode(command, &mut buf).unwrap();
command.encode(&mut buf).unwrap();
let threaded_rt = Builder::new_multi_thread().build().unwrap();
let (tx, rx) = channel();
let sender = self.sender.clone();
let handle = thread::spawn(move || {
sender
.send(RaftEvent::StartReadOnly(buf, tx))
.expect("Unable to send start ReadOnly to RaftExecutor");
let fut_values = async { rx.await };
threaded_rt.block_on(fut_values).unwrap()
});
let response = handle.join().unwrap();
debug!(
"RaftNode {} -- Start a ReadOnly, response with: {:?}",
self.me, response
);
response
} else {
debug!("RaftNode {} -- Start a ReadOnly but in Not Leader", self.me);
Err(KvError::NotLeader)
}
}
/// receive a snapshot and save it.
pub fn start_snapshot(&self, snapshot: Vec<u8>, last_applied: u64) {
self.sender
.send(RaftEvent::StartSnapshot(snapshot, last_applied))
.expect("Unable to send start Snapshot to RaftExecutor");
debug!("RaftNode {} -- Start a Snapshot", self.me,);
}
/// The current term of this peer.
pub fn term(&self) -> u64 {
self.term.load(Ordering::SeqCst)
}
/// Whether this peer believes it is the leader.
pub fn is_leader(&self) -> bool {
self.is_leader.load(Ordering::SeqCst)
}
/// Shutdown this node
pub fn kill(&self) {
let _ = self.sender.send(RaftEvent::Shutdown);
}
}
#[tonic::async_trait]
impl RaftRpc for RaftNode {
async fn request_vote(
&self,
args: Request<RequestVoteArgs>,
) -> std::result::Result<Response<RequestVoteReply>, Status> {
let (tx, rx) = channel();
let event = RaftEvent::RequestVote(args.into_inner(), tx);
let _ = self.sender.clone().send(event);
let reply = rx.await;
reply
.map(|reply| Response::new(reply))
.map_err(|e| Status::new(Code::Cancelled, e.to_string()))
}
async fn append_entries(
&self,
args: Request<AppendEntriesArgs>,
) -> std::result::Result<Response<AppendEntriesReply>, Status> {
let (tx, rx) = channel();
let event = RaftEvent::AppendEntries(args.into_inner(), tx);
let _ = self.sender.clone().send(event);
let reply = rx.await;
reply
.map(|reply| Response::new(reply))
.map_err(|e| Status::new(Code::Cancelled, e.to_string()))
}
async fn heart_beat(
&self,
args: Request<HeartBeatArgs>,
) -> std::result::Result<Response<HeartBeatReply>, Status> {
let (tx, rx) = channel();
let event = RaftEvent::HeartBeat(args.into_inner(), tx);
let _ = self.sender.clone().send(event);
let reply = rx.await;
reply
.map(|reply| Response::new(reply))
.map_err(|e| Status::new(Code::Cancelled, e.to_string()))
}
async fn install_snapshot(
&self,
args: Request<InstallSnapshotArgs>,
) -> std::result::Result<Response<InstallSnapshotReply>, Status> {
let (tx, rx) = channel();
let event = RaftEvent::InstallSnapshot(args.into_inner(), tx);
let _ = self.sender.clone().send(event);
let reply = rx.await;
reply
.map(|reply| Response::new(reply))
.map_err(|e| Status::new(Code::Cancelled, e.to_string()))
}
}
|
// Exercise 1.2.
// Translate the following expression into prefix form
// (/ (+ 5 4 (- 2 (- 3 (+ 6 (/ 4 3)))))
// (* 3 (- 6 2) (- 2 7)))
fn main (){
} |
use crate::pcapng::blocks::common::opts_from_slice;
use crate::errors::PcapError;
use byteorder::{ByteOrder, ReadBytesExt};
use crate::pcapng::{UnknownOption, CustomUtf8Option, CustomBinaryOption};
use std::borrow::Cow;
use derive_into_owned::IntoOwned;
/// The Interface Statistics Block contains the capture statistics for a given interface and it is optional.
#[derive(Clone, Debug, IntoOwned)]
pub struct InterfaceStatisticsBlock<'a> {
/// Specifies the interface these statistics refers to.
/// The correct interface will be the one whose Interface Description Block (within the current Section of the file)
/// is identified by same number of this field.
pub interface_id: u32,
/// Time this statistics refers to.
/// The format of the timestamp is the same already defined in the Enhanced Packet Block.
/// The length of a unit of time is specified by the 'if_tsresol' option of the Interface Description Block referenced by this packet.
pub timestamp: u64,
/// Options
pub options: Vec<InterfaceStatisticsOption<'a>>
}
impl<'a> InterfaceStatisticsBlock<'a> {
pub fn from_slice<B:ByteOrder>(mut slice: &'a[u8]) -> Result<(&'a[u8], Self), PcapError> {
if slice.len() < 12 {
return Err(PcapError::InvalidField("InterfaceStatisticsBlock: block length < 12"));
}
let interface_id = slice.read_u32::<B>()? as u32;
let timestamp = slice.read_u64::<B>()?;
let (slice, options) = InterfaceStatisticsOption::from_slice::<B>(slice)?;
let block = InterfaceStatisticsBlock {
interface_id,
timestamp,
options
};
Ok((slice, block))
}
}
#[derive(Clone, Debug, IntoOwned)]
pub enum InterfaceStatisticsOption<'a> {
/// The opt_comment option is a UTF-8 string containing human-readable comment text
/// that is associated to the current block.
Comment(Cow<'a, str>),
/// The isb_starttime option specifies the time the capture started.
IsbStartTime(u64),
/// The isb_endtime option specifies the time the capture ended.
IsbEndTime(u64),
/// The isb_ifrecv option specifies the 64-bit unsigned integer number of packets received from the physical interface
/// starting from the beginning of the capture.
IsbIfRecv(u64),
/// The isb_ifdrop option specifies the 64-bit unsigned integer number of packets dropped by the interface
/// due to lack of resources starting from the beginning of the capture.
IsbIfDrop(u64),
/// The isb_filteraccept option specifies the 64-bit unsigned integer number of packets accepted
/// by filter starting from the beginning of the capture.
IsbFilterAccept(u64),
/// The isb_osdrop option specifies the 64-bit unsigned integer number of packets dropped
/// by the operating system starting from the beginning of the capture.
IsbOsDrop(u64),
/// The isb_usrdeliv option specifies the 64-bit unsigned integer number of packets delivered
/// to the user starting from the beginning of the capture.
IsbUsrDeliv(u64),
/// Custom option containing binary octets in the Custom Data portion
CustomBinary(CustomBinaryOption<'a>),
/// Custom option containing a UTF-8 string in the Custom Data portion
CustomUtf8(CustomUtf8Option<'a>),
/// Unknown option
Unknown(UnknownOption<'a>)
}
impl<'a> InterfaceStatisticsOption<'a> {
fn from_slice<B:ByteOrder>(slice: &'a[u8]) -> Result<(&'a [u8], Vec<Self>), PcapError> {
opts_from_slice::<B, _, _>(slice, |mut slice, code, length| {
let opt = match code {
1 => InterfaceStatisticsOption::Comment(Cow::Borrowed(std::str::from_utf8(slice)?)),
2 => InterfaceStatisticsOption::IsbStartTime(slice.read_u64::<B>()?),
3 => InterfaceStatisticsOption::IsbEndTime(slice.read_u64::<B>()?),
4 => InterfaceStatisticsOption::IsbIfRecv(slice.read_u64::<B>()?),
5 => InterfaceStatisticsOption::IsbIfDrop(slice.read_u64::<B>()?),
6 => InterfaceStatisticsOption::IsbFilterAccept(slice.read_u64::<B>()?),
7 => InterfaceStatisticsOption::IsbOsDrop(slice.read_u64::<B>()?),
8 => InterfaceStatisticsOption::IsbUsrDeliv(slice.read_u64::<B>()?),
2988 | 19372 => InterfaceStatisticsOption::CustomUtf8(CustomUtf8Option::from_slice::<B>(code, slice)?),
2989 | 19373 => InterfaceStatisticsOption::CustomBinary(CustomBinaryOption::from_slice::<B>(code, slice)?),
_ => InterfaceStatisticsOption::Unknown(UnknownOption::new(code, length, slice))
};
Ok(opt)
})
}
}
|
use std::pin::Pin;
use std::sync::Arc;
use dbus::channel::Sender;
use dbus::arg;
use std::future::Future;
use std::marker::PhantomData;
use crate::{Context, MethodErr, IfaceBuilder,stdimpl};
use crate::ifacedesc::Registry;
use std::collections::{BTreeMap, HashSet};
use std::any::Any;
use std::fmt;
const INTROSPECTABLE: usize = 0;
const PROPERTIES: usize = 1;
#[derive(Debug, Copy, Clone, Eq, Ord, Hash, PartialEq, PartialOrd)]
pub struct IfaceToken<T: Send + 'static>(usize, PhantomData<&'static T>);
#[derive(Debug)]
struct Object {
ifaces: HashSet<usize>,
data: Box<dyn Any + Send + 'static>
}
pub type BoxedSpawn = Box<dyn Fn(Pin<Box<dyn Future<Output = ()> + Send + 'static>>) + Send + 'static>;
struct AsyncSupport {
sender: Arc<dyn Sender + Send + Sync + 'static>,
spawner: BoxedSpawn,
}
impl fmt::Debug for AsyncSupport {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "AsyncSupport") }
}
#[derive(Debug)]
pub struct Crossroads {
map: BTreeMap<dbus::Path<'static>, Object>,
registry: Registry,
add_standard_ifaces: bool,
async_support: Option<AsyncSupport>,
}
impl Crossroads {
pub fn new() -> Crossroads {
let mut cr = Crossroads {
map: Default::default(),
registry: Default::default(),
add_standard_ifaces: true,
async_support: None,
};
let t0 = stdimpl::introspectable(&mut cr);
let t1 = stdimpl::properties(&mut cr);
debug_assert_eq!(t0.0, INTROSPECTABLE);
debug_assert_eq!(t1.0, PROPERTIES);
cr
}
pub fn set_add_standard_ifaces(&mut self, enable: bool) {
self.add_standard_ifaces = enable;
}
pub fn register<T, N, F>(&mut self, name: N, f: F) -> IfaceToken<T>
where T: Send + 'static, N: Into<dbus::strings::Interface<'static>>,
F: FnOnce(&mut IfaceBuilder<T>)
{
let iface = IfaceBuilder::build(Some(name.into()), f);
let x = self.registry.push(iface);
IfaceToken(x, PhantomData)
}
pub fn data_mut<D: Any + Send + 'static>(&mut self, name: &dbus::Path<'static>) -> Option<&mut D> {
let obj = self.map.get_mut(name)?;
obj.data.downcast_mut()
}
pub fn insert<'z, D, I, N>(&mut self, name: N, ifaces: I, data: D)
where D: Any + Send + 'static, N: Into<dbus::Path<'static>>, I: IntoIterator<Item = &'z IfaceToken<D>>
{
let ifaces = ifaces.into_iter().map(|x| x.0);
let mut ifaces: HashSet<usize> = std::iter::FromIterator::from_iter(ifaces);
if self.add_standard_ifaces {
ifaces.insert(INTROSPECTABLE);
if ifaces.iter().any(|u| self.registry().has_props(*u)) {
ifaces.insert(PROPERTIES);
}
}
self.map.insert(name.into(), Object { ifaces, data: Box::new(data)});
}
pub (crate) fn find_iface_token(&self,
path: &dbus::Path<'static>,
interface: Option<&dbus::strings::Interface<'static>>)
-> Result<usize, MethodErr> {
let obj = self.map.get(path).ok_or_else(|| MethodErr::no_path(path))?;
self.registry.find_token(interface, &obj.ifaces)
}
pub (crate) fn registry(&mut self) -> &mut Registry { &mut self.registry }
pub (crate) fn registry_and_ifaces(&self, path: &dbus::Path<'static>)
-> (&Registry, &HashSet<usize>) {
let obj = self.map.get(path).unwrap();
(&self.registry, &obj.ifaces)
}
pub (crate) fn get_children(&self, path: &dbus::Path<'static>) -> Vec<&str> {
use std::ops::Bound;
let mut range = self.map.range((Bound::Excluded(path), Bound::Unbounded));
let p2 = path.as_bytes();
let mut r = vec!();
while let Some((c, _)) = range.next() {
if !c.as_bytes().starts_with(p2) { break; }
let csub: &str = &c[p2.len()..];
if csub.len() == 0 || csub.as_bytes()[0] != b'/' { continue; }
r.push(&csub[1..]);
};
r
}
pub (crate) fn run_async_method<F, R>(&mut self, mut ctx: Context, f: F)
where F: FnOnce(Context, &mut Crossroads) -> R,
R: Future<Output=()> + Send + 'static
{
let sender = self.async_support.as_ref().expect("Async support not set").sender.clone();
ctx.set_on_drop(sender);
let future = f(ctx, self);
let spawner = &self.async_support.as_ref().expect("Async support not set").spawner;
let boxed = Box::pin(async move { future.await });
(spawner)(boxed)
}
pub fn handle_message<S: dbus::channel::Sender>(&mut self, message: dbus::Message, conn: &S) -> Result<(), ()> {
let mut ctx = Context::new(message).ok_or(())?;
let (itoken, mut cb) = ctx.check(|ctx| {
let itoken = self.find_iface_token(ctx.path(), ctx.interface())?;
let cb = self.registry.take_method(itoken, ctx.method())?;
Ok((itoken, cb))
})?;
// No failure paths before method is given back!
let methodname = ctx.method().clone();
let ctx = cb(ctx, self);
self.registry.give_method(itoken, &methodname, cb);
if let Some(mut ctx) = ctx { ctx.flush_messages(conn) } else { Ok(()) }
}
pub fn introspectable<T: Send + 'static>(&self) -> IfaceToken<T> { IfaceToken(INTROSPECTABLE, PhantomData) }
pub fn properties<T: Send + 'static>(&self) -> IfaceToken<T> { IfaceToken(PROPERTIES, PhantomData) }
pub fn spawn_method<OA: arg::AppendAll, F>(&self, mut ctx: Context, f: F) -> Result<PhantomData<OA>, Context>
where F: Future<Output=Result<OA, MethodErr>> + Send + 'static {
let support = match self.async_support.as_ref() {
Some(x) => x,
None => {
let _ = ctx.check::<(),_>(|_| Err(MethodErr::failed(&"Async support not set")));
return Err(ctx);
}
};
let sender = support.sender.clone();
let boxed = Box::pin(async move {
let r = f.await;
if let Ok(oa) = ctx.check(|_| {Ok(r?) }) {
ctx.do_reply(|mut msg| oa.append(&mut arg::IterAppend::new(&mut msg)));
}
let _ = ctx.flush_messages(&*sender);
()
});
(support.spawner)(boxed);
Ok(PhantomData)
}
pub fn set_async_support(&mut self, x: Option<(Arc<dyn Sender + Send + Sync + 'static>, BoxedSpawn)>) -> Option<(Arc<dyn Sender + Send + Sync + 'static>, BoxedSpawn)> {
let a = self.async_support.take();
self.async_support = x.map(|x| AsyncSupport {
sender: x.0,
spawner: x.1
});
a.map(|x| (x.sender, x.spawner))
}
}
|
#[doc = "Register `CFGR` reader"]
pub type R = crate::R<CFGR_SPEC>;
#[doc = "Register `CFGR` writer"]
pub type W = crate::W<CFGR_SPEC>;
#[doc = "Field `SW` reader - System clock switch This bitfield is controlled by software and hardware. The bitfield selects the clock for SYSCLK as follows: Others: Reserved The setting is forced by hardware to 000 (HSISYS selected) when the MCU exits Stop, or Standby, or Shutdown mode, or when the setting is 001 (HSE selected) and HSE oscillator failure is detected."]
pub type SW_R = crate::FieldReader;
#[doc = "Field `SW` writer - System clock switch This bitfield is controlled by software and hardware. The bitfield selects the clock for SYSCLK as follows: Others: Reserved The setting is forced by hardware to 000 (HSISYS selected) when the MCU exits Stop, or Standby, or Shutdown mode, or when the setting is 001 (HSE selected) and HSE oscillator failure is detected."]
pub type SW_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `SWS` reader - System clock switch status This bitfield is controlled by hardware to indicate the clock source used as system clock: Others: Reserved"]
pub type SWS_R = crate::FieldReader;
#[doc = "Field `HPRE` reader - AHB prescaler This bitfield is controlled by software. To produce HCLK clock, it sets the division factor of SYSCLK clock as follows: 0xxx: 1"]
pub type HPRE_R = crate::FieldReader;
#[doc = "Field `HPRE` writer - AHB prescaler This bitfield is controlled by software. To produce HCLK clock, it sets the division factor of SYSCLK clock as follows: 0xxx: 1"]
pub type HPRE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `PPRE` reader - APB prescaler This bitfield is controlled by software. To produce PCLK clock, it sets the division factor of HCLK clock as follows: 0xx: 1"]
pub type PPRE_R = crate::FieldReader;
#[doc = "Field `PPRE` writer - APB prescaler This bitfield is controlled by software. To produce PCLK clock, it sets the division factor of HCLK clock as follows: 0xx: 1"]
pub type PPRE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `MCO2SEL` reader - Microcontroller clock output 2 clock selector This bitfield is controlled by software. It sets the clock selector for MCO2 output as follows: This bitfield is controlled by software. It sets the clock selector for MCO output as follows: Note: This clock output may have some truncated cycles at startup or during MCO2 clock source switching."]
pub type MCO2SEL_R = crate::FieldReader;
#[doc = "Field `MCO2SEL` writer - Microcontroller clock output 2 clock selector This bitfield is controlled by software. It sets the clock selector for MCO2 output as follows: This bitfield is controlled by software. It sets the clock selector for MCO output as follows: Note: This clock output may have some truncated cycles at startup or during MCO2 clock source switching."]
pub type MCO2SEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `MCO2PRE` reader - Microcontroller clock output 2 prescaler This bitfield is controlled by software. It sets the division factor of the clock sent to the MCO2 output as follows: ... It is highly recommended to set this field before the MCO2 output is enabled."]
pub type MCO2PRE_R = crate::FieldReader;
#[doc = "Field `MCO2PRE` writer - Microcontroller clock output 2 prescaler This bitfield is controlled by software. It sets the division factor of the clock sent to the MCO2 output as follows: ... It is highly recommended to set this field before the MCO2 output is enabled."]
pub type MCO2PRE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `MCOSEL` reader - Microcontroller clock output clock selector This bitfield is controlled by software. It sets the clock selector for MCO output as follows: Note: This clock output may have some truncated cycles at startup or during MCO clock source switching. Any other value means no clock on MCO."]
pub type MCOSEL_R = crate::FieldReader;
#[doc = "Field `MCOSEL` writer - Microcontroller clock output clock selector This bitfield is controlled by software. It sets the clock selector for MCO output as follows: Note: This clock output may have some truncated cycles at startup or during MCO clock source switching. Any other value means no clock on MCO."]
pub type MCOSEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `MCOPRE` reader - Microcontroller clock output prescaler This bitfield is controlled by software. It sets the division factor of the clock sent to the MCO output as follows: ... It is highly recommended to set this field before the MCO output is enabled."]
pub type MCOPRE_R = crate::FieldReader;
#[doc = "Field `MCOPRE` writer - Microcontroller clock output prescaler This bitfield is controlled by software. It sets the division factor of the clock sent to the MCO output as follows: ... It is highly recommended to set this field before the MCO output is enabled."]
pub type MCOPRE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
impl R {
#[doc = "Bits 0:2 - System clock switch This bitfield is controlled by software and hardware. The bitfield selects the clock for SYSCLK as follows: Others: Reserved The setting is forced by hardware to 000 (HSISYS selected) when the MCU exits Stop, or Standby, or Shutdown mode, or when the setting is 001 (HSE selected) and HSE oscillator failure is detected."]
#[inline(always)]
pub fn sw(&self) -> SW_R {
SW_R::new((self.bits & 7) as u8)
}
#[doc = "Bits 3:5 - System clock switch status This bitfield is controlled by hardware to indicate the clock source used as system clock: Others: Reserved"]
#[inline(always)]
pub fn sws(&self) -> SWS_R {
SWS_R::new(((self.bits >> 3) & 7) as u8)
}
#[doc = "Bits 8:11 - AHB prescaler This bitfield is controlled by software. To produce HCLK clock, it sets the division factor of SYSCLK clock as follows: 0xxx: 1"]
#[inline(always)]
pub fn hpre(&self) -> HPRE_R {
HPRE_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 12:14 - APB prescaler This bitfield is controlled by software. To produce PCLK clock, it sets the division factor of HCLK clock as follows: 0xx: 1"]
#[inline(always)]
pub fn ppre(&self) -> PPRE_R {
PPRE_R::new(((self.bits >> 12) & 7) as u8)
}
#[doc = "Bits 16:19 - Microcontroller clock output 2 clock selector This bitfield is controlled by software. It sets the clock selector for MCO2 output as follows: This bitfield is controlled by software. It sets the clock selector for MCO output as follows: Note: This clock output may have some truncated cycles at startup or during MCO2 clock source switching."]
#[inline(always)]
pub fn mco2sel(&self) -> MCO2SEL_R {
MCO2SEL_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bits 20:23 - Microcontroller clock output 2 prescaler This bitfield is controlled by software. It sets the division factor of the clock sent to the MCO2 output as follows: ... It is highly recommended to set this field before the MCO2 output is enabled."]
#[inline(always)]
pub fn mco2pre(&self) -> MCO2PRE_R {
MCO2PRE_R::new(((self.bits >> 20) & 0x0f) as u8)
}
#[doc = "Bits 24:27 - Microcontroller clock output clock selector This bitfield is controlled by software. It sets the clock selector for MCO output as follows: Note: This clock output may have some truncated cycles at startup or during MCO clock source switching. Any other value means no clock on MCO."]
#[inline(always)]
pub fn mcosel(&self) -> MCOSEL_R {
MCOSEL_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bits 28:31 - Microcontroller clock output prescaler This bitfield is controlled by software. It sets the division factor of the clock sent to the MCO output as follows: ... It is highly recommended to set this field before the MCO output is enabled."]
#[inline(always)]
pub fn mcopre(&self) -> MCOPRE_R {
MCOPRE_R::new(((self.bits >> 28) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:2 - System clock switch This bitfield is controlled by software and hardware. The bitfield selects the clock for SYSCLK as follows: Others: Reserved The setting is forced by hardware to 000 (HSISYS selected) when the MCU exits Stop, or Standby, or Shutdown mode, or when the setting is 001 (HSE selected) and HSE oscillator failure is detected."]
#[inline(always)]
#[must_use]
pub fn sw(&mut self) -> SW_W<CFGR_SPEC, 0> {
SW_W::new(self)
}
#[doc = "Bits 8:11 - AHB prescaler This bitfield is controlled by software. To produce HCLK clock, it sets the division factor of SYSCLK clock as follows: 0xxx: 1"]
#[inline(always)]
#[must_use]
pub fn hpre(&mut self) -> HPRE_W<CFGR_SPEC, 8> {
HPRE_W::new(self)
}
#[doc = "Bits 12:14 - APB prescaler This bitfield is controlled by software. To produce PCLK clock, it sets the division factor of HCLK clock as follows: 0xx: 1"]
#[inline(always)]
#[must_use]
pub fn ppre(&mut self) -> PPRE_W<CFGR_SPEC, 12> {
PPRE_W::new(self)
}
#[doc = "Bits 16:19 - Microcontroller clock output 2 clock selector This bitfield is controlled by software. It sets the clock selector for MCO2 output as follows: This bitfield is controlled by software. It sets the clock selector for MCO output as follows: Note: This clock output may have some truncated cycles at startup or during MCO2 clock source switching."]
#[inline(always)]
#[must_use]
pub fn mco2sel(&mut self) -> MCO2SEL_W<CFGR_SPEC, 16> {
MCO2SEL_W::new(self)
}
#[doc = "Bits 20:23 - Microcontroller clock output 2 prescaler This bitfield is controlled by software. It sets the division factor of the clock sent to the MCO2 output as follows: ... It is highly recommended to set this field before the MCO2 output is enabled."]
#[inline(always)]
#[must_use]
pub fn mco2pre(&mut self) -> MCO2PRE_W<CFGR_SPEC, 20> {
MCO2PRE_W::new(self)
}
#[doc = "Bits 24:27 - Microcontroller clock output clock selector This bitfield is controlled by software. It sets the clock selector for MCO output as follows: Note: This clock output may have some truncated cycles at startup or during MCO clock source switching. Any other value means no clock on MCO."]
#[inline(always)]
#[must_use]
pub fn mcosel(&mut self) -> MCOSEL_W<CFGR_SPEC, 24> {
MCOSEL_W::new(self)
}
#[doc = "Bits 28:31 - Microcontroller clock output prescaler This bitfield is controlled by software. It sets the division factor of the clock sent to the MCO output as follows: ... It is highly recommended to set this field before the MCO output is enabled."]
#[inline(always)]
#[must_use]
pub fn mcopre(&mut self) -> MCOPRE_W<CFGR_SPEC, 28> {
MCOPRE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "RCC clock configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CFGR_SPEC;
impl crate::RegisterSpec for CFGR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cfgr::R`](R) reader structure"]
impl crate::Readable for CFGR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cfgr::W`](W) writer structure"]
impl crate::Writable for CFGR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CFGR to value 0"]
impl crate::Resettable for CFGR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use slab::*;
use std::ops::{Deref, DerefMut};
#[derive(Copy, Clone)]
pub struct NodeId(usize);
#[derive(Copy, Clone)]
pub struct Node<T> {
item: T,
next: Option<usize>,
prev: Option<usize>,
}
impl<T> Deref for Node<T> {
type Target = T;
fn deref(&self) -> &T {
&self.item
}
}
impl<T> DerefMut for Node<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.item
}
}
impl<T> Node<T> {
pub fn into_inner(self) -> T {
self.item
}
pub fn next(&self) -> Option<NodeId> {
Some(NodeId(self.next?))
}
pub fn prev(&self) -> Option<NodeId> {
Some(NodeId(self.prev?))
}
}
#[derive(Clone)]
pub struct List<T> {
inner: Slab<Node<T>>,
init: Option<usize>,
last: Option<usize>,
}
impl<T> Default for List<T> {
fn default() -> Self {
List {
inner: Slab::new(),
init: None,
last: None,
}
}
}
impl<T> List<T> {
pub fn new() -> Self {
Self::default()
}
pub fn with_capacity(cap: usize) -> Self {
List {
inner: Slab::with_capacity(cap),
..Self::default()
}
}
pub fn get(&self, id: NodeId) -> Option<&Node<T>> {
self.inner.get(id.0)
}
pub fn get_mut(&mut self, id: NodeId) -> Option<&mut Node<T>> {
self.inner.get_mut(id.0)
}
pub fn reserve(&mut self, additional: usize) {
self.inner.reserve(additional);
}
pub fn reserve_exact(&mut self, additional: usize) {
self.inner.reserve_exact(additional);
}
pub fn push_front(&mut self, item: T) -> NodeId {
let vacant = self.inner.vacant_entry();
let key = vacant.key();
let next = self.init.replace(key);
vacant.insert(Node {
item,
next,
prev: None,
});
if let Some(ix) = next {
debug_assert!(self.inner.contains(ix));
let next = unsafe { self.inner.get_unchecked_mut(ix) };
let old_prev = next.prev.replace(key);
debug_assert_eq!(old_prev, None);
}
NodeId(key)
}
pub fn push_back(&mut self, item: T) -> NodeId {
let vacant = self.inner.vacant_entry();
let key = vacant.key();
let prev = self.last.replace(key);
vacant.insert(Node {
item,
prev,
next: None,
});
if let Some(ix) = prev {
debug_assert!(self.inner.contains(ix));
let prev = unsafe { self.inner.get_unchecked_mut(ix) };
let old_next = prev.next.replace(key);
debug_assert_eq!(old_next, None);
}
NodeId(key)
}
pub fn remove(&mut self, id: NodeId) -> Option<Node<T>> {
if !self.inner.contains(id.0) {
return None;
}
let node = self.inner.remove(id.0);
if let Some(prev_ix) = node.prev {
debug_assert!(self.inner.contains(prev_ix));
let prev = unsafe { self.inner.get_unchecked_mut(prev_ix) };
debug_assert_eq!(prev.next, Some(id.0));
prev.next = node.next;
} else {
self.init = node.next;
}
if let Some(next_ix) = node.next {
debug_assert!(self.inner.contains(next_ix));
let next = unsafe { self.inner.get_unchecked_mut(next_ix) };
debug_assert_eq!(next.prev, Some(id.0));
next.prev = node.prev;
} else {
self.last = node.prev;
}
Some(node)
}
pub fn init(&self) -> Option<NodeId> {
Some(NodeId(self.init?))
}
pub fn last(&self) -> Option<NodeId> {
Some(NodeId(self.last?))
}
pub fn pop_front(&mut self) -> Option<Node<T>> {
self.remove(self.init()?)
}
pub fn pop_back(&mut self) -> Option<Node<T>> {
self.remove(self.last()?)
}
pub fn contains(&self, id: NodeId) -> bool {
self.inner.contains(id.0)
}
pub fn cursor_front(&self) -> Cursor<T> {
Cursor {
current: self.init,
backing: self,
}
}
pub fn cursor_back(&self) -> Cursor<T> {
Cursor {
current: self.last,
backing: self,
}
}
pub fn cursor_at(&self, id: NodeId) -> Cursor<T> {
Cursor {
current: Some(id.0),
backing: self,
}
}
pub fn cursor_front_mut(&mut self) -> CursorMut<T> {
CursorMut {
current: self.init,
backing: self,
}
}
pub fn cursor_back_mut(&mut self) -> CursorMut<T> {
CursorMut {
current: self.last,
backing: self,
}
}
pub fn cursor_at_mut(&mut self, id: NodeId) -> CursorMut<T> {
CursorMut {
current: Some(id.0),
backing: self,
}
}
}
#[derive(Copy, Clone)]
pub struct Cursor<'a, T> {
current: Option<usize>,
backing: &'a List<T>,
}
impl<'a, T> Cursor<'a, T> {
pub fn current(&self) -> Option<&'a Node<T>> {
self.backing.inner.get(self.current?)
}
pub fn try_next(&mut self) -> bool {
if let Some(ix) = self.current().and_then(|n| n.next) {
self.current.replace(ix);
true
} else {
false
}
}
pub fn try_prev(&mut self) -> bool {
if let Some(ix) = self.current().and_then(|n| n.prev) {
self.current.replace(ix);
true
} else {
false
}
}
}
pub struct CursorMut<'a, T> {
current: Option<usize>,
backing: &'a mut List<T>,
}
impl<'a, T> CursorMut<'a, T> {
pub fn current(&mut self) -> Option<&mut Node<T>> {
self.backing.inner.get_mut(self.current?)
}
pub fn try_next(&mut self) -> bool {
if let Some(ix) = self.current().and_then(|n| n.next) {
self.current.replace(ix);
true
} else {
false
}
}
pub fn try_prev(&mut self) -> bool {
if let Some(ix) = self.current().and_then(|n| n.prev) {
self.current.replace(ix);
true
} else {
false
}
}
}
|
mod array;
mod hash_map;
mod hash_set;
mod queue;
mod raw_array;
mod small_array;
mod string;
mod wstring;
pub use array::*;
pub use hash_map::*;
pub use hash_set::*;
pub use queue::Queue;
pub(crate) use raw_array::*;
pub use small_array::*;
pub use string::*;
pub use wstring::*;
|
use std::thread;
use std::sync::{Arc, Mutex};
use substring::Substring;
pub enum NucleicAcid {
DNA,
RNA
}
pub struct Strand {
pub bases: String,
pub index: usize,
pub is_dna: bool,
}
pub struct Options {
pub seq_len: usize,
pub is_dna: bool,
}
pub fn transcribe_sequence(dna: String, opts: Options) -> String {
let start_sites: Vec<usize> = find_start_sites(&dna);
let params: StopSiteParams = StopSiteParams {
len: 8,
t_region_len: 4 * 8,
stop_seq: "TTTT",
stop_seq_len: 4
};
let stop_sites: Vec<usize> = find_stop_sites(&dna, params);
let gene: String = String::from(dna.substring(start_sites[0], stop_sites[0]));
let target: String = transcribe(Strand {
bases: String::from(&gene),
index: 0,
is_dna: true,
});
println!("{:?}", start_sites);
println!("{:?}", stop_sites);
println!("5`-> 3`: {}", gene);
println!("3`-> 5`: {}", target);
let substrands: Vec<Strand> = to_subs(target, opts);
return transcribe_strands(substrands);
}
fn find_start_sites(dna: &String) -> Vec<usize> {
let minus35: Vec<char> = vec!['T','T','G','A','C','A'];
let minus10: Vec<char> = vec!['T','A','T','A','T','T'];
let mut i = 0;
let last_start = dna.chars().count() - 6;
let mut start_sites: Vec<usize> = vec![];
while i <= last_start {
let one = dna.substring(i, i + 6);
let two = dna.substring(i + 23, i + 6 + 23);
if is_promotor(one, &minus35) && is_promotor(two, &minus10) {
let start_site = i + 25 + 10;
start_sites.push(start_site);
}
i += 1;
}
start_sites
}
fn is_promotor(substr: &str, consensus: &Vec<char>) -> bool {
let mut y: u32 = 0;
let mut n: u32 = 0;
for (index, char) in substr.chars().enumerate() {
if char == consensus[index] {
y += 1;
} else if n <= 3 {
n += 1;
} else {
return false;
}
}
if y > 3 { true } else { false }
}
struct StopSiteParams<'a> {
len: usize,
t_region_len: usize,
stop_seq: &'a str,
stop_seq_len: usize
}
fn find_stop_sites(dna: &String, params: StopSiteParams) -> Vec<usize> {
let mut i: usize = 0;
let last_start: usize = dna.chars().count() - params.len;
let mut stop_sites: Vec<usize> = vec![];
while i <= last_start {
let n = i + params.len;
let first_region: &str = dna.substring(i, n);
if is_terminator(&first_region) {
let second_region: &str = dna.substring(n, i + params.t_region_len);
let palindrome: String = make_palindrome(&first_region);
if let Some(t) = second_region.find(&palindrome) {
let third_region: &str = second_region.substring(t, i + params.t_region_len);
if let Some(j) = third_region.find(params.stop_seq) {
let stop_site: usize = n + t + j + params.stop_seq_len;
stop_sites.push(stop_site);
}
}
}
i += 1;
}
stop_sites
}
fn make_palindrome(str: &str) -> String {
let transcript: String = transcribe(Strand {
bases: String::from(str),
index: 0,
is_dna: true
});
transcript.chars().rev().collect()
}
fn is_terminator(sequence: &str) -> bool {
let mut cs: u32 = 0;
let mut gs: u32 = 0;
for char in sequence.chars() {
if char == 'C' {
cs += 1;
} else if char == 'G' {
gs += 1;
}
}
if cs + gs > 6 && cs > 2 && gs > 2 {
true
} else {
false
}
}
fn to_subs(strand: String, opts: Options) -> Vec<Strand> {
let strand_len: usize = strand.chars().count();
let mut substrands: Vec<Strand> = Vec::new();
let mut index: usize = 0;
while index * opts.seq_len <= strand_len {
substrands.push(Strand {
bases: String::from(strand.substring(
index * opts.seq_len,
index * opts.seq_len + opts.seq_len)),
index,
is_dna: opts.is_dna
});
index += 1;
}
substrands
}
fn transcribe_strands(strands: Vec<Strand>) -> String {
let mut handles = vec![];
let v = vec![String::from(""); strands.len()];
let arc: Arc<Mutex<Vec<String>>> = Arc::new(Mutex::new(v));
for strand in strands {
let clone = Arc::clone(&arc);
let handle = thread::spawn(move || {
let mut total = clone.lock().unwrap();
let i: usize = strand.index;
total[i] = transcribe(strand);
});
handles.push(handle);
}
for handle in handles {
handle.join().unwrap();
}
return arc.lock().unwrap().join("");
}
fn transcribe(strand: Strand) -> String {
let var_base = match strand.is_dna {
false => "U",
true => "T",
};
let mut transcript = String::from("");
for base in strand.bases.chars() {
if base == 'A' {
transcript += var_base;
} else if base == 'T' {
transcript += "A";
} else if base == 'C' {
transcript += "G";
} else if base == 'G' {
transcript += "C";
} else {
panic!("Detected unknown nucleotide, either you are an alien or this is a mistake!");
}
}
transcript
}
|
use chrono::prelude::*;
#[derive(Debug, Deserialize)]
pub struct Notification {
pub reason: String,
pub subject: Subject,
pub repository: Repository,
}
#[derive(Debug, Deserialize)]
pub struct Subject {
#[serde(rename = "type")]
pub _type: String,
pub title: String,
pub url: String,
}
#[derive(Debug, Deserialize, Clone)]
pub struct Repository {
pub name: String,
}
#[derive(Debug, Clone)]
pub struct ReviewRequest {
pub pr_title: String,
pub repository: String,
pub url: String,
}
#[derive(Deserialize, Debug, Clone)]
pub struct PullRequest {
pub number: i64,
pub title: String,
pub html_url: String,
pub created_at: DateTime<Local>,
pub merged_at: Option<DateTime<Local>>,
pub closed_at: Option<DateTime<Local>>,
base: PullRequestBase,
}
#[derive(Deserialize, Debug, Clone)]
struct PullRequestBase {
repo: Repository,
}
impl PullRequest {
pub fn is_open(&self) -> bool {
self.merged_at.is_none() && self.closed_at.is_none()
}
pub fn repo(&self) -> &str {
&self.base.repo.name
}
}
impl ReviewRequest {
pub fn from_notification(n: Notification) -> Option<ReviewRequest> {
if n.reason != "review_requested" || n.subject._type != "PullRequest" {
return None;
}
Some(ReviewRequest {
pr_title: n.subject.title,
repository: n.repository.name,
url: n.subject.url,
})
}
}
|
use actix_web::{get, App, HttpServer, HttpRequest, middleware, HttpResponse, web};
use actix_files::NamedFile;
use std::path::PathBuf;
use std::io::Error;
use actix_web::http::ContentEncoding;
use serde::Deserialize;
use std::fs;
#[get("/{filename:.*}")]
async fn index(req: HttpRequest) -> Result<NamedFile, Error> {
let mut path: PathBuf = PathBuf::from(r"public\static");
let file: PathBuf = req.match_info().query("filename").parse().unwrap();
path.push(file);
Ok(NamedFile::open(path)?)
}
#[derive(Deserialize)]
struct Operands {
pub a: i64,
pub b: i64
}
/*
#[get("/dynamic")]
async fn dynamic(operands: web::Query<Operands>) -> HttpResponse {
let mut data = fs::read_to_string(r"public\test.html")
.expect("File not found!");
data = data.replace("{{}}", &format!("{}", operands.a + operands.b));
return HttpResponse::Ok().body(data);
}
*/
#[get("/dynamic")]
async fn dynamic(operands: web::Query<Operands>) -> HttpResponse {
return HttpResponse::Ok().body(format!("The result of a + b is: {}", operands.a + operands.b));
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
HttpServer::new(||
App::new().wrap(middleware::Compress::new(ContentEncoding::Gzip))
.service(dynamic)
.service(index))
.bind("127.0.0.1:8080")?
.workers(1)
.run()
.await
} |
use super::*;
pub struct SoundSystem {
reader: ReaderId<Sound>,
}
impl SoundSystem {
pub fn new(reader: ReaderId<Sound>) -> Self {
SoundSystem { reader: reader }
}
}
impl<'a> System<'a> for SoundSystem {
type SystemData = (
ReadStorage<'a, ThreadPin<SoundData>>,
ReadStorage<'a, CharacterMarker>,
ReadStorage<'a, MultyLazer>,
WriteStorage<'a, SoundPlacement>,
ReadExpect<'a, PreloadedSounds>,
Write<'a, EventChannel<Sound>>,
Write<'a, LoopSound>,
ReadExpect<'a, ThreadPin<MusicData<'static>>>,
Write<'a, Music>,
Read<'a, AppState>,
);
fn run(&mut self, data: Self::SystemData) {
let (
sounds,
character_markers,
multy_lazers,
mut sound_placements,
preloaded_sounds,
sounds_channel,
mut loop_sound,
music_data,
mut music,
app_state,
) = data;
for s in sounds_channel.read(&mut self.reader) {
let sound = &sounds.get(s.0).unwrap().0;
let position = s.1;
let placement = sound_placements.get_mut(s.0).unwrap();
for i in placement.start..placement.end {
let current_channel = sdl2::mixer::Channel(i as i32);
if !current_channel.is_playing()
&& Instant::now().duration_since(placement.last_upd)
>= placement.gap
{
placement.last_upd = Instant::now();
current_channel.play(sound, 0).unwrap();
let n = position.coords.norm();
// let smooth = 1.0; // more value less depend on l
let l = 1.0 + n;
let mut fade = 1.0 / (l.ln());
if n < 10f32 {
fade = 1.0;
}
current_channel
.set_volume((EFFECT_MAX_VOLUME as f32 * fade) as i32);
break;
}
}
}
for (lazer, _character) in (&multy_lazers, &character_markers).join() {
if lazer.active() {
if loop_sound.player_lazer_channel.is_none() {
let channel = sdl2::mixer::Channel::all()
.play(
&sounds.get(preloaded_sounds.lazer).unwrap().0,
-1,
)
.unwrap();
music.menu_play = false; // hacky
loop_sound.player_lazer_channel = Some(channel);
}
} else {
if let Some(lazer) = loop_sound.player_lazer_channel {
lazer.halt();
loop_sound.player_lazer_channel = None;
}
}
}
match *app_state {
AppState::Play(_) => {
if music.current_battle.is_none() {
let mut rng = thread_rng();
let music_id =
rng.gen_range(0, music_data.battle_music.len());
sdl2::mixer::Music::halt();
music.menu_play = false;
music_data.battle_music[music_id].play(-1).unwrap();
music.current_battle = Some(music_id);
}
}
AppState::Menu | AppState::DeadScreen => {
loop_sound.player_lazer_channel = None; // hacky
if let Some(_music_id) = music.current_battle {
sdl2::mixer::Music::halt();
music.current_battle = None;
}
if !music.menu_play {
music_data.menu_music.play(-1).unwrap();
music.menu_play = true;
}
}
AppState::ScoreTable => {}
}
}
}
|
extern crate reqwest;
extern crate serde;
extern crate serde_json;
use warheads::api;
pub struct HttpApi {}
impl HttpApi {
fn post(client: &mut reqwest::Client, password: &str) -> reqwest::Result<reqwest::Response> {
let url = "http://gitland.azurewebsites.net:80/api/warheads/launch?launchCode="
.to_owned() + password;
client.post(&url).send()
}
}
impl api::Api for HttpApi {
fn launch(password: &str) -> Result<api::LaunchResponse, String> {
let mut client = match reqwest::Client::new() {
Ok(client) => client,
Err(_) => return Err("error creating client".to_string()),
};
let mut request_response: reqwest::Response = match Self::post(&mut client, password) {
Ok(response) => response,
Err(_) => return Err("error sending post request".to_string()),
};
let launch_response: api::LaunchResponse = match request_response.json() {
Ok(response) => response,
Err(_) => return Err("error deserialising post response".to_string()),
};
Ok(launch_response)
}
}
#[cfg(test)]
pub mod test {
use warheads::api::Api;
use user_password_provider::UserPasswordProvider;
use user_password_provider::test::MockUserPasswordProvider;
#[test]
fn test_launch_call() {
let password = MockUserPasswordProvider::get_password();
let api_password = ::api_password_generator::build_password(&password);
assert!(::warheads::http_api::HttpApi::launch(&api_password).is_ok())
}
}
|
use clap::CommandFactory;
use clap_complete::{generate_to, Shell};
use std::env;
use std::io::Error;
use std::process;
include!("src/cli.rs");
fn main() -> Result<(), Error> {
let outdir = match env::var_os("OUT_DIR") {
None => return Err(Error::new(std::io::ErrorKind::Other, "no $OUT_DIR!")),
Some(outdir) => outdir,
};
let mut app = ProgramOptions::command();
generate_to(Shell::Bash, &mut app, "thumbs", &outdir)?;
generate_to(Shell::Zsh, &mut app, "thumbs", &outdir)?;
generate_to(Shell::Fish, &mut app, "thumbs", outdir)?;
if let Some(v) = version_check::Version::read() {
println!("cargo:rustc-env=BUILD_RUSTC={}", v)
}
if let Some(hash) = get_commit_hash().or_else(|| env::var("BUILD_ID").ok()) {
println!("cargo:rustc-env=BUILD_ID={}", hash);
}
println!(
"cargo:rustc-env=BUILD_INFO={}-{}-{}-{}",
env::var("CARGO_CFG_TARGET_ARCH").unwrap(),
env::var("CARGO_CFG_TARGET_VENDOR").unwrap(),
env::var("CARGO_CFG_TARGET_OS").unwrap(),
env::var("CARGO_CFG_TARGET_ENV").unwrap(),
);
Ok(())
}
fn get_commit_hash() -> Option<String> {
process::Command::new("git")
.args(&["rev-parse", "--short", "HEAD"])
.output()
.ok()
.and_then(|r| {
if r.status.success() {
String::from_utf8(r.stdout).ok()
} else {
None
}
})
}
|
mod compute_constraints;
mod linear_program;
pub use compute_constraints::compute_constraints;
//use crate::log;
use crate::neighborhood::AgentNeighborhood;
use crate::vec2::Vec2;
use itertools::izip;
#[allow(clippy::too_many_arguments)]
pub fn orca_navigator(
positions: &[Vec2],
directions: &[Vec2],
desired_velocities: &[Vec2],
radii: &[f64],
maximum_speeds: &[f64],
neighborhoods: &[AgentNeighborhood],
time_horizon: f64,
dt: f64,
) -> Vec<Vec2> {
izip!(
compute_constraints(
positions,
desired_velocities,
radii,
neighborhoods,
time_horizon,
dt
),
directions,
desired_velocities,
maximum_speeds,
)
.map(
|(orca_constraints, &direction, &desired_velocity, &maximum_speed)| {
let desired_speed = desired_velocity.norm();
let desired_direction = if desired_speed < f64::EPSILON {
direction
} else {
desired_velocity / desired_speed
};
// log!(
// "$ Finding valid velocity close to {} respecting {:#?}",
// desired_direction.normalize_to(desired_speed),
// orca_constraints
// );
match linear_program::solve_linear_program(
&desired_direction,
desired_speed,
&orca_constraints,
true,
) {
Some(corrected_velocity) => {
//log!("$$ 1st solve worked -> {}", corrected_velocity);
corrected_velocity
}
// No solution, let's try to accelerate
None => {
match linear_program::solve_linear_program(
&desired_direction,
maximum_speed,
&orca_constraints,
false,
) {
Some(corrected_velocity) => {
//log!("$$ 2nd solve worked -> {}", corrected_velocity);
corrected_velocity
}
// No solution, let's continue on our merry way
None => {
//log!("$$ No solution found");
desired_direction.normalize_to(0.9 * desired_speed)
}
}
}
}
},
)
.collect()
}
#[cfg(test)]
mod tests {
use super::super::agent::Agent;
use super::super::agents::Agents;
use super::*;
#[test]
fn test_orca_navigator_no_movement() {
let mut agents = Agents::new();
agents.create_agent(Agent::new().position(1.0, 0.0));
agents.create_agent(Agent::new().position(2.0, 0.0));
let updated_velocities: Vec<Vec2> = orca_navigator(
agents.get_positions(),
agents.get_directions(),
agents.get_velocities(),
agents.get_radii(),
agents.get_maximum_speeds(),
&AgentNeighborhood::compute_agents_neighborhood(
agents.get_positions(),
agents.get_velocities(),
agents.get_radii(),
),
10.,
0.5,
);
itertools::assert_equal(
updated_velocities,
vec![Vec2::new(0.0, 0.0), Vec2::new(0.0, 0.0)],
)
}
#[test]
fn test_orca_navigator_diverging_movement() {
let mut agents = Agents::new();
agents.create_agent(Agent::new().position(1.0, 0.0).velocity(0.0, -1.0));
agents.create_agent(Agent::new().position(2.0, 0.0).velocity(0.0, 1.0));
let updated_velocities: Vec<Vec2> = orca_navigator(
agents.get_positions(),
agents.get_directions(),
agents.get_velocities(),
agents.get_radii(),
agents.get_maximum_speeds(),
&AgentNeighborhood::compute_agents_neighborhood(
agents.get_positions(),
agents.get_velocities(),
agents.get_radii(),
),
10.,
0.5,
);
itertools::assert_equal(
updated_velocities,
vec![Vec2::new(0.0, -1.0), Vec2::new(0.0, 1.0)],
)
}
#[test]
fn test_orca_navigator_converging_movement() {
let mut agents = Agents::new();
agents.create_agent(Agent::new().position(0.0, 0.0).velocity(1.0, 0.0));
agents.create_agent(Agent::new().position(1.0, 0.0).velocity(-1.0, 0.0));
orca_navigator(
agents.get_positions(),
agents.get_directions(),
agents.get_velocities(),
agents.get_radii(),
agents.get_maximum_speeds(),
&AgentNeighborhood::compute_agents_neighborhood(
agents.get_positions(),
agents.get_velocities(),
agents.get_radii(),
),
10.,
0.5,
);
// No assertion, let's just make sure everything works fine
}
}
|
// Copyright 2020 EinsteinDB Project Authors. Licensed under Apache-2.0.
// TODO: remove following line
#![allow(dead_code)]
use super::changer::Changer;
use crate::evioletabftpb::{ConfChangeSingle, ConfChangeType, ConfState};
use crate::tracker::ProgressTracker;
use crate::Result;
/// Translates a conf state into 1) a slice of operations creating first the config that
/// will become the outgoing one, and then the incoming one, and b) another slice that,
/// when applied to the config resulted from 1), represents the ConfState.
fn to_conf_change_single(cs: &ConfState) -> (Vec<ConfChangeSingle>, Vec<ConfChangeSingle>) {
// Example to follow along this code:
// voters=(1 2 3) learners=(5) outgoing=(1 2 4 6) learners_next=(4)
//
// This means that before entering the joint config, the configuration
// had voters (1 2 4 6) and perhaps some learners that are already gone.
// The new set of voters is (1 2 3), i.e. (1 2) were kept around, and (4 6)
// are no longer voters; however 4 is poised to become a learner upon leaving
// the joint state.
// We can't tell whether 5 was a learner before entering the joint config,
// but it doesn't matter (we'll pretend that it wasn't).
//
// The code below will construct
// outgoing = add 1; add 2; add 4; add 6
// incoming = remove 1; remove 2; remove 4; remove 6
// add 1; add 2; add 3;
// add-learner 5;
// add-learner 4;
//
// So, when starting with an empty config, after applying 'outgoing' we have
//
// quorum=(1 2 4 6)
//
// From which we enter a joint state via 'incoming'
//
// quorum=(1 2 3)&&(1 2 4 6) learners=(5) learners_next=(4)
//
// as desired.
let mut incoming = Vec::new();
let mut outgoing = Vec::new();
for id in cs.get_voters_outgoing() {
// If there are outgoing voters, first add them one by one so that the
// (non-joint) config has them all.
outgoing.push(violetabft_proto::new_conf_change_single(
*id,
ConfChangeType::AddNode,
));
}
// We're done constructing the outgoing slice, now on to the incoming one
// (which will apply on top of the config created by the outgoing slice).
// First, we'll remove all of the outgoing voters.
for id in cs.get_voters_outgoing() {
incoming.push(violetabft_proto::new_conf_change_single(
*id,
ConfChangeType::RemoveNode,
));
}
// Then we'll add the incoming voters and learners.
for id in cs.get_voters() {
incoming.push(violetabft_proto::new_conf_change_single(
*id,
ConfChangeType::AddNode,
));
}
for id in cs.get_learners() {
incoming.push(violetabft_proto::new_conf_change_single(
*id,
ConfChangeType::AddLearnerNode,
));
}
// Same for LearnersNext; these are nodes we want to be learners but which
// are currently voters in the outgoing config.
for id in cs.get_learners_next() {
incoming.push(violetabft_proto::new_conf_change_single(
*id,
ConfChangeType::AddLearnerNode,
));
}
(outgoing, incoming)
}
/// Restore takes a Changer (which must represent an empty configuration), and runs a
/// sequence of changes enacting the configuration described in the ConfState.
///
/// TODO(jay) find a way to only take `ProgressMap` instead of a whole tracker.
pub fn restore(tracker: &mut ProgressTracker, next_idx: u64, cs: &ConfState) -> Result<()> {
let (outgoing, incoming) = to_conf_change_single(cs);
if outgoing.is_empty() {
for i in incoming {
let (cfg, changes) = Changer::new(tracker).simple(&[i])?;
tracker.apply_conf(cfg, changes, next_idx);
}
} else {
for cc in outgoing {
let (cfg, changes) = Changer::new(tracker).simple(&[cc])?;
tracker.apply_conf(cfg, changes, next_idx);
}
let (cfg, changes) = Changer::new(tracker).enter_joint(cs.auto_leave, &incoming)?;
tracker.apply_conf(cfg, changes, next_idx);
}
Ok(())
}
|
/// An enum to represent all characters in the HangulJamoExtendedB block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum HangulJamoExtendedB {
/// \u{d7b0}: 'ힰ'
HangulJungseongODashYeo,
/// \u{d7b1}: 'ힱ'
HangulJungseongODashODashI,
/// \u{d7b2}: 'ힲ'
HangulJungseongYoDashA,
/// \u{d7b3}: 'ힳ'
HangulJungseongYoDashAe,
/// \u{d7b4}: 'ힴ'
HangulJungseongYoDashEo,
/// \u{d7b5}: 'ힵ'
HangulJungseongUDashYeo,
/// \u{d7b6}: 'ힶ'
HangulJungseongUDashIDashI,
/// \u{d7b7}: 'ힷ'
HangulJungseongYuDashAe,
/// \u{d7b8}: 'ힸ'
HangulJungseongYuDashO,
/// \u{d7b9}: 'ힹ'
HangulJungseongEuDashA,
/// \u{d7ba}: 'ힺ'
HangulJungseongEuDashEo,
/// \u{d7bb}: 'ힻ'
HangulJungseongEuDashE,
/// \u{d7bc}: 'ힼ'
HangulJungseongEuDashO,
/// \u{d7bd}: 'ힽ'
HangulJungseongIDashYaDashO,
/// \u{d7be}: 'ힾ'
HangulJungseongIDashYae,
/// \u{d7bf}: 'ힿ'
HangulJungseongIDashYeo,
/// \u{d7c0}: 'ퟀ'
HangulJungseongIDashYe,
/// \u{d7c1}: 'ퟁ'
HangulJungseongIDashODashI,
/// \u{d7c2}: 'ퟂ'
HangulJungseongIDashYo,
/// \u{d7c3}: 'ퟃ'
HangulJungseongIDashYu,
/// \u{d7c4}: 'ퟄ'
HangulJungseongIDashI,
/// \u{d7c5}: 'ퟅ'
HangulJungseongAraeaDashA,
/// \u{d7c6}: 'ퟆ'
HangulJungseongAraeaDashE,
/// \u{d7cb}: 'ퟋ'
HangulJongseongNieunDashRieul,
/// \u{d7cc}: 'ퟌ'
HangulJongseongNieunDashChieuch,
/// \u{d7cd}: 'ퟍ'
HangulJongseongSsangtikeut,
/// \u{d7ce}: 'ퟎ'
HangulJongseongSsangtikeutDashPieup,
/// \u{d7cf}: 'ퟏ'
HangulJongseongTikeutDashPieup,
/// \u{d7d0}: 'ퟐ'
HangulJongseongTikeutDashSios,
/// \u{d7d1}: 'ퟑ'
HangulJongseongTikeutDashSiosDashKiyeok,
/// \u{d7d2}: 'ퟒ'
HangulJongseongTikeutDashCieuc,
/// \u{d7d3}: 'ퟓ'
HangulJongseongTikeutDashChieuch,
/// \u{d7d4}: 'ퟔ'
HangulJongseongTikeutDashThieuth,
/// \u{d7d5}: 'ퟕ'
HangulJongseongRieulDashSsangkiyeok,
/// \u{d7d6}: 'ퟖ'
HangulJongseongRieulDashKiyeokDashHieuh,
/// \u{d7d7}: 'ퟗ'
HangulJongseongSsangrieulDashKhieukh,
/// \u{d7d8}: 'ퟘ'
HangulJongseongRieulDashMieumDashHieuh,
/// \u{d7d9}: 'ퟙ'
HangulJongseongRieulDashPieupDashTikeut,
/// \u{d7da}: 'ퟚ'
HangulJongseongRieulDashPieupDashPhieuph,
/// \u{d7db}: 'ퟛ'
HangulJongseongRieulDashYesieung,
/// \u{d7dc}: 'ퟜ'
HangulJongseongRieulDashYeorinhieuhDashHieuh,
/// \u{d7dd}: 'ퟝ'
HangulJongseongKapyeounrieul,
/// \u{d7de}: 'ퟞ'
HangulJongseongMieumDashNieun,
/// \u{d7df}: 'ퟟ'
HangulJongseongMieumDashSsangnieun,
/// \u{d7e0}: 'ퟠ'
HangulJongseongSsangmieum,
/// \u{d7e1}: 'ퟡ'
HangulJongseongMieumDashPieupDashSios,
/// \u{d7e2}: 'ퟢ'
HangulJongseongMieumDashCieuc,
/// \u{d7e3}: 'ퟣ'
HangulJongseongPieupDashTikeut,
/// \u{d7e4}: 'ퟤ'
HangulJongseongPieupDashRieulDashPhieuph,
/// \u{d7e5}: 'ퟥ'
HangulJongseongPieupDashMieum,
/// \u{d7e6}: 'ퟦ'
HangulJongseongSsangpieup,
/// \u{d7e7}: 'ퟧ'
HangulJongseongPieupDashSiosDashTikeut,
/// \u{d7e8}: 'ퟨ'
HangulJongseongPieupDashCieuc,
/// \u{d7e9}: 'ퟩ'
HangulJongseongPieupDashChieuch,
/// \u{d7ea}: 'ퟪ'
HangulJongseongSiosDashMieum,
/// \u{d7eb}: 'ퟫ'
HangulJongseongSiosDashKapyeounpieup,
/// \u{d7ec}: 'ퟬ'
HangulJongseongSsangsiosDashKiyeok,
/// \u{d7ed}: 'ퟭ'
HangulJongseongSsangsiosDashTikeut,
/// \u{d7ee}: 'ퟮ'
HangulJongseongSiosDashPansios,
/// \u{d7ef}: 'ퟯ'
HangulJongseongSiosDashCieuc,
/// \u{d7f0}: 'ퟰ'
HangulJongseongSiosDashChieuch,
/// \u{d7f1}: 'ퟱ'
HangulJongseongSiosDashThieuth,
/// \u{d7f2}: 'ퟲ'
HangulJongseongSiosDashHieuh,
/// \u{d7f3}: 'ퟳ'
HangulJongseongPansiosDashPieup,
/// \u{d7f4}: 'ퟴ'
HangulJongseongPansiosDashKapyeounpieup,
/// \u{d7f5}: 'ퟵ'
HangulJongseongYesieungDashMieum,
/// \u{d7f6}: 'ퟶ'
HangulJongseongYesieungDashHieuh,
/// \u{d7f7}: 'ퟷ'
HangulJongseongCieucDashPieup,
/// \u{d7f8}: 'ퟸ'
HangulJongseongCieucDashSsangpieup,
/// \u{d7f9}: 'ퟹ'
HangulJongseongSsangcieuc,
/// \u{d7fa}: 'ퟺ'
HangulJongseongPhieuphDashSios,
/// \u{d7fb}: 'ퟻ'
HangulJongseongPhieuphDashThieuth,
}
impl Into<char> for HangulJamoExtendedB {
fn into(self) -> char {
match self {
HangulJamoExtendedB::HangulJungseongODashYeo => 'ힰ',
HangulJamoExtendedB::HangulJungseongODashODashI => 'ힱ',
HangulJamoExtendedB::HangulJungseongYoDashA => 'ힲ',
HangulJamoExtendedB::HangulJungseongYoDashAe => 'ힳ',
HangulJamoExtendedB::HangulJungseongYoDashEo => 'ힴ',
HangulJamoExtendedB::HangulJungseongUDashYeo => 'ힵ',
HangulJamoExtendedB::HangulJungseongUDashIDashI => 'ힶ',
HangulJamoExtendedB::HangulJungseongYuDashAe => 'ힷ',
HangulJamoExtendedB::HangulJungseongYuDashO => 'ힸ',
HangulJamoExtendedB::HangulJungseongEuDashA => 'ힹ',
HangulJamoExtendedB::HangulJungseongEuDashEo => 'ힺ',
HangulJamoExtendedB::HangulJungseongEuDashE => 'ힻ',
HangulJamoExtendedB::HangulJungseongEuDashO => 'ힼ',
HangulJamoExtendedB::HangulJungseongIDashYaDashO => 'ힽ',
HangulJamoExtendedB::HangulJungseongIDashYae => 'ힾ',
HangulJamoExtendedB::HangulJungseongIDashYeo => 'ힿ',
HangulJamoExtendedB::HangulJungseongIDashYe => 'ퟀ',
HangulJamoExtendedB::HangulJungseongIDashODashI => 'ퟁ',
HangulJamoExtendedB::HangulJungseongIDashYo => 'ퟂ',
HangulJamoExtendedB::HangulJungseongIDashYu => 'ퟃ',
HangulJamoExtendedB::HangulJungseongIDashI => 'ퟄ',
HangulJamoExtendedB::HangulJungseongAraeaDashA => 'ퟅ',
HangulJamoExtendedB::HangulJungseongAraeaDashE => 'ퟆ',
HangulJamoExtendedB::HangulJongseongNieunDashRieul => 'ퟋ',
HangulJamoExtendedB::HangulJongseongNieunDashChieuch => 'ퟌ',
HangulJamoExtendedB::HangulJongseongSsangtikeut => 'ퟍ',
HangulJamoExtendedB::HangulJongseongSsangtikeutDashPieup => 'ퟎ',
HangulJamoExtendedB::HangulJongseongTikeutDashPieup => 'ퟏ',
HangulJamoExtendedB::HangulJongseongTikeutDashSios => 'ퟐ',
HangulJamoExtendedB::HangulJongseongTikeutDashSiosDashKiyeok => 'ퟑ',
HangulJamoExtendedB::HangulJongseongTikeutDashCieuc => 'ퟒ',
HangulJamoExtendedB::HangulJongseongTikeutDashChieuch => 'ퟓ',
HangulJamoExtendedB::HangulJongseongTikeutDashThieuth => 'ퟔ',
HangulJamoExtendedB::HangulJongseongRieulDashSsangkiyeok => 'ퟕ',
HangulJamoExtendedB::HangulJongseongRieulDashKiyeokDashHieuh => 'ퟖ',
HangulJamoExtendedB::HangulJongseongSsangrieulDashKhieukh => 'ퟗ',
HangulJamoExtendedB::HangulJongseongRieulDashMieumDashHieuh => 'ퟘ',
HangulJamoExtendedB::HangulJongseongRieulDashPieupDashTikeut => 'ퟙ',
HangulJamoExtendedB::HangulJongseongRieulDashPieupDashPhieuph => 'ퟚ',
HangulJamoExtendedB::HangulJongseongRieulDashYesieung => 'ퟛ',
HangulJamoExtendedB::HangulJongseongRieulDashYeorinhieuhDashHieuh => 'ퟜ',
HangulJamoExtendedB::HangulJongseongKapyeounrieul => 'ퟝ',
HangulJamoExtendedB::HangulJongseongMieumDashNieun => 'ퟞ',
HangulJamoExtendedB::HangulJongseongMieumDashSsangnieun => 'ퟟ',
HangulJamoExtendedB::HangulJongseongSsangmieum => 'ퟠ',
HangulJamoExtendedB::HangulJongseongMieumDashPieupDashSios => 'ퟡ',
HangulJamoExtendedB::HangulJongseongMieumDashCieuc => 'ퟢ',
HangulJamoExtendedB::HangulJongseongPieupDashTikeut => 'ퟣ',
HangulJamoExtendedB::HangulJongseongPieupDashRieulDashPhieuph => 'ퟤ',
HangulJamoExtendedB::HangulJongseongPieupDashMieum => 'ퟥ',
HangulJamoExtendedB::HangulJongseongSsangpieup => 'ퟦ',
HangulJamoExtendedB::HangulJongseongPieupDashSiosDashTikeut => 'ퟧ',
HangulJamoExtendedB::HangulJongseongPieupDashCieuc => 'ퟨ',
HangulJamoExtendedB::HangulJongseongPieupDashChieuch => 'ퟩ',
HangulJamoExtendedB::HangulJongseongSiosDashMieum => 'ퟪ',
HangulJamoExtendedB::HangulJongseongSiosDashKapyeounpieup => 'ퟫ',
HangulJamoExtendedB::HangulJongseongSsangsiosDashKiyeok => 'ퟬ',
HangulJamoExtendedB::HangulJongseongSsangsiosDashTikeut => 'ퟭ',
HangulJamoExtendedB::HangulJongseongSiosDashPansios => 'ퟮ',
HangulJamoExtendedB::HangulJongseongSiosDashCieuc => 'ퟯ',
HangulJamoExtendedB::HangulJongseongSiosDashChieuch => 'ퟰ',
HangulJamoExtendedB::HangulJongseongSiosDashThieuth => 'ퟱ',
HangulJamoExtendedB::HangulJongseongSiosDashHieuh => 'ퟲ',
HangulJamoExtendedB::HangulJongseongPansiosDashPieup => 'ퟳ',
HangulJamoExtendedB::HangulJongseongPansiosDashKapyeounpieup => 'ퟴ',
HangulJamoExtendedB::HangulJongseongYesieungDashMieum => 'ퟵ',
HangulJamoExtendedB::HangulJongseongYesieungDashHieuh => 'ퟶ',
HangulJamoExtendedB::HangulJongseongCieucDashPieup => 'ퟷ',
HangulJamoExtendedB::HangulJongseongCieucDashSsangpieup => 'ퟸ',
HangulJamoExtendedB::HangulJongseongSsangcieuc => 'ퟹ',
HangulJamoExtendedB::HangulJongseongPhieuphDashSios => 'ퟺ',
HangulJamoExtendedB::HangulJongseongPhieuphDashThieuth => 'ퟻ',
}
}
}
impl std::convert::TryFrom<char> for HangulJamoExtendedB {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'ힰ' => Ok(HangulJamoExtendedB::HangulJungseongODashYeo),
'ힱ' => Ok(HangulJamoExtendedB::HangulJungseongODashODashI),
'ힲ' => Ok(HangulJamoExtendedB::HangulJungseongYoDashA),
'ힳ' => Ok(HangulJamoExtendedB::HangulJungseongYoDashAe),
'ힴ' => Ok(HangulJamoExtendedB::HangulJungseongYoDashEo),
'ힵ' => Ok(HangulJamoExtendedB::HangulJungseongUDashYeo),
'ힶ' => Ok(HangulJamoExtendedB::HangulJungseongUDashIDashI),
'ힷ' => Ok(HangulJamoExtendedB::HangulJungseongYuDashAe),
'ힸ' => Ok(HangulJamoExtendedB::HangulJungseongYuDashO),
'ힹ' => Ok(HangulJamoExtendedB::HangulJungseongEuDashA),
'ힺ' => Ok(HangulJamoExtendedB::HangulJungseongEuDashEo),
'ힻ' => Ok(HangulJamoExtendedB::HangulJungseongEuDashE),
'ힼ' => Ok(HangulJamoExtendedB::HangulJungseongEuDashO),
'ힽ' => Ok(HangulJamoExtendedB::HangulJungseongIDashYaDashO),
'ힾ' => Ok(HangulJamoExtendedB::HangulJungseongIDashYae),
'ힿ' => Ok(HangulJamoExtendedB::HangulJungseongIDashYeo),
'ퟀ' => Ok(HangulJamoExtendedB::HangulJungseongIDashYe),
'ퟁ' => Ok(HangulJamoExtendedB::HangulJungseongIDashODashI),
'ퟂ' => Ok(HangulJamoExtendedB::HangulJungseongIDashYo),
'ퟃ' => Ok(HangulJamoExtendedB::HangulJungseongIDashYu),
'ퟄ' => Ok(HangulJamoExtendedB::HangulJungseongIDashI),
'ퟅ' => Ok(HangulJamoExtendedB::HangulJungseongAraeaDashA),
'ퟆ' => Ok(HangulJamoExtendedB::HangulJungseongAraeaDashE),
'ퟋ' => Ok(HangulJamoExtendedB::HangulJongseongNieunDashRieul),
'ퟌ' => Ok(HangulJamoExtendedB::HangulJongseongNieunDashChieuch),
'ퟍ' => Ok(HangulJamoExtendedB::HangulJongseongSsangtikeut),
'ퟎ' => Ok(HangulJamoExtendedB::HangulJongseongSsangtikeutDashPieup),
'ퟏ' => Ok(HangulJamoExtendedB::HangulJongseongTikeutDashPieup),
'ퟐ' => Ok(HangulJamoExtendedB::HangulJongseongTikeutDashSios),
'ퟑ' => Ok(HangulJamoExtendedB::HangulJongseongTikeutDashSiosDashKiyeok),
'ퟒ' => Ok(HangulJamoExtendedB::HangulJongseongTikeutDashCieuc),
'ퟓ' => Ok(HangulJamoExtendedB::HangulJongseongTikeutDashChieuch),
'ퟔ' => Ok(HangulJamoExtendedB::HangulJongseongTikeutDashThieuth),
'ퟕ' => Ok(HangulJamoExtendedB::HangulJongseongRieulDashSsangkiyeok),
'ퟖ' => Ok(HangulJamoExtendedB::HangulJongseongRieulDashKiyeokDashHieuh),
'ퟗ' => Ok(HangulJamoExtendedB::HangulJongseongSsangrieulDashKhieukh),
'ퟘ' => Ok(HangulJamoExtendedB::HangulJongseongRieulDashMieumDashHieuh),
'ퟙ' => Ok(HangulJamoExtendedB::HangulJongseongRieulDashPieupDashTikeut),
'ퟚ' => Ok(HangulJamoExtendedB::HangulJongseongRieulDashPieupDashPhieuph),
'ퟛ' => Ok(HangulJamoExtendedB::HangulJongseongRieulDashYesieung),
'ퟜ' => Ok(HangulJamoExtendedB::HangulJongseongRieulDashYeorinhieuhDashHieuh),
'ퟝ' => Ok(HangulJamoExtendedB::HangulJongseongKapyeounrieul),
'ퟞ' => Ok(HangulJamoExtendedB::HangulJongseongMieumDashNieun),
'ퟟ' => Ok(HangulJamoExtendedB::HangulJongseongMieumDashSsangnieun),
'ퟠ' => Ok(HangulJamoExtendedB::HangulJongseongSsangmieum),
'ퟡ' => Ok(HangulJamoExtendedB::HangulJongseongMieumDashPieupDashSios),
'ퟢ' => Ok(HangulJamoExtendedB::HangulJongseongMieumDashCieuc),
'ퟣ' => Ok(HangulJamoExtendedB::HangulJongseongPieupDashTikeut),
'ퟤ' => Ok(HangulJamoExtendedB::HangulJongseongPieupDashRieulDashPhieuph),
'ퟥ' => Ok(HangulJamoExtendedB::HangulJongseongPieupDashMieum),
'ퟦ' => Ok(HangulJamoExtendedB::HangulJongseongSsangpieup),
'ퟧ' => Ok(HangulJamoExtendedB::HangulJongseongPieupDashSiosDashTikeut),
'ퟨ' => Ok(HangulJamoExtendedB::HangulJongseongPieupDashCieuc),
'ퟩ' => Ok(HangulJamoExtendedB::HangulJongseongPieupDashChieuch),
'ퟪ' => Ok(HangulJamoExtendedB::HangulJongseongSiosDashMieum),
'ퟫ' => Ok(HangulJamoExtendedB::HangulJongseongSiosDashKapyeounpieup),
'ퟬ' => Ok(HangulJamoExtendedB::HangulJongseongSsangsiosDashKiyeok),
'ퟭ' => Ok(HangulJamoExtendedB::HangulJongseongSsangsiosDashTikeut),
'ퟮ' => Ok(HangulJamoExtendedB::HangulJongseongSiosDashPansios),
'ퟯ' => Ok(HangulJamoExtendedB::HangulJongseongSiosDashCieuc),
'ퟰ' => Ok(HangulJamoExtendedB::HangulJongseongSiosDashChieuch),
'ퟱ' => Ok(HangulJamoExtendedB::HangulJongseongSiosDashThieuth),
'ퟲ' => Ok(HangulJamoExtendedB::HangulJongseongSiosDashHieuh),
'ퟳ' => Ok(HangulJamoExtendedB::HangulJongseongPansiosDashPieup),
'ퟴ' => Ok(HangulJamoExtendedB::HangulJongseongPansiosDashKapyeounpieup),
'ퟵ' => Ok(HangulJamoExtendedB::HangulJongseongYesieungDashMieum),
'ퟶ' => Ok(HangulJamoExtendedB::HangulJongseongYesieungDashHieuh),
'ퟷ' => Ok(HangulJamoExtendedB::HangulJongseongCieucDashPieup),
'ퟸ' => Ok(HangulJamoExtendedB::HangulJongseongCieucDashSsangpieup),
'ퟹ' => Ok(HangulJamoExtendedB::HangulJongseongSsangcieuc),
'ퟺ' => Ok(HangulJamoExtendedB::HangulJongseongPhieuphDashSios),
'ퟻ' => Ok(HangulJamoExtendedB::HangulJongseongPhieuphDashThieuth),
_ => Err(()),
}
}
}
impl Into<u32> for HangulJamoExtendedB {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for HangulJamoExtendedB {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for HangulJamoExtendedB {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl HangulJamoExtendedB {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
HangulJamoExtendedB::HangulJungseongODashYeo
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("HangulJamoExtendedB{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
use actix_web::{error, http, HttpResponse};
#[derive(Fail, Debug)]
pub enum FetchError {
#[fail(display = "internal error")]
StoreError,
#[fail(display = "entry not found")]
NotFoundError,
#[fail(display = "no id provided")]
NoProvidedIdError,
#[fail(display = "unparseable id")]
IDParsingError,
}
impl error::ResponseError for FetchError {
fn error_response(&self) -> HttpResponse {
match *self {
FetchError::StoreError => HttpResponse::new(http::StatusCode::INTERNAL_SERVER_ERROR),
FetchError::NoProvidedIdError => HttpResponse::new(http::StatusCode::BAD_REQUEST),
FetchError::NotFoundError => HttpResponse::new(http::StatusCode::NOT_FOUND),
FetchError::IDParsingError => HttpResponse::new(http::StatusCode::UNPROCESSABLE_ENTITY),
}
}
}
|
#[doc = "Reader of register RCRC"]
pub type R = crate::R<u32, super::RCRC>;
#[doc = "Reader of field `RCR`"]
pub type RCR_R = crate::R<u16, u16>;
impl R {
#[doc = "Bits 0:15 - RX CRC register"]
#[inline(always)]
pub fn rcr(&self) -> RCR_R {
RCR_R::new((self.bits & 0xffff) as u16)
}
}
|
/*
* A simple concurrency example in Rust using std::arc
*
* picked up bits from: https://github.com/mozilla/rust/issues/3562
* This program does foo.
*/
// Crate linkage metadata
#[ link(name = "spawnARC",
vers = "0.1.2",
author = "smadhueagle") ];
#[pkg(id = "spawnARC", vers = "0.1.2")];
// Additional metadata attributes
#[ desc = "A spawn with ARC example in rust"];
#[ license = "MIT" ];
#[crate_type = "bin"];
extern mod std;
extern mod rustils;
extern mod extra;
use extra::arc;
use rustils::randutils;
use std::task::spawn;
// this function just waits looping indefinitely
// using this to test what the cost is for creating
// a large no. of tasks in rust & passing a large data
// array for distributed processing.
fn spawn_test_003(pid: int, arr: &~[int]){
println(fmt!("child: %d, val = %d", pid, arr[pid]));
loop {};
}
fn main(){
// variable 'a' simulates the large data object we want our
// rust tasks to process.
let a: ~[int] = randutils::gen_rand_vec_int(100000, [0, 25000]);
// we create an atomic reference counter to share immutable 'a'
// across all tasks without copying heavy data.
let a_ref = arc::Arc::new(a);
for taskid in range(0, 100000) {
let a_ref_copy = a_ref.clone();
do spawn {
spawn_test_003(taskid, a_ref_copy.get());
}
}
}
|
use handlegraph::handle::{Handle, NodeId};
use nalgebra_glm as glm;
use crate::{
geometry::{Point, Rect},
universe::Node,
view::View,
};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum LabelPos {
World {
point: Point,
offset: Option<Point>,
},
Handle {
handle: Handle,
offset: Option<Point>,
},
}
impl LabelPos {
pub fn offset(&self, nodes: &[Node]) -> Option<Point> {
match *self {
LabelPos::World { offset, .. } => offset,
LabelPos::Handle { handle, offset } => {
// if the offset field isn't set, we use the handle
// orientation and node position to figure it out
if let Some(offset) = offset {
return Some(offset);
}
let id = handle.id();
let ix = id.0 - 1;
let node = nodes[ix as usize];
let start_p = node.p0;
let end_p = node.p1;
let start_v = glm::vec2(start_p.x, start_p.y);
let end_v = glm::vec2(end_p.x, end_p.y);
let del = end_v - start_v;
let rot_del =
glm::rotate_vec2(&del, std::f32::consts::PI / 2.0);
let rot_del_norm = rot_del.normalize();
let offset = Point::new(rot_del_norm[0], rot_del_norm[1]);
Some(offset)
}
}
}
pub fn try_world(&self) -> Option<Point> {
if let LabelPos::World { point, .. } = *self {
Some(point)
} else {
None
}
}
pub fn world(&self, nodes: &[Node]) -> Point {
match *self {
LabelPos::World { point, .. } => point,
LabelPos::Handle { handle, .. } => {
let id = handle.id();
let ix = id.0 - 1;
let node = nodes[ix as usize];
node.center()
}
}
}
pub fn anchor(&self, nodes: &[Node]) -> egui::Align2 {
let dir = if let Some(offset) = self.offset(nodes) {
offset
} else {
return egui::Align2::CENTER_CENTER;
};
let norm = dir / dir.length();
let align_for = |v: f32| {
if v > 0.67 {
egui::Align::Max
} else if v < -0.67 {
egui::Align::Min
} else {
egui::Align::Center
}
};
let hor_align = align_for(norm.x);
let ver_align = align_for(norm.y);
egui::Align2([hor_align, ver_align])
}
}
pub fn offset_align(dir: &Point) -> egui::Align2 {
let norm = *dir / dir.length();
let align_for = |v: f32| {
if v > 0.67 {
egui::Align::Max
} else if v < -0.67 {
egui::Align::Min
} else {
egui::Align::Center
}
};
let hor_align = align_for(norm.x);
let ver_align = align_for(norm.y);
egui::Align2([hor_align, ver_align])
}
pub fn draw_text_at_node_anchor(
ctx: &egui::CtxRef,
node_positions: &[Node],
view: View,
node: NodeId,
screen_offset: Point,
anchor_dir: Point,
text: &str,
) -> Option<Rect> {
let node_ix = (node.0 - 1) as usize;
if let Some(node) = node_positions.get(node_ix) {
let pos = node.center();
return draw_text_at_aligned_world_point_offset(
ctx,
view,
pos,
screen_offset,
anchor_dir,
text,
);
}
None
}
pub fn draw_text_at_world_point(
ctx: &egui::CtxRef,
view: View,
world: Point,
text: &str,
) -> Option<Rect> {
draw_text_at_world_point_offset(ctx, view, world, Point::ZERO, text)
}
pub fn draw_text_at_node(
ctx: &egui::CtxRef,
node_positions: &[Node],
view: View,
node: NodeId,
screen_offset: Point,
text: &str,
) -> Option<Rect> {
let node_ix = (node.0 - 1) as usize;
if let Some(node) = node_positions.get(node_ix) {
let pos = node.center();
return draw_text_at_world_point_offset(
ctx,
view,
pos,
screen_offset,
text,
);
}
None
}
pub fn draw_text_at_world_point_offset(
ctx: &egui::CtxRef,
view: View,
world: Point,
screen_offset: Point,
text: &str,
) -> Option<Rect> {
draw_text_at_aligned_world_point_offset(
ctx,
view,
world,
screen_offset,
Point::ZERO,
text,
)
}
fn painter_layer() -> egui::LayerId {
egui::LayerId::new(
egui::Order::Background,
egui::Id::new("gui_text_background"),
)
}
pub fn draw_point_world(ctx: &egui::CtxRef, view: View, point: Point) {
let screen_rect = ctx.input().screen_rect();
let offset = Point::new(screen_rect.width(), screen_rect.height()) / 2.0;
let mut s = view.world_point_to_screen(point);
s += offset;
let painter = ctx.layer_painter(painter_layer());
let stroke = egui::Stroke::new(2.0, egui::Color32::from_rgb(128, 128, 128));
painter.circle_stroke(point.into(), 2.0, stroke);
}
pub fn draw_circle_world(
ctx: &egui::CtxRef,
view: View,
origin: Point,
radius: f32,
color: Option<rgb::RGBA<f32>>,
) {
let screen_rect = ctx.input().screen_rect();
// let screen_radius = radius * view.scale;
let screen_radius = radius;
let p = origin;
let mut s = view.world_point_to_screen(p);
let offset = Point::new(screen_rect.width(), screen_rect.height()) / 2.0;
s += offset;
let painter = ctx.layer_painter(painter_layer());
let color = color
.map(|c| {
let r = (c.r * 255.0) as u8;
let g = (c.g * 255.0) as u8;
let b = (c.b * 255.0) as u8;
let a = (c.a * 255.0) as u8;
egui::Color32::from_rgba_unmultiplied(r, g, b, a)
})
.unwrap_or(egui::Color32::from_rgb(128, 128, 128));
let stroke = egui::Stroke::new(2.0, color);
painter.circle_stroke(s.into(), screen_radius, stroke);
// painter.rect_stroke(Rect::new(s0, s1).into(), 0.0, stroke);
}
pub fn draw_rect_world(
ctx: &egui::CtxRef,
view: View,
rect: Rect,
color: Option<rgb::RGBA<f32>>,
) {
let screen_rect = ctx.input().screen_rect();
let p0 = rect.min();
let p1 = rect.max();
let mut s0 = view.world_point_to_screen(p0);
let mut s1 = view.world_point_to_screen(p1);
let offset = Point::new(screen_rect.width(), screen_rect.height()) / 2.0;
s0 += offset;
s1 += offset;
let painter = ctx.layer_painter(painter_layer());
let color = color
.map(|c| {
let r = (c.r * 255.0) as u8;
let g = (c.g * 255.0) as u8;
let b = (c.b * 255.0) as u8;
let a = (c.a * 255.0) as u8;
egui::Color32::from_rgba_unmultiplied(r, g, b, a)
})
.unwrap_or(egui::Color32::from_rgb(128, 128, 128));
let stroke = egui::Stroke::new(2.0, color);
painter.rect_stroke(Rect::new(s0, s1).into(), 0.0, stroke);
}
pub fn draw_rect<R: Into<egui::Rect>>(ctx: &egui::CtxRef, rect: R) {
let painter = ctx.layer_painter(painter_layer());
let stroke = egui::Stroke::new(2.0, egui::Color32::from_rgb(128, 128, 128));
let rect = rect.into();
painter.rect_stroke(rect, 0.0, stroke);
}
pub fn draw_text_at_aligned_world_point_offset(
ctx: &egui::CtxRef,
view: View,
world: Point,
screen_offset: Point,
anchor_dir: Point,
text: &str,
) -> Option<Rect> {
let screen_rect = ctx.input().screen_rect();
let painter = ctx.layer_painter(painter_layer());
let screen_pos = view.world_point_to_screen(world);
let dims = Point::new(screen_rect.width(), screen_rect.height());
let mut screen_pos = screen_pos + dims / 2.0;
screen_pos += screen_offset;
// hacky way to ensure that the text is only being rendered when
// (more or less) on the screen, without being cut off if the
// center of the text is just outside the visible area
if screen_pos.x > -screen_rect.width()
&& screen_pos.x < 2.0 * screen_rect.width()
&& screen_pos.y > -screen_rect.height()
&& screen_pos.y < 2.0 * screen_rect.height()
{
let align = offset_align(&anchor_dir);
let rect = painter.text(
screen_pos.into(),
align,
text,
// egui::TextStyle::Body,
egui::TextStyle::Button,
ctx.style().visuals.text_color(),
);
return Some(rect.into());
}
None
}
|
use std::fmt;
use std::io::Write;
////////// From syntax.ml /////////
// Abstract Syntax
// Variable names
pub type Name = String;
// Types
#[derive(Debug)]
pub enum Type {
Int,
Bool,
Arrow(Box<Type>, Box<Type>),
}
impl Clone for Type {
fn clone(&self) -> Type {
match *self {
Type::Int => Type::Int,
Type::Bool => Type::Bool,
Type::Arrow(ref t1, ref t2) => {
Type::Arrow(t1.clone(), t2.clone())
}
}
}
}
impl PartialEq for Type {
fn eq(&self, other: &Type) -> bool {
match *self {
Type::Int => {
match *other {
Type::Int => true,
_ => false
}
},
Type::Bool => {
match *other {
Type::Bool => true,
_ => false
}
},
Type::Arrow(ref t1, ref t2) => {
match *other {
Type::Arrow(ref o1, ref o2) => {
*t1 == *o1 && *t2 == *o2
},
_ => false
}
}
}
}
}
impl fmt::Display for Type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", rec_fmt_type(&self, -1))
}
}
fn rec_fmt_type(t: &Type, n: i8) -> String {
let (m, str) = match *t {
Type::Int => (2, "int".to_string()),
Type::Bool => (2, "bool".to_string()),
Type::Arrow(ref t1, ref t2) => (1, format!("{} -> {}", &rec_fmt_type(t1, 1), &rec_fmt_type(t2, 0))),
};
if m > n {
str
} else {
format!("({})", &str)
}
}
// Expressions
#[derive(Debug)]
pub enum Expr {
Var(Name),
Int(i64),
Bool(bool),
Times(Box<Expr>, Box<Expr>),
Plus(Box<Expr>, Box<Expr>),
Minus(Box<Expr>, Box<Expr>),
Equal(Box<Expr>, Box<Expr>),
Less(Box<Expr>, Box<Expr>),
If(Box<Expr>, Box<Expr>, Box<Expr>),
Fn(Name, Name, Type, Type, Box<Expr>),
Apply(Box<Expr>, Box<Expr>),
}
impl fmt::Display for Expr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", &rec_fmt_expr(self, -1))
}
}
fn rec_fmt_expr(t: &Expr, n: i8) -> String {
let (m, str) = match *t {
Expr::Int(i) => (7, i.to_string()),
Expr::Bool(b) => (7, if b { "true".to_string() } else { "false".to_string() }),
Expr::Var(ref name) => (7, name.to_string()),
Expr::Apply(ref e1, ref e2) => (6, format!("{} {}", rec_fmt_expr(e1, 5), rec_fmt_expr(e2, 6))),
Expr::Times(ref e1, ref e2) => (5, format!("{} * {}", rec_fmt_expr(e1, 4), rec_fmt_expr(e2, 5))),
Expr::Plus(ref e1, ref e2) => (4, format!("{} + {}", rec_fmt_expr(e1, 3), rec_fmt_expr(e2, 4))),
Expr::Minus(ref e1, ref e2) => (4, format!("{} - {}", rec_fmt_expr(e1, 3), rec_fmt_expr(e2, 4))),
Expr::Equal(ref e1, ref e2) => (3, format!("{} = {}", rec_fmt_expr(e1, 3), rec_fmt_expr(e2, 3))),
Expr::Less(ref e1, ref e2) => (3, format!("{} < {}", rec_fmt_expr(e1, 3), rec_fmt_expr(e2, 3))),
Expr::If(ref e1, ref e2, ref e3) => (2, format!("if {} then {} else {}",
rec_fmt_expr(e1, 3),
rec_fmt_expr(e2, 3),
rec_fmt_expr(e3, 3))),
Expr::Fn(ref f, ref x, ref t1, ref t2, ref e) => (1, format!("fn {}({} : {}) : {} is {}",
f.to_string(),
t1,
x.to_string(),
t2,
rec_fmt_expr(e, 0))),
};
if m > n {
str
} else {
format!("({})", &str)
}
}
// Toplevel commands
#[derive(Debug)]
pub enum TopLevelCmd {
Expr(Expr),
Def(Name, Expr)
}
////////// end syntax //////////////////
|
use std::io::prelude::*;
use std::net::{TcpStream};
use std::error::Error;
use ssh2::{Session, ErrorCode, ExtendedData};
use serde::Deserialize;
use std::collections::HashMap;
use log::{error, warn, info};
use crate::authenticator::Authenticator;
use crate::resolver::Resolver;
#[derive(Debug, Deserialize)]
pub struct Config {
pub targets: Vec<Target>,
#[serde(flatten)]
tasks: HashMap<String, Vec<Task>>,
}
#[derive(Debug, Deserialize)]
pub struct Target {
pub host: String,
ip: Option<String>,
port: Option<u16>,
user: String,
password: Option<String>,
pub tasks: Vec<Task>,
}
#[derive(Debug, Deserialize)]
pub struct Task {
command: String,
expected_result: i32,
stop_on_error: bool,
}
#[derive(Debug)]
pub enum State {
Failed,
Warning,
Ok,
}
impl Target {
pub fn connect(&self, authenticator: &Option<impl Authenticator>, resolver: &Option<impl Resolver>) -> Result<Session, Box<dyn Error>> {
// try resolver
let address = match resolver {
// always fall back to dns name
Some(resolver) => {
match resolver.get(&self.host) {
Ok(ip) => {
info!("found ip address: {}", ip);
ip
},
Err(e) => {
error!("unable to resolve ip: {}", e);
self.host.to_string()
}
}
},
None => self.host.to_string(),
};
// Open SSH Session to Address
match TcpStream::connect(format!("{}:{}", address, self.port.unwrap_or(22u16)).as_str()) {
Ok(tcp) => {
match Session::new() {
Ok(mut session) => {
session.set_timeout(150000);
session.set_tcp_stream(tcp);
session.handshake()?;
match &self.password {
Some(password) => {
match password.as_str() {
"bitwarden" => {
match authenticator {
Some(a) => session.userauth_password(&self.user, a.get(&self.host, &self.user)?)?,
None => return Err(Box::new(ssh2::Error::new(ssh2::ErrorCode::Session(-18), "authentication method not available")))
}
},
_ => session.userauth_password(&self.user, password)?,
}
},
None => session.userauth_agent(&self.user)?,
}
Ok(session)
},
Err(e) => {
error!("Connection Error: {}", e);
return Err(Box::new(ssh2::Error::new(ssh2::ErrorCode::Session(-26), "Connection Error")))
}
}
},
Err(e) => {
error!("Connection Error: {} {}", format!("{}:{}", address, self.port.unwrap_or(22u16)).as_str(), e);
return Err(Box::new(ssh2::Error::new(ErrorCode::Session(-9), "Connection Error")))
}
}
}
}
impl Task {
pub fn run(&self, session: &Session) -> Result<State, Box<dyn Error>> {
// Run command in session
let mut channel = session.channel_session()?;
// Add stderr stream to normal output
channel.handle_extended_data(ExtendedData::Merge)?;
channel.exec(&self.command)?;
let mut buffer = String::new();
// catch session timeout
match channel.read_to_string(&mut buffer) {
Err(_e) => {
return Err(Box::new(ssh2::Error::new(ErrorCode::Session(-23), "Data Read Error/Timeout")))
},
_ => ()
}
channel.wait_close()?;
// write output to logfile
info!("{}", buffer);
match channel.exit_status() {
Ok(r) => {
if self.expected_result == r {
return Ok(State::Ok);
} else {
if self.stop_on_error == true {
error!("expected result {} but recieved {}", self.expected_result, r);
return Ok(State::Failed);
} else {
warn!("expected result {} but recieved {}", self.expected_result, r);
return Ok(State::Warning);
}
}
},
Err(e) => {
error!("{}", e);
return Err(Box::new(e));
}
}
}
} |
//! Traits which expose underlying winit crate's types
mod window;
pub use window::*;
|
use std::{
rc::Rc,
sync::{
atomic::{self, AtomicBool, AtomicU16, Ordering},
Arc,
},
};
use anyhow::Context as _;
use gba::{Button, ButtonSet};
use glutin::{
event::{ElementState, ModifiersState, VirtualKeyCode, WindowEvent},
PossiblyCurrent, WindowedContext,
};
use parking_lot::Mutex;
use pyrite::{config::Config, GbaHandle};
use crate::{
glutil::{
self, AttribPtrType, Buffer, BufferTarget, BufferUsage, DrawMode, InternalTextureFormat,
PixelDataType, Program, Shader, ShaderType, Texture, TextureFormat, UnlinkedProgram,
VertexArray,
},
pyrite_window::PyriteWindow,
};
pub struct GbaWindow {
context: Option<WindowedContext<PossiblyCurrent>>,
gl: Rc<glow::Context>,
gba: GbaHandle,
buttons: ButtonSet,
buttons_u16: Arc<AtomicU16>,
screen_ready: Arc<AtomicBool>,
screen: Arc<Mutex<[u16; 240 * 160]>>,
screen_texture: Texture,
screen_vbo: Buffer,
screen_vao: VertexArray,
screen_shader: Program,
modifiers: ModifiersState,
wants_exit: bool,
wants_debugger: bool,
config: Arc<Config>,
}
impl GbaWindow {
pub fn new(
config: Arc<Config>,
context: WindowedContext<PossiblyCurrent>,
gba: GbaHandle,
) -> anyhow::Result<GbaWindow> {
let gl = Rc::new(unsafe {
glow::Context::from_loader_function(|s| context.get_proc_address(s) as *const _)
});
let buffer_ready = Arc::new(AtomicBool::new(true));
let buffer = Arc::new(Mutex::new([0; 240 * 160]));
let gba_buffer_ready = Arc::clone(&buffer_ready);
let gba_buffer = Arc::clone(&buffer);
let buttons_u16 = Arc::new(AtomicU16::new(ButtonSet::default().into()));
let buttons_u16_gba = buttons_u16.clone();
gba.on_frame(move |gba, state| {
let buttons = buttons_u16_gba.load(atomic::Ordering::Acquire);
gba.set_buttons(ButtonSet::from(buttons));
if !state.paused {
let mut screen = gba_buffer.lock();
screen.copy_from_slice(gba.video().screen());
drop(screen);
gba_buffer_ready.store(true, Ordering::Release);
}
});
let screen_texture = Texture::builder()
.width(240)
.height(160)
.internal_format(InternalTextureFormat::Rgb)
.format(TextureFormat::Bgra)
.build::<[u16]>(&gl, PixelDataType::UnsignedShort1555Rev, None)
.map_err(anyhow::Error::msg)
.context("error while creating screen texture")?;
// Device Coordinates (left, right ,top, bottom):
const DL: f32 = -1.0;
const DR: f32 = 1.0;
const DT: f32 = 1.0;
const DB: f32 = -1.0;
// Texture Coordinates (left, right, top, bottom):
const TL: f32 = 0.0;
const TR: f32 = 1.0;
const TT: f32 = 0.0;
const TB: f32 = 1.0;
let vertices: &[f32] = &[
DL, DT, TL, TT, // left, top
DR, DT, TR, TT, // right, top
DL, DB, TL, TB, // left, bottom
DL, DB, TL, TB, // left, bottom
DR, DB, TR, TB, // right, bottom
DR, DT, TR, TT, // right, top
];
let screen_vbo = Buffer::from_slice(
&gl,
BufferTarget::ArrayBuffer,
vertices,
BufferUsage::StaticDraw,
)
.map_err(anyhow::Error::msg)
.context("error creating screen vertex buffer object")?;
let vertex_shader =
Shader::new(&gl, ShaderType::Vertex, include_str!("../shaders/gba.vert"))
.map_err(anyhow::Error::msg)
.context("error creating/compiling vertex shader")?;
let fragment_shader = Shader::new(
&gl,
ShaderType::Fragment,
include_str!("../shaders/gba.frag"),
)
.map_err(anyhow::Error::msg)
.context("error creating/compiling fragmenet shader")?;
let screen_shader = UnlinkedProgram::new(&gl, &[vertex_shader, fragment_shader])
.map_err(anyhow::Error::msg)
.context("error creating shader program")?;
screen_shader.bind_frag_data_location(0, "out_color");
let screen_shader = screen_shader
.link()
.map_err(anyhow::Error::msg)
.context("error linking shader program")?;
screen_shader.bind();
glutil::uniform_1_i32(
&gl,
screen_shader
.uniform_location("tex")
.expect("no tex uniform"),
0,
);
let screen_vao = VertexArray::new(&gl)
.map_err(anyhow::Error::msg)
.context("error creating vertex array")?;
screen_vao.with(|vao| {
let sz_float = std::mem::size_of::<f32>() as i32;
let pos = screen_shader
.get_attrib_location("in_position")
.expect("no in_position attribute");
let tex = screen_shader
.get_attrib_location("in_texcoord")
.expect("no in_texcoord attribute");
vao.vertex_attrib_pointer_f32(pos, 2, AttribPtrType::Float, false, 4 * sz_float, 0);
vao.vertex_attrib_pointer_f32(
tex,
2,
AttribPtrType::Float,
false,
4 * sz_float,
2 * sz_float,
);
vao.enable_attrib(pos);
vao.enable_attrib(tex);
});
Ok(GbaWindow {
context: Some(context),
gl,
gba,
buttons: ButtonSet::default(),
buttons_u16,
screen_ready: buffer_ready,
screen: buffer,
screen_texture,
screen_vbo,
screen_vao,
screen_shader,
modifiers: ModifiersState::default(),
wants_exit: false,
wants_debugger: false,
config,
})
}
fn on_keyboard_input(&mut self, input: glutin::event::KeyboardInput) {
let pressed = input.state == ElementState::Pressed;
match input.virtual_keycode {
Some(VirtualKeyCode::Escape) if pressed => self.wants_exit = true,
Some(VirtualKeyCode::P) if pressed && self.modifiers.ctrl() => self
.gba
.after_frame(|_, state| state.paused = !state.paused),
Some(VirtualKeyCode::R) if pressed && self.modifiers.ctrl() => {
let boot_from_bios = self.config.gba.boot_from_bios.unwrap_or(true);
self.gba
.after_frame(move |gba, _| gba.reset(boot_from_bios))
}
Some(VirtualKeyCode::D) if pressed && self.modifiers.ctrl() => {
self.wants_debugger = true
}
Some(keycode) => {
if let Some(button) = keycode_to_button(keycode) {
self.buttons.set_pressed(button, pressed);
self.buttons_u16
.store(self.buttons.into(), Ordering::Relaxed);
}
}
_ => {}
}
}
pub fn wants_exit(&mut self) -> bool {
std::mem::replace(&mut self.wants_exit, false)
}
pub fn wants_debugger(&mut self) -> bool {
std::mem::replace(&mut self.wants_debugger, false)
}
}
impl PyriteWindow for GbaWindow {
fn on_window_event(&mut self, event: WindowEvent) {
if let WindowEvent::KeyboardInput { input, .. } = event {
self.on_keyboard_input(input)
}
}
fn render(&mut self) {
glutil::clear(&self.gl, 0.5, 0.2, 0.5);
if self
.screen_ready
.compare_exchange(true, false, Ordering::Acquire, Ordering::Relaxed)
.is_ok()
{
let screen = self.screen.lock();
self.screen_texture.update(
None,
TextureFormat::Rgba,
PixelDataType::UnsignedShort1555Rev,
&screen[..],
);
}
self.screen_vbo.bind();
self.screen_vao.bind();
self.screen_shader.bind();
self.screen_texture.bind(0);
glutil::draw_arrays(&self.gl, DrawMode::Triangles, 0, 6);
self.request_redraw();
}
fn context_mut_opt(&mut self) -> &mut Option<WindowedContext<PossiblyCurrent>> {
&mut self.context
}
fn context_opt(&self) -> &Option<WindowedContext<PossiblyCurrent>> {
&self.context
}
fn modifiers_mut(&mut self) -> &mut ModifiersState {
&mut self.modifiers
}
fn gl(&self) -> &glow::Context {
&self.gl
}
}
fn keycode_to_button(keycode: VirtualKeyCode) -> Option<Button> {
match keycode {
VirtualKeyCode::Z => Some(Button::A),
VirtualKeyCode::X => Some(Button::B),
VirtualKeyCode::Left => Some(Button::Left),
VirtualKeyCode::Right => Some(Button::Right),
VirtualKeyCode::Up => Some(Button::Up),
VirtualKeyCode::Down => Some(Button::Down),
VirtualKeyCode::A => Some(Button::L),
VirtualKeyCode::S => Some(Button::R),
VirtualKeyCode::Return => Some(Button::Start),
VirtualKeyCode::Back => Some(Button::Select),
_ => None,
}
}
|
mod batch;
mod cloud_table;
mod continuation_token;
pub mod de;
pub mod paginated_response;
pub mod table_client;
mod table_entity;
pub use batch::*;
pub use cloud_table::*;
pub use continuation_token::ContinuationToken;
pub use paginated_response::PaginatedResponse;
pub use table_client::*;
pub use table_entity::*;
|
use bevy::prelude::*;
use super::{is_path_empty, Piece, PieceColor, PieceType};
pub fn spawn_rook(
commands: &mut Commands,
material: Handle<StandardMaterial>,
piece_color: PieceColor,
position: (u8, u8),
asset_server: &AssetServer,
) {
let mesh: Handle<Mesh> = asset_server.load("models/chess_kit/pieces.glb#Mesh5/Primitive0");
commands
// Spawn parent entity
.spawn_bundle(PbrBundle {
transform: Transform::from_translation(Vec3::new(
position.0 as f32,
0.,
position.1 as f32,
)),
..Default::default()
})
.insert(Piece {
color: piece_color,
piece_type: PieceType::Rook,
x: position.0,
y: position.1,
})
.with_children(|parent| {
parent.spawn_bundle(PbrBundle {
mesh,
material,
transform: {
let mut transform = Transform::from_translation(Vec3::new(-0.1, 0., 1.8));
transform.apply_non_uniform_scale(Vec3::new(0.2, 0.2, 0.2));
transform
},
..Default::default()
});
});
}
pub fn is_rook_move_valid(
current_position: (u8, u8),
target_position: (u8, u8),
pieces: &Query<&Piece>,
) -> bool {
let (current_x, current_y) = current_position;
let (target_x, target_y) = target_position;
is_path_empty((current_x, current_y), target_position, pieces)
&& ((current_x == target_x && current_y != target_y)
|| (current_y == target_y && current_x != target_x))
}
|
use std::{ fs };
pub fn main() -> Option<bool> {
let file_contents = match fs::read_to_string(
"./inputs/2020-12-06-aoc-01-input.txt"
) {
Ok(c) => c,
Err(e) => panic!("{:?}", e)
};
let total_sum: usize = file_contents
.split("\n\n")
.map(|block| {
let mut ordered_block_answers = block
.split('\n')
.filter(|line| line.len() > 0)
.map(|line| {
line
.chars()
.fold(vec!(), |mut a, c| {
a.push(c.clone());
a
})
}).collect::<Vec<_>>();
ordered_block_answers
.sort_by(|a, b| a.len().cmp(&b.len()));
let mut valid_answers = ordered_block_answers[0].clone();
for block_answer in &ordered_block_answers {
for valid_answer in &ordered_block_answers[0] {
if !block_answer.contains(&valid_answer) {
let index = match valid_answers.iter()
.position(|answer| answer == valid_answer) {
Some(index) => index,
None => continue
};
valid_answers.remove(index);
}
}
}
valid_answers.len()
})
.sum();
println!("The total sum is: {}", total_sum);
Some(true)
}
|
// `Iter`-like structure.
// It only deals with &str (Vec<char>) (, so that we can build code simply).
pub struct Consumer {
queue: Vec<char>,
pos: usize,
}
impl Consumer {
pub fn new(s: &str) -> Self {
let vec = s.chars().collect::<Vec<char>>();
Self {
queue: vec,
pos: 0,
}
}
// Return next char as `String`.
pub fn next(&mut self) -> Option<String> {
if let Some(c) = self.next_char() {
Some(c.to_string())
} else {
None
}
}
// Inner function for `next` and `next_n`
// Return next char if `self.queue` has the next element,
pub fn next_char(&mut self) -> Option<char> {
if self.pos < self.queue.len() {
let res = self.queue[self.pos];
self.pos +=1;
Some(res)
} else {
None
}
}
// return next n chars as `String`
pub fn next_n(&mut self, n: usize) -> Option<String> {
let mut vec = Vec::new();
for _ in 0..n {
if let Some(c) = self.next_char() {
vec.push(c);
} else {
return None;
}
}
let res = vec.into_iter().collect::<String>();
Some(res)
}
// return string from `self.pos` to next white space
pub fn next_until_space(&mut self) -> Option<String> {
let mut vec = Vec::new();
if self.peek().is_none() {
return None;
}
loop {
match self.peek_char() {
Some(c) => {
match c {
' ' | '\t' => {
break;
}
_ => {
self.next();
vec.push(c);
}
}
},
None => {
break;
}
}
}
let res = vec.into_iter().collect::<String>();
Some(res)
}
// return a char as `String`
pub fn peek(&self) -> Option<String> {
if let Some(c) = self.peek_char() {
Some(c.to_string())
} else {
None
}
}
// inner function for mainly `peek` and `peek_n`
pub fn peek_char(&self) -> Option<char> {
let res: char;
if self.pos < self.queue.len() {
res = self.queue[self.pos];
Some(res)
} else {
None
}
}
// return chars as `String`
pub fn peek_n(&self, n: usize) -> Option<String> {
let mut vec = Vec::new();
for i in 0..n {
if self.pos+i < self.queue.len() {
vec.push(self.queue[self.pos+i]);
} else {
return None;
}
}
let res = vec.iter().collect::<String>();
Some(res)
}
// return `usize` integer
pub fn to_usize(&mut self) -> Option<usize> {
let mut result: usize = 0;
// check whether the first char is number
match self.peek_char() {
Some(c) => {
match c {
'0'..='9' => {
}
_ => {
return None;
}
}
}
None => {
return None;
}
}
loop {
match self.peek_char() {
Some(c) => {
match c {
'0'..='9' => {
self.next();
let n = c.to_digit(10).unwrap() as usize;
result = result*10 + n;
}
_ => {
break;
}
}
}
None => {
break;
}
}
}
return Some(result);
}
// skip white spaces
pub fn skip_space(&mut self) {
loop {
if let Some(c) = self.peek_char() {
if " \t".contains(c) {
self.next_char();
} else {
break;
}
} else {
break;
}
}
}
// Example: read URL
// ```rust
// let mut consumer = Consumer::new("http://example.com/rust");
// let protocol = consumer.next_until("://").unwrap(); // == "http"
// consumer.next_n(3);
// let host = consumer.next_until("/").unwrap(); // == "example.com"
// let path = consumer.next_until_space().unwrap(); // == "/rust"
//
// assert_eq!(&protocol, "http");
// assert_eq!(&host, "example.com");
// assert_eq!(&path, "/rust");
// ```
pub fn next_until(&mut self, s: &str) -> Option<String> {
let mut n = 0;
//
loop {
match self.peek_n(s.len() + n) {
Some(peeked) => {
if &peeked[n..] == s {
self.next_n(n);
return Some(peeked[..n].to_string());
}
}
None => return None,
}
n += 1;
}
}
pub fn next_while(&mut self, f: Box<dyn Fn(char) -> bool>) -> Option<String> {
let mut n = 1;
let mut ret = String::new();
loop {
match self.peek_char() {
Some(c) => {
if !f(c) {
break;
}
self.next();
ret.push(c);
},
None => break,
}
}
Some(ret)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_next_until() {
let mut con = Consumer::new("https://ja.wikipedia.org/wiki/Uniform_Resource_Locator");
let protocol = con.next_until("://").unwrap();
con.next_n("://".len());
let host = con.next_until("/").unwrap();
let path = con.next_until_space().unwrap();
assert_eq!(&protocol, "https");
assert_eq!(&host, "ja.wikipedia.org");
assert_eq!(&path, "/wiki/Uniform_Resource_Locator");
}
}
|
use std::path::PathBuf;
use std::sync::{atomic::AtomicBool, Arc};
use anyhow::Result;
use filter::FilteredItem;
use icon::{Icon, IconKind};
use jsonrpc_core::Params;
use matcher::MatchType;
use parking_lot::Mutex;
use serde::Deserialize;
use crate::stdio_server::{
rpc::{Call, MethodCall, Notification},
types::ProviderId,
};
const DEFAULT_DISPLAY_WINWIDTH: u64 = 100;
const DEFAULT_PREVIEW_WINHEIGHT: u64 = 30;
/// This type represents the scale of filtering source.
#[derive(Debug, Clone)]
pub enum Scale {
/// We do not know the exact total number of source items.
Indefinite,
/// Large scale.
///
/// The number of total source items is already known, but that's
/// too many for the synchorous filtering.
Large(usize),
/// Small scale, in which case we do not have to use the dynamic filtering.
Small { total: usize, lines: Vec<String> },
/// Unknown scale, but the cache exists.
Cache { total: usize, path: PathBuf },
}
impl Default for Scale {
fn default() -> Self {
Self::Indefinite
}
}
impl Scale {
pub fn total(&self) -> Option<usize> {
match self {
Self::Large(total) | Self::Small { total, .. } | Self::Cache { total, .. } => {
Some(*total)
}
_ => None,
}
}
pub fn initial_lines(&self, n: usize) -> Option<Vec<FilteredItem>> {
match self {
Self::Small { ref lines, .. } => {
Some(lines.iter().take(n).map(|s| s.as_str().into()).collect())
}
Self::Cache { ref path, .. } => {
if let Ok(lines_iter) = utility::read_first_lines(path, n) {
Some(lines_iter.map(Into::into).collect::<Vec<_>>())
} else {
None
}
}
_ => None,
}
}
}
#[derive(Clone, Debug)]
pub struct SyncFilterResults {
pub total: usize,
pub decorated_lines: printer::DecoratedLines,
}
#[derive(Debug, Clone)]
pub struct SessionContext {
pub provider_id: ProviderId,
pub cwd: PathBuf,
pub no_cache: bool,
pub start_buffer_path: PathBuf,
pub display_winwidth: u64,
pub preview_winheight: u64,
pub icon: Icon,
pub match_type: MatchType,
pub match_bonuses: Vec<matcher::Bonus>,
pub scale: Arc<Mutex<Scale>>,
pub source_cmd: Option<String>,
pub runtimepath: Option<String>,
pub is_running: Arc<Mutex<AtomicBool>>,
}
impl SessionContext {
/// Executes the command `cmd` and returns the raw bytes of stdout.
pub fn execute(&self, cmd: &str) -> Result<Vec<u8>> {
let out = utility::execute_at(cmd, Some(&self.cwd))?;
Ok(out.stdout)
}
/// Size for fulfilling the preview window.
pub fn sensible_preview_size(&self) -> usize {
std::cmp::max(
self.provider_id.get_preview_size(),
(self.preview_winheight / 2) as usize,
)
}
fn fuzzy_matcher(&self) -> matcher::Matcher {
matcher::Matcher::with_bonuses(
matcher::FuzzyAlgorithm::Fzy,
self.match_type,
Vec::new(), // TODO: bonuses
)
}
pub fn sync_filter_source_item<'a>(
&self,
query: &str,
lines: impl Iterator<Item = &'a str>,
) -> Result<SyncFilterResults> {
let ranked = filter::par_filter(
query,
lines.map(Into::into).collect(),
&self.fuzzy_matcher(),
);
let total = ranked.len();
// Take the first 200 entries and add an icon to each of them.
let decorated_lines = printer::decorate_lines(
ranked.iter().take(200).cloned().collect(),
self.display_winwidth as usize,
self.icon,
);
Ok(SyncFilterResults {
total,
decorated_lines,
})
}
fn from_params(params: Params) -> Self {
#[derive(Deserialize)]
struct InnerParams {
provider_id: ProviderId,
cwd: PathBuf,
no_cache: bool,
source_fpath: PathBuf,
display_winwidth: Option<u64>,
preview_winheight: Option<u64>,
source_cmd: Option<String>,
runtimepath: Option<String>,
enable_icon: Option<bool>,
}
let InnerParams {
provider_id,
cwd,
no_cache,
source_fpath,
display_winwidth,
preview_winheight,
source_cmd,
runtimepath,
enable_icon,
} = params
.parse()
.expect("Failed to deserialize SessionContext");
let match_type = match provider_id.as_str() {
"tags" | "proj_tags" => MatchType::TagName,
"grep" | "grep2" => MatchType::IgnoreFilePath,
_ => MatchType::Full,
};
let icon = if enable_icon.unwrap_or(false) {
match provider_id.as_str() {
"tags" | "proj_tags" => Icon::Enabled(IconKind::ProjTags),
"grep" | "grep2" => Icon::Enabled(IconKind::Grep),
"files" => Icon::Enabled(IconKind::File),
_ => Icon::Null,
}
} else {
Icon::Null
};
let match_bonuses = match provider_id.as_str() {
"files" | "git_files" | "filer" => vec![matcher::Bonus::FileName],
_ => vec![],
};
Self {
provider_id,
cwd,
no_cache,
start_buffer_path: source_fpath,
display_winwidth: display_winwidth.unwrap_or(DEFAULT_DISPLAY_WINWIDTH),
preview_winheight: preview_winheight.unwrap_or(DEFAULT_PREVIEW_WINHEIGHT),
source_cmd,
runtimepath,
match_type,
match_bonuses,
icon,
scale: Arc::new(Mutex::new(Scale::Indefinite)),
is_running: Arc::new(Mutex::new(true.into())),
}
}
}
impl From<MethodCall> for SessionContext {
fn from(method_call: MethodCall) -> Self {
Self::from_params(method_call.params)
}
}
impl From<Notification> for SessionContext {
fn from(notification: Notification) -> Self {
Self::from_params(notification.params)
}
}
impl From<Call> for SessionContext {
fn from(call: Call) -> Self {
tracing::debug!(?call, "Creating a new SessionContext from given call");
match call {
Call::MethodCall(method_call) => method_call.into(),
Call::Notification(notification) => notification.into(),
}
}
}
|
use crate::domain_block_processor::{
DomainBlockProcessor, PendingConsensusBlocks, ReceiptsChecker,
};
use crate::{DomainParentChain, ExecutionReceiptFor, TransactionFor};
use domain_block_preprocessor::runtime_api_full::RuntimeApiFull;
use domain_block_preprocessor::{DomainBlockPreprocessor, PreprocessResult};
use domain_runtime_primitives::{DomainCoreApi, InherentExtrinsicApi};
use sc_client_api::{AuxStore, BlockBackend, Finalizer, StateBackendFor};
use sc_consensus::{BlockImport, BlockImportParams, ForkChoiceStrategy, StateAction};
use sp_api::{NumberFor, ProvideRuntimeApi};
use sp_blockchain::{HeaderBackend, HeaderMetadata};
use sp_consensus::BlockOrigin;
use sp_core::traits::CodeExecutor;
use sp_domains::{DomainId, DomainsApi, InvalidReceipt, ReceiptValidity};
use sp_keystore::KeystorePtr;
use sp_messenger::MessengerApi;
use sp_runtime::traits::{Block as BlockT, HashFor, Zero};
use sp_runtime::Digest;
use std::sync::Arc;
type DomainReceiptsChecker<Block, CBlock, Client, CClient, Backend, E> = ReceiptsChecker<
Block,
Client,
CBlock,
CClient,
Backend,
E,
DomainParentChain<Block, CBlock, CClient>,
CBlock,
>;
pub(crate) struct BundleProcessor<Block, CBlock, Client, CClient, Backend, E, BI>
where
Block: BlockT,
CBlock: BlockT,
{
domain_id: DomainId,
consensus_client: Arc<CClient>,
client: Arc<Client>,
backend: Arc<Backend>,
keystore: KeystorePtr,
domain_receipts_checker: DomainReceiptsChecker<Block, CBlock, Client, CClient, Backend, E>,
domain_block_preprocessor: DomainBlockPreprocessor<
Block,
CBlock,
Client,
CClient,
RuntimeApiFull<Client>,
ReceiptValidator<Client>,
>,
domain_block_processor: DomainBlockProcessor<Block, CBlock, Client, CClient, Backend, BI>,
}
impl<Block, CBlock, Client, CClient, Backend, E, BI> Clone
for BundleProcessor<Block, CBlock, Client, CClient, Backend, E, BI>
where
Block: BlockT,
CBlock: BlockT,
{
fn clone(&self) -> Self {
Self {
domain_id: self.domain_id,
consensus_client: self.consensus_client.clone(),
client: self.client.clone(),
backend: self.backend.clone(),
keystore: self.keystore.clone(),
domain_receipts_checker: self.domain_receipts_checker.clone(),
domain_block_preprocessor: self.domain_block_preprocessor.clone(),
domain_block_processor: self.domain_block_processor.clone(),
}
}
}
struct ReceiptValidator<Client> {
client: Arc<Client>,
}
impl<Client> Clone for ReceiptValidator<Client> {
fn clone(&self) -> Self {
Self {
client: self.client.clone(),
}
}
}
impl<Client> ReceiptValidator<Client> {
pub fn new(client: Arc<Client>) -> Self {
Self { client }
}
}
impl<Block, CBlock, Client> domain_block_preprocessor::ValidateReceipt<Block, CBlock>
for ReceiptValidator<Client>
where
Block: BlockT,
CBlock: BlockT,
Client: AuxStore,
{
fn validate_receipt(
&self,
receipt: &ExecutionReceiptFor<Block, CBlock>,
) -> sp_blockchain::Result<ReceiptValidity> {
// Skip genesis receipt as it has been already verified by the consensus chain.
if receipt.domain_block_number.is_zero() {
return Ok(ReceiptValidity::Valid);
}
let consensus_block_hash = receipt.consensus_block_hash;
let local_receipt = crate::aux_schema::load_execution_receipt::<_, Block, CBlock>(
&*self.client,
consensus_block_hash,
)?
.ok_or_else(|| {
sp_blockchain::Error::Backend(format!(
"Receipt for consensus block {consensus_block_hash} not found"
))
})?;
if local_receipt.invalid_bundles != receipt.invalid_bundles {
// TODO: Generate fraud proof
return Ok(ReceiptValidity::Invalid(InvalidReceipt::InvalidBundles));
}
Ok(ReceiptValidity::Valid)
}
}
impl<Block, CBlock, Client, CClient, Backend, E, BI>
BundleProcessor<Block, CBlock, Client, CClient, Backend, E, BI>
where
Block: BlockT,
CBlock: BlockT,
NumberFor<CBlock>: From<NumberFor<Block>> + Into<NumberFor<Block>>,
CBlock::Hash: From<Block::Hash>,
Client: HeaderBackend<Block>
+ BlockBackend<Block>
+ AuxStore
+ ProvideRuntimeApi<Block>
+ Finalizer<Block, Backend>
+ 'static,
Client::Api: DomainCoreApi<Block>
+ MessengerApi<Block, NumberFor<Block>>
+ InherentExtrinsicApi<Block>
+ sp_block_builder::BlockBuilder<Block>
+ sp_api::ApiExt<Block, StateBackend = StateBackendFor<Backend, Block>>,
for<'b> &'b BI: BlockImport<
Block,
Transaction = sp_api::TransactionFor<Client, Block>,
Error = sp_consensus::Error,
>,
CClient: HeaderBackend<CBlock>
+ HeaderMetadata<CBlock, Error = sp_blockchain::Error>
+ BlockBackend<CBlock>
+ ProvideRuntimeApi<CBlock>
+ 'static,
CClient::Api: DomainsApi<CBlock, NumberFor<Block>, Block::Hash> + 'static,
Backend: sc_client_api::Backend<Block> + 'static,
TransactionFor<Backend, Block>: sp_trie::HashDBT<HashFor<Block>, sp_trie::DBValue>,
E: CodeExecutor,
{
pub(crate) fn new(
domain_id: DomainId,
consensus_client: Arc<CClient>,
client: Arc<Client>,
backend: Arc<Backend>,
keystore: KeystorePtr,
domain_receipts_checker: DomainReceiptsChecker<Block, CBlock, Client, CClient, Backend, E>,
domain_block_processor: DomainBlockProcessor<Block, CBlock, Client, CClient, Backend, BI>,
) -> Self {
let domain_block_preprocessor = DomainBlockPreprocessor::new(
domain_id,
client.clone(),
consensus_client.clone(),
RuntimeApiFull::new(client.clone()),
ReceiptValidator::new(client.clone()),
);
Self {
domain_id,
consensus_client,
client,
backend,
keystore,
domain_receipts_checker,
domain_block_preprocessor,
domain_block_processor,
}
}
// TODO: Handle the returned error properly, ref to https://github.com/subspace/subspace/pull/695#discussion_r926721185
pub(crate) async fn process_bundles(
self,
consensus_block_info: (CBlock::Hash, NumberFor<CBlock>, bool),
) -> sp_blockchain::Result<()> {
let (consensus_block_hash, consensus_block_number, is_new_best) = consensus_block_info;
tracing::debug!(
"Processing consensus block #{consensus_block_number},{consensus_block_hash}"
);
let maybe_pending_consensus_blocks = self
.domain_block_processor
.pending_imported_consensus_blocks(consensus_block_hash, consensus_block_number)?;
if let Some(PendingConsensusBlocks {
initial_parent,
consensus_imports,
}) = maybe_pending_consensus_blocks
{
tracing::trace!(
?initial_parent,
?consensus_imports,
"Pending consensus blocks to process"
);
let mut domain_parent = initial_parent;
for consensus_info in consensus_imports {
if let Some(next_domain_parent) = self
.process_bundles_at((consensus_info.hash, consensus_info.number), domain_parent)
.await?
{
domain_parent = next_domain_parent;
}
}
// The domain branch driving from the best consensus branch should also be the best domain branch even
// if it is no the longest domain branch. Thus re-import the tip of the best domain branch to make it
// the new best block if it isn't.
//
// Note: this may cause the best domain fork switch to a shorter fork or in some case the best domain
// block become the ancestor block of the current best block.
let domain_tip = domain_parent.0;
if is_new_best && self.client.info().best_hash != domain_tip {
let header = self.client.header(domain_tip)?.ok_or_else(|| {
sp_blockchain::Error::Backend(format!("Header for #{:?} not found", domain_tip))
})?;
let block_import_params = {
let mut import_block = BlockImportParams::new(BlockOrigin::Own, header);
import_block.import_existing = true;
import_block.fork_choice = Some(ForkChoiceStrategy::Custom(true));
import_block.state_action = StateAction::Skip;
import_block
};
self.domain_block_processor
.import_domain_block(block_import_params)
.await?;
assert_eq!(domain_tip, self.client.info().best_hash);
}
}
Ok(())
}
async fn process_bundles_at(
&self,
consensus_block_info: (CBlock::Hash, NumberFor<CBlock>),
parent_info: (Block::Hash, NumberFor<Block>),
) -> sp_blockchain::Result<Option<(Block::Hash, NumberFor<Block>)>> {
let (consensus_block_hash, consensus_block_number) = consensus_block_info;
let (parent_hash, parent_number) = parent_info;
tracing::debug!(
"Building a new domain block from consensus block #{consensus_block_number},{consensus_block_hash} \
on top of parent block #{parent_number},{parent_hash}"
);
let head_receipt_number = self
.consensus_client
.runtime_api()
.head_receipt_number(consensus_block_hash, self.domain_id)?
.into();
let Some(PreprocessResult {
extrinsics,
extrinsics_roots,
invalid_bundles,
}) = self
.domain_block_preprocessor
.preprocess_consensus_block(consensus_block_hash, parent_hash)?
else {
tracing::debug!(
"Skip building new domain block, no bundles and runtime upgrade for this domain \
in consensus block #{consensus_block_number:?},{consensus_block_hash}"
);
self.domain_block_processor.on_consensus_block_processed(
consensus_block_hash,
None,
head_receipt_number,
)?;
return Ok(None);
};
let domain_block_result = self
.domain_block_processor
.process_domain_block(
(consensus_block_hash, consensus_block_number),
(parent_hash, parent_number),
extrinsics,
invalid_bundles,
extrinsics_roots,
Digest::default(),
)
.await?;
assert!(
domain_block_result.header_number > head_receipt_number,
"Domain chain number must larger than the head number of the receipt chain \
(which is maintained on the consensus chain) by at least 1"
);
let built_block_info = (
domain_block_result.header_hash,
domain_block_result.header_number,
);
self.domain_block_processor.on_consensus_block_processed(
consensus_block_hash,
Some(domain_block_result),
head_receipt_number,
)?;
// TODO: Remove as ReceiptsChecker has been superseded by ReceiptValidator in block-preprocessor.
self.domain_receipts_checker
.check_state_transition(consensus_block_hash)?;
Ok(Some(built_block_info))
}
}
|
extern crate utils;
use std::env;
use std::io::{self, BufReader};
use std::io::prelude::*;
use std::fs::File;
use utils::*;
#[derive(Debug)]
struct Input {
earliest_ts: u64,
bus_ids: Vec<Option<u64>>
}
fn part1(input: &Input) -> u64 {
let (least_wait, bus_id) = input.bus_ids.iter()
.flatten()
.fold((std::u64::MAX, 0), |(least_wait_time, least_wait_bus_id), &bus_id| {
let time_left = bus_id - (input.earliest_ts % bus_id);
if time_left < least_wait_time {
(time_left, bus_id)
} else {
(least_wait_time, least_wait_bus_id)
}
});
least_wait * bus_id
}
fn part2(input: &Input) -> u64 {
let mut start = 0;
let mut step = input.bus_ids[0].unwrap();
for i in 1..input.bus_ids.len() {
if let Some(bus_id) = input.bus_ids[i] {
let bus_id = bus_id;
let mut found = None;
for t in (start..).step_by(step as usize) {
if (t + i as u64) % bus_id == 0 {
if let Some(found) = found {
step = t - found;
start = found + step;
break;
} else {
if i == input.bus_ids.len() - 1 {
return t;
}
found = Some(t)
}
}
}
}
}
0
}
fn main() {
measure(|| {
let input = input().expect("Input failed");
println!("Part1: {}", part1(&input));
println!("Part2: {}", part2(&input));
});
}
fn read_input<R: Read>(reader: BufReader<R>) -> io::Result<Input> {
let mut lines = reader.lines();
Ok(Input {
earliest_ts: lines.next().unwrap()?.parse::<u64>().unwrap(),
bus_ids: lines.next().unwrap()?.split(',').map(|i| i.parse::<u64>().ok()).collect::<Vec<_>>()
})
}
fn input() -> io::Result<Input> {
let f = File::open(env::args().skip(1).next().expect("No input file given"))?;
read_input(BufReader::new(f))
}
#[cfg(test)]
mod tests {
use super::*;
const INPUT: &'static str =
"939
7,13,x,x,59,x,31,19";
fn as_input(s: &str) -> Input {
read_input(BufReader::new(s.split('\n').map(|s| s.trim()).collect::<Vec<_>>().join("\n").as_bytes())).unwrap()
}
#[test]
fn test_part1() {
assert_eq!(part1(&as_input(INPUT)), 295);
}
#[test]
fn test_part2() {
assert_eq!(part2(&as_input(INPUT)), 1068781);
assert_eq!(part2(&as_input("0\n17,x,13,19")), 3417);
assert_eq!(part2(&as_input("0\n67,7,59,61")), 754018);
assert_eq!(part2(&as_input("0\n67,x,7,59,61")), 779210);
assert_eq!(part2(&as_input("0\n67,7,x,59,61")), 1261476);
assert_eq!(part2(&as_input("0\n1789,37,47,1889")), 1202161486);
}
}
|
use super::datasource::{ReaderBuilder, ReaderError};
use super::stream::{FilterStream, GroupByStream, InMemoryStream, LimitStream, LogFileStream, MapStream, RecordStream};
use crate::common;
use crate::common::types::{DataSource, Tuple, Value, VariableName, Variables};
use crate::execution::stream::ProjectionStream;
use crate::syntax::ast::{PathExpr, PathSegment};
use chrono::Timelike;
use hashbrown::HashMap;
use ordered_float::OrderedFloat;
use pdatastructs::hyperloglog::HyperLogLog;
use std::collections::VecDeque;
use std::io;
use std::result;
use tdigest::TDigest;
pub(crate) type EvaluateResult<T> = result::Result<T, EvaluateError>;
#[derive(Fail, PartialEq, Eq, Debug)]
pub(crate) enum EvaluateError {
#[fail(display = "{}", _0)]
Expression(#[cause] ExpressionError),
}
impl From<ExpressionError> for EvaluateError {
fn from(err: ExpressionError) -> EvaluateError {
EvaluateError::Expression(err)
}
}
pub(crate) type CreateStreamResult<T> = result::Result<T, CreateStreamError>;
#[derive(Fail, PartialEq, Eq, Debug)]
pub enum CreateStreamError {
#[fail(display = "Io Error")]
Io,
#[fail(display = "Reader Error")]
Reader,
#[fail(display = "Stream Error")]
Stream,
}
impl From<io::Error> for CreateStreamError {
fn from(_: io::Error) -> CreateStreamError {
CreateStreamError::Io
}
}
impl From<ReaderError> for CreateStreamError {
fn from(_: ReaderError) -> CreateStreamError {
CreateStreamError::Reader
}
}
impl From<StreamError> for CreateStreamError {
fn from(_: StreamError) -> CreateStreamError {
CreateStreamError::Stream
}
}
pub(crate) type StreamResult<T> = result::Result<T, StreamError>;
#[derive(Fail, PartialEq, Eq, Debug)]
pub(crate) enum StreamError {
#[fail(display = "{}", _0)]
Get(#[cause] CreateStreamError),
#[fail(display = "{}", _0)]
Evaluate(#[cause] EvaluateError),
#[fail(display = "{}", _0)]
Expression(#[cause] ExpressionError),
#[fail(display = "Reader Error")]
Reader,
#[fail(display = "Aggregate Error")]
Aggregate,
}
impl From<CreateStreamError> for StreamError {
fn from(err: CreateStreamError) -> StreamError {
StreamError::Get(err)
}
}
impl From<EvaluateError> for StreamError {
fn from(err: EvaluateError) -> StreamError {
StreamError::Evaluate(err)
}
}
impl From<ExpressionError> for StreamError {
fn from(err: ExpressionError) -> StreamError {
StreamError::Expression(err)
}
}
impl From<ReaderError> for StreamError {
fn from(_: ReaderError) -> StreamError {
StreamError::Reader
}
}
impl From<AggregateError> for StreamError {
fn from(_: AggregateError) -> StreamError {
StreamError::Aggregate
}
}
pub(crate) type ExpressionResult<T> = result::Result<T, ExpressionError>;
#[derive(Fail, PartialEq, Eq, Debug)]
pub(crate) enum ExpressionError {
#[fail(display = "Key Not Found")]
KeyNotFound,
#[fail(display = "Invalid Arguments")]
InvalidArguments,
#[fail(display = "Unknown Function")]
UnknownFunction,
#[fail(display = "Invalid Star")]
InvalidStar,
#[fail(display = "Missing Else")]
MissingElse,
#[fail(display = "Type Mismatch")]
TypeMismatch,
#[fail(display = "{}", _0)]
ParseTimeInterval(#[cause] common::types::ParseTimeIntervalError),
#[fail(display = "TimeInterval Not Supported Yet")]
TimeIntervalNotSupported,
#[fail(display = "Zero TimeInterval")]
TimeIntervalZero,
#[fail(display = "DatePartUnit Not Supported Yet")]
DatePartUnitNotSupported,
#[fail(display = "{}", _0)]
ParseDatePart(#[cause] common::types::ParseDatePartError),
}
impl From<EvaluateError> for ExpressionError {
fn from(_: EvaluateError) -> ExpressionError {
ExpressionError::KeyNotFound
}
}
impl From<common::types::ParseTimeIntervalError> for ExpressionError {
fn from(e: common::types::ParseTimeIntervalError) -> ExpressionError {
ExpressionError::ParseTimeInterval(e)
}
}
impl From<common::types::ParseDatePartError> for ExpressionError {
fn from(e: common::types::ParseDatePartError) -> ExpressionError {
ExpressionError::ParseDatePart(e)
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub(crate) enum Ordering {
Asc,
Desc,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) enum Expression {
Logic(Box<Formula>),
Variable(PathExpr),
Function(String, Vec<Named>),
Branch(Box<Formula>, Box<Expression>, Option<Box<Expression>>),
}
fn get_value_by_path_expr(path_expr: &PathExpr, i: usize, variables: &Variables) -> Value {
if i >= path_expr.path_segments.len() {
return Value::Missing;
}
match &path_expr.path_segments[i] {
PathSegment::AttrName(attr_name) => {
if let Some(val) = variables.get(attr_name) {
if i + 1 == path_expr.path_segments.len() {
return val.clone();
} else {
match val {
Value::Object(o) => get_value_by_path_expr(path_expr, i + 1, o as &Variables),
_ => Value::Missing,
}
}
} else {
Value::Missing
}
}
PathSegment::ArrayIndex(attr_name, idx) => {
if let Some(val) = variables.get(attr_name) {
if i + 1 == path_expr.path_segments.len() {
match val {
Value::Array(a) => {
let a = &a[*idx];
return a.clone();
}
_ => Value::Missing,
}
} else {
match val {
Value::Array(a) => {
let a = &a[*idx];
match a {
Value::Object(o) => get_value_by_path_expr(path_expr, i + 1, o as &Variables),
_ => Value::Missing,
}
}
_ => Value::Missing,
}
}
} else {
Value::Missing
}
}
}
}
impl Expression {
pub(crate) fn expression_value(&self, variables: &Variables) -> ExpressionResult<Value> {
match self {
Expression::Logic(formula) => {
let out = formula.evaluate(variables)?;
Ok(Value::Boolean(out))
}
Expression::Variable(path_expr) => {
let v = get_value_by_path_expr(path_expr, 0, variables);
Ok(v.clone())
}
Expression::Function(name, arguments) => {
let mut values: Vec<Value> = Vec::new();
for arg in arguments.iter() {
match arg {
Named::Expression(expr, _) => {
let value = expr.expression_value(&variables)?;
values.push(value);
}
Named::Star => {
return Err(ExpressionError::InvalidStar);
}
}
}
let return_value: Value = evaluate(&*name, &values)?;
Ok(return_value)
}
Expression::Branch(condition, then_expr, else_expr) => {
let choose_then_branch = condition.evaluate(variables)?;
if choose_then_branch {
then_expr.expression_value(variables)
} else {
if let Some(e) = else_expr {
e.expression_value(variables)
} else {
Err(ExpressionError::MissingElse)
}
}
}
}
}
}
fn evaluate_url_functions(func_name: &str, arguments: &[Value]) -> ExpressionResult<Value> {
match func_name {
"url_host" => {
if arguments.len() != 1 {
return Err(ExpressionError::InvalidArguments);
}
match &arguments[0] {
Value::HttpRequest(r) => {
if let Some(host) = r.url.host_str() {
Ok(Value::String(host.to_string()))
} else {
Ok(Value::Null)
}
}
_ => Err(ExpressionError::InvalidArguments),
}
}
"url_port" => {
if arguments.len() != 1 {
return Err(ExpressionError::InvalidArguments);
}
match &arguments[0] {
Value::HttpRequest(r) => {
if let Some(port) = r.url.port() {
Ok(Value::Int(port as i32))
} else {
Ok(Value::Null)
}
}
_ => Err(ExpressionError::InvalidArguments),
}
}
"url_path" => {
if arguments.len() != 1 {
return Err(ExpressionError::InvalidArguments);
}
match &arguments[0] {
Value::HttpRequest(r) => {
let url_path = r.url.path();
Ok(Value::String(url_path.to_string()))
}
_ => Err(ExpressionError::InvalidArguments),
}
}
"url_fragment" => {
if arguments.len() != 1 {
return Err(ExpressionError::InvalidArguments);
}
match &arguments[0] {
Value::HttpRequest(r) => {
if let Some(url_fragment) = r.url.fragment() {
Ok(Value::String(url_fragment.to_string()))
} else {
Ok(Value::Null)
}
}
_ => Err(ExpressionError::InvalidArguments),
}
}
"url_query" => {
if arguments.len() != 1 {
return Err(ExpressionError::InvalidArguments);
}
match &arguments[0] {
Value::HttpRequest(r) => {
if let Some(url_query) = r.url.query() {
Ok(Value::String(url_query.to_string()))
} else {
Ok(Value::Null)
}
}
_ => Err(ExpressionError::InvalidArguments),
}
}
"url_path_segments" => {
if arguments.len() != 2 {
return Err(ExpressionError::InvalidArguments);
}
match (&arguments[0], &arguments[1]) {
(Value::HttpRequest(r), Value::Int(idx)) => {
if let Some(url_path_segments) = r.url.path_segments() {
let idx = *idx as usize;
for (i, segment) in url_path_segments.enumerate() {
if i == idx {
return Ok(Value::String(segment.to_string()));
}
}
Ok(Value::Null)
} else {
Ok(Value::Null)
}
}
_ => Err(ExpressionError::InvalidArguments),
}
}
"url_path_bucket" => {
if arguments.len() != 3 {
return Err(ExpressionError::InvalidArguments);
}
match (&arguments[0], &arguments[1], &arguments[2]) {
(Value::HttpRequest(r), Value::Int(idx), Value::String(target)) => {
if let Some(url_path_segments) = r.url.path_segments() {
let idx = *idx as usize;
let mut res = String::new();
for (i, segment) in url_path_segments.enumerate() {
if i == idx {
res.push('/');
res.push_str(target);
} else {
res.push('/');
res.push_str(segment);
}
}
Ok(Value::String(res))
} else {
Ok(Value::Null)
}
}
_ => Err(ExpressionError::InvalidArguments),
}
}
_ => Err(ExpressionError::UnknownFunction),
}
}
fn evaluate_host_functions(func_name: &str, arguments: &[Value]) -> ExpressionResult<Value> {
match func_name {
"host_name" => {
if arguments.len() != 1 {
return Err(ExpressionError::InvalidArguments);
}
match &arguments[0] {
Value::Host(h) => Ok(Value::String(h.hostname.clone())),
_ => Err(ExpressionError::InvalidArguments),
}
}
"host_port" => {
if arguments.len() != 1 {
return Err(ExpressionError::InvalidArguments);
}
match &arguments[0] {
Value::Host(h) => Ok(Value::Int(i32::from(h.port))),
_ => Err(ExpressionError::InvalidArguments),
}
}
_ => Err(ExpressionError::UnknownFunction),
}
}
fn evaluate(func_name: &str, arguments: &[Value]) -> ExpressionResult<Value> {
if func_name.starts_with("url_") {
return evaluate_url_functions(func_name, arguments);
}
if func_name.starts_with("host_") {
return evaluate_host_functions(func_name, arguments);
}
match func_name {
"Plus" => {
if arguments.len() != 2 {
return Err(ExpressionError::InvalidArguments);
}
match (&arguments[0], &arguments[1]) {
(Value::Int(a), Value::Int(b)) => Ok(Value::Int(a + b)),
_ => Err(ExpressionError::InvalidArguments),
}
}
"Minus" => {
if arguments.len() != 2 {
return Err(ExpressionError::InvalidArguments);
}
match (&arguments[0], &arguments[1]) {
(Value::Int(a), Value::Int(b)) => Ok(Value::Int(a - b)),
_ => Err(ExpressionError::InvalidArguments),
}
}
"Times" => {
if arguments.len() != 2 {
return Err(ExpressionError::InvalidArguments);
}
match (&arguments[0], &arguments[1]) {
(Value::Int(a), Value::Int(b)) => Ok(Value::Int(a * b)),
_ => Err(ExpressionError::InvalidArguments),
}
}
"Divide" => {
if arguments.len() != 2 {
return Err(ExpressionError::InvalidArguments);
}
match (&arguments[0], &arguments[1]) {
(Value::Int(a), Value::Int(b)) => Ok(Value::Int(a / b)),
_ => Err(ExpressionError::InvalidArguments),
}
}
"date_part" => {
if arguments.len() != 2 {
return Err(ExpressionError::InvalidArguments);
}
match (&arguments[0], &arguments[1]) {
(Value::String(date_part_unit_str), Value::DateTime(dt)) => {
let date_part_unit = common::types::parse_date_part_unit(date_part_unit_str)?;
match date_part_unit {
common::types::DatePartUnit::Second => Ok(Value::Float(OrderedFloat::from(dt.second() as f32))),
common::types::DatePartUnit::Minute => Ok(Value::Float(OrderedFloat::from(dt.minute() as f32))),
_ => Err(ExpressionError::DatePartUnitNotSupported),
}
}
_ => Err(ExpressionError::InvalidArguments),
}
}
"time_bucket" => {
if arguments.len() != 2 {
return Err(ExpressionError::InvalidArguments);
}
match (&arguments[0], &arguments[1]) {
(Value::String(time_interval_str), Value::DateTime(dt)) => {
let time_interval = common::types::parse_time_interval(time_interval_str)?;
if time_interval.n == 0 {
return Err(ExpressionError::TimeIntervalZero);
}
match time_interval.unit {
common::types::TimeIntervalUnit::Second => {
if time_interval.n > 60 || 60 % time_interval.n != 0 {
return Err(ExpressionError::TimeIntervalNotSupported);
}
let mut target_opt: Option<u32> = None;
let step_size: usize = time_interval.n as usize;
//FIXME: binary search
for point in (0..=60u32).rev().step_by(step_size) {
if point <= dt.second() {
target_opt = Some(point);
break;
}
}
if let Some(target) = target_opt {
let new_dt = dt.with_second(target).and_then(|d| d.with_nanosecond(0)).unwrap();
Ok(Value::DateTime(new_dt))
} else {
unreachable!();
}
}
common::types::TimeIntervalUnit::Minute => {
if time_interval.n > 60 || 60 % time_interval.n != 0 {
return Err(ExpressionError::TimeIntervalNotSupported);
}
let mut target_opt: Option<u32> = None;
let step_size: usize = time_interval.n as usize;
//FIXME: binary search
for point in (0..=60u32).rev().step_by(step_size) {
if point <= dt.minute() {
target_opt = Some(point);
break;
}
}
if let Some(target) = target_opt {
let new_dt = dt
.with_minute(target)
.and_then(|d| d.with_second(0))
.and_then(|d| d.with_nanosecond(0))
.unwrap();
Ok(Value::DateTime(new_dt))
} else {
unreachable!();
}
}
common::types::TimeIntervalUnit::Hour => {
if time_interval.n > 24 || 24 % time_interval.n != 0 {
return Err(ExpressionError::TimeIntervalNotSupported);
}
let mut target_opt: Option<u32> = None;
let step_size: usize = time_interval.n as usize;
//FIXME: binary search
for point in (0..=24u32).rev().step_by(step_size) {
if point <= dt.hour() {
target_opt = Some(point);
break;
}
}
if let Some(target) = target_opt {
let new_dt = dt
.with_hour(target)
.and_then(|d| d.with_minute(0))
.and_then(|d| d.with_second(0))
.and_then(|d| d.with_nanosecond(0))
.unwrap();
Ok(Value::DateTime(new_dt))
} else {
unreachable!();
}
}
_ => Err(ExpressionError::TimeIntervalNotSupported),
}
}
_ => Err(ExpressionError::InvalidArguments),
}
}
_ => Err(ExpressionError::UnknownFunction),
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) enum Relation {
Equal,
NotEqual,
MoreThan,
LessThan,
GreaterEqual,
LessEqual,
}
impl Relation {
pub(crate) fn apply(&self, variables: &Variables, left: &Expression, right: &Expression) -> ExpressionResult<bool> {
let left_result = left.expression_value(variables)?;
let right_result = right.expression_value(variables)?;
match self {
Relation::Equal => Ok(left_result == right_result),
Relation::NotEqual => Ok(left_result != right_result),
Relation::GreaterEqual => match (left_result, right_result) {
(Value::Int(l), Value::Int(r)) => Ok(l >= r),
(Value::Float(l), Value::Float(r)) => Ok(l >= r),
_ => Err(ExpressionError::TypeMismatch),
},
Relation::LessEqual => match (left_result, right_result) {
(Value::Int(l), Value::Int(r)) => Ok(l <= r),
(Value::Float(l), Value::Float(r)) => Ok(l <= r),
_ => Err(ExpressionError::TypeMismatch),
},
Relation::MoreThan => match (left_result, right_result) {
(Value::Int(l), Value::Int(r)) => Ok(l > r),
(Value::Float(l), Value::Float(r)) => Ok(l > r),
_ => Err(ExpressionError::TypeMismatch),
},
Relation::LessThan => match (left_result, right_result) {
(Value::Int(l), Value::Int(r)) => Ok(l < r),
(Value::Float(l), Value::Float(r)) => Ok(l < r),
_ => Err(ExpressionError::TypeMismatch),
},
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) enum Named {
Expression(Expression, Option<VariableName>),
Star,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) enum Formula {
Constant(bool),
And(Box<Formula>, Box<Formula>),
Or(Box<Formula>, Box<Formula>),
Not(Box<Formula>),
Predicate(Relation, Box<Expression>, Box<Expression>),
}
impl Formula {
pub(crate) fn evaluate(&self, variables: &Variables) -> EvaluateResult<bool> {
match self {
Formula::And(left_formula, right_formula) => {
let left = left_formula.evaluate(variables)?;
let right = right_formula.evaluate(variables)?;
Ok(left && right)
}
Formula::Or(left_formula, right_formula) => {
let left = left_formula.evaluate(variables)?;
let right = right_formula.evaluate(variables)?;
Ok(left || right)
}
Formula::Not(child_formula) => {
let child = child_formula.evaluate(variables)?;
Ok(!child)
}
Formula::Predicate(relation, left_formula, right_formula) => {
let result = relation.apply(variables, left_formula, right_formula)?;
Ok(result)
}
Formula::Constant(value) => Ok(*value),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) enum Node {
DataSource(DataSource, Vec<common::types::Binding>),
Filter(Box<Node>, Box<Formula>),
Map(Vec<Named>, Box<Node>),
GroupBy(Vec<PathExpr>, Vec<NamedAggregate>, Box<Node>),
Limit(u32, Box<Node>),
OrderBy(Vec<PathExpr>, Vec<Ordering>, Box<Node>),
}
impl Node {
pub(crate) fn get(&self, variables: Variables) -> CreateStreamResult<Box<dyn RecordStream>> {
match self {
Node::Filter(source, formula) => {
let record_stream = source.get(variables.clone())?;
let stream = FilterStream::new(*formula.clone(), variables, record_stream);
Ok(Box::new(stream))
}
Node::Map(named_list, source) => {
let record_stream = source.get(variables.clone())?;
let stream = MapStream::new(named_list.clone(), variables, record_stream);
Ok(Box::new(stream))
}
Node::DataSource(data_source, bindings) => match data_source {
DataSource::File(path, file_format, _table_name) => {
let reader = ReaderBuilder::new(file_format.clone()).with_path(path)?;
let file_stream = LogFileStream::new(Box::new(reader));
if !bindings.is_empty() {
let stream = ProjectionStream::new(Box::new(file_stream), bindings.clone());
return Ok(Box::new(stream));
} else {
Ok(Box::new(file_stream))
}
}
DataSource::Stdin(file_format, _table_name) => {
let reader = ReaderBuilder::new(file_format.clone()).with_reader(io::stdin());
let stream = LogFileStream::new(Box::new(reader));
Ok(Box::new(stream))
}
},
Node::GroupBy(fields, named_aggregates, source) => {
let record_stream = source.get(variables.clone())?;
let stream = GroupByStream::new(fields.clone(), variables, named_aggregates.clone(), record_stream);
Ok(Box::new(stream))
}
Node::Limit(row_count, source) => {
let record_stream = source.get(variables.clone())?;
let stream = LimitStream::new(*row_count, record_stream);
Ok(Box::new(stream))
}
Node::OrderBy(column_names, orderings, source) => {
let mut record_stream = source.get(variables.clone())?;
let mut records = Vec::new();
while let Some(record) = record_stream.next()? {
records.push(record);
}
records.sort_by(|a, b| {
for idx in 0..column_names.len() {
let column_name = &column_names[idx];
let curr_ordering = &orderings[idx];
let a_value = a.get(column_name);
let b_value = b.get(column_name);
match (a_value, b_value) {
(Value::Int(i1), Value::Int(i2)) => match curr_ordering {
Ordering::Asc => {
return i1.cmp(&i2);
}
Ordering::Desc => {
return i2.cmp(&i1);
}
},
(Value::Boolean(b1), Value::Boolean(b2)) => match curr_ordering {
Ordering::Asc => {
return b1.cmp(&b2);
}
Ordering::Desc => {
return b2.cmp(&b1);
}
},
(Value::Float(f1), Value::Float(f2)) => match curr_ordering {
Ordering::Asc => {
return f1.cmp(&f2);
}
Ordering::Desc => {
return f2.cmp(&f1);
}
},
(Value::String(s1), Value::String(s2)) => match curr_ordering {
Ordering::Asc => {
return s1.cmp(&s2);
}
Ordering::Desc => {
return s2.cmp(&s1);
}
},
(Value::DateTime(dt1), Value::DateTime(dt2)) => match curr_ordering {
Ordering::Asc => {
return dt1.cmp(&dt2);
}
Ordering::Desc => {
return dt2.cmp(&dt1);
}
},
(Value::Null, Value::Null) => {
return std::cmp::Ordering::Equal;
}
(Value::Host(h1), Value::Host(h2)) => {
let s1 = h1.to_string();
let s2 = h2.to_string();
match curr_ordering {
Ordering::Asc => {
return s1.cmp(&s2);
}
Ordering::Desc => {
return s2.cmp(&s1);
}
}
}
(Value::HttpRequest(h1), Value::HttpRequest(h2)) => {
let s1 = h1.to_string();
let s2 = h2.to_string();
match curr_ordering {
Ordering::Asc => {
return s1.cmp(&s2);
}
Ordering::Desc => {
return s2.cmp(&s1);
}
}
}
_ => {
unreachable!();
}
}
}
std::cmp::Ordering::Equal
});
let stream = InMemoryStream::new(VecDeque::from(records));
Ok(Box::new(stream))
}
}
}
}
pub(crate) type AggregateResult<T> = result::Result<T, AggregateError>;
#[derive(Fail, PartialEq, Eq, Debug)]
pub enum AggregateError {
#[fail(display = "Key Not Found")]
KeyNotFound,
#[fail(display = "Invalid Type")]
InvalidType,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct NamedAggregate {
pub(crate) aggregate: Aggregate,
pub(crate) name_opt: Option<String>,
}
impl NamedAggregate {
pub(crate) fn new(aggregate: Aggregate, name_opt: Option<String>) -> Self {
NamedAggregate { aggregate, name_opt }
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) enum Aggregate {
Avg(AvgAggregate, Named),
Count(CountAggregate, Named),
First(FirstAggregate, Named),
Last(LastAggregate, Named),
Max(MaxAggregate, Named),
Min(MinAggregate, Named),
Sum(SumAggregate, Named),
ApproxCountDistinct(ApproxCountDistinctAggregate, Named),
PercentileDisc(PercentileDiscAggregate, String),
ApproxPercentile(ApproxPercentileAggregate, String),
GroupAs(GroupAsAggregate, Named),
}
impl Aggregate {
#[allow(dead_code)]
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
match self {
Aggregate::GroupAs(agg, _) => agg.add_record(key, value),
Aggregate::Avg(agg, _) => agg.add_record(key, value),
Aggregate::Count(agg, _) => agg.add_record(key, value),
Aggregate::First(agg, _) => agg.add_record(key, value),
Aggregate::Last(agg, _) => agg.add_record(key, value),
Aggregate::Sum(agg, _) => agg.add_record(key, value),
Aggregate::Max(agg, _) => agg.add_record(key, value),
Aggregate::Min(agg, _) => agg.add_record(key, value),
Aggregate::ApproxCountDistinct(agg, _) => agg.add_record(key, value),
Aggregate::PercentileDisc(agg, _) => agg.add_record(key, value),
Aggregate::ApproxPercentile(agg, _) => agg.add_record(key, value),
}
}
pub(crate) fn get_aggregated(&mut self, key: &Option<Tuple>) -> AggregateResult<Value> {
match self {
Aggregate::GroupAs(agg, _) => agg.get_aggregated(key),
Aggregate::Avg(agg, _) => agg.get_aggregated(key),
Aggregate::Count(agg, _) => agg.get_aggregated(key),
Aggregate::First(agg, _) => agg.get_aggregated(key),
Aggregate::Last(agg, _) => agg.get_aggregated(key),
Aggregate::Sum(agg, _) => agg.get_aggregated(key),
Aggregate::Max(agg, _) => agg.get_aggregated(key),
Aggregate::Min(agg, _) => agg.get_aggregated(key),
Aggregate::ApproxCountDistinct(agg, _) => agg.get_aggregated(key),
Aggregate::PercentileDisc(agg, _) => agg.get_aggregated(key),
Aggregate::ApproxPercentile(agg, _) => agg.get_aggregated(key),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct PercentileDiscAggregate {
pub(crate) partitions: HashMap<Option<Tuple>, Vec<Value>>,
pub(crate) percentile: OrderedFloat<f32>,
pub(crate) ordering: Ordering,
}
impl PercentileDiscAggregate {
pub(crate) fn new(percentile: OrderedFloat<f32>, ordering: Ordering) -> Self {
PercentileDiscAggregate {
partitions: HashMap::new(),
percentile,
ordering,
}
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
let v = self.partitions.entry(key.clone()).or_insert(Vec::new());
v.push(value.clone());
Ok(())
}
pub(crate) fn get_aggregated(&self, key: &Option<Tuple>) -> AggregateResult<Value> {
//FIXME: expensive operation
let mut v = self.partitions.get(key).unwrap().clone();
v.sort_by(|a, b| match (a, b) {
(Value::Int(i1), Value::Int(i2)) => match self.ordering {
Ordering::Asc => i1.cmp(i2),
Ordering::Desc => i2.cmp(i1),
},
(Value::Boolean(b1), Value::Boolean(b2)) => match self.ordering {
Ordering::Asc => b1.cmp(b2),
Ordering::Desc => b2.cmp(b1),
},
(Value::Float(f1), Value::Float(f2)) => match self.ordering {
Ordering::Asc => f1.cmp(f2),
Ordering::Desc => f2.cmp(f1),
},
(Value::DateTime(dt1), Value::DateTime(dt2)) => match self.ordering {
Ordering::Asc => dt1.cmp(dt2),
Ordering::Desc => dt2.cmp(dt1),
},
(Value::String(s1), Value::String(s2)) => match self.ordering {
Ordering::Asc => s1.cmp(s2),
Ordering::Desc => s2.cmp(s1),
},
(Value::Null, Value::Null) => std::cmp::Ordering::Equal,
(Value::Host(h1), Value::Host(h2)) => {
let s1 = h1.to_string();
let s2 = h2.to_string();
match self.ordering {
Ordering::Asc => s1.cmp(&s2),
Ordering::Desc => s2.cmp(&s1),
}
}
(Value::HttpRequest(h1), Value::HttpRequest(h2)) => {
let s1 = h1.to_string();
let s2 = h2.to_string();
match self.ordering {
Ordering::Asc => s1.cmp(&s2),
Ordering::Desc => s2.cmp(&s1),
}
}
_ => {
unreachable!();
}
});
let f32_percentile: f32 = self.percentile.into();
let idx: usize = ((v.len() as f32) * f32_percentile) as usize;
let ans = v[idx].clone();
Ok(ans)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct ApproxPercentileAggregate {
pub(crate) partitions: HashMap<Option<Tuple>, TDigest>,
pub(crate) buffer: HashMap<Option<Tuple>, Vec<Value>>,
pub(crate) percentile: OrderedFloat<f32>,
pub(crate) ordering: Ordering,
}
impl ApproxPercentileAggregate {
pub(crate) fn new(percentile: OrderedFloat<f32>, ordering: Ordering) -> Self {
ApproxPercentileAggregate {
partitions: HashMap::new(),
buffer: HashMap::new(),
percentile,
ordering,
}
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
let buf = self.buffer.entry(key.clone()).or_insert(Vec::new());
buf.push(value.clone());
if buf.len() < 10000 {
Ok(())
} else {
let v = self
.partitions
.entry(key.clone())
.or_insert(TDigest::new_with_size(100));
let mut fvec = Vec::new();
for val in buf.iter() {
match val {
Value::Float(f) => {
fvec.push(f64::from(f.into_inner()));
}
Value::Int(i) => {
fvec.push(f64::from(*i));
}
_ => {
return Err(AggregateError::InvalidType);
}
}
}
let new_digest = v.merge_unsorted(fvec);
self.partitions.insert(key.clone(), new_digest);
buf.clear();
Ok(())
}
}
pub(crate) fn get_aggregated(&mut self, key: &Option<Tuple>) -> AggregateResult<Value> {
let buf = self.buffer.entry(key.clone()).or_insert(Vec::new());
let t = if !buf.is_empty() {
let v = self
.partitions
.entry(key.clone())
.or_insert(TDigest::new_with_size(100));
let mut fvec = Vec::new();
for val in buf.iter() {
match val {
Value::Float(f) => {
fvec.push(f64::from(f.into_inner()));
}
Value::Int(i) => {
fvec.push(f64::from(*i));
}
_ => {
return Err(AggregateError::InvalidType);
}
}
}
v.merge_unsorted(fvec)
} else {
self.partitions.get(key).unwrap().clone()
};
let f32_percentile: f32 = self.percentile.into();
let f64_percentile: f64 = f64::from(f32_percentile);
let f64_ans = t.estimate_quantile(f64_percentile);
let ans = Value::Float(OrderedFloat::from(f64_ans as f32));
Ok(ans)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct AvgAggregate {
pub(crate) averages: HashMap<Option<Tuple>, OrderedFloat<f32>>,
pub(crate) counts: HashMap<Option<Tuple>, i64>,
}
impl AvgAggregate {
pub(crate) fn new() -> Self {
AvgAggregate {
averages: HashMap::new(),
counts: HashMap::new(),
}
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
let new_value: OrderedFloat<f32> = match value {
&Value::Int(i) => OrderedFloat::from(i as f32),
&Value::Float(f) => f,
_ => {
return Err(AggregateError::InvalidType);
}
};
if let (Some(&average), Some(&count)) = (self.averages.get(key), self.counts.get(key)) {
let new_count = count + 1;
let f32_average: f32 = average.into();
let f32_new_value: f32 = new_value.into();
let new_average: f32 = (f32_average * (count as f32) + f32_new_value) / (new_count as f32);
self.averages.insert(key.clone(), OrderedFloat::from(new_average));
self.counts.insert(key.clone(), new_count);
Ok(())
} else {
self.averages.insert(key.clone(), new_value);
self.counts.insert(key.clone(), 1);
Ok(())
}
}
pub(crate) fn get_aggregated(&self, key: &Option<Tuple>) -> AggregateResult<Value> {
if let Some(&average) = self.averages.get(key) {
Ok(Value::Float(average))
} else {
Err(AggregateError::KeyNotFound)
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct SumAggregate {
pub(crate) sums: HashMap<Option<Tuple>, OrderedFloat<f32>>,
}
impl SumAggregate {
pub(crate) fn new() -> Self {
SumAggregate { sums: HashMap::new() }
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
let new_value: OrderedFloat<f32> = match value {
&Value::Int(i) => OrderedFloat::from(i as f32),
&Value::Float(f) => f,
_ => {
return Err(AggregateError::InvalidType);
}
};
if let Some(&average) = self.sums.get(key) {
let f32_average: f32 = average.into();
let f32_new_value: f32 = new_value.into();
let new_average: f32 = f32_average + f32_new_value;
self.sums.insert(key.clone(), OrderedFloat::from(new_average));
Ok(())
} else {
self.sums.insert(key.clone(), new_value);
Ok(())
}
}
pub(crate) fn get_aggregated(&self, key: &Option<Tuple>) -> AggregateResult<Value> {
if let Some(&average) = self.sums.get(key) {
Ok(Value::Float(average))
} else {
Err(AggregateError::KeyNotFound)
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct CountAggregate {
pub(crate) counts: HashMap<Option<Tuple>, i64>,
}
impl CountAggregate {
pub(crate) fn new() -> Self {
CountAggregate { counts: HashMap::new() }
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
if let &Value::Null = value {
//Null value doesn't contribute to the total count
return Ok(());
};
if let Some(&count) = self.counts.get(key) {
let new_count = count + 1;
self.counts.insert(key.clone(), new_count);
Ok(())
} else {
self.counts.insert(key.clone(), 1);
Ok(())
}
}
pub(crate) fn add_row(&mut self, key: Option<Tuple>) -> AggregateResult<()> {
if let Some(&count) = self.counts.get(&key) {
let new_count = count + 1;
self.counts.insert(key.clone(), new_count);
Ok(())
} else {
self.counts.insert(key.clone(), 1);
Ok(())
}
}
pub(crate) fn get_aggregated(&self, key: &Option<Tuple>) -> AggregateResult<Value> {
if let Some(&counts) = self.counts.get(key) {
Ok(Value::Int(counts as i32))
} else {
Err(AggregateError::KeyNotFound)
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct GroupAsAggregate {
pub(crate) tuples: HashMap<Option<Tuple>, Vec<Value>>,
}
impl GroupAsAggregate {
pub(crate) fn new() -> Self {
GroupAsAggregate { tuples: HashMap::new() }
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
if let Some(tuples) = self.tuples.get_mut(key) {
tuples.push(value.clone());
Ok(())
} else {
self.tuples.insert(key.clone(), vec![value.clone()]);
Ok(())
}
}
pub(crate) fn get_aggregated(&self, key: &Option<Tuple>) -> AggregateResult<Value> {
if let Some(tuples) = self.tuples.get(key) {
Ok(Value::Array(tuples.clone()))
} else {
Err(AggregateError::KeyNotFound)
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct MaxAggregate {
pub(crate) maxs: HashMap<Option<Tuple>, Value>,
}
impl MaxAggregate {
pub(crate) fn new() -> Self {
MaxAggregate { maxs: HashMap::new() }
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
if let Some(candidate) = self.maxs.get(key) {
let less_than = match (candidate, value) {
(&Value::Int(i1), &Value::Int(i2)) => i1 < i2,
(&Value::Float(f1), &Value::Float(f2)) => f1 < f2,
_ => {
return Err(AggregateError::InvalidType);
}
};
if less_than {
self.maxs.insert(key.clone(), value.clone());
}
Ok(())
} else {
self.maxs.insert(key.clone(), value.clone());
Ok(())
}
}
pub(crate) fn get_aggregated(&self, key: &Option<Tuple>) -> AggregateResult<Value> {
if let Some(first) = self.maxs.get(key) {
Ok(first.clone())
} else {
Err(AggregateError::KeyNotFound)
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct MinAggregate {
pub(crate) mins: HashMap<Option<Tuple>, Value>,
}
impl MinAggregate {
pub(crate) fn new() -> Self {
MinAggregate { mins: HashMap::new() }
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
if let Some(candidate) = self.mins.get(key) {
let greater_than = match (candidate, value) {
(&Value::Int(i1), &Value::Int(i2)) => i1 > i2,
(&Value::Float(f1), &Value::Float(f2)) => f1 > f2,
_ => {
return Err(AggregateError::InvalidType);
}
};
if greater_than {
self.mins.insert(key.clone(), value.clone());
}
Ok(())
} else {
self.mins.insert(key.clone(), value.clone());
Ok(())
}
}
pub(crate) fn get_aggregated(&self, key: &Option<Tuple>) -> AggregateResult<Value> {
if let Some(first) = self.mins.get(key) {
Ok(first.clone())
} else {
Err(AggregateError::KeyNotFound)
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct FirstAggregate {
pub(crate) firsts: HashMap<Option<Tuple>, Value>,
}
impl FirstAggregate {
pub(crate) fn new() -> Self {
FirstAggregate { firsts: HashMap::new() }
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
if self.firsts.get(&key).is_some() {
//do nothing
Ok(())
} else {
self.firsts.insert(key.clone(), value.clone());
Ok(())
}
}
pub(crate) fn get_aggregated(&self, key: &Option<Tuple>) -> AggregateResult<Value> {
if let Some(first) = self.firsts.get(key) {
Ok(first.clone())
} else {
Err(AggregateError::KeyNotFound)
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct LastAggregate {
pub(crate) lasts: HashMap<Option<Tuple>, Value>,
}
impl LastAggregate {
pub(crate) fn new() -> Self {
LastAggregate { lasts: HashMap::new() }
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
self.lasts.insert(key.clone(), value.clone());
Ok(())
}
pub(crate) fn get_aggregated(&self, key: &Option<Tuple>) -> AggregateResult<Value> {
if let Some(last) = self.lasts.get(key) {
Ok(last.clone())
} else {
Err(AggregateError::KeyNotFound)
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct ApproxCountDistinctAggregate {
pub(crate) counts: HashMap<Option<Tuple>, HyperLogLog<Value>>,
}
impl PartialEq for ApproxCountDistinctAggregate {
fn eq(&self, _other: &Self) -> bool {
//Ignoring the detail since we only use Eq for unit test
true
}
}
impl Eq for ApproxCountDistinctAggregate {}
impl ApproxCountDistinctAggregate {
pub(crate) fn new() -> Self {
ApproxCountDistinctAggregate { counts: HashMap::new() }
}
pub(crate) fn add_record(&mut self, key: &Option<Tuple>, value: &Value) -> AggregateResult<()> {
if let &Value::Null = value {
//Null value doesn't contribute to the total count
return Ok(());
};
if let Some(hll) = self.counts.get_mut(key) {
hll.add(value);
Ok(())
} else {
self.counts.insert(key.clone(), HyperLogLog::new(8));
Ok(())
}
}
pub(crate) fn get_aggregated(&self, key: &Option<Tuple>) -> AggregateResult<Value> {
if let Some(hll) = self.counts.get(key) {
Ok(Value::Int(hll.count() as i32))
} else {
Err(AggregateError::KeyNotFound)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_avg_aggregate_with_one_element() {
let mut iter = Aggregate::Avg(AvgAggregate::new(), Named::Star);
let tuple = Some(vec![Value::String("key".to_string())]);
let value = Value::Float(OrderedFloat::from(5.0));
let _ = iter.add_record(&tuple, &value);
let aggregate = iter.get_aggregated(&tuple);
assert_eq!(Ok(value), aggregate);
}
#[test]
fn test_avg_aggregate_with_many_elements() {
let mut iter = Aggregate::Avg(AvgAggregate::new(), Named::Star);
let tuple = Some(vec![Value::String("key".to_string())]);
for i in 1..=10 {
let value = Value::Float(OrderedFloat::from(i as f32));
let _ = iter.add_record(&tuple, &value);
}
let aggregate = iter.get_aggregated(&tuple);
assert_eq!(Ok(Value::Float(OrderedFloat::from(5.5))), aggregate);
}
#[test]
fn test_count_aggregate() {
let mut iter = Aggregate::Count(CountAggregate::new(), Named::Star);
let tuple = Some(vec![Value::String("key".to_string())]);
for i in 0..13 {
let value = Value::Int(i);
let _ = iter.add_record(&tuple, &value);
}
let aggregate = iter.get_aggregated(&tuple);
assert_eq!(Ok(Value::Int(13)), aggregate);
}
#[test]
fn test_first_aggregate() {
let mut iter = Aggregate::First(FirstAggregate::new(), Named::Star);
let tuple = Some(vec![Value::String("key".to_string())]);
for i in 0..13 {
let value = Value::Int(i);
let _ = iter.add_record(&tuple, &value);
}
let aggregate = iter.get_aggregated(&tuple);
assert_eq!(Ok(Value::Int(0)), aggregate);
}
#[test]
fn test_last_aggregate() {
let mut iter = Aggregate::Last(LastAggregate::new(), Named::Star);
let tuple = Some(vec![Value::String("key".to_string())]);
for i in 0..13 {
let value = Value::Int(i);
let _ = iter.add_record(&tuple, &value);
}
let aggregate = iter.get_aggregated(&tuple);
assert_eq!(Ok(Value::Int(12)), aggregate);
}
#[test]
fn test_sum_aggregate_with_many_elements() {
let mut iter = Aggregate::Sum(SumAggregate::new(), Named::Star);
let tuple = Some(vec![Value::String("key".to_string())]);
for i in 1..=10 {
let value = Value::Float(OrderedFloat::from(i as f32));
let _ = iter.add_record(&tuple, &value);
}
let aggregate = iter.get_aggregated(&tuple);
assert_eq!(Ok(Value::Float(OrderedFloat::from(55.0))), aggregate);
}
#[test]
fn test_max_aggregate() {
let mut iter = Aggregate::Max(MaxAggregate::new(), Named::Star);
let tuple = Some(vec![Value::String("key".to_string())]);
for i in 0..13 {
let value = Value::Int(i);
let _ = iter.add_record(&tuple, &value);
}
let aggregate = iter.get_aggregated(&tuple);
assert_eq!(Ok(Value::Int(12)), aggregate);
}
#[test]
fn test_min_aggregate() {
let mut iter = Aggregate::Min(MinAggregate::new(), Named::Star);
let tuple = Some(vec![Value::String("key".to_string())]);
for i in 0..13 {
let value = Value::Int(i);
let _ = iter.add_record(&tuple, &value);
}
let aggregate = iter.get_aggregated(&tuple);
assert_eq!(Ok(Value::Int(0)), aggregate);
}
#[test]
fn test_evaluate_host_functions() {
let v = Value::Host(common::types::parse_host("192.168.131.39:2817").unwrap());
let name = evaluate_host_functions("host_name", &vec![v.clone()]).unwrap();
assert_eq!(name, Value::String("192.168.131.39".to_string()));
let port = evaluate_host_functions("host_port", &vec![v]).unwrap();
assert_eq!(port, Value::Int(2817));
}
#[test]
fn test_evaluate_url_functions() {
let v = Value::HttpRequest(
common::types::parse_http_request(
"GET http://example.com:8000/users/123?mode=json&after=&iteration=1 HTTP/1.1",
)
.unwrap(),
);
let name = evaluate_url_functions("url_host", &vec![v.clone()]).unwrap();
assert_eq!(name, Value::String("example.com".to_string()));
let port = evaluate_url_functions("url_port", &vec![v.clone()]).unwrap();
assert_eq!(port, Value::Int(8000));
let path = evaluate_url_functions("url_path", &vec![v.clone()]).unwrap();
assert_eq!(path, Value::String("/users/123".to_string()));
let fragment = evaluate_url_functions("url_fragment", &vec![v.clone()]).unwrap();
assert_eq!(fragment, Value::Null);
let query = evaluate_url_functions("url_query", &vec![v.clone()]).unwrap();
assert_eq!(query, Value::String("mode=json&after=&iteration=1".to_string()));
let path_segments = evaluate_url_functions("url_path_segments", &vec![v.clone(), Value::Int(1)]).unwrap();
assert_eq!(path_segments, Value::String("123".to_string()));
let mapped_path = evaluate_url_functions(
"url_path_bucket",
&vec![v.clone(), Value::Int(1), Value::String("_".to_string())],
)
.unwrap();
assert_eq!(mapped_path, Value::String("/users/_".to_string()));
}
#[test]
fn test_evaluate() {
let v = evaluate("Plus", &vec![Value::Int(1), Value::Int(2)]).unwrap();
assert_eq!(v, Value::Int(3));
let v = evaluate("Minus", &vec![Value::Int(2), Value::Int(2)]).unwrap();
assert_eq!(v, Value::Int(0));
let v = evaluate("Times", &vec![Value::Int(2), Value::Int(2)]).unwrap();
assert_eq!(v, Value::Int(4));
let v = evaluate("Divide", &vec![Value::Int(2), Value::Int(2)]).unwrap();
assert_eq!(v, Value::Int(1));
let dt = Value::DateTime(chrono::DateTime::parse_from_rfc3339("2015-11-07T18:45:37.691548Z").unwrap());
let expected_dt = Value::DateTime(chrono::DateTime::parse_from_rfc3339("2015-11-07T18:45:35.000000Z").unwrap());
let bucket_dt = evaluate("time_bucket", &vec![Value::String("5 seconds".to_string()), dt.clone()]).unwrap();
assert_eq!(expected_dt, bucket_dt);
let expected_dt = Value::DateTime(chrono::DateTime::parse_from_rfc3339("2015-11-07T18:45:00.000000Z").unwrap());
let bucket_dt = evaluate("time_bucket", &vec![Value::String("5 minutes".to_string()), dt.clone()]).unwrap();
assert_eq!(expected_dt, bucket_dt);
let expected_dt = Value::DateTime(chrono::DateTime::parse_from_rfc3339("2015-11-07T18:00:00.000000Z").unwrap());
let bucket_dt = evaluate("time_bucket", &vec![Value::String("1 hour".to_string()), dt.clone()]).unwrap();
assert_eq!(expected_dt, bucket_dt);
let hour = evaluate("date_part", &vec![Value::String("second".to_string()), dt.clone()]).unwrap();
assert_eq!(Value::Float(OrderedFloat::from(37.0)), hour);
}
}
|
//
// Copyright 2020 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use anyhow::Context;
use assert_matches::assert_matches;
use maplit::hashmap;
use oak_abi::{
label::{confidentiality_label, web_assembly_module_signature_tag},
proto::oak::application::ConfigMap,
};
use oak_sign::{read_pem_file, KeyPair, SignatureBundle};
use private_set_intersection_grpc::proto::{
private_set_intersection_client::PrivateSetIntersectionClient, GetIntersectionRequest,
SubmitSetRequest,
};
use std::{
collections::{HashMap, HashSet},
iter::FromIterator,
};
// Base64 encoded Ed25519 private key corresponding to Wasm module signature.
const PRIVATE_KEY_FILE: &str = "../../../keys/ed25519/test.key";
const MAIN_MODULE_MANIFEST: &str = "../../main_module/rust/Cargo.toml";
const HANDLER_MODULE_MANIFEST: &str = "../../handler_module/rust/Cargo.toml";
const MAIN_MODULE_NAME: &str = "app";
const HANDLER_MODULE_NAME: &str = "handler";
const ENTRYPOINT_NAME: &str = "oak_main";
const TEST_SET_ID: &str = "test";
fn build_wasm() -> anyhow::Result<HashMap<String, Vec<u8>>> {
Ok(hashmap! {
MAIN_MODULE_NAME.to_owned() => oak_tests::compile_rust_wasm(MAIN_MODULE_MANIFEST, oak_tests::Profile::Release).context("Couldn't compile main module")?,
HANDLER_MODULE_NAME.to_owned() => oak_tests::compile_rust_wasm(HANDLER_MODULE_MANIFEST, oak_tests::Profile::Release).context("Couldn't compile handler module")?,
})
}
fn sign(input: &[u8]) -> anyhow::Result<SignatureBundle> {
let key_file = read_pem_file(PRIVATE_KEY_FILE)?;
let key_pair = KeyPair::parse(&key_file)?;
SignatureBundle::create(input, &key_pair)
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_set_intersection() {
let _ = env_logger::builder().is_test(true).try_init();
let wasm_modules = build_wasm().expect("Couldn't compile Wasm modules");
let signature =
sign(wasm_modules.get(HANDLER_MODULE_NAME).unwrap()).expect("Couldn't sign Wasm module");
let permissions = oak_runtime::permissions::PermissionsConfiguration {
allow_grpc_server_nodes: true,
allow_log_nodes: true,
..Default::default()
};
let config = oak_tests::runtime_config_wasm(
wasm_modules,
MAIN_MODULE_NAME,
ENTRYPOINT_NAME,
ConfigMap::default(),
permissions,
oak_runtime::SignatureTable {
values: hashmap! {
hex::encode(&signature.hash) => vec![signature.clone()]
},
},
);
let runtime =
oak_runtime::configure_and_run(config).expect("Unable to configure runtime with test wasm");
let public_key_label =
confidentiality_label(web_assembly_module_signature_tag(&signature.public_key_der));
let (channel, interceptor) = oak_tests::channel_and_interceptor(&public_key_label).await;
let mut client = PrivateSetIntersectionClient::with_interceptor(channel, interceptor);
let req = SubmitSetRequest {
set_id: TEST_SET_ID.to_string(),
values: vec!["a".to_string(), "b".to_string(), "c".to_string()],
};
let result = client.submit_set(req).await;
assert_matches!(result, Ok(_));
let req = SubmitSetRequest {
set_id: TEST_SET_ID.to_string(),
values: vec!["b".to_string(), "c".to_string(), "d".to_string()],
};
let result = client.submit_set(req).await;
assert_matches!(result, Ok(_));
// Send more sets than threshold.
let req = SubmitSetRequest {
set_id: TEST_SET_ID.to_string(),
values: vec!["c".to_string()],
};
let result = client.submit_set(req).await;
assert_matches!(result, Err(_));
let result = client
.get_intersection(GetIntersectionRequest {
set_id: TEST_SET_ID.to_string(),
})
.await;
assert_matches!(result, Ok(_));
let got = HashSet::<String>::from_iter(result.unwrap().into_inner().values.to_vec());
let want: HashSet<String> = ["b".to_string(), "c".to_string()].iter().cloned().collect();
assert_eq!(got, want);
// Send a new set after the intersection was requested.
let req = SubmitSetRequest {
set_id: TEST_SET_ID.to_string(),
values: vec!["c".to_string()],
};
let result = client.submit_set(req).await;
assert_matches!(result, Err(_));
runtime.stop();
}
|
extern crate arrayfire;
extern crate fnv;
use arrayfire::{Array, Dim4, constant, add, col, row, cols, rows, join, lookup, set_col, set_row, maxof, replace, gt, ge, eq, max_all};
use std::hash::BuildHasherDefault;
use std::collections::HashMap;
use fnv::FnvHasher;
pub type BElm = u8;
type BSq = Vec<BElm>;
type AlgnPs = usize;
#[derive(Debug)]
struct AlgnPsPr {
strt_ps: AlgnPs,
end_ps: AlgnPs,
}
type AlgnScr = f32;
#[derive(Debug)]
pub struct PrAlgn {
algn: (BSq, BSq),
algn_ps_pr_pr: (AlgnPsPr, AlgnPsPr),
algn_scr: AlgnScr,
}
type Hshr = BuildHasherDefault<FnvHasher>;
pub type SbstMt = HashMap<(BElm, BElm), AlgnScr, Hshr>;
pub struct AlgnScrSchm {
gp_opn_pnlty: AlgnScr,
gp_extnsn_pnlty: AlgnScr,
}
impl AlgnScrSchm {
pub fn new(gp_opn_pnlty: AlgnScr, gp_extnsn_pnlty: AlgnScr) -> AlgnScrSchm {
AlgnScrSchm {
gp_opn_pnlty: gp_opn_pnlty,
gp_extnsn_pnlty: gp_extnsn_pnlty,
}
}
}
type AfDm = u64;
pub type Alphbt<'a> = &'a[BElm];
type DpSrc = u32;
const GP: BElm = '-' as BElm;
const DGNL: DpSrc = 0;
const VRTCL: DpSrc = DGNL + 1;
const HRZNTL: DpSrc = VRTCL + 1;
/// Run the Smith Waterman algorithm on GPU.
/// # Examples
///
/// ```rust
/// use self::gpu_sw::{gpu_sw, gt_alphbt, SbstMt, AlgnScrSchm};
///
/// let is_dna = true;
/// let alphbt = gt_alphbt(is_dna);
/// let b_sq_pr = (&b"GGTTGACTA"[..], &b"TGTTACGG"[..]);
/// let mut sbst_mt = SbstMt::default();
/// for &alphbt_elm_1 in alphbt.iter() {
/// for &alphbt_elm_2 in alphbt.iter() {
/// sbst_mt.insert((alphbt_elm_1, alphbt_elm_2), if alphbt_elm_1 == alphbt_elm_2 {1.} else {-1.});
/// }
/// }
/// let algn_scr_schm = AlgnScrSchm::new(-7., -1.);
/// let pr_algn = gpu_sw(&b_sq_pr, &sbst_mt, &algn_scr_schm, is_dna);
/// println!("{:?}.", &pr_algn);
/// ```
///
pub fn gpu_sw(b_sq_pr: &(&[BElm], &[BElm]), sbst_mt: &SbstMt, algn_scr_schm: &AlgnScrSchm, is_dna: bool) -> PrAlgn {
let b_sq_ln_pr = (b_sq_pr.0.len(), b_sq_pr.1.len());
let alphbt = gt_alphbt(is_dna);
let gpu_b_sq_pr = (
Array::new(&b_sq_pr.0.iter().map(|&b_elm| gt_hsh_b_elm(b_elm, alphbt)).collect::<BSq>(), Dim4::new(&[b_sq_ln_pr.0 as AfDm, 1, 1, 1])),
Array::new(&b_sq_pr.1.iter().map(|&b_elm| gt_hsh_b_elm(b_elm, alphbt)).collect::<BSq>(), Dim4::new(&[b_sq_ln_pr.1 as AfDm, 1, 1, 1])),
);
let alphbt_ln = alphbt.len();
let mut hsh_sbst_mt = vec![vec![0.; alphbt_ln]; alphbt_ln];
for (b_elm_pr, &sbst_scr) in sbst_mt {
hsh_sbst_mt[gt_hsh_b_elm(b_elm_pr.0, alphbt) as usize][gt_hsh_b_elm(b_elm_pr.1, alphbt) as usize] = sbst_scr;
}
let hsh_sbst_mt = hsh_sbst_mt.iter().flat_map(|sbst_scr| sbst_scr.clone()).collect::<Vec<AlgnScr>>();
let gpu_sbst_mt = Array::new(&hsh_sbst_mt, Dim4::new(&[alphbt_ln as AfDm, alphbt_ln as AfDm, 1, 1]));
let scr_mt = lookup(&lookup(&gpu_sbst_mt, &gpu_b_sq_pr.0, 0), &gpu_b_sq_pr.1, 1);
let scr_mt_dms = scr_mt.dims();
let mut dp_mt = constant(0. as AlgnScr, Dim4::new(&[scr_mt_dms[0] + 1, scr_mt_dms[1] + 1, 1, 1]));
let (gp_opn_pnlty, gp_extnsn_pnlty) = (algn_scr_schm.gp_opn_pnlty, algn_scr_schm.gp_extnsn_pnlty);
let dp_mt_dms = dp_mt.dims();
let mut src_mt = constant(DGNL, Dim4::new(&[1, dp_mt_dms[1], 1, 1]));
let rw_tl_dms = Dim4::new(&[1, scr_mt_dms[1], 1, 1]);
let cl_dms = Dim4::new(&[1, 1, 1, 1]);
for i in 1 .. dp_mt_dms[0] {
let prvs_rw = row(&dp_mt, i - 1);
let mut prvs_rw_tl = cols(&prvs_rw, 1, dp_mt_dms[1] - 1).copy();
replace(&mut prvs_rw_tl, &eq(&cols(&row(&src_mt, i - 1), 1, dp_mt_dms[1] - 1), &VRTCL, false), &add(&cols(&prvs_rw, 1, dp_mt_dms[1] - 1), &gp_opn_pnlty, false));
let prvs_rw_tl = add(&prvs_rw_tl, &gp_extnsn_pnlty, false);
let prvs_rw_hd = add(&cols(&prvs_rw, 0, dp_mt_dms[1] - 2), &cols(&row(&scr_mt, i - 1), 0, dp_mt_dms[1] - 2), false);
let mut nw_rw_tl = constant(DGNL, rw_tl_dms);
replace(&mut nw_rw_tl, &ge(&prvs_rw_hd, &prvs_rw_tl, false), &constant(VRTCL, rw_tl_dms));
src_mt = join(0, &src_mt, &join(1, &constant(DGNL, cl_dms), &nw_rw_tl));
dp_mt = set_row(&dp_mt, &join(1, &col(&row(&dp_mt, i), 0), &maxof(&maxof(&prvs_rw_tl, &prvs_rw_hd, false), &constant(0. as AlgnScr, rw_tl_dms), false)), i);
}
let clmn_tl_dms = Dim4::new(&[scr_mt_dms[0], 1, 1, 1]);
for i in 1 .. dp_mt_dms[1] {
let prvs_clmn = col(&dp_mt, i - 1);
let mut prvs_clmn_tl = rows(&prvs_clmn, 1, dp_mt_dms[0] - 1).copy();
replace(&mut prvs_clmn_tl, &eq(&rows(&col(&src_mt, i - 1), 1, dp_mt_dms[0] - 1), &HRZNTL, false), &add(&rows(&prvs_clmn, 1, dp_mt_dms[0] - 1), &gp_opn_pnlty, false));
let prvs_clmn_tl = add(&prvs_clmn_tl, &gp_extnsn_pnlty, false);
let prvs_clmn_hd = add(&rows(&prvs_clmn, 0, dp_mt_dms[0] - 2), &rows(&col(&scr_mt, i - 1), 0, dp_mt_dms[0] - 2), false);
let crnt_clmn_tl = rows(&col(&dp_mt, i), 1, dp_mt_dms[0] - 1);
let mx = maxof(&prvs_clmn_hd, &crnt_clmn_tl, false);
let mut src_clmn_tl = rows(&col(&src_mt, i), 1, dp_mt_dms[0] - 1).copy();
replace(&mut src_clmn_tl, >(&crnt_clmn_tl, &prvs_clmn_hd, false), &constant(DGNL, clmn_tl_dms));
replace(&mut src_clmn_tl, &ge(&mx, &prvs_clmn_tl, false), &constant(HRZNTL, clmn_tl_dms));
src_mt = set_col(&src_mt, &join(0, &row(&col(&src_mt, i), 0), &src_clmn_tl), i);
dp_mt = set_col(&dp_mt, &join(0, &row(&col(&dp_mt, i), 0), &maxof(&prvs_clmn_tl, &mx, false)), i);
}
let dp_mt_elm_nm = dp_mt.elements() as usize;
let mut cpu_dp_mt = vec![0. as AlgnScr; dp_mt_elm_nm];
let mut cpu_src_mt = vec![DGNL; dp_mt_elm_nm];
dp_mt.host(&mut cpu_dp_mt);
src_mt.host(&mut cpu_src_mt);
let mx_scr = max_all(&dp_mt).0 as AlgnScr;
let mut dp_mt = vec![vec![0. as AlgnScr; dp_mt_dms[1] as usize]; dp_mt_dms[0] as usize];
let mut src_mt = vec![vec![DGNL; dp_mt_dms[1] as usize]; dp_mt_dms[0] as usize];
let mut fnd_ps_pr = (0, 0);
let mut is_ps_pr_fnd = false;
for (i, (&dp_mt_elm, &src)) in cpu_dp_mt.iter().zip(cpu_src_mt.iter()).enumerate() {
let ps_pr = (i % (dp_mt_dms[0] as usize), i / (dp_mt_dms[0] as usize));
dp_mt[ps_pr.0][ps_pr.1] = dp_mt_elm;
src_mt[ps_pr.0][ps_pr.1] = src;
if !is_ps_pr_fnd && dp_mt[ps_pr.0][ps_pr.1] == mx_scr {
fnd_ps_pr = ps_pr;
is_ps_pr_fnd = true;
}
}
let mut pr_algn = (Vec::new(), Vec::new());
let (mut i, mut j) = fnd_ps_pr;
let (mut prvs_i, mut prvs_j) = (i, j);
while i > 0 || j > 0 {
if dp_mt[i][j] == 0. {
break;
}
prvs_i = i;
prvs_j = j;
if j == 0 {
pr_algn.0.insert(0, b_sq_pr.0[i - 1]);
pr_algn.1.insert(0, GP);
i -= 1;
continue;
} else if i == 0 {
pr_algn.0.insert(0, GP);
pr_algn.1.insert(0, b_sq_pr.1[j - 1]);
j -= 1;
continue;
}
let src = src_mt[i][j];
if src == DGNL {
pr_algn.0.insert(0, b_sq_pr.0[i - 1]);
pr_algn.1.insert(0, b_sq_pr.1[j - 1]);
i -= 1;
j -= 1;
} else if src == VRTCL {
pr_algn.0.insert(0, b_sq_pr.0[i - 1]);
pr_algn.1.insert(0, GP);
i -= 1;
} else {
pr_algn.0.insert(0, GP);
pr_algn.1.insert(0, b_sq_pr.1[j - 1]);
j -= 1;
}
}
let pr_algn = PrAlgn {
algn: pr_algn,
algn_ps_pr_pr: (AlgnPsPr {strt_ps: if prvs_i == 0 {0} else {prvs_i - 1}, end_ps: fnd_ps_pr.0 - 1}, AlgnPsPr {strt_ps: if prvs_j == 0 {0} else {prvs_j - 1}, end_ps: fnd_ps_pr.1 - 1}),
algn_scr: mx_scr,
};
pr_algn
}
pub fn gt_alphbt<'a>(is_dna: bool) -> Alphbt<'a> {
if is_dna {
b"ACGTURYSWKMBDHVNacgturyswkmbdhvn"
} else {
b"ARNDCEQGHILKMFPSTWYVarndceqghilkmfpstwyv"
}
}
fn gt_hsh_b_elm(b_elm: BElm, alphbt: Alphbt) -> BElm {
alphbt.iter().position(|&alphbt_elm| alphbt_elm == b_elm).expect("Failed to get hashed bio elem.") as BElm
}
#[cfg(test)]
mod tsts {
use super::{gpu_sw, gt_alphbt, SbstMt, AlgnScrSchm};
use super::arrayfire::{set_device, info};
use super::std::str::from_utf8;
#[test]
fn tst_gpu_sw() {
set_device(0);
info();
let is_dna = true;
let alphbt = gt_alphbt(is_dna);
let b_sq_pr = (&b"GGTTGACTA"[..], &b"TGTTACGG"[..]);
println!("Seq. pair to align:");
println!("{}", from_utf8(&b_sq_pr.0).expect("Failed to get Bio seq."));
println!("{}", from_utf8(&b_sq_pr.1).expect("Failed to get Bio seq."));
let mut sbst_mt = SbstMt::default();
for &alphbt_elm_1 in alphbt.iter() {
for &alphbt_elm_2 in alphbt.iter() {
sbst_mt.insert((alphbt_elm_1, alphbt_elm_2), if alphbt_elm_1 == alphbt_elm_2 {3.} else {-3.});
}
}
let algn_scr_schm = AlgnScrSchm::new(-0., -2.);
let pr_algn = gpu_sw(&b_sq_pr, &sbst_mt, &algn_scr_schm, is_dna);
println!("{:?}", &pr_algn);
println!("Alignment:");
println!("{}", from_utf8(&pr_algn.algn.0).expect("Failed to get alignment."));
println!("{}", from_utf8(&pr_algn.algn.1).expect("Failed to get alignment."));
}
}
|
// Problem 9 - Special Pythagorean triplet
//
// A Pythagorean triplet is a set of three natural numbers, a < b < c, for
// which, a² + b² = c²
//
// For example, 3² + 4² = 9 + 16 = 25 = 5².
//
// There exists exactly one Pythagorean triplet for which a + b + c = 1000.
// Find the product abc.
fn main() {
println!("{}", solution());
}
fn solution() -> i32 {
let perimeter = 1000;
let min_c = (perimeter/3) + 1;
let max_c = perimeter - 3;
let mut product = 0;
'outer: for c in min_c..(max_c + 1) {
let min_b = (perimeter - c)/2 + 1;
let max_b = perimeter - c - 1;
for b in min_b..(max_b + 1) {
let a = perimeter - b - c;
if a*a + b*b == c*c {
product = a*b*c;
break 'outer;
}
}
}
product
}
|
#[derive(Debug, Clone)]
pub struct AssistantClient{
url: String,
version: String,
api_key: String
}
impl AssistantClient {
pub fn new()-> AssistantClient{
AssistantClient{
url: "".to_string(),
version: "".to_string(),
api_key: "".to_string()
}
}
pub fn set_service_url(&mut self, url: &str){
self.url = url.to_string();
}
pub fn set_api_key(&mut self, api_key: &str){
self.api_key = api_key.to_string();
}
pub fn set_version(&mut self, version: &str){ self.version = version.to_string();}
}
#[cfg(test)]
mod tests {
use crate::AssistantClient;
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
#[test]
fn new_assistant(){
let mut client: AssistantClient = AssistantClient::new();
let url = "";
let version = "";
let api_key = "";
client.set_service_url(url);
client.set_version(version);
client.set_api_key(api_key);
assert_eq!(client.url, url.to_string());
assert_eq!(client.api_key, api_key.to_string());
assert_eq!(client.version, version.to_string());
}
#[test]
fn new_sessesion(){}
}
|
use std::io;
use std::net::{Ipv4Addr, UdpSocket, SocketAddr};
use std::time::Duration;
use std::thread;
use esp_idf_hal::interface::Interface;
use dnsparse::{Header, HeaderKind, Answer, QueryKind, QueryClass, Message, OpCode, ResponseCode};
pub fn handle_request(socket: &UdpSocket, src: SocketAddr, request: Message, ip: &Ipv4Addr) -> io::Result<usize> {
let response_header = Header::builder()
.id(request.header().id())
.kind(HeaderKind::Response)
.recursion_desired(request.header().recursion_desired())
.response_code(ResponseCode::NotImplemented);
let mut buf = Message::BUFFER;
let mut response = Message::builder(&mut buf)
.header(response_header.build())
.build();
let question_count = request.header().question_count();
let kind = request.header().kind();
let opcode = request.header().opcode();
if question_count == 1 && kind == HeaderKind::Query && opcode == OpCode::Query {
for question in request.questions() {
if *question.kind() == QueryKind::A && *question.class() == QueryClass::IN {
if question.name() == "captive.apple.com" {
response.header_mut().set_response_code(ResponseCode::NoError);
let answer = Answer {
name: question.name().clone(),
kind: *question.kind(),
class: *question.class(),
ttl: 60,
rdata: &ip.octets(),
};
response.add_question(&question);
response.add_answer(&answer);
} else {
response.header_mut().set_response_code(ResponseCode::NonExistentDomain);
break;
}
}
}
}
socket.send_to(&response, src)
}
pub fn server() {
println!("Starting DNS server …");
let socket = UdpSocket::bind("0.0.0.0:53").unwrap();
socket.set_read_timeout(Some(Duration::from_secs(30))).unwrap();
socket.set_write_timeout(Some(Duration::from_secs(30))).unwrap();
let ip = *Interface::Ap.ip_info().ip();
println!("IP: {:?}", ip);
loop {
thread::yield_now();
let mut buf = Message::BUFFER;
let (len, src) = match socket.recv_from(&mut buf) {
Ok(ok) => ok,
Err(err) => {
if err.kind() != std::io::ErrorKind::WouldBlock {
eprintln!("Receiving DNS request failed: {}", err);
}
continue
}
};
let request = if let Ok(frame) = Message::parse(&mut buf[..len]) {
frame
} else {
eprintln!("Failed to parse DNS request.");
continue
};
if let Err(err) = handle_request(&socket, src, request, &ip) {
eprintln!("Error sending response to '{:?}': {}", src, err);
}
}
}
|
use super::*;
use nom::{multi::many0, sequence::pair};
pub fn paren<'a, InnerP, Inner>(inner: InnerP) -> impl Parser<Input<'a>, Paren<Inner>, Err>
where
InnerP: Parser<Input<'a>, Inner, Err>,
{
delimited(lparen, inner, rparen)
}
pub fn tuple<'a, InnerP, Inner>(inner: InnerP) -> impl Parser<Input<'a>, Tuple<Inner>, Err>
where
InnerP: Parser<Input<'a>, Inner, Err>,
{
paren(punctuated0(inner, comma))
}
pub fn curly<'a, InnerP, Inner>(inner: InnerP) -> impl Parser<Input<'a>, Curly<Inner>, Err>
where
InnerP: Parser<Input<'a>, Inner, Err>,
{
delimited(lcurly, inner, rcurly)
}
pub const fn delimited<Open, Inner, Close>(
open: Open,
inner: Inner,
close: Close,
) -> DelimitedP<Open, Inner, Close> {
DelimitedP { open, inner, close }
}
pub fn punctuated0<'a, Inner, Sep, InnerP, SepP>(
mut inner: InnerP,
mut sep: SepP,
) -> impl FnMut(Input<'a>) -> IResult<Punctuated0<Inner, Sep>>
where
InnerP: Parser<Input<'a>, Inner, Err>,
SepP: Parser<Input<'a>, Sep, Err>,
{
move |input| match punctuated1(inner.by_ref(), sep.by_ref()).opt().parse(input) {
Ok((input, Some(Punctuated1 { first, tail, trail }))) => Ok((
input,
Punctuated0 {
first: Some(first),
tail,
trail,
},
)),
Ok((input, None)) => Ok((input, Punctuated0::default())),
Err(nom::Err::Error(_)) => Ok((input, Punctuated0::default())),
Err(e) => Err(e),
}
}
pub fn punctuated0_no_trail<'a, Inner, Sep, InnerP, SepP>(
mut inner: InnerP,
mut sep: SepP,
) -> impl FnMut(Input<'a>) -> IResult<Punctuated0NoTrail<Inner, Sep>>
where
InnerP: Parser<Input<'a>, Inner, Err>,
SepP: Parser<Input<'a>, Sep, Err>,
{
move |input| match punctuated1_no_trail(inner.by_ref(), sep.by_ref())
.opt()
.parse(input)
{
Ok((input, Some(Punctuated1NoTrail { first, tail }))) => Ok((
input,
Punctuated0NoTrail {
first: Some(first),
tail,
},
)),
Ok((input, None)) => Ok((input, Punctuated0NoTrail::default())),
Err(nom::Err::Error(_)) => Ok((input, Punctuated0NoTrail::default())),
Err(e) => Err(e),
}
}
pub fn punctuated1<'a, Inner, Sep, InnerP, SepP>(
mut inner: InnerP,
mut sep: SepP,
) -> impl FnMut(Input<'a>) -> IResult<Punctuated1<Inner, Sep>>
where
InnerP: Parser<Input<'a>, Inner, Err>,
SepP: Parser<Input<'a>, Sep, Err>,
{
move |input| {
let (input, first) = inner.parse(input)?;
let (input, tail) = many0(pair(sep.by_ref(), inner.by_ref())).parse(input)?;
let (input, trail) = sep.by_ref().opt().parse(input)?;
Ok((input, Punctuated1 { first, tail, trail }))
}
}
pub fn punctuated1_no_trail<'a, Inner, Sep, InnerP, SepP>(
mut inner: InnerP,
mut sep: SepP,
) -> impl FnMut(Input<'a>) -> IResult<Punctuated1NoTrail<Inner, Sep>>
where
InnerP: Parser<Input<'a>, Inner, Err>,
SepP: Parser<Input<'a>, Sep, Err>,
{
move |input| {
let (input, first) = inner.parse(input)?;
let (input, tail) = many0(pair(sep.by_ref(), inner.by_ref())).parse(input)?;
Ok((input, Punctuated1NoTrail { first, tail }))
}
}
pub struct DelimitedP<Open, Inner, Close> {
open: Open,
inner: Inner,
close: Close,
}
impl<I, E, Open, Inner, Close, OpenP, InnerP, CloseP> Parser<I, Delimited<Inner, Open, Close>, E>
for DelimitedP<OpenP, InnerP, CloseP>
where
OpenP: Parser<I, Open, E>,
CloseP: Parser<I, Close, E>,
InnerP: Parser<I, Inner, E>,
{
fn parse(&mut self, input: I) -> nom::IResult<I, Delimited<Inner, Open, Close>, E> {
let (input, open) = self.open.parse(input)?;
let (input, inner) = self.inner.parse(input)?;
let (input, close) = self.close.parse(input)?;
Ok((
input,
Delimited {
open,
inner: box inner,
close,
},
))
}
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - DMA2D control register"]
pub cr: CR,
#[doc = "0x04 - DMA2D Interrupt Status Register"]
pub isr: ISR,
#[doc = "0x08 - DMA2D interrupt flag clear register"]
pub ifcr: IFCR,
#[doc = "0x0c - DMA2D foreground memory address register"]
pub fgmar: FGMAR,
#[doc = "0x10 - DMA2D foreground offset register"]
pub fgor: FGOR,
#[doc = "0x14 - DMA2D background memory address register"]
pub bgmar: BGMAR,
#[doc = "0x18 - DMA2D background offset register"]
pub bgor: BGOR,
#[doc = "0x1c - DMA2D foreground PFC control register"]
pub fgpfccr: FGPFCCR,
#[doc = "0x20 - DMA2D foreground color register"]
pub fgcolr: FGCOLR,
#[doc = "0x24 - DMA2D background PFC control register"]
pub bgpfccr: BGPFCCR,
#[doc = "0x28 - DMA2D background color register"]
pub bgcolr: BGCOLR,
#[doc = "0x2c - DMA2D foreground CLUT memory address register"]
pub fgcmar: FGCMAR,
#[doc = "0x30 - DMA2D background CLUT memory address register"]
pub bgcmar: BGCMAR,
#[doc = "0x34 - DMA2D output PFC control register"]
pub opfccr: OPFCCR,
#[doc = "0x38 - DMA2D output color register"]
pub ocolr: OCOLR,
#[doc = "0x3c - DMA2D output memory address register"]
pub omar: OMAR,
#[doc = "0x40 - DMA2D output offset register"]
pub oor: OOR,
#[doc = "0x44 - DMA2D number of line register"]
pub nlr: NLR,
#[doc = "0x48 - DMA2D line watermark register"]
pub lwr: LWR,
#[doc = "0x4c - DMA2D AXI master timer configuration register"]
pub amtcr: AMTCR,
}
#[doc = "CR (rw) register accessor: DMA2D control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr`]
module"]
pub type CR = crate::Reg<cr::CR_SPEC>;
#[doc = "DMA2D control register"]
pub mod cr;
#[doc = "ISR (r) register accessor: DMA2D Interrupt Status Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`isr`]
module"]
pub type ISR = crate::Reg<isr::ISR_SPEC>;
#[doc = "DMA2D Interrupt Status Register"]
pub mod isr;
#[doc = "IFCR (rw) register accessor: DMA2D interrupt flag clear register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ifcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ifcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ifcr`]
module"]
pub type IFCR = crate::Reg<ifcr::IFCR_SPEC>;
#[doc = "DMA2D interrupt flag clear register"]
pub mod ifcr;
#[doc = "FGMAR (rw) register accessor: DMA2D foreground memory address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fgmar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fgmar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fgmar`]
module"]
pub type FGMAR = crate::Reg<fgmar::FGMAR_SPEC>;
#[doc = "DMA2D foreground memory address register"]
pub mod fgmar;
#[doc = "FGOR (rw) register accessor: DMA2D foreground offset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fgor::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fgor::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fgor`]
module"]
pub type FGOR = crate::Reg<fgor::FGOR_SPEC>;
#[doc = "DMA2D foreground offset register"]
pub mod fgor;
#[doc = "BGMAR (rw) register accessor: DMA2D background memory address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bgmar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bgmar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bgmar`]
module"]
pub type BGMAR = crate::Reg<bgmar::BGMAR_SPEC>;
#[doc = "DMA2D background memory address register"]
pub mod bgmar;
#[doc = "BGOR (rw) register accessor: DMA2D background offset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bgor::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bgor::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bgor`]
module"]
pub type BGOR = crate::Reg<bgor::BGOR_SPEC>;
#[doc = "DMA2D background offset register"]
pub mod bgor;
#[doc = "FGPFCCR (rw) register accessor: DMA2D foreground PFC control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fgpfccr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fgpfccr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fgpfccr`]
module"]
pub type FGPFCCR = crate::Reg<fgpfccr::FGPFCCR_SPEC>;
#[doc = "DMA2D foreground PFC control register"]
pub mod fgpfccr;
#[doc = "FGCOLR (rw) register accessor: DMA2D foreground color register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fgcolr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fgcolr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fgcolr`]
module"]
pub type FGCOLR = crate::Reg<fgcolr::FGCOLR_SPEC>;
#[doc = "DMA2D foreground color register"]
pub mod fgcolr;
#[doc = "BGPFCCR (rw) register accessor: DMA2D background PFC control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bgpfccr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bgpfccr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bgpfccr`]
module"]
pub type BGPFCCR = crate::Reg<bgpfccr::BGPFCCR_SPEC>;
#[doc = "DMA2D background PFC control register"]
pub mod bgpfccr;
#[doc = "BGCOLR (rw) register accessor: DMA2D background color register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bgcolr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bgcolr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bgcolr`]
module"]
pub type BGCOLR = crate::Reg<bgcolr::BGCOLR_SPEC>;
#[doc = "DMA2D background color register"]
pub mod bgcolr;
#[doc = "FGCMAR (rw) register accessor: DMA2D foreground CLUT memory address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fgcmar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fgcmar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fgcmar`]
module"]
pub type FGCMAR = crate::Reg<fgcmar::FGCMAR_SPEC>;
#[doc = "DMA2D foreground CLUT memory address register"]
pub mod fgcmar;
#[doc = "BGCMAR (rw) register accessor: DMA2D background CLUT memory address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bgcmar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bgcmar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`bgcmar`]
module"]
pub type BGCMAR = crate::Reg<bgcmar::BGCMAR_SPEC>;
#[doc = "DMA2D background CLUT memory address register"]
pub mod bgcmar;
#[doc = "OPFCCR (rw) register accessor: DMA2D output PFC control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`opfccr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`opfccr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`opfccr`]
module"]
pub type OPFCCR = crate::Reg<opfccr::OPFCCR_SPEC>;
#[doc = "DMA2D output PFC control register"]
pub mod opfccr;
#[doc = "OCOLR (rw) register accessor: DMA2D output color register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ocolr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ocolr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ocolr`]
module"]
pub type OCOLR = crate::Reg<ocolr::OCOLR_SPEC>;
#[doc = "DMA2D output color register"]
pub mod ocolr;
#[doc = "OMAR (rw) register accessor: DMA2D output memory address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`omar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`omar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`omar`]
module"]
pub type OMAR = crate::Reg<omar::OMAR_SPEC>;
#[doc = "DMA2D output memory address register"]
pub mod omar;
#[doc = "OOR (rw) register accessor: DMA2D output offset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`oor::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`oor::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`oor`]
module"]
pub type OOR = crate::Reg<oor::OOR_SPEC>;
#[doc = "DMA2D output offset register"]
pub mod oor;
#[doc = "NLR (rw) register accessor: DMA2D number of line register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`nlr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`nlr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`nlr`]
module"]
pub type NLR = crate::Reg<nlr::NLR_SPEC>;
#[doc = "DMA2D number of line register"]
pub mod nlr;
#[doc = "LWR (rw) register accessor: DMA2D line watermark register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`lwr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`lwr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`lwr`]
module"]
pub type LWR = crate::Reg<lwr::LWR_SPEC>;
#[doc = "DMA2D line watermark register"]
pub mod lwr;
#[doc = "AMTCR (rw) register accessor: DMA2D AXI master timer configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`amtcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`amtcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`amtcr`]
module"]
pub type AMTCR = crate::Reg<amtcr::AMTCR_SPEC>;
#[doc = "DMA2D AXI master timer configuration register"]
pub mod amtcr;
|
use literate::bin_search;
fn main() {
let target = 'a';
let collection = ['a', 'b', 'c'];
println!("Target: {}", target);
print!("Collection: ");
for i in 0..collection.len() {
print!("{}, ", collection[i]);
}
println!("");
match bin_search(&target, &collection) {
Some(i) => println!("Result: {}", collection[i]),
None => println!("Not found")
}
}
|
//! JSON-LD helper methods
use crate::config::BlogConfig;
use serde_json::{json, Value};
pub static CONTEXT: &str = "http://schema.org";
pub fn full_url(config: &BlogConfig, path: &str) -> String {
format!("{}/{}", config.site.url, path)
}
pub fn owner(config: &BlogConfig) -> Value {
let photo = config
.owner
.image
.as_ref()
.map(|p| image(p.width, p.height, full_url(config, &p.url)));
json!({
"@type": "Person",
"name": config.owner.name,
"url": full_url(config, "about"),
"image": photo,
"sameAs": config.owner.urls
})
}
/// http://schema.org/Organization
pub fn organization(config: &BlogConfig) -> Value {
let logo = &config.site.logo;
let logo = image(logo.width, logo.height, full_url(config, &logo.url));
json!({
"@type": "Organization",
"name": config.site.title,
"logo": logo
})
}
pub fn image(width: u16, height: u16, url: String) -> Value {
json!({
"@type": "ImageObject",
"width": width,
"height": height,
"url": url
})
}
/// http://schema.org/WebPage
pub fn web_page(config: &BlogConfig, path: &str) -> Value {
json!({
"@type": "WebPage",
"id": full_url(config, path)
})
}
/// A `BreadcrumbList` is an `ItemList` consisting of a chain of linked Web
/// pages, typically described using at least their URL and their name, and
/// typically ending with the current page.
///
/// The `position` property is used to reconstruct the order of the items in a
/// BreadcrumbList The convention is that a breadcrumb list has an
/// `itemListOrder` of `ItemListOrderAscending` (lower values listed first),
/// and that the first items in this list correspond to the "top" or beginning
/// of the breadcrumb trail, e.g. with a site or section homepage. The
/// specific values of `position` are not assigned meaning for a
/// `BreadcrumbList`, but they should be integers, e.g. beginning with `1` for
/// the first item in the list.
///
/// http://schema.org/BreadcrumbList
///
pub fn breadcrumb(
config: &BlogConfig,
path: &str,
name: &str,
position: usize,
) -> Value {
json!({
"@type": "ListItem",
"item": {
"@type": "Breadcrumb",
"url": full_url(config, path),
"name": name
},
"position": position
})
}
|
use super::range_wrapper::RangeInclusiveStartWrapper;
use crate::std_ext::*;
use std::collections::BTreeMap;
use std::fmt::{self, Debug};
use std::marker::PhantomData;
use std::ops::RangeInclusive;
/// A map whose keys are stored as ranges bounded
/// inclusively below and above `(start..=end)`.
///
/// Contiguous and overlapping ranges that map to the same value
/// are coalesced into a single range.
///
/// Successor and predecessor functions must be provided for
/// the key type `K`, so that we can detect adjacent but non-overlapping
/// (closed) ranges. (This is not a problem for half-open ranges,
/// because adjacent ranges can be detected using equality of range ends alone.)
///
/// You can provide these functions either by implementing the
/// [`StepLite`](crate::StepLite) trait for your key type `K`, or,
/// if this is impossible because of Rust's "orphan rules",
/// you can provide equivalent free functions using the `StepFnsT` type parameter.
/// [`StepLite`](crate::StepLite) is implemented for all standard integer types,
/// but not for any third party crate types.
#[derive(Clone)]
pub struct RangeInclusiveMap<K, V, StepFnsT = K> {
// Wrap ranges so that they are `Ord`.
// See `range_wrapper.rs` for explanation.
btm: BTreeMap<RangeInclusiveStartWrapper<K>, V>,
_phantom: PhantomData<StepFnsT>,
}
impl<K, V> Default for RangeInclusiveMap<K, V, K>
where
K: Ord + Clone + StepLite,
V: Eq + Clone,
{
fn default() -> Self {
Self::new()
}
}
impl<K, V> RangeInclusiveMap<K, V, K>
where
K: Ord + Clone + StepLite,
V: Eq + Clone,
{
/// Makes a new empty `RangeInclusiveMap`.
pub fn new() -> Self {
Self::new_with_step_fns()
}
}
impl<K, V, StepFnsT> RangeInclusiveMap<K, V, StepFnsT>
where
K: Ord + Clone,
V: Eq + Clone,
StepFnsT: StepFns<K>,
{
/// Makes a new empty `RangeInclusiveMap`, specifying successor and
/// predecessor functions defined separately from `K` itself.
///
/// This is useful as a workaround for Rust's "orphan rules",
/// which prevent you from implementing `StepLite` for `K` if `K`
/// is a foreign type.
///
/// **NOTE:** This will likely be deprecated and then eventually
/// removed once the standard library's [Step](std::iter::Step)
/// trait is stabilised, as most crates will then likely implement [Step](std::iter::Step)
/// for their types where appropriate.
///
/// See [this issue](https://github.com/rust-lang/rust/issues/42168)
/// for details about that stabilization process.
pub fn new_with_step_fns() -> Self {
Self {
btm: BTreeMap::new(),
_phantom: PhantomData,
}
}
/// Returns a reference to the value corresponding to the given key,
/// if the key is covered by any range in the map.
pub fn get(&self, key: &K) -> Option<&V> {
self.get_key_value(key).map(|(_range, value)| value)
}
/// Returns the range-value pair (as a pair of references) corresponding
/// to the given key, if the key is covered by any range in the map.
pub fn get_key_value(&self, key: &K) -> Option<(&RangeInclusive<K>, &V)> {
use std::ops::Bound;
// The only stored range that could contain the given key is the
// last stored range whose start is less than or equal to this key.
let key_as_start = RangeInclusiveStartWrapper::new(key.clone()..=key.clone());
self.btm
.range((Bound::Unbounded, Bound::Included(key_as_start)))
.next_back()
.filter(|(range_start_wrapper, _value)| {
// Does the only candidate range contain
// the requested key?
range_start_wrapper.range.contains(key)
})
.map(|(range_start_wrapper, value)| (&range_start_wrapper.range, value))
}
/// Returns `true` if any range in the map covers the specified key.
pub fn contains_key(&self, key: &K) -> bool {
self.get(key).is_some()
}
/// Gets an iterator over all pairs of key range and value,
/// ordered by key range.
///
/// The iterator element type is `(&'a RangeInclusive<K>, &'a V)`.
pub fn iter(&self) -> impl Iterator<Item = (&RangeInclusive<K>, &V)> {
self.btm.iter().map(|(by_start, v)| (&by_start.range, v))
}
/// Insert a pair of key range and value into the map.
///
/// If the inserted range partially or completely overlaps any
/// existing range in the map, then the existing range (or ranges) will be
/// partially or completely replaced by the inserted range.
///
/// If the inserted range either overlaps or is immediately adjacent
/// any existing range _mapping to the same value_, then the ranges
/// will be coalesced into a single contiguous range.
///
/// # Panics
///
/// Panics if range `start > end`.
pub fn insert(&mut self, range: RangeInclusive<K>, value: V) {
use std::ops::Bound;
// Backwards ranges don't make sense.
// `RangeInclusive` doesn't enforce this,
// and we don't want weird explosions further down
// if someone gives us such a range.
assert!(
range.start() <= range.end(),
"Range start can not be after range end"
);
// Wrap up the given range so that we can "borrow"
// it as a wrapper reference to either its start or end.
// See `range_wrapper.rs` for explanation of these hacks.
let mut new_range_start_wrapper: RangeInclusiveStartWrapper<K> =
RangeInclusiveStartWrapper::new(range);
let new_value = value;
// Is there a stored range either overlapping the start of
// the range to insert or immediately preceding it?
//
// If there is any such stored range, it will be the last
// whose start is less than or equal to _one less than_
// the start of the range to insert, or the one before that
// if both of the above cases exist.
let mut candidates = self
.btm
.range((Bound::Unbounded, Bound::Included(&new_range_start_wrapper)))
.rev()
.take(2)
.filter(|(stored_range_start_wrapper, _stored_value)| {
// Does the candidate range either overlap
// or immediately precede the range to insert?
// (Remember that it might actually cover the _whole_
// range to insert and then some.)
stored_range_start_wrapper
.range
.touches::<StepFnsT>(&new_range_start_wrapper.range)
});
if let Some(mut candidate) = candidates.next() {
// Or the one before it if both cases described above exist.
if let Some(another_candidate) = candidates.next() {
candidate = another_candidate;
}
let (stored_range_start_wrapper, stored_value) =
(candidate.0.clone(), candidate.1.clone());
self.adjust_touching_ranges_for_insert(
stored_range_start_wrapper,
stored_value,
&mut new_range_start_wrapper.range,
&new_value,
);
}
// Are there any stored ranges whose heads overlap or immediately
// follow the range to insert?
//
// If there are any such stored ranges (that weren't already caught above),
// their starts will fall somewhere after the start of the range to insert,
// and on, before, or _immediately after_ its end. To handle that last case
// without risking arithmetic overflow, we'll consider _one more_ stored item past
// the end of the end of the range to insert.
//
// REVISIT: Possible micro-optimisation: `impl Borrow<T> for RangeInclusiveStartWrapper<T>`
// and use that to search here, to avoid constructing another `RangeInclusiveStartWrapper`.
let second_last_possible_start = new_range_start_wrapper.range.end().clone();
let second_last_possible_start = RangeInclusiveStartWrapper::new(
second_last_possible_start.clone()..=second_last_possible_start,
);
while let Some((stored_range_start_wrapper, stored_value)) = self
.btm
.range((
Bound::Included(&new_range_start_wrapper),
// We would use something like `Bound::Included(&last_possible_start)`,
// but making `last_possible_start` might cause arithmetic overflow;
// instead decide inside the loop whether we've gone too far and break.
Bound::Unbounded,
))
.next()
{
// A couple of extra exceptions are needed at the
// end of the subset of stored ranges we want to consider,
// in part because we use `Bound::Unbounded` above.
// (See comments up there, and in the individual cases below.)
let stored_start = stored_range_start_wrapper.range.start();
if *stored_start > *second_last_possible_start.range.start() {
let latest_possible_start =
StepFnsT::add_one(second_last_possible_start.range.start());
if *stored_start > latest_possible_start {
// We're beyond the last stored range that could be relevant.
// Avoid wasting time on irrelevant ranges, or even worse, looping forever.
// (`adjust_touching_ranges_for_insert` below assumes that the given range
// is relevant, and behaves very poorly if it is handed a range that it
// shouldn't be touching.)
break;
}
if *stored_start == latest_possible_start && *stored_value != new_value {
// We are looking at the last stored range that could be relevant,
// but it has a different value, so we don't want to merge with it.
// We must explicitly break here as well, because `adjust_touching_ranges_for_insert`
// below assumes that the given range is relevant, and behaves very poorly if it
// is handed a range that it shouldn't be touching.
break;
}
}
let stored_range_start_wrapper = stored_range_start_wrapper.clone();
let stored_value = stored_value.clone();
self.adjust_touching_ranges_for_insert(
stored_range_start_wrapper,
stored_value,
&mut new_range_start_wrapper.range,
&new_value,
);
}
// Insert the (possibly expanded) new range, and we're done!
self.btm.insert(new_range_start_wrapper, new_value);
}
/// Removes a range from the map, if all or any of it was present.
///
/// If the range to be removed _partially_ overlaps any ranges
/// in the map, then those ranges will be contracted to no
/// longer cover the removed range.
///
///
/// # Panics
///
/// Panics if range `start > end`.
pub fn remove(&mut self, range: RangeInclusive<K>) {
use std::ops::Bound;
// Backwards ranges don't make sense.
// `RangeInclusive` doesn't enforce this,
// and we don't want weird explosions further down
// if someone gives us such a range.
assert!(
range.start() <= range.end(),
"Range start can not be after range end"
);
let range_start_wrapper: RangeInclusiveStartWrapper<K> =
RangeInclusiveStartWrapper::new(range);
let range = &range_start_wrapper.range;
// Is there a stored range overlapping the start of
// the range to insert?
//
// If there is any such stored range, it will be the last
// whose start is less than or equal to the start of the range to insert.
if let Some((stored_range_start_wrapper, stored_value)) = self
.btm
.range((Bound::Unbounded, Bound::Included(&range_start_wrapper)))
.next_back()
.filter(|(stored_range_start_wrapper, _stored_value)| {
// Does the only candidate range overlap
// the range to insert?
stored_range_start_wrapper.range.overlaps(&range)
})
.map(|(stored_range_start_wrapper, stored_value)| {
(stored_range_start_wrapper.clone(), stored_value.clone())
})
{
self.adjust_overlapping_ranges_for_remove(
stored_range_start_wrapper,
stored_value,
&range,
);
}
// Are there any stored ranges whose heads overlap the range to insert?
//
// If there are any such stored ranges (that weren't already caught above),
// their starts will fall somewhere after the start of the range to insert,
// and on or before its end.
//
// REVISIT: Possible micro-optimisation: `impl Borrow<T> for RangeInclusiveStartWrapper<T>`
// and use that to search here, to avoid constructing another `RangeInclusiveStartWrapper`.
let new_range_end_as_start =
RangeInclusiveStartWrapper::new(range.end().clone()..=range.end().clone());
while let Some((stored_range_start_wrapper, stored_value)) = self
.btm
.range((
Bound::Excluded(&range_start_wrapper),
Bound::Included(&new_range_end_as_start),
))
.next()
.map(|(stored_range_start_wrapper, stored_value)| {
(stored_range_start_wrapper.clone(), stored_value.clone())
})
{
self.adjust_overlapping_ranges_for_remove(
stored_range_start_wrapper,
stored_value,
&range,
);
}
}
fn adjust_touching_ranges_for_insert(
&mut self,
stored_range_start_wrapper: RangeInclusiveStartWrapper<K>,
stored_value: V,
new_range: &mut RangeInclusive<K>,
new_value: &V,
) {
use std::cmp::{max, min};
if stored_value == *new_value {
// The ranges have the same value, so we can "adopt"
// the stored range.
//
// This means that no matter how big or where the stored range is,
// we will expand the new range's bounds to subsume it,
// and then delete the stored range.
let new_start =
min(new_range.start(), stored_range_start_wrapper.range.start()).clone();
let new_end = max(new_range.end(), stored_range_start_wrapper.range.end()).clone();
*new_range = new_start..=new_end;
self.btm.remove(&stored_range_start_wrapper);
} else {
// The ranges have different values.
if new_range.overlaps(&stored_range_start_wrapper.range) {
// The ranges overlap. This is a little bit more complicated.
// Delete the stored range, and then add back between
// 0 and 2 subranges at the ends of the range to insert.
self.btm.remove(&stored_range_start_wrapper);
if stored_range_start_wrapper.range.start() < new_range.start() {
// Insert the piece left of the range to insert.
self.btm.insert(
RangeInclusiveStartWrapper::new(
stored_range_start_wrapper.range.start().clone()
..=StepFnsT::sub_one(new_range.start()),
),
stored_value.clone(),
);
}
if stored_range_start_wrapper.range.end() > new_range.end() {
// Insert the piece right of the range to insert.
self.btm.insert(
RangeInclusiveStartWrapper::new(
StepFnsT::add_one(new_range.end())
..=stored_range_start_wrapper.range.end().clone(),
),
stored_value,
);
}
} else {
// No-op; they're not overlapping,
// so we can just keep both ranges as they are.
}
}
}
fn adjust_overlapping_ranges_for_remove(
&mut self,
stored_range_start_wrapper: RangeInclusiveStartWrapper<K>,
stored_value: V,
range_to_remove: &RangeInclusive<K>,
) {
// Delete the stored range, and then add back between
// 0 and 2 subranges at the ends of the range to insert.
self.btm.remove(&stored_range_start_wrapper);
let stored_range = stored_range_start_wrapper.range;
if stored_range.start() < range_to_remove.start() {
// Insert the piece left of the range to insert.
self.btm.insert(
RangeInclusiveStartWrapper::new(
stored_range.start().clone()..=StepFnsT::sub_one(range_to_remove.start()),
),
stored_value.clone(),
);
}
if stored_range.end() > range_to_remove.end() {
// Insert the piece right of the range to insert.
self.btm.insert(
RangeInclusiveStartWrapper::new(
StepFnsT::add_one(range_to_remove.end())..=stored_range.end().clone(),
),
stored_value,
);
}
}
/// Gets an iterator over all the maximally-sized ranges
/// contained in `outer_range` that are not covered by
/// any range stored in the map.
///
/// The iterator element type is `RangeInclusive<K>`.
///
/// NOTE: Calling `gaps` eagerly finds the first gap,
/// even if the iterator is never consumed.
pub fn gaps<'a>(&'a self, outer_range: &'a RangeInclusive<K>) -> Gaps<'a, K, V, StepFnsT> {
let mut keys = self.btm.keys().peekable();
// Find the first potential gap.
let mut candidate_start = outer_range.start().clone();
// We might be already done from the start,
// but not be able to represent it using
// `candidate_start` alone if we're at the end
// of the key domain.
let mut done = false;
while let Some(item) = keys.peek() {
if item.range.end() < outer_range.start() {
// This range sits entirely before the start of
// the outer range; just skip it.
let _ = keys.next();
} else if item.range.start() <= outer_range.start() {
// This range overlaps the start of the
// outer range, so the first possible candidate
// range begins immediately after its end.
if item.range.end() >= outer_range.end() {
// There's a risk of overflowing;
// use our extra "done" flag to represent
// that the iterator is already done.
// (Don't worry about `candidate_start`
// we'll ignore everything else if `done`
// is `true`.)
done = true;
} else {
candidate_start = StepFnsT::add_one(item.range.end());
}
let _ = keys.next();
} else {
// The rest of the items might contribute to gaps.
break;
}
}
Gaps {
done,
outer_range,
keys,
candidate_start,
_phantom: PhantomData,
}
}
}
// We can't just derive this automatically, because that would
// expose irrelevant (and private) implementation details.
// Instead implement it in the same way that the underlying BTreeMap does.
impl<K: Debug, V: Debug> Debug for RangeInclusiveMap<K, V>
where
K: Ord + Clone + StepLite,
V: Eq + Clone,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
pub struct Gaps<'a, K, V, StepFnsT> {
/// Would be redundant, but we need an extra flag to
/// avoid overflowing when dealing with inclusive ranges.
///
/// All other things here are ignored if `done` is `true`.
done: bool,
outer_range: &'a RangeInclusive<K>,
keys: std::iter::Peekable<
std::collections::btree_map::Keys<'a, RangeInclusiveStartWrapper<K>, V>,
>,
candidate_start: K,
_phantom: PhantomData<StepFnsT>,
}
// `Gaps` is always fused. (See definition of `next` below.)
impl<'a, K, V, StepFnsT> std::iter::FusedIterator for Gaps<'a, K, V, StepFnsT>
where
K: Ord + Clone,
StepFnsT: StepFns<K>,
{
}
impl<'a, K, V, StepFnsT> Iterator for Gaps<'a, K, V, StepFnsT>
where
K: Ord + Clone,
StepFnsT: StepFns<K>,
{
type Item = RangeInclusive<K>;
fn next(&mut self) -> Option<Self::Item> {
if self.done || self.candidate_start > *self.outer_range.end() {
// We've already passed the end of the outer range;
// there are no more gaps to find.
return None;
}
// Figure out where this gap ends.
let (end, next_candidate_start) = if let Some(item) = self.keys.next() {
if item.range.start() <= self.outer_range.end() {
// The gap goes up until just before the start of the next item,
// and the next candidate starts after it.
(
StepFnsT::sub_one(item.range.start()),
StepFnsT::add_one(item.range.end()),
)
} else {
// The item sits after the end of the outer range,
// so this gap ends at the end of the outer range.
// This also means there will be no more gaps.
self.done = true;
(
self.outer_range.end().clone(),
// This value will be ignored.
self.candidate_start.clone(),
)
}
} else {
// There's no next item; the end is at the
// end of the outer range.
// This also means there will be no more gaps.
self.done = true;
(
self.outer_range.end().clone(),
// This value will be ignored.
self.candidate_start.clone(),
)
};
// Move the next candidate gap start past the end
// of this gap, and yield the gap we found.
let gap = self.candidate_start.clone()..=end;
self.candidate_start = next_candidate_start;
Some(gap)
}
}
#[cfg(test)]
mod tests {
use super::*;
trait RangeInclusiveMapExt<K, V> {
fn to_vec(&self) -> Vec<(RangeInclusive<K>, V)>;
}
impl<K, V> RangeInclusiveMapExt<K, V> for RangeInclusiveMap<K, V, K>
where
K: Ord + Clone + StepLite,
V: Eq + Clone,
{
fn to_vec(&self) -> Vec<(RangeInclusive<K>, V)> {
self.iter().map(|(kr, v)| (kr.clone(), v.clone())).collect()
}
}
//
// Insertion tests
//
#[test]
fn empty_map_is_empty() {
let range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
assert_eq!(range_map.to_vec(), vec![]);
}
#[test]
fn insert_into_empty_map() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.insert(0..=50, false);
assert_eq!(range_map.to_vec(), vec![(0..=50, false)]);
}
#[test]
fn new_same_value_immediately_following_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---● ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..=3, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ●---◌ ◌ ◌ ◌
range_map.insert(4..=6, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---------◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..=6, false)]);
}
#[test]
fn new_different_value_immediately_following_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---● ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..=3, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌
range_map.insert(4..=6, true);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---● ◌ ◌ ◌ ◌ ◌ ◌
// ◌ ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..=3, false), (4..=6, true)]);
}
#[test]
fn new_same_value_overlapping_end_of_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-----● ◌ ◌ ◌ ◌ ◌
range_map.insert(1..=4, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ●---● ◌ ◌ ◌
range_map.insert(4..=6, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---------● ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..=6, false)]);
}
#[test]
fn new_different_value_overlapping_end_of_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---● ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..=3, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◆---◆ ◌ ◌ ◌ ◌
range_map.insert(3..=5, true);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-● ◌ ◌ ◌ ◌ ◌ ◌ ◌
// ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..=2, false), (3..=5, true)]);
}
#[test]
fn new_same_value_immediately_preceding_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ●---● ◌ ◌ ◌ ◌
range_map.insert(3..=5, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-● ◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..=2, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------● ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..=5, false)]);
}
#[test]
fn new_different_value_immediately_preceding_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◆---◆ ◌ ◌ ◌ ◌
range_map.insert(3..=5, true);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-● ◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..=2, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-● ◌ ◌ ◌ ◌ ◌ ◌ ◌
// ◌ ◌ ◌ ◆---◇ ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..=2, false), (3..=5, true)]);
}
#[test]
fn new_same_value_wholly_inside_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------● ◌ ◌ ◌ ◌
range_map.insert(1..=5, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ●---● ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(2..=4, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------● ◌ ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..=5, false)]);
}
#[test]
fn new_different_value_wholly_inside_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆-------◆ ◌ ◌ ◌ ◌
range_map.insert(1..=5, true);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ●---● ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(2..=4, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆ ◌ ◌ ◌ ◌ ◌ ◌ ◌ ◌
// ◌ ◌ ●---● ◌ ◌ ◌ ◌ ◌
// ◌ ◌ ◌ ◌ ◌ ◆ ◌ ◌ ◌ ◌
assert_eq!(
range_map.to_vec(),
vec![(1..=1, true), (2..=4, false), (5..=5, true)]
);
}
#[test]
fn replace_at_end_of_existing_range_should_coalesce() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---● ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..=3, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ●---● ◌ ◌ ◌
range_map.insert(4..=6, true);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ●---● ◌ ◌ ◌
range_map.insert(4..=6, false);
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---------● ◌ ◌ ◌
assert_eq!(range_map.to_vec(), vec![(1..=6, false)]);
}
#[test]
// Test every permutation of a bunch of touching and overlapping ranges.
fn lots_of_interesting_ranges() {
use crate::stupid_range_map::StupidU32RangeMap;
use permutator::Permutation;
let mut ranges_with_values = [
(2..=3, false),
// A duplicate range
(2..=3, false),
// Almost a duplicate, but with a different value
(2..=3, true),
// A few small ranges, some of them overlapping others,
// some of them touching others
(3..=5, true),
(4..=6, true),
(6..=7, true),
// A really big range
(2..=6, true),
];
ranges_with_values.permutation().for_each(|permutation| {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
let mut stupid: StupidU32RangeMap<bool> = StupidU32RangeMap::new();
for (k, v) in permutation {
// Insert it into both maps.
range_map.insert(k.clone(), v);
stupid.insert(k, v);
// At every step, both maps should contain the same stuff.
let stupid2: StupidU32RangeMap<bool> = range_map.clone().into();
assert_eq!(stupid, stupid2);
}
});
}
//
// Get* tests
//
#[test]
fn get() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.insert(0..=50, false);
assert_eq!(range_map.get(&50), Some(&false));
assert_eq!(range_map.get(&51), None);
}
#[test]
fn get_key_value() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.insert(0..=50, false);
assert_eq!(range_map.get_key_value(&50), Some((&(0..=50), &false)));
assert_eq!(range_map.get_key_value(&51), None);
}
//
// Removal tests
//
#[test]
fn remove_from_empty_map() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.remove(0..=50);
assert_eq!(range_map.to_vec(), vec![]);
}
#[test]
fn remove_non_covered_range_before_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.insert(25..=75, false);
range_map.remove(0..=24);
assert_eq!(range_map.to_vec(), vec![(25..=75, false)]);
}
#[test]
fn remove_non_covered_range_after_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.insert(25..=75, false);
range_map.remove(76..=100);
assert_eq!(range_map.to_vec(), vec![(25..=75, false)]);
}
#[test]
fn remove_overlapping_start_of_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.insert(25..=75, false);
range_map.remove(0..=25);
assert_eq!(range_map.to_vec(), vec![(26..=75, false)]);
}
#[test]
fn remove_middle_of_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.insert(25..=75, false);
range_map.remove(30..=70);
assert_eq!(range_map.to_vec(), vec![(25..=29, false), (71..=75, false)]);
}
#[test]
fn remove_overlapping_end_of_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.insert(25..=75, false);
range_map.remove(75..=100);
assert_eq!(range_map.to_vec(), vec![(25..=74, false)]);
}
#[test]
fn remove_exactly_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.insert(25..=75, false);
range_map.remove(25..=75);
assert_eq!(range_map.to_vec(), vec![]);
}
#[test]
fn remove_superset_of_stored() {
let mut range_map: RangeInclusiveMap<u32, bool> = RangeInclusiveMap::new();
range_map.insert(25..=75, false);
range_map.remove(0..=100);
assert_eq!(range_map.to_vec(), vec![]);
}
//
// Test extremes of key ranges; we do addition/subtraction in
// the range domain so I want to make sure I haven't accidentally
// introduced some arithmetic overflow there.
//
#[test]
fn no_overflow_at_key_domain_extremes() {
let mut range_map: RangeInclusiveMap<u8, bool> = RangeInclusiveMap::new();
range_map.insert(0..=255, false);
range_map.insert(0..=10, true);
range_map.insert(245..=255, true);
range_map.remove(0..=5);
range_map.remove(0..=5);
range_map.remove(250..=255);
range_map.remove(250..=255);
range_map.insert(0..=255, true);
range_map.remove(1..=254);
range_map.insert(254..=254, true);
range_map.insert(255..=255, true);
range_map.insert(255..=255, false);
range_map.insert(0..=0, false);
range_map.insert(1..=1, true);
range_map.insert(0..=0, true);
}
// Gaps tests
#[test]
fn whole_range_is_a_gap() {
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◌ ◌ ◌ ◌ ◌
let range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆-------------◆ ◌
let outer_range = 1..=8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield the entire outer range.
assert_eq!(gaps.next(), Some(1..=8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn whole_range_is_covered_exactly() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---------● ◌ ◌ ◌
range_map.insert(1..=6, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆---------◆ ◌ ◌ ◌
let outer_range = 1..=6;
let mut gaps = range_map.gaps(&outer_range);
// Should yield no gaps.
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_before_outer_range() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●---● ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..=3, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆-----◆ ◌
let outer_range = 5..=8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield the entire outer range.
assert_eq!(gaps.next(), Some(5..=8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_touching_start_of_outer_range() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-----● ◌ ◌ ◌ ◌ ◌
range_map.insert(1..=4, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆-----◆ ◌
let outer_range = 5..=8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield the entire outer range.
assert_eq!(gaps.next(), Some(5..=8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_overlapping_start_of_outer_range() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ●-------● ◌ ◌ ◌ ◌
range_map.insert(1..=5, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆-----◆ ◌
let outer_range = 5..=8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield from just past the end of the stored item
// to the end of the outer range.
assert_eq!(gaps.next(), Some(6..=8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_starting_at_start_of_outer_range() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ●-● ◌ ◌ ◌
range_map.insert(5..=6, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆-----◆ ◌
let outer_range = 5..=8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield from just past the item onwards.
assert_eq!(gaps.next(), Some(7..=8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn items_floating_inside_outer_range() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◌ ●-● ◌ ◌
range_map.insert(6..=7, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ●-● ◌ ◌ ◌ ◌ ◌
range_map.insert(3..=4, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆-------------◆ ◌
let outer_range = 1..=8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield gaps at start, between items,
// and at end.
assert_eq!(gaps.next(), Some(1..=2));
assert_eq!(gaps.next(), Some(5..=5));
assert_eq!(gaps.next(), Some(8..=8));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_ending_at_end_of_outer_range() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◌ ◌ ●-● ◌
range_map.insert(7..=8, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆-----◆ ◌
let outer_range = 5..=8;
let mut gaps = range_map.gaps(&outer_range);
// Should yield from the start of the outer range
// up to just before the start of the stored item.
assert_eq!(gaps.next(), Some(5..=6));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_overlapping_end_of_outer_range() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ●---● ◌ ◌
range_map.insert(5..=6, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◆-----◆ ◌ ◌ ◌ ◌
let outer_range = 2..=5;
let mut gaps = range_map.gaps(&outer_range);
// Should yield from the start of the outer range
// up to the start of the stored item.
assert_eq!(gaps.next(), Some(2..=4));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_touching_end_of_outer_range() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ●-----● ◌
range_map.insert(5..=9, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆-----◆ ◌ ◌ ◌ ◌ ◌
let outer_range = 1..=4;
let mut gaps = range_map.gaps(&outer_range);
// Should yield the entire outer range.
assert_eq!(gaps.next(), Some(1..=4));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn item_after_outer_range() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◌ ●---● ◌
range_map.insert(6..=7, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆-----◆ ◌ ◌ ◌ ◌ ◌
let outer_range = 1..=4;
let mut gaps = range_map.gaps(&outer_range);
// Should yield the entire outer range.
assert_eq!(gaps.next(), Some(1..=4));
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn zero_width_outer_range_with_items_away_from_both_sides() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◆---◆ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(1..=3, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆---◆ ◌ ◌
range_map.insert(5..=7, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◆ ◌ ◌ ◌ ◌ ◌
let outer_range = 4..=4;
let mut gaps = range_map.gaps(&outer_range);
// Should yield a zero-width gap.
assert_eq!(gaps.next(), Some(4..=4));
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn zero_width_outer_range_with_items_touching_both_sides() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◆-◆ ◌ ◌ ◌ ◌ ◌ ◌ ◌
range_map.insert(2..=3, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◌ ◆---◆ ◌ ◌ ◌
range_map.insert(5..=6, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◆ ◌ ◌ ◌ ◌ ◌
let outer_range = 4..=4;
let mut gaps = range_map.gaps(&outer_range);
// Should yield no gaps.
assert_eq!(gaps.next(), Some(4..=4));
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn empty_outer_range_with_item_straddling() {
let mut range_map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◆-----◆ ◌ ◌ ◌ ◌ ◌
range_map.insert(2..=5, ());
// 0 1 2 3 4 5 6 7 8 9
// ◌ ◌ ◌ ◌ ◆ ◌ ◌ ◌ ◌ ◌
let outer_range = 4..=4;
let mut gaps = range_map.gaps(&outer_range);
// Should yield no gaps.
assert_eq!(gaps.next(), None);
// Gaps iterator should be fused.
assert_eq!(gaps.next(), None);
assert_eq!(gaps.next(), None);
}
#[test]
fn no_overflow_finding_gaps_at_key_domain_extremes() {
// Items and outer range both at extremes.
let mut range_map: RangeInclusiveMap<u8, bool> = RangeInclusiveMap::new();
range_map.insert(0..=255, false);
range_map.gaps(&(0..=255));
// Items at extremes with gaps in middle.
let mut range_map: RangeInclusiveMap<u8, bool> = RangeInclusiveMap::new();
range_map.insert(0..=255, false);
range_map.gaps(&(0..=5));
range_map.gaps(&(250..=255));
// Items just in from extremes.
let mut range_map: RangeInclusiveMap<u8, bool> = RangeInclusiveMap::new();
range_map.insert(0..=255, false);
range_map.gaps(&(1..=5));
range_map.gaps(&(250..=254));
// Outer range just in from extremes,
// items at extremes.
let mut range_map: RangeInclusiveMap<u8, bool> = RangeInclusiveMap::new();
range_map.insert(1..=254, false);
range_map.gaps(&(0..=5));
range_map.gaps(&(250..=255));
}
///
/// impl Debug
///
#[test]
fn map_debug_repr_looks_right() {
let mut map: RangeInclusiveMap<u32, ()> = RangeInclusiveMap::new();
// Empty
assert_eq!(format!("{:?}", map), "{}");
// One entry
map.insert(2..=5, ());
assert_eq!(format!("{:?}", map), "{2..=5: ()}");
// Many entries
map.insert(7..=8, ());
map.insert(10..=11, ());
assert_eq!(format!("{:?}", map), "{2..=5: (), 7..=8: (), 10..=11: ()}");
}
}
|
use std::path;
mod file;
mod location;
mod operation;
mod update;
// mod recovery;
/// The atomic operation to perform, we store a path relative to the working directory
enum FileOperation {
/// Files being replaced start in rw, then move to rc, but first files are moved from current to rp, finally rc is moved to current
Replace(path::PathBuf),
/// Files being created start in cw, then once complete are moved to cc
Create(path::PathBuf),
/// Files being stored are copied to sw, then moved to sc
Store(path::PathBuf),
}
// TODO: AtomicUpdate needs to support versioning since a repository might be updated with an old interrupted operation
/// Responsible for making sure that all files are updated atomically
pub struct AtomicUpdate<'a> {
atomic_jobs: Vec<FileOperation>,
path_to_working: &'a path::Path,
path_to_repository: &'a path::Path,
path_to_create_working: path::PathBuf,
path_to_create_complete: path::PathBuf,
path_to_replace_working: path::PathBuf,
path_to_replace_complete: path::PathBuf,
path_to_replace_previous: path::PathBuf,
path_to_replace_remove: path::PathBuf,
path_to_store_working: path::PathBuf,
path_to_store_complete: path::PathBuf,
}
pub(crate) enum AtomicLocation {
Base,
CreateWorking,
CreateComplete,
ReplaceWorking,
ReplaceComplete,
ReplacePrevious,
ReplaceRemove,
StoreWorking,
StoreComplete,
}
#[cfg(test)]
mod tests {
use super::{AtomicLocation, AtomicUpdate};
use testspace::TestSpace;
#[test]
fn test_atomic_replace_init() {
let ts = TestSpace::new();
let ts2 = ts.create_child();
let working_path = ts.get_path();
let repository_path = ts2.get_path();
AtomicUpdate::new(repository_path, working_path)
.expect("Failed to initialize atomic update");
let temp_directory = repository_path.join(AtomicLocation::ReplaceWorking.get_path());
let complete_directory = repository_path.join(AtomicLocation::ReplaceComplete.get_path());
let previous_directory = repository_path.join(AtomicLocation::ReplacePrevious.get_path());
let old_directory = repository_path.join(AtomicLocation::ReplaceRemove.get_path());
assert!(temp_directory.exists());
assert!(complete_directory.exists());
assert!(previous_directory.exists());
assert!(old_directory.exists());
}
#[test]
fn test_atomic_create_init() {
let ts = TestSpace::new();
let ts2 = ts.create_child();
let working_path = ts.get_path();
let repository_path = ts2.get_path();
AtomicUpdate::new(repository_path, working_path)
.expect("Failed to initialize atomic update");
let complete_directory = repository_path.join(AtomicLocation::CreateComplete.get_path());
let working_directory = repository_path.join(AtomicLocation::CreateWorking.get_path());
assert!(working_directory.exists());
assert!(complete_directory.exists());
}
#[test]
fn test_atomic_store_init() {
let ts = TestSpace::new();
let ts2 = ts.create_child();
let repository_path = ts2.get_path();
let working_path = ts.get_path();
AtomicUpdate::new(repository_path, working_path)
.expect("Failed to initialize atomic update");
let complete_directory = repository_path.join(AtomicLocation::StoreComplete.get_path());
let working_directory = repository_path.join(AtomicLocation::StoreWorking.get_path());
assert!(working_directory.exists());
assert!(complete_directory.exists());
}
// TODO: Test load?
}
|
use clap::App;
use clap::ArgMatches;
use crate::error::HnError;
pub(crate) mod login;
pub(crate) mod news;
pub(crate) mod query;
pub(crate) mod thread;
pub(crate) mod tree;
pub mod hacker_news;
/// A trait defining the interface to add a subcommand to the command line
/// application.
pub trait HnCommand {
/// The name of this subcommand. Will be used at the command line interface
/// to name this subcommand.
const NAME: &'static str;
/// A function which returns a [clap](https://docs.rs/clap/2.33.3/clap/index.html)
/// App instance. This App will be used as a subcommand in the over all command line
/// application structure.
fn parser<'a, 'b>() -> App<'a, 'b>;
/// The command executed when this subcommand is actually run. This function receives a
/// [clap](https://docs.rs/clap/2.33.3/clap/index.html) ArgMatches instance, which can
/// drive optional or argument based logic.
// fn cmd(matches: &ArgMatches) -> Result<(), Box<dyn Error>>;
fn cmd(matches: &ArgMatches) -> Result<(), Box<HnError>>;
}
|
pub mod client;
pub mod common;
pub mod entry;
pub mod server;
pub mod vecbuf;
pub extern crate rustls;
pub extern crate webpki;
extern crate bytes;
extern crate futures;
extern crate iovec;
extern crate tokio_io;
|
use q2::school_member;
fn main() {
school_member::student::get_marks();
}
|
use std::collections::HashMap;
use std::string::ToString;
use serde::ser::{Error, SerializeMap};
use serde::{Serialize, Serializer};
use serde_json::{json, Value};
use crate::schema::{PrimitiveType, Schema, UniqueItems};
// we output Draft 4 of the Json Schema specification because the downstream consumers
// of the JSON schema we produce fully support Draft 4, and not really Draft 7;
// in general most of the tools and libraries on the internet understand Draft 4 but have some problems with Draft 7
const SCHEMA_URL: &str = "http://json-schema.org/draft-04/schema#";
pub struct JsonSchema<'a> {
schema_url: Option<&'static str>,
schema: &'a Schema,
}
impl<'a> Serialize for JsonSchema<'a> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(None)?;
if let Some(url) = self.schema_url {
map.serialize_entry("$schema", url)?;
}
serialize_as_json_schema(&self.schema, &mut map)?;
map.end()
}
}
impl<'a> JsonSchema<'a> {
pub fn new(schema: &'a Schema) -> Self {
JsonSchema {
schema,
schema_url: None,
}
}
pub fn with_default_schema_url(schema: &'a Schema) -> Self {
JsonSchema {
schema,
schema_url: Some(SCHEMA_URL),
}
}
}
fn serialize_type<O, E, S>(schema: &Schema, map: &mut S) -> Result<(), E>
where
E: Error,
S: SerializeMap<Ok = O, Error = E>,
{
let primitive_type = schema.r#type().primitive_type();
let (typ, additional_keywords): (&str, Value) = match primitive_type {
PrimitiveType::Hostname => ("string", json!({"format": "hostname"})),
PrimitiveType::Password => ("string", json!({"writeOnly": true})),
PrimitiveType::DateTime => ("string", json!({"format": "date-time"})),
PrimitiveType::Date => ("string", json!({"format": "date"})),
PrimitiveType::Time => ("string", json!({"format": "time"})),
PrimitiveType::IPv4 => ("string", json!({"format": "ipv4"})),
PrimitiveType::IPv6 => ("string", json!({"format": "ipv6"})),
PrimitiveType::Uri => ("string", json!({"format": "uri"})),
PrimitiveType::Text => ("string", Value::Null),
PrimitiveType::StringList => ("array", Value::Null),
PrimitiveType::DNSMasqAddress => ("string", json!({"format": "dnsmasq-address"})),
PrimitiveType::ChronyAddress => ("string", json!({"format": "chrony-address"})),
PrimitiveType::IPTablesAddress => ("string", json!({"format": "iptables-address"})),
PrimitiveType::Email => ("string", json!({"format": "email"})),
PrimitiveType::Object => ("object", Value::Null),
PrimitiveType::Array => ("array", Value::Null),
PrimitiveType::String => ("string", Value::Null),
PrimitiveType::Boolean => ("boolean", Value::Null),
PrimitiveType::Integer => ("integer", Value::Null),
PrimitiveType::Number => ("number", Value::Null),
PrimitiveType::File => ("string", json!({"format": "data-url"})),
PrimitiveType::Port => ("integer", {
match (schema.min(), schema.max()) {
(None, Some(_)) => json!({"minimum": 0}),
(Some(_), None) => json!({"maximum": 65535}),
(None, None) => json!({"minimum": 0, "maximum": 65535}),
_ => Value::Null,
}
}),
};
map.serialize_entry("type", typ)?;
if let Some(obj) = additional_keywords.as_object() {
for (k, v) in obj.iter() {
map.serialize_entry(k, v)?;
}
}
Ok(())
}
fn serialize_annotations<O, E, S>(schema: &Schema, map: &mut S) -> Result<(), E>
where
E: Error,
S: SerializeMap<Ok = O, Error = E>,
{
if let Some(title) = schema.title() {
map.serialize_entry("title", title)?;
}
if let Some(description) = schema.description() {
map.serialize_entry("description", description)?;
}
Ok(())
}
fn serialize_array_keywords<O, E, S>(schema: &Schema, map: &mut S) -> Result<(), E>
where
E: Error,
S: SerializeMap<Ok = O, Error = E>,
{
if let Some(min_items) = schema.min_items() {
map.serialize_entry("minItems", &min_items)?;
}
if let Some(max_items) = schema.max_items() {
map.serialize_entry("maxItems", &max_items)?;
}
match schema.unique_items() {
UniqueItems::Boolean(value) if *value => map.serialize_entry("uniqueItems", value)?,
UniqueItems::Paths(paths) => map.serialize_entry("$$uniqueItemProperties", paths)?,
_ => {}
};
let items_count = schema.items().len();
match items_count {
0 => {}
1 => map.serialize_entry("items", &JsonSchema::new(schema.items().first().unwrap()))?,
_ => {
let json_schemas: Vec<JsonSchema> = schema.items().iter().map(JsonSchema::new).collect();
map.serialize_entry("items", &json!({ "oneOf": json_schemas }))?;
}
};
Ok(())
}
fn serialize_object_keywords<O, E, S>(schema: &Schema, map: &mut S) -> Result<(), E>
where
E: Error,
S: SerializeMap<Ok = O, Error = E>,
{
if let PrimitiveType::Object = schema.r#type().primitive_type() {
map.serialize_entry("additionalProperties", &schema.additional_properties())?;
}
if !schema.properties().is_empty() {
let mut required = vec![];
let mut order = vec![];
let mut properties = HashMap::<&str, JsonSchema>::new();
for property in schema.properties() {
if property.schema().r#type().is_required() {
required.push(property.name());
}
order.push(property.name());
properties.insert(property.name(), JsonSchema::new(property.schema()));
}
if !required.is_empty() {
map.serialize_entry("required", &required)?;
}
if !order.is_empty() {
map.serialize_entry("$$order", &order)?;
}
if !properties.is_empty() {
map.serialize_entry("properties", &properties)?;
}
}
match (schema.keys(), schema.values()) {
(Some(keys), Some(values)) if keys.pattern().is_some() => map.serialize_entry(
"patternProperties",
&json!({ keys.pattern().unwrap().to_string(): JsonSchema::new(values) }),
)?,
_ => {}
};
Ok(())
}
fn serialize_number_keywords<O, E, S>(schema: &Schema, map: &mut S) -> Result<(), E>
where
E: Error,
S: SerializeMap<Ok = O, Error = E>,
{
if let Some(multiple_of) = schema.multiple_of() {
map.serialize_entry("multipleOf", &multiple_of)?;
}
match (schema.max(), schema.exclusive_max()) {
(_, Some(max)) => {
map.serialize_entry("exclusiveMaximum", &true)?;
map.serialize_entry("maximum", &max)?;
}
(Some(max), None) => {
map.serialize_entry("maximum", &max)?;
}
_ => {}
};
match (schema.min(), schema.exclusive_min()) {
(_, Some(min)) => {
map.serialize_entry("exclusiveMinimum", &true)?;
map.serialize_entry("minimum", &min)?;
}
(Some(min), None) => {
map.serialize_entry("minimum", &min)?;
}
_ => {}
};
Ok(())
}
fn serialize_string_keywords<O, E, S>(schema: &Schema, map: &mut S) -> Result<(), E>
where
E: Error,
S: SerializeMap<Ok = O, Error = E>,
{
if let Some(max_length) = schema.max_length() {
map.serialize_entry("maxLength", &max_length)?;
}
if let Some(min_length) = schema.min_length() {
map.serialize_entry("minLength", &min_length)?;
}
if let Some(pattern) = schema.pattern() {
map.serialize_entry("pattern", &pattern.to_string())?;
}
Ok(())
}
fn serialize_as_json_schema<O, E, S>(schema: &Schema, map: &mut S) -> Result<(), E>
where
E: Error,
S: SerializeMap<Ok = O, Error = E>,
{
serialize_annotations(schema, map)?;
serialize_array_keywords(schema, map)?;
serialize_object_keywords(schema, map)?;
serialize_number_keywords(schema, map)?;
serialize_string_keywords(schema, map)?;
serialize_type(schema, map)?;
if let Some(version) = schema.version() {
map.serialize_entry("$$version", &version)?;
}
if let Some(cons) = schema.r#const() {
map.serialize_entry("enum", &vec![cons])?;
}
if let Some(def) = schema.default() {
map.serialize_entry("default", def)?;
}
if let Some(formula) = schema.formula() {
map.serialize_entry("$$formula", formula)?;
}
if schema.read_only() {
map.serialize_entry("readOnly", &true)?;
}
if schema.write_only() {
map.serialize_entry("writeOnly", &true)?;
}
let values: Vec<Value> = schema
.r#enum()
.iter()
.map(|x| json!({ "title": x.title(), "enum": [ x.value() ]}))
.collect();
if !values.is_empty() {
map.serialize_entry("oneOf", &values)?;
}
Ok(())
}
|
#[macro_use]
extern crate lazy_static;
use regex::Regex;
use std::collections::HashSet;
const RULES: &str = include_str!("../input.txt");
const MOLECULE: &str = &"CRnCaSiRnBSiRnFArTiBPTiTiBFArPBCaSiThSiRnTiBPBPMgArCaSiRnTiMgArCaSiThCaSiRnFArRnSiRnFArTiTiBFArCaCaSiRnSiThCaCaSiRnMgArFYSiRnFYCaFArSiThCaSiThPBPTiMgArCaPRnSiAlArPBCaCaSiRnFYSiThCaRnFArArCaCaSiRnPBSiRnFArMgYCaCaCaCaSiThCaCaSiAlArCaCaSiRnPBSiAlArBCaCaCaCaSiThCaPBSiThPBPBCaSiRnFYFArSiThCaSiRnFArBCaCaSiRnFYFArSiThCaPBSiThCaSiRnPMgArRnFArPTiBCaPRnFArCaCaCaCaSiRnCaCaSiRnFYFArFArBCaSiThFArThSiThSiRnTiRnPMgArFArCaSiThCaPBCaSiRnBFArCaCaPRnCaCaPMgArSiRnFYFArCaSiThRnPBPMgAr";
fn parse_rules(input: &str) -> HashSet<(String, String)> {
lazy_static! {
static ref RE_RULES: Regex =
Regex::new(r"(?P<educt>[a-zA-Z]+)[^a-zA-z]+(?P<product>[a-zA-Z]+)").unwrap();
}
let mut rules: HashSet<(String, String)> = HashSet::new();
for line in input.lines() {
match RE_RULES.captures(line) {
Some(cap) => {
let educt: String = String::from(cap.name("educt").map_or("", |m| m.as_str()));
let product: String = String::from(cap.name("product").map_or("", |m| m.as_str()));
rules.insert((educt, product));
}
None => panic!("Couldn't parse rule: {}", line),
}
}
rules
}
fn solve_part_1(input_rules: &str, input_molecule: &str) -> usize {
let rules: HashSet<(String, String)> = parse_rules(input_rules);
let mut results: HashSet<String> = HashSet::new();
let chars: Vec<char> = input_molecule.chars().collect();
for (educt, product) in rules.iter() {
match educt.chars().count() {
1 => {
for i in 0..chars.len() {
if chars[i].to_string() == *educt {
let mut temp: String = String::from(&input_molecule[0..i]);
temp.push_str(&product);
temp.push_str(&input_molecule[i + 1..]);
results.insert(temp);
}
}
}
2 => {
for i in 1..chars.len() {
let mut pattern: String = String::new();
pattern.push(chars[i - 1]);
pattern.push(chars[i]);
if pattern == *educt {
let mut temp: String = String::from(&input_molecule[0..i - 1]);
temp.push_str(&product);
temp.push_str(&input_molecule[i + 1..]);
results.insert(temp);
}
}
}
_ => {
panic!("Unrecognized rule: {} => {}", educt, product);
}
}
}
results.len()
}
fn main() {
println!("Answer part 1: {}", solve_part_1(RULES, MOLECULE));
}
|
// Player Struct
use super::velocity::Velocity;
use super::item::Item;
use sdl2::rect::Rect;
pub struct Player<'p> {
pub dst_rect: Option<Rect>,
pub src_rect: Option<Rect>,
pub texture: Option<sdl2::render::Texture<'p>>,
pub velocity: Option<Velocity>,
}
impl Player<'_> {
// Create player struct with x, y, w, h Rect
pub fn set_rect(x: i32, y: i32, width: u32, height: u32) -> Rect {
Rect::new(x, y, width, height)
}
pub fn is_collide(&self, item: &Item<'_>) -> bool {
if ((item.dst_rect.unwrap().x < self.dst_rect.unwrap().x) && (self.dst_rect.unwrap().x < item.dst_rect.unwrap().x + item.dst_rect.unwrap().width() as i32) && (item.dst_rect.unwrap().y < self.dst_rect.unwrap().y) && (self.dst_rect.unwrap().y < item.dst_rect.unwrap().y + item.dst_rect.unwrap().height() as i32)) ||
((self.dst_rect.unwrap().x < item.dst_rect.unwrap().x) && (item.dst_rect.unwrap().x < self.dst_rect.unwrap().x + self.dst_rect.unwrap().width() as i32) && (self.dst_rect.unwrap().y < item.dst_rect.unwrap().y) && (item.dst_rect.unwrap().y < self.dst_rect.unwrap().y + self.dst_rect.unwrap().height() as i32)) {
true
} else {
false
}
}
} |
use super::*;
use std::io;
pub struct TypeSection<'a>(pub &'a [u8], pub usize);
pub struct TypeEntryIterator<'a>(&'a [u8], usize);
pub enum TypeEntry<'a> {
Function(FunctionType<'a>),
}
pub struct FunctionType<'a> {
pub form: LanguageType,
params_count: usize,
params_raw: &'a [u8],
pub return_type: Option<ValueType>,
}
pub struct ParamsIterator<'a>(&'a [u8], usize);
impl<'a> TypeSection<'a> {
pub fn entries(&self) -> TypeEntryIterator<'a> {
TypeEntryIterator(self.0, self.1)
}
}
impl<'a> Iterator for TypeEntryIterator<'a> {
type Item = Result<TypeEntry<'a>, Error>;
fn next(&mut self) -> Option<Result<TypeEntry<'a>, Error>> {
if self.1 == 0 {
return None
}
self.1 -= 1;
let form = try_opt!(read_varuint(&mut self.0));
let form = try_opt!(LanguageType::from_int(form as u8).ok_or(
Error::UnknownVariant("type entry form")
));
let param_count = try_opt!(read_varuint(&mut self.0));
let params = if param_count > self.0.len() as u64 {
return Some(Err(Error::Io(io::Error::new(
io::ErrorKind::UnexpectedEof,
"param_count is larger than remaining space"
))))
} else {
let res = &self.0[..param_count as usize];
self.0 = &self.0[param_count as usize..];
res
};
let return_count = try_opt!(read_varuint(&mut self.0));
let return_ty = if return_count > 0 {
if self.0.len() < 1 {
return Some(Err(Error::Io(io::Error::new(
io::ErrorKind::UnexpectedEof,
"return_count is larger than remaining space"
))))
}
let res = self.0[0];
self.0 = &self.0[1..];
Some(try_opt!(ValueType::from_int(res).ok_or(Error::UnknownVariant("value type"))))
} else {
None
};
Some(Ok(TypeEntry::Function(FunctionType {
form: form,
params_count: param_count as usize,
params_raw: params,
return_type: return_ty
})))
}
}
impl<'a> FunctionType<'a> {
pub fn params(&self) -> ParamsIterator<'a> {
ParamsIterator(self.params_raw, self.params_count)
}
}
impl<'a> Iterator for ParamsIterator<'a> {
type Item = Result<ValueType, Error>;
fn next(&mut self) -> Option<Result<ValueType, Error>> {
if self.1 == 0 {
return None
}
self.1 -= 1;
if self.0.len() < 1 {
return Some(Err(Error::Io(io::Error::new(
io::ErrorKind::UnexpectedEof, "number of params is larger than available space"
))))
}
let res = self.0[0];
self.0 = &self.0[1..];
Some(Ok(try_opt!(ValueType::from_int(res).ok_or(Error::UnknownVariant("value type")))))
}
}
|
use crate::cli::{SudoAction, SudoOptions};
use crate::system::{hostname, Group, Process, User};
use std::path::PathBuf;
use super::{
command::CommandAndArguments,
resolve::{resolve_current_user, resolve_launch_and_shell, resolve_target_user_and_group},
Error,
};
#[derive(Debug)]
pub struct Context {
// cli options
pub launch: LaunchType,
pub chdir: Option<PathBuf>,
pub command: CommandAndArguments,
pub target_user: User,
pub target_group: Group,
pub stdin: bool,
pub non_interactive: bool,
pub use_session_records: bool,
// system
pub hostname: String,
pub current_user: User,
pub process: Process,
// policy
pub use_pty: bool,
}
#[derive(Debug, PartialEq, Eq)]
pub enum LaunchType {
Direct,
Shell,
Login,
}
impl Context {
pub fn build_from_options(sudo_options: SudoOptions, path: String) -> Result<Context, Error> {
let hostname = hostname();
let current_user = resolve_current_user()?;
let (target_user, target_group) =
resolve_target_user_and_group(&sudo_options.user, &sudo_options.group, ¤t_user)?;
let (launch, shell) = resolve_launch_and_shell(&sudo_options, ¤t_user, &target_user);
let command = match sudo_options.action {
SudoAction::Run(args) => CommandAndArguments::build_from_args(shell, args, &path),
SudoAction::List(args) => {
if args.is_empty() {
// FIXME here and in the `_` arm, `Default` is being used as `Option::None`
Default::default()
} else {
CommandAndArguments::build_from_args(shell, args, &path)
}
}
_ => Default::default(),
};
Ok(Context {
hostname,
command,
current_user,
target_user,
target_group,
use_session_records: !sudo_options.reset_timestamp,
launch,
chdir: sudo_options.directory,
stdin: sudo_options.stdin,
non_interactive: sudo_options.non_interactive,
process: Process::new(),
use_pty: true,
})
}
}
#[cfg(test)]
mod tests {
use crate::{cli::SudoOptions, system::hostname};
use std::collections::HashMap;
use super::Context;
#[test]
fn test_build_context() {
let options = SudoOptions::try_parse_from(["sudo", "echo", "hello"]).unwrap();
let path = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin";
let context = Context::build_from_options(options, path.to_string()).unwrap();
let mut target_environment = HashMap::new();
target_environment.insert("SUDO_USER".to_string(), context.current_user.name.clone());
assert_eq!(context.command.command.to_str().unwrap(), "/usr/bin/echo");
assert_eq!(context.command.arguments, ["hello"]);
assert_eq!(context.hostname, hostname());
assert_eq!(context.target_user.uid, 0);
}
}
|
pub mod anchor;
pub mod category;
pub mod iter;
pub mod page;
pub mod writer;
pub use self::{
anchor::Anchor,
iter::{PageIterator, RawPageIterator, TantivyPageIterator},
page::Page,
writer::PageWriter,
};
|
use crate::intro::Intro;
use crate::resources::Lifes;
use crate::resources::MaterialVector;
use crate::util::*;
use amethyst::{assets::Loader, input::is_close_requested, prelude::*, ui::TtfFormat};
pub struct Loading;
impl<'a, 'b> State<GameData<'a, 'b>, StateEvent> for Loading {
fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {
let world = data.world;
initialize_colors(world);
world.add_resource(Lifes::default());
let font = world.read_resource::<Loader>().load(
"font/square.ttf",
TtfFormat,
(),
(),
&world.read_resource(),
);
world.add_resource(font);
}
fn update(&mut self, data: StateData<GameData>) -> Trans<GameData<'a, 'b>, StateEvent> {
data.data.update(&data.world);
Trans::Switch(Box::new(Intro { ui: None }))
}
fn handle_event(
&mut self,
_data: StateData<GameData>,
event: StateEvent,
) -> Trans<GameData<'a, 'b>, StateEvent> {
if let StateEvent::Window(event) = &event {
if is_close_requested(&event) {
Trans::Quit
} else {
Trans::None
}
} else {
Trans::None
}
}
}
fn initialize_colors(world: &mut World) {
let m = MaterialVector {
pad: create_colour_material(world, [0., 0., 1., 1.]),
ball: create_colour_material(world, [0.5, 0.5, 0.5, 0.5]),
lifes: vec![
create_colour_material(world, [1., 0., 1., 1.]),
create_colour_material(world, [1., 1., 1., 1.]),
create_colour_material(world, [0., 1., 1., 1.]),
create_colour_material(world, [1., 1., 0., 1.]),
create_colour_material(world, [1., 0., 0., 1.]),
],
};
world.add_resource(m);
}
|
use amethyst::ecs::Entity;
use amethyst::renderer::Material;
pub struct MaterialVector {
pub pad: Material,
pub ball: Material,
pub lifes: Vec<Material>,
}
#[derive(Default)]
pub struct Lifes {
pub lifes: u32,
pub e: Option<Entity>,
}
#[derive(Default)]
pub struct WindowSize {
pub width: f32,
pub height: f32,
}
|
use crate::{error, helpers, open};
use std::io::Write;
use std::process::{Command, Stdio};
use std::sync::{atomic, Arc};
use std::time::SystemTime;
use std::{fmt, fs, io};
use structopt::StructOpt;
mod examples;
use examples::Examples;
#[derive(StructOpt, Debug)]
pub struct NewOpts {
/// The name of the playground to create. If not supplied, the current timestamp will be used
#[structopt(short, long)]
name: Option<String>,
#[structopt(flatten)]
editor_opts: super::EditorOpts,
/// Do not pass -w flag when opening GUI editor
#[structopt(long, requires("gui"))]
no_w: bool,
/// Indicates the editor is a gui editor
#[structopt(short, long)]
gui: bool,
/// The library to base main.rs on. If not provided, base Cargo main.rs will be used.
///
/// Follows same format as dependencies. You do not need to repeat the library in dependencies,
/// as it is automatically added
#[structopt(short, long)]
template: Option<String>,
/// The dependencies to add. It must be in the following format:
/// 1. <dep-name>
/// 2. <dep-name>=<dep-version>
#[structopt(name = "dependencies")]
deps: Vec<String>,
}
pub fn new(opts: NewOpts) -> error::Result<()> {
let name = match opts.name {
Some(name) => name,
None => {
let time = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.map_err(|_| {
error::Error::new(io::ErrorKind::Other, "current time is unix epoch!")
})?;
format!("playground-{}", time.as_secs())
}
};
helpers::print_status("Creating", &name);
println!();
let mut path = helpers::get_dir();
path.push(&name); // Now represents the playground directory
if !Command::new("cargo")
.arg("new")
.arg(&path)
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()?
.success()
{
return Err(error::Error::new(
io::ErrorKind::Other,
"could not create cargo project",
));
}
path.push("Cargo.toml"); // Now represents path to Cargo.toml
let mut cargo_toml = fs::OpenOptions::new().append(true).open(&path)?;
if let Some(ref template) = opts.template {
let stop = Arc::new(false.into());
let loader = helpers::loader("fetching examples", Arc::clone(&stop));
let dep = Dep::try_parse(&template)?;
writeln!(cargo_toml, "{}", dep)?;
cargo_toml.flush()?;
let examples = Examples::find(&mut path, dep.dep_name)
.map_err(|e| error::Error::new(e.kind(), format!("couldn't get templates: {}", e)))?
.ok_or_else(|| {
error::Error::new(
io::ErrorKind::NotFound,
format!("couldn't find any templates for {}", dep.dep_name),
)
.with_help(
"templates are taken from the `examples` directory in a crate
check if the crate has an examples directory",
)
});
// only one writer, and reader doesn't care about the race, so its fine
stop.store(true, atomic::Ordering::Relaxed);
let _ = loader.join();
path.pop();
match examples {
Ok(examples) => {
let example = examples.pick_one().map_err(|err| {
error::Error::new(
io::ErrorKind::Other,
format!("couldn't pick template: {}", err),
)
})?;
if example.is_none() {
return Ok(());
}
path.push("src");
path.push("main.rs");
fs::copy(&example.unwrap(), &path)?;
}
Err(err) => {
eprintln!("{}", err);
match helpers::pick_from("Do you want to continue anyway?", &["Yes", "No"]) {
// Selected 'Yes'
Ok(Some(0)) => {}
_ => {
fs::remove_dir_all(path)?;
return Ok(());
}
}
}
}
}
for dep in opts.deps {
writeln!(cargo_toml, "{}", Dep::try_parse(&dep)?)?;
}
open::open(open::OpenOpts {
name,
gui: opts.gui,
no_w: opts.no_w,
skip_check: true,
editor_opts: opts.editor_opts,
})
}
struct Dep<'a> {
dep_name: &'a str,
dep_ver: &'a str,
}
impl<'a> Dep<'a> {
fn try_parse(dep: &'a str) -> error::Result<Self> {
let mut parts = dep.split('=');
let dep_name = parts.next().unwrap().trim();
let dep_ver = parts.next().unwrap_or("*").trim();
if parts.next().is_some() {
return Err(error::Error::new(
io::ErrorKind::InvalidInput,
format!("dependency '{}' is in an incorrect format", dep),
)
.with_help("dependencies must either be '<dep-name>' or '<dep-name>=<dep-version>'"));
}
Ok(Self { dep_name, dep_ver })
}
}
impl fmt::Display for Dep<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{} = \"{}\"", self.dep_name, self.dep_ver)
}
}
|
// Copyright 2019. The Tari Project
// SPDX-License-Identifier: BSD-3-Clause
use crate::{
ristretto::{RistrettoPublicKey, RistrettoSecretKey},
signatures::{SchnorrSigChallenge, SchnorrSignature},
};
/// # A Schnorr signature implementation on Ristretto
///
/// Find out more about [Schnorr signatures](https://tlu.tarilabs.com/cryptography/digital_signatures/introduction.html).
///
/// `RistrettoSchnorr` utilises the [curve25519-dalek](https://github.com/dalek-cryptography/curve25519-dalek1)
/// implementation of `ristretto255` to provide Schnorr signature functionality.
///
/// In short, a Schnorr sig is made up of the pair _(R, s)_, where _R_ is a public key (of a secret nonce) and _s_ is
/// the signature.
///
/// ## Creating signatures
///
/// You can create a `RisrettoSchnorr` from it's component parts:
///
/// ```edition2018
/// # use tari_crypto::ristretto::*;
/// # use tari_crypto::keys::*;
/// # use tari_crypto::signatures::SchnorrSignature;
/// # use tari_utilities::ByteArray;
/// # use tari_utilities::hex::Hex;
///
/// let public_r = RistrettoPublicKey::from_hex(
/// "6a493210f7499cd17fecb510ae0cea23a110e8d5b901f8acadd3095c73a3b919",
/// )
/// .unwrap();
/// let s = RistrettoSecretKey::from_bytes(b"10000000000000000000000000000000").unwrap();
/// let sig = RistrettoSchnorr::new(public_r, s);
/// ```
///
/// or you can create a signature by signing a message:
///
/// ```rust
/// # use tari_crypto::ristretto::*;
/// # use tari_crypto::keys::*;
/// # use tari_crypto::signatures::SchnorrSignature;
/// # use digest::Digest;
/// # use rand::{Rng, thread_rng};
///
/// fn get_keypair() -> (RistrettoSecretKey, RistrettoPublicKey) {
/// let mut rng = rand::thread_rng();
/// let k = RistrettoSecretKey::random(&mut rng);
/// let pk = RistrettoPublicKey::from_secret_key(&k);
/// (k, pk)
/// }
///
/// #[allow(non_snake_case)]
/// let (k, P) = get_keypair();
/// let msg = "Small Gods";
/// let mut rng = thread_rng();
/// let sig = RistrettoSchnorr::sign_message(&k, &msg, &mut rng);
/// ```
///
/// # Verifying signatures
///
/// Given a signature, (R,s) and a Challenge, e, you can verify that the signature is valid by calling the `verify`
/// method:
///
/// ```edition2018
/// # use tari_crypto::ristretto::*;
/// # use tari_crypto::keys::*;
/// # use tari_crypto::signatures::SchnorrSignature;
/// # use tari_utilities::hex::*;
/// # use tari_utilities::ByteArray;
/// # use digest::Digest;
/// # use rand::{Rng, thread_rng};
///
/// let msg = "Maskerade";
/// let k = RistrettoSecretKey::from_hex(
/// "bd0b253a619310340a4fa2de54cdd212eac7d088ee1dc47e305c3f6cbd020908",
/// )
/// .unwrap();
/// # #[allow(non_snake_case)]
/// let P = RistrettoPublicKey::from_secret_key(&k);
/// let mut rng = thread_rng();
/// let sig: SchnorrSignature<RistrettoPublicKey, RistrettoSecretKey> =
/// SchnorrSignature::sign_message(&k, msg, &mut rng).unwrap();
/// assert!(sig.verify_message(&P, msg));
/// ```
pub type RistrettoSchnorr = SchnorrSignature<RistrettoPublicKey, RistrettoSecretKey, SchnorrSigChallenge>;
/// # A Schnorr signature implementation on Ristretto with a custom domain separation tag
///
/// Usage is identical to [`RistrettoSchnorr`], except that you are able to specify the domain separation tag to use
/// when computing challenges for the signature.
///
/// ## Example
/// ```edition2018
/// # use tari_crypto::ristretto::*;
/// # use tari_crypto::keys::*;
/// # use tari_crypto::hash_domain;
/// # use tari_crypto::signatures::SchnorrSignature;
/// # use tari_utilities::hex::*;
/// # use rand::{Rng, thread_rng};
/// # use tari_utilities::ByteArray;
/// # use digest::Digest;
///
/// hash_domain!(MyCustomDomain, "com.example.custom");
///
/// let msg = "Maskerade";
/// let k = RistrettoSecretKey::from_hex(
/// "bd0b253a619310340a4fa2de54cdd212eac7d088ee1dc47e305c3f6cbd020908",
/// )
/// .unwrap();
/// # #[allow(non_snake_case)]
/// let P = RistrettoPublicKey::from_secret_key(&k);
/// let mut rng = thread_rng();
/// let sig: SchnorrSignature<RistrettoPublicKey, RistrettoSecretKey, MyCustomDomain> =
/// SchnorrSignature::sign_message(&k, msg, &mut rng).unwrap();
/// assert!(sig.verify_message(&P, msg));
/// ```
pub type RistrettoSchnorrWithDomain<H> = SchnorrSignature<RistrettoPublicKey, RistrettoSecretKey, H>;
#[cfg(test)]
mod test {
use blake2::Blake2b;
use digest::{consts::U32, Digest};
use tari_utilities::{
hex::{from_hex, to_hex, Hex},
ByteArray,
};
use crate::{
hash_domain,
keys::{PublicKey, SecretKey},
ristretto::{
ristretto_sig::RistrettoSchnorrWithDomain,
RistrettoPublicKey,
RistrettoSchnorr,
RistrettoSecretKey,
},
signatures::{SchnorrSigChallenge, SchnorrSignature},
};
#[test]
fn default() {
let sig = RistrettoSchnorr::default();
assert_eq!(sig.get_signature(), &RistrettoSecretKey::default());
assert_eq!(sig.get_public_nonce(), &RistrettoPublicKey::default());
}
/// Create a signature, and then verify it. Also checks that some invalid signatures fail to verify
#[test]
#[allow(non_snake_case)]
fn raw_sign_and_verify_challenge() {
let mut rng = rand::thread_rng();
let (k, P) = RistrettoPublicKey::random_keypair(&mut rng);
let (r, R) = RistrettoPublicKey::random_keypair(&mut rng);
// Use sign raw, and bind the nonce and public key manually
let e = Blake2b::<U32>::new()
.chain_update(P.as_bytes())
.chain_update(R.as_bytes())
.chain_update(b"Small Gods")
.finalize();
let e_key = RistrettoSecretKey::from_bytes(&e).unwrap();
let s = &r + &e_key * &k;
let sig = RistrettoSchnorr::sign_raw(&k, r, &e).unwrap();
let R_calc = sig.get_public_nonce();
assert_eq!(R, *R_calc);
assert_eq!(sig.get_signature(), &s);
assert!(sig.verify_challenge(&P, &e));
// Doesn't work for invalid credentials
assert!(!sig.verify_challenge(&R, &e));
// Doesn't work for different challenge
let wrong_challenge = Blake2b::<U32>::digest(b"Guards! Guards!");
assert!(!sig.verify_challenge(&P, &wrong_challenge));
}
/// This test checks that the linearity of Schnorr signatures hold, i.e. that s = s1 + s2 is validated by R1 + R2
/// and P1 + P2. We do this by hand here rather than using the APIs to guard against regressions
#[test]
#[allow(non_snake_case)]
fn test_signature_addition() {
let mut rng = rand::thread_rng();
// Alice and Bob generate some keys and nonces
let (k1, P1) = RistrettoPublicKey::random_keypair(&mut rng);
let (r1, R1) = RistrettoPublicKey::random_keypair(&mut rng);
let (k2, P2) = RistrettoPublicKey::random_keypair(&mut rng);
let (r2, R2) = RistrettoPublicKey::random_keypair(&mut rng);
// Each of them creates the Challenge = H(R1 || R2 || P1 || P2 || m)
let e = Blake2b::<U32>::new()
.chain_update(R1.as_bytes())
.chain_update(R2.as_bytes())
.chain_update(P1.as_bytes())
.chain_update(P2.as_bytes())
.chain_update(b"Moving Pictures")
.finalize();
// Calculate Alice's signature
let s1 = RistrettoSchnorr::sign_raw(&k1, r1, &e).unwrap();
// Calculate Bob's signature
let s2 = RistrettoSchnorr::sign_raw(&k2, r2, &e).unwrap();
// Now add the two signatures together
let s_agg = &s1 + &s2;
// Check that the multi-sig verifies
assert!(s_agg.verify_challenge(&(P1 + P2), &e));
}
/// Ristretto scalars have a max value 2^255. This test checks that hashed messages above this value can still be
/// signed as a result of applying modulo arithmetic on the challenge value
#[test]
#[allow(non_snake_case)]
fn challenge_from_invalid_scalar() {
let mut rng = rand::thread_rng();
let m = from_hex("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff").unwrap();
let k = RistrettoSecretKey::random(&mut rng);
let r = RistrettoSecretKey::random(&mut rng);
assert!(RistrettoSchnorr::sign_raw(&k, r, &m).is_ok());
}
#[test]
#[allow(non_snake_case)]
fn domain_separated_challenge() {
let P =
RistrettoPublicKey::from_hex("74896a30c89186b8194e25f8c1382f8d3081c5a182fb8f8a6d34f27fbefbfc70").unwrap();
let R =
RistrettoPublicKey::from_hex("fa14cb581ce5717248444721242e6b195a482d503a853dea4acb513074d8d803").unwrap();
let msg = "Moving Pictures";
let hash = SchnorrSignature::<_, _, SchnorrSigChallenge>::construct_domain_separated_challenge::<_, Blake2b<U32>>(
&R, &P, msg,
);
let naiive = Blake2b::<U32>::new()
.chain_update(R.as_bytes())
.chain_update(P.as_bytes())
.chain_update(msg)
.finalize()
.to_vec();
assert_ne!(hash.as_ref(), naiive.as_bytes());
assert_eq!(
to_hex(hash.as_ref()),
"d8f6b29b641113c91175b8d44f265ff1167d58d5aa5ee03e6f1f521505b09d80"
);
}
#[test]
#[allow(non_snake_case)]
fn custom_hash_domain() {
hash_domain!(TestDomain, "test.signature.com");
let mut rng = rand::thread_rng();
let (k, P) = RistrettoPublicKey::random_keypair(&mut rng);
let (r, _) = RistrettoPublicKey::random_keypair(&mut rng);
let msg = "Moving Pictures";
// Using default domain
// NEVER re-use nonces in practice. This is done here explicitly to indicate that the domain separation
// prevents accidental signature duplication.
let sig1 = RistrettoSchnorr::sign_with_nonce_and_message(&k, r.clone(), msg).unwrap();
// Using custom domain
let sig2 = RistrettoSchnorrWithDomain::<TestDomain>::sign_with_nonce_and_message(&k, r, msg).unwrap();
// The type system won't even let this compile :)
// assert_ne!(sig1, sig2);
// Prove that the nonces were reused. Again, NEVER do this
assert_eq!(sig1.get_public_nonce(), sig2.get_public_nonce());
assert!(sig1.verify_message(&P, msg));
assert!(sig2.verify_message(&P, msg));
// But the signatures are different, for the same message, secret and nonce.
assert_ne!(sig1.get_signature(), sig2.get_signature());
}
#[test]
#[allow(non_snake_case)]
fn sign_and_verify_message() {
let mut rng = rand::thread_rng();
let (k, P) = RistrettoPublicKey::random_keypair(&mut rng);
let sig =
RistrettoSchnorr::sign_message(&k, "Queues are things that happen to other people", &mut rng).unwrap();
assert!(sig.verify_message(&P, "Queues are things that happen to other people"));
assert!(!sig.verify_message(&P, "Qs are things that happen to other people"));
assert!(!sig.verify_message(&(&P + &P), "Queues are things that happen to other people"));
}
}
|
#![allow(dead_code)]
extern crate reqwest;
// enum method
enum Day{
Monday, Tuesday, Wednesday,
Thursday, Friday, Saturday, Sunday
}
impl Day {
fn is_weekday(&self) -> bool {
match self {
&Day::Saturday | &Day::Sunday => return false,
_ => return true
}
}
}
// CLI
use std::process::Command;
fn main() {
// Option enum
let name = String::from("Domenic");
println!("charater at index 8: {}", match name.chars().nth(8) {
Some(c) => c.to_string(),
None => "No character at index 8!".to_string()
});
println!("Occupation is {}", match get_occupation("Domenic") {
Some (o) => o,
None => "No occupation"
});
//Http
match reqwest::get("https://jsonplaceholder.typicode.com/todos/1") {
Ok(mut response) => {
// check if 200 ok
if response.status() == reqwest::StatusCode::Ok {
match response.text() {
Ok(text) => println!("Response text: {}", text),
Err(_) => println!("Could not read response text")
}
} else {
println!("Response was not 200 OK.")
}
},
Err(_) => println!("Cloud not make the request!")
};
let response_text = reqwest
::get("https://jsonplaceholder.typicode.com/todos/1")
.expect("Could not make request")
.text().expect("Could not read response text");
println!("response_text is {}", response_text);
//Enum method
let dday = Day::Tuesday;
let d2 = Day::Saturday;
println!("Is d a weekday? {}", dday.is_weekday());
println!("Is d2 a weekday? {}", d2.is_weekday());
//CLI
let mut cmd = Command::new("ls");
cmd.arg("-la");
// Execute the command
match cmd.output() {
Ok(o) => {
unsafe {
// convert byte to string
println!("Output CLI: {}", String::from_utf8_unchecked(o.stdout));
}
},
Err(e) => {
println!("error cli: {}", e);
}
}
}
// Option Enum
fn get_occupation(name: &str) -> Option<&str> {
match name {
"Domenic" => Some("Software Developer"),
"Michael" => Some("Dentist"),
_ => None
}
} |
fn main() {
let i = convert_fracts(vec![(690, 1300), (87, 1310), (30, 40)]);
print!("{:?}", i);
}
fn is_common_denominator(l: &[i64], denom: i64) -> bool {
for t in l.iter() {
if denom % t != 0 {
return false;
}
}
true
}
fn get_max_denominator(l: i64, k: i64) -> i64 {
let mut d = 1;
for i in 2 .. k / 2 as f64 as i64 + 1 {
if l % i == 0 && k % i == 0 && i > d {
d = i;
}
}
d
}
fn convert_fracts(l: Vec<(i64, i64)>) -> Vec<(i64, i64)> {
let mut max_denom = l[0].1;
let mut denoms = Vec::new();
let l: Vec<(i64, i64)> = l.iter()
.map(|i| {
let max = get_max_denominator(i.0, i.1);
(i.0 / max, i.1 / max)
})
.collect();
for t in l.iter() {
if t.1 > max_denom {
max_denom = t.1;
}
denoms.push(t.1);
}
let mut mult = 1;
loop {
if is_common_denominator(&denoms, max_denom * mult) {
break;
}
mult += 1;
}
let mut ret = Vec::new();
for t in l.iter() {
ret.push((t.0 * max_denom * mult / t.1, max_denom * mult));
}
ret
}
#[test]
fn basics_convert_fracts() {
assert_eq!(convert_fracts(vec![(69, 130), (87, 1310), (3, 4)]), vec![(18078, 34060), (2262, 34060), (25545, 34060)]);
assert_eq!(convert_fracts(vec![(690, 1300), (87, 1310), (30, 40)]), vec![(18078, 34060), (2262, 34060), (25545, 34060)]);
}
|
#[doc = "Reader of register CCR"]
pub type R = crate::R<u32, super::CCR>;
#[doc = "Reader of field `STKALIGN`"]
pub type STKALIGN_R = crate::R<bool, bool>;
#[doc = "Reader of field `UNALIGN_TRP`"]
pub type UNALIGN_TRP_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 9 - Always reads as one, indicates 8-byte stack alignment on exception entry. On exception entry, the processor uses bit\\[9\\]
of the stacked PSR to indicate the stack alignment. On return from the exception it uses this stacked bit to restore the correct stack alignment."]
#[inline(always)]
pub fn stkalign(&self) -> STKALIGN_R {
STKALIGN_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 3 - Always reads as one, indicates that all unaligned accesses generate a HardFault."]
#[inline(always)]
pub fn unalign_trp(&self) -> UNALIGN_TRP_R {
UNALIGN_TRP_R::new(((self.bits >> 3) & 0x01) != 0)
}
}
|
#[doc = "Register `CSR` reader"]
pub type R = crate::R<CSR_SPEC>;
#[doc = "Register `CSR` writer"]
pub type W = crate::W<CSR_SPEC>;
#[doc = "Field `LSION` reader - Internal low-speed oscillator enable"]
pub type LSION_R = crate::BitReader;
#[doc = "Field `LSION` writer - Internal low-speed oscillator enable"]
pub type LSION_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LSIRDY` reader - Internal low-speed oscillator ready"]
pub type LSIRDY_R = crate::BitReader;
#[doc = "Field `LSEON` reader - External low-speed oscillator enable"]
pub type LSEON_R = crate::BitReader;
#[doc = "Field `LSEON` writer - External low-speed oscillator enable"]
pub type LSEON_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LSERDY` reader - External low-speed oscillator ready"]
pub type LSERDY_R = crate::BitReader;
#[doc = "Field `LSEBYP` reader - External low-speed oscillator bypass"]
pub type LSEBYP_R = crate::BitReader;
#[doc = "Field `LSEBYP` writer - External low-speed oscillator bypass"]
pub type LSEBYP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LSECSSON` reader - CSS on LSE enable"]
pub type LSECSSON_R = crate::BitReader;
#[doc = "Field `LSECSSON` writer - CSS on LSE enable"]
pub type LSECSSON_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LSECSSD` reader - CSS on LSE failure Detection"]
pub type LSECSSD_R = crate::BitReader;
#[doc = "Field `LSECSSD` writer - CSS on LSE failure Detection"]
pub type LSECSSD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RTCSEL` reader - RTC and LCD clock source selection"]
pub type RTCSEL_R = crate::FieldReader;
#[doc = "Field `RTCSEL` writer - RTC and LCD clock source selection"]
pub type RTCSEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `RTCEN` reader - RTC clock enable"]
pub type RTCEN_R = crate::BitReader;
#[doc = "Field `RTCEN` writer - RTC clock enable"]
pub type RTCEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `RTCRST` reader - RTC software reset"]
pub type RTCRST_R = crate::BitReader<RTCRSTW_A>;
#[doc = "RTC software reset\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RTCRSTW_A {
#[doc = "1: Resets the RTC peripheral"]
Reset = 1,
}
impl From<RTCRSTW_A> for bool {
#[inline(always)]
fn from(variant: RTCRSTW_A) -> Self {
variant as u8 != 0
}
}
impl RTCRST_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<RTCRSTW_A> {
match self.bits {
true => Some(RTCRSTW_A::Reset),
_ => None,
}
}
#[doc = "Resets the RTC peripheral"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == RTCRSTW_A::Reset
}
}
#[doc = "Field `RTCRST` writer - RTC software reset"]
pub type RTCRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RTCRSTW_A>;
impl<'a, REG, const O: u8> RTCRST_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Resets the RTC peripheral"]
#[inline(always)]
pub fn reset(self) -> &'a mut crate::W<REG> {
self.variant(RTCRSTW_A::Reset)
}
}
#[doc = "Field `RMVF` reader - Remove reset flag"]
pub type RMVF_R = crate::BitReader<RMVFW_A>;
#[doc = "Remove reset flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RMVFW_A {
#[doc = "1: Clears the reset flag"]
Clear = 1,
}
impl From<RMVFW_A> for bool {
#[inline(always)]
fn from(variant: RMVFW_A) -> Self {
variant as u8 != 0
}
}
impl RMVF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<RMVFW_A> {
match self.bits {
true => Some(RMVFW_A::Clear),
_ => None,
}
}
#[doc = "Clears the reset flag"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == RMVFW_A::Clear
}
}
#[doc = "Field `RMVF` writer - Remove reset flag"]
pub type RMVF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RMVFW_A>;
impl<'a, REG, const O: u8> RMVF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clears the reset flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(RMVFW_A::Clear)
}
}
#[doc = "Field `OBLRSTF` reader - Options bytes loading reset flag"]
pub type OBLRSTF_R = crate::BitReader<OBLRSTFR_A>;
#[doc = "Options bytes loading reset flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OBLRSTFR_A {
#[doc = "0: No reset has occured"]
NoReset = 0,
#[doc = "1: A reset has occured"]
Reset = 1,
}
impl From<OBLRSTFR_A> for bool {
#[inline(always)]
fn from(variant: OBLRSTFR_A) -> Self {
variant as u8 != 0
}
}
impl OBLRSTF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OBLRSTFR_A {
match self.bits {
false => OBLRSTFR_A::NoReset,
true => OBLRSTFR_A::Reset,
}
}
#[doc = "No reset has occured"]
#[inline(always)]
pub fn is_no_reset(&self) -> bool {
*self == OBLRSTFR_A::NoReset
}
#[doc = "A reset has occured"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == OBLRSTFR_A::Reset
}
}
#[doc = "Field `OBLRSTF` writer - Options bytes loading reset flag"]
pub type OBLRSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, OBLRSTFR_A>;
impl<'a, REG, const O: u8> OBLRSTF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No reset has occured"]
#[inline(always)]
pub fn no_reset(self) -> &'a mut crate::W<REG> {
self.variant(OBLRSTFR_A::NoReset)
}
#[doc = "A reset has occured"]
#[inline(always)]
pub fn reset(self) -> &'a mut crate::W<REG> {
self.variant(OBLRSTFR_A::Reset)
}
}
#[doc = "Field `PINRSTF` reader - PIN reset flag"]
pub use OBLRSTF_R as PINRSTF_R;
#[doc = "Field `PORRSTF` reader - POR/PDR reset flag"]
pub use OBLRSTF_R as PORRSTF_R;
#[doc = "Field `SFTRSTF` reader - Software reset flag"]
pub use OBLRSTF_R as SFTRSTF_R;
#[doc = "Field `IWDGRSTF` reader - Independent watchdog reset flag"]
pub use OBLRSTF_R as IWDGRSTF_R;
#[doc = "Field `WWDGRSTF` reader - Window watchdog reset flag"]
pub use OBLRSTF_R as WWDGRSTF_R;
#[doc = "Field `LPWRSTF` reader - Low-power reset flag"]
pub use OBLRSTF_R as LPWRSTF_R;
#[doc = "Field `PINRSTF` writer - PIN reset flag"]
pub use OBLRSTF_W as PINRSTF_W;
#[doc = "Field `PORRSTF` writer - POR/PDR reset flag"]
pub use OBLRSTF_W as PORRSTF_W;
#[doc = "Field `SFTRSTF` writer - Software reset flag"]
pub use OBLRSTF_W as SFTRSTF_W;
#[doc = "Field `IWDGRSTF` writer - Independent watchdog reset flag"]
pub use OBLRSTF_W as IWDGRSTF_W;
#[doc = "Field `WWDGRSTF` writer - Window watchdog reset flag"]
pub use OBLRSTF_W as WWDGRSTF_W;
#[doc = "Field `LPWRSTF` writer - Low-power reset flag"]
pub use OBLRSTF_W as LPWRSTF_W;
impl R {
#[doc = "Bit 0 - Internal low-speed oscillator enable"]
#[inline(always)]
pub fn lsion(&self) -> LSION_R {
LSION_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Internal low-speed oscillator ready"]
#[inline(always)]
pub fn lsirdy(&self) -> LSIRDY_R {
LSIRDY_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 8 - External low-speed oscillator enable"]
#[inline(always)]
pub fn lseon(&self) -> LSEON_R {
LSEON_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - External low-speed oscillator ready"]
#[inline(always)]
pub fn lserdy(&self) -> LSERDY_R {
LSERDY_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - External low-speed oscillator bypass"]
#[inline(always)]
pub fn lsebyp(&self) -> LSEBYP_R {
LSEBYP_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - CSS on LSE enable"]
#[inline(always)]
pub fn lsecsson(&self) -> LSECSSON_R {
LSECSSON_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - CSS on LSE failure Detection"]
#[inline(always)]
pub fn lsecssd(&self) -> LSECSSD_R {
LSECSSD_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bits 16:17 - RTC and LCD clock source selection"]
#[inline(always)]
pub fn rtcsel(&self) -> RTCSEL_R {
RTCSEL_R::new(((self.bits >> 16) & 3) as u8)
}
#[doc = "Bit 22 - RTC clock enable"]
#[inline(always)]
pub fn rtcen(&self) -> RTCEN_R {
RTCEN_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - RTC software reset"]
#[inline(always)]
pub fn rtcrst(&self) -> RTCRST_R {
RTCRST_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - Remove reset flag"]
#[inline(always)]
pub fn rmvf(&self) -> RMVF_R {
RMVF_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - Options bytes loading reset flag"]
#[inline(always)]
pub fn oblrstf(&self) -> OBLRSTF_R {
OBLRSTF_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - PIN reset flag"]
#[inline(always)]
pub fn pinrstf(&self) -> PINRSTF_R {
PINRSTF_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - POR/PDR reset flag"]
#[inline(always)]
pub fn porrstf(&self) -> PORRSTF_R {
PORRSTF_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - Software reset flag"]
#[inline(always)]
pub fn sftrstf(&self) -> SFTRSTF_R {
SFTRSTF_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - Independent watchdog reset flag"]
#[inline(always)]
pub fn iwdgrstf(&self) -> IWDGRSTF_R {
IWDGRSTF_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - Window watchdog reset flag"]
#[inline(always)]
pub fn wwdgrstf(&self) -> WWDGRSTF_R {
WWDGRSTF_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - Low-power reset flag"]
#[inline(always)]
pub fn lpwrstf(&self) -> LPWRSTF_R {
LPWRSTF_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Internal low-speed oscillator enable"]
#[inline(always)]
#[must_use]
pub fn lsion(&mut self) -> LSION_W<CSR_SPEC, 0> {
LSION_W::new(self)
}
#[doc = "Bit 8 - External low-speed oscillator enable"]
#[inline(always)]
#[must_use]
pub fn lseon(&mut self) -> LSEON_W<CSR_SPEC, 8> {
LSEON_W::new(self)
}
#[doc = "Bit 10 - External low-speed oscillator bypass"]
#[inline(always)]
#[must_use]
pub fn lsebyp(&mut self) -> LSEBYP_W<CSR_SPEC, 10> {
LSEBYP_W::new(self)
}
#[doc = "Bit 11 - CSS on LSE enable"]
#[inline(always)]
#[must_use]
pub fn lsecsson(&mut self) -> LSECSSON_W<CSR_SPEC, 11> {
LSECSSON_W::new(self)
}
#[doc = "Bit 12 - CSS on LSE failure Detection"]
#[inline(always)]
#[must_use]
pub fn lsecssd(&mut self) -> LSECSSD_W<CSR_SPEC, 12> {
LSECSSD_W::new(self)
}
#[doc = "Bits 16:17 - RTC and LCD clock source selection"]
#[inline(always)]
#[must_use]
pub fn rtcsel(&mut self) -> RTCSEL_W<CSR_SPEC, 16> {
RTCSEL_W::new(self)
}
#[doc = "Bit 22 - RTC clock enable"]
#[inline(always)]
#[must_use]
pub fn rtcen(&mut self) -> RTCEN_W<CSR_SPEC, 22> {
RTCEN_W::new(self)
}
#[doc = "Bit 23 - RTC software reset"]
#[inline(always)]
#[must_use]
pub fn rtcrst(&mut self) -> RTCRST_W<CSR_SPEC, 23> {
RTCRST_W::new(self)
}
#[doc = "Bit 24 - Remove reset flag"]
#[inline(always)]
#[must_use]
pub fn rmvf(&mut self) -> RMVF_W<CSR_SPEC, 24> {
RMVF_W::new(self)
}
#[doc = "Bit 25 - Options bytes loading reset flag"]
#[inline(always)]
#[must_use]
pub fn oblrstf(&mut self) -> OBLRSTF_W<CSR_SPEC, 25> {
OBLRSTF_W::new(self)
}
#[doc = "Bit 26 - PIN reset flag"]
#[inline(always)]
#[must_use]
pub fn pinrstf(&mut self) -> PINRSTF_W<CSR_SPEC, 26> {
PINRSTF_W::new(self)
}
#[doc = "Bit 27 - POR/PDR reset flag"]
#[inline(always)]
#[must_use]
pub fn porrstf(&mut self) -> PORRSTF_W<CSR_SPEC, 27> {
PORRSTF_W::new(self)
}
#[doc = "Bit 28 - Software reset flag"]
#[inline(always)]
#[must_use]
pub fn sftrstf(&mut self) -> SFTRSTF_W<CSR_SPEC, 28> {
SFTRSTF_W::new(self)
}
#[doc = "Bit 29 - Independent watchdog reset flag"]
#[inline(always)]
#[must_use]
pub fn iwdgrstf(&mut self) -> IWDGRSTF_W<CSR_SPEC, 29> {
IWDGRSTF_W::new(self)
}
#[doc = "Bit 30 - Window watchdog reset flag"]
#[inline(always)]
#[must_use]
pub fn wwdgrstf(&mut self) -> WWDGRSTF_W<CSR_SPEC, 30> {
WWDGRSTF_W::new(self)
}
#[doc = "Bit 31 - Low-power reset flag"]
#[inline(always)]
#[must_use]
pub fn lpwrstf(&mut self) -> LPWRSTF_W<CSR_SPEC, 31> {
LPWRSTF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Control/status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`csr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`csr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CSR_SPEC;
impl crate::RegisterSpec for CSR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`csr::R`](R) reader structure"]
impl crate::Readable for CSR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`csr::W`](W) writer structure"]
impl crate::Writable for CSR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CSR to value 0"]
impl crate::Resettable for CSR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use criterion::{criterion_group, criterion_main};
use criterion::{BenchmarkId, Criterion};
#[cfg(unix)]
use pprof::criterion::{Output, PProfProfiler};
use ppp::v2;
fn ipv6_input() -> Vec<u8> {
let prefix = b"\r\n\r\n\0\r\nQUIT\n";
let mut input: Vec<u8> = Vec::with_capacity(prefix.len());
input.extend_from_slice(prefix);
input.push(0x21);
input.push(0x21);
input.extend(&[0, 45]);
input.extend(&[
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF,
]);
input.extend(&[
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xF1,
]);
input.extend(&[0, 80]);
input.extend(&[1, 187]);
input.extend(&[1, 0, 1, 5]);
input.extend(&[2, 0, 2, 5, 5]);
input
}
fn ipv4_input() -> Vec<u8> {
let prefix = b"\r\n\r\n\0\r\nQUIT\n";
let mut input: Vec<u8> = Vec::with_capacity(prefix.len());
input.extend_from_slice(prefix);
input.push(0x21);
input.push(0x11);
input.extend(&[0, 26]);
input.extend(&[127, 0, 0, 1]);
input.extend(&[198, 168, 1, 1]);
input.extend(&[0, 80]);
input.extend(&[1, 187]);
input.extend(&[1, 0, 1, 5]);
input.extend(&[2, 0, 2, 5, 5]);
input.extend(&[2, 0, 2, 5, 5]);
input
}
fn benchmarks(c: &mut Criterion) {
let mut group = c.benchmark_group("PPP Binary");
let inputs = [
("IPv4 with TLVs", ipv4_input()),
("IPv6 without TLVs", ipv6_input()),
];
for (id, input) in inputs {
group.bench_with_input(
BenchmarkId::new("v2::Header::try_from", id),
input.as_slice(),
|b, i| {
b.iter(|| v2::Header::try_from(i).unwrap());
},
);
group.bench_with_input(
BenchmarkId::new("v2::Header::as_bytes", id),
&v2::Header::try_from(input.as_slice()).unwrap(),
|b, h| {
b.iter(|| h.as_bytes());
},
);
}
let source_address = [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xF2,
];
let destination_address = [
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xF1,
];
let addresses =
v2::Addresses::IPv6(v2::IPv6::new(source_address, destination_address, 80, 443));
group.bench_with_input(
BenchmarkId::new("v2::Builder::build", "IPv6 with TLVs"),
&addresses,
|b, a| {
b.iter(|| {
v2::Builder::new(
v2::Version::Two | v2::Command::Local,
v2::AddressFamily::IPv6 | v2::Protocol::Unspecified,
)
.write_payload(a)
.unwrap()
.write_payloads(vec![(v2::Type::NoOp, [0].as_slice())])
.unwrap()
.write_tlv(v2::Type::NoOp, [42].as_slice())
.unwrap()
.build()
.unwrap()
})
},
);
group.bench_with_input(
BenchmarkId::new("v2::Builder::build", "IPv6 with TLVs with length"),
&addresses,
|b, &a| {
b.iter(|| {
v2::Builder::with_addresses(
v2::Version::Two | v2::Command::Local,
v2::Protocol::Unspecified,
a,
)
.reserve_capacity(8)
.write_payloads([
(v2::Type::NoOp, [0].as_slice()),
(v2::Type::NoOp, [42].as_slice()),
])
.unwrap()
.build()
.unwrap()
})
},
);
group.finish();
}
#[cfg(unix)]
criterion_group! {
name = benches;
config = {
Criterion::default().with_profiler(PProfProfiler::new(100, Output::Protobuf))
};
targets = benchmarks
}
#[cfg(not(unix))]
criterion_group!(benches, benchmarks);
criterion_main!(benches);
|
pub mod closure;
pub mod enums;
pub mod guess_game;
pub mod hello;
pub mod structs;
pub mod vectors;
pub mod warmy;
|
use crate::types::*;
pub(crate) unsafe fn get_last_base_out_struct_chain(
mut s: *mut VkBaseOutStructure,
) -> *mut VkBaseOutStructure
{
while !(*s).p_next.is_null()
{
s = (*s).p_next as *mut VkBaseOutStructure;
}
return s;
}
|
#[doc = "Register `DR` reader"]
pub type R = crate::R<DR_SPEC>;
#[doc = "Register `DR` writer"]
pub type W = crate::W<DR_SPEC>;
#[doc = "Field `DU` reader - Date units in BCD format"]
pub type DU_R = crate::FieldReader;
#[doc = "Field `DU` writer - Date units in BCD format"]
pub type DU_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 4, O>;
#[doc = "Field `DT` reader - Date tens in BCD format"]
pub type DT_R = crate::FieldReader;
#[doc = "Field `DT` writer - Date tens in BCD format"]
pub type DT_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O>;
#[doc = "Field `MU` reader - Month units in BCD format"]
pub type MU_R = crate::FieldReader;
#[doc = "Field `MU` writer - Month units in BCD format"]
pub type MU_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 4, O>;
#[doc = "Field `MT` reader - Month tens in BCD format"]
pub type MT_R = crate::BitReader<MT_A>;
#[doc = "Month tens in BCD format\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MT_A {
#[doc = "0: Month tens is 0"]
Zero = 0,
#[doc = "1: Month tens is 1"]
One = 1,
}
impl From<MT_A> for bool {
#[inline(always)]
fn from(variant: MT_A) -> Self {
variant as u8 != 0
}
}
impl MT_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MT_A {
match self.bits {
false => MT_A::Zero,
true => MT_A::One,
}
}
#[doc = "Month tens is 0"]
#[inline(always)]
pub fn is_zero(&self) -> bool {
*self == MT_A::Zero
}
#[doc = "Month tens is 1"]
#[inline(always)]
pub fn is_one(&self) -> bool {
*self == MT_A::One
}
}
#[doc = "Field `MT` writer - Month tens in BCD format"]
pub type MT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, MT_A>;
impl<'a, REG, const O: u8> MT_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Month tens is 0"]
#[inline(always)]
pub fn zero(self) -> &'a mut crate::W<REG> {
self.variant(MT_A::Zero)
}
#[doc = "Month tens is 1"]
#[inline(always)]
pub fn one(self) -> &'a mut crate::W<REG> {
self.variant(MT_A::One)
}
}
#[doc = "Field `WDU` reader - Week day units"]
pub type WDU_R = crate::FieldReader;
#[doc = "Field `WDU` writer - Week day units"]
pub type WDU_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `YU` reader - Year units in BCD format"]
pub type YU_R = crate::FieldReader;
#[doc = "Field `YU` writer - Year units in BCD format"]
pub type YU_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 4, O>;
#[doc = "Field `YT` reader - Year tens in BCD format"]
pub type YT_R = crate::FieldReader;
#[doc = "Field `YT` writer - Year tens in BCD format"]
pub type YT_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 4, O>;
impl R {
#[doc = "Bits 0:3 - Date units in BCD format"]
#[inline(always)]
pub fn du(&self) -> DU_R {
DU_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:5 - Date tens in BCD format"]
#[inline(always)]
pub fn dt(&self) -> DT_R {
DT_R::new(((self.bits >> 4) & 3) as u8)
}
#[doc = "Bits 8:11 - Month units in BCD format"]
#[inline(always)]
pub fn mu(&self) -> MU_R {
MU_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bit 12 - Month tens in BCD format"]
#[inline(always)]
pub fn mt(&self) -> MT_R {
MT_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bits 13:15 - Week day units"]
#[inline(always)]
pub fn wdu(&self) -> WDU_R {
WDU_R::new(((self.bits >> 13) & 7) as u8)
}
#[doc = "Bits 16:19 - Year units in BCD format"]
#[inline(always)]
pub fn yu(&self) -> YU_R {
YU_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bits 20:23 - Year tens in BCD format"]
#[inline(always)]
pub fn yt(&self) -> YT_R {
YT_R::new(((self.bits >> 20) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - Date units in BCD format"]
#[inline(always)]
#[must_use]
pub fn du(&mut self) -> DU_W<DR_SPEC, 0> {
DU_W::new(self)
}
#[doc = "Bits 4:5 - Date tens in BCD format"]
#[inline(always)]
#[must_use]
pub fn dt(&mut self) -> DT_W<DR_SPEC, 4> {
DT_W::new(self)
}
#[doc = "Bits 8:11 - Month units in BCD format"]
#[inline(always)]
#[must_use]
pub fn mu(&mut self) -> MU_W<DR_SPEC, 8> {
MU_W::new(self)
}
#[doc = "Bit 12 - Month tens in BCD format"]
#[inline(always)]
#[must_use]
pub fn mt(&mut self) -> MT_W<DR_SPEC, 12> {
MT_W::new(self)
}
#[doc = "Bits 13:15 - Week day units"]
#[inline(always)]
#[must_use]
pub fn wdu(&mut self) -> WDU_W<DR_SPEC, 13> {
WDU_W::new(self)
}
#[doc = "Bits 16:19 - Year units in BCD format"]
#[inline(always)]
#[must_use]
pub fn yu(&mut self) -> YU_W<DR_SPEC, 16> {
YU_W::new(self)
}
#[doc = "Bits 20:23 - Year tens in BCD format"]
#[inline(always)]
#[must_use]
pub fn yt(&mut self) -> YT_W<DR_SPEC, 20> {
YT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "The RTC_DR is the calendar date shadow register. This register must be written in initialization mode only. Refer to Calendar initialization and configuration on page9 and Reading the calendar on page10.This register is write protected. The write access procedure is described in RTC register write protection on page9.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DR_SPEC;
impl crate::RegisterSpec for DR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dr::R`](R) reader structure"]
impl crate::Readable for DR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`dr::W`](W) writer structure"]
impl crate::Writable for DR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DR to value 0x2101"]
impl crate::Resettable for DR_SPEC {
const RESET_VALUE: Self::Ux = 0x2101;
}
|
use lc_render::{Chart, Color, LinearScale, PointType, ScatterView};
fn main() {
let width = 800;
let height = 600;
let margin_top = 90;
let margin_bottom = 50;
let margin_left = 60;
let margin_right = 40;
let x_scale = LinearScale::new(0.0, 200.0, 0, width - margin_left - margin_right);
let y_scale = LinearScale::new(0.0, 100.0, height - margin_top - margin_bottom, 0);
let data_1 = vec![
(20_f32, 90_f32),
(12_f32, 54_f32),
(25_f32, 70_f32),
(33_f32, 40_f32),
];
let data_2 = vec![
(120_f32, 10_f32),
(143_f32, 34_f32),
(170_f32, 14_f32),
(190_f32, 13_f32),
];
let view_1 = ScatterView::new(x_scale.clone(), y_scale.clone())
.set_point_fill_color(Color::new_from_hex("#808080"))
.set_point_stroke_color(Color::new_from_hex("#000000"))
.set_data(&data_1)
.expect("setting data_1");
let view_2 = ScatterView::new(x_scale.clone(), y_scale.clone())
.set_point_fill_color(Color::new_from_hex("#000000"))
.set_point_stroke_color(Color::new_from_hex("#808080"))
.set_point_type(PointType::Square)
.set_data(&data_2)
.expect("setting data_2");
let chart = Chart::new()
.set_width(width)
.set_height(height)
.set_margin_top(margin_top)
.set_margin_bottom(margin_bottom)
.set_margin_left(margin_left)
.set_margin_right(margin_right)
.set_axis_bottom_linear(x_scale)
.set_axis_left_linear(y_scale)
.set_axis_bottom_label("X Values")
.set_axis_left_label("Y Values")
.set_title("Two Scatters Chart")
.add_view(&view_1)
.add_view(&view_2);
chart
.save("./examples/svg/two_scatters_chart.svg")
.expect("saving ./examples/svg/two_scatters_chart.svg");
}
|
use rand::Rng;
use crate::data::{EOChar, EOInt, EOShort};
#[derive(Debug, Default)]
pub struct ServerSequencer {
sequence_start: EOInt,
upcoming_sequence_start: EOInt,
sequence: EOInt,
}
impl ServerSequencer {
pub fn init_new_sequence(&mut self) {
let mut rng = rand::thread_rng();
self.sequence_start = rng.gen_range(0, 240);
}
pub fn get_sequence_start(&self) -> EOInt {
self.sequence_start
}
pub fn ping_new_sequence(&mut self) {
let mut rng = rand::thread_rng();
self.upcoming_sequence_start = rng.gen_range(0, 240);
}
pub fn pong_new_sequence(&mut self) {
self.sequence_start = self.upcoming_sequence_start;
}
pub fn get_init_sequence_bytes(&self) -> (EOShort, EOChar) {
let mut rng = rand::thread_rng();
let s1_max = (self.sequence_start + 13) / 7;
let s1_min = std::cmp::max(0, (self.sequence_start as i32 - 252 + 13 + 6) / 7) as u32;
let s1 = rng.gen_range(s1_min, s1_max);
let s2 = (self.sequence_start as i32 - s1 as i32 * 7 + 13) as u32;
(s1 as EOShort, s2 as EOChar)
}
pub fn get_update_sequence_bytes(&self) -> (EOShort, EOChar) {
let mut rng = rand::thread_rng();
let s1_max = self.upcoming_sequence_start + 252;
let s1_min = self.upcoming_sequence_start;
let s1 = rng.gen_range(s1_min, s1_max);
let s2 = s1 - self.upcoming_sequence_start;
(s1 as EOShort, s2 as EOChar)
}
pub fn gen_sequence(&mut self) -> EOInt {
self.sequence = (self.sequence + 1) % 10;
self.sequence_start as EOInt + self.sequence
}
}
|
#[doc = "Register `WPCR0` reader"]
pub type R = crate::R<WPCR0_SPEC>;
#[doc = "Register `WPCR0` writer"]
pub type W = crate::W<WPCR0_SPEC>;
#[doc = "Field `UIX4` reader - UIX4"]
pub type UIX4_R = crate::FieldReader;
#[doc = "Field `UIX4` writer - UIX4"]
pub type UIX4_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 6, O>;
#[doc = "Field `SWCL` reader - SWCL"]
pub type SWCL_R = crate::BitReader;
#[doc = "Field `SWCL` writer - SWCL"]
pub type SWCL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SWDL0` reader - SWDL0"]
pub type SWDL0_R = crate::BitReader;
#[doc = "Field `SWDL0` writer - SWDL0"]
pub type SWDL0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SWDL1` reader - SWDL1"]
pub type SWDL1_R = crate::BitReader;
#[doc = "Field `SWDL1` writer - SWDL1"]
pub type SWDL1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HSICL` reader - HSICL"]
pub type HSICL_R = crate::BitReader;
#[doc = "Field `HSICL` writer - HSICL"]
pub type HSICL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HSIDL0` reader - HSIDL0"]
pub type HSIDL0_R = crate::BitReader;
#[doc = "Field `HSIDL0` writer - HSIDL0"]
pub type HSIDL0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HSIDL1` reader - HSIDL1"]
pub type HSIDL1_R = crate::BitReader;
#[doc = "Field `HSIDL1` writer - HSIDL1"]
pub type HSIDL1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FTXSMCL` reader - FTXSMCL"]
pub type FTXSMCL_R = crate::BitReader;
#[doc = "Field `FTXSMCL` writer - FTXSMCL"]
pub type FTXSMCL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FTXSMDL` reader - FTXSMDL"]
pub type FTXSMDL_R = crate::BitReader;
#[doc = "Field `FTXSMDL` writer - FTXSMDL"]
pub type FTXSMDL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CDOFFDL` reader - CDOFFDL"]
pub type CDOFFDL_R = crate::BitReader;
#[doc = "Field `CDOFFDL` writer - CDOFFDL"]
pub type CDOFFDL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TDDL` reader - TDDL"]
pub type TDDL_R = crate::BitReader;
#[doc = "Field `TDDL` writer - TDDL"]
pub type TDDL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:5 - UIX4"]
#[inline(always)]
pub fn uix4(&self) -> UIX4_R {
UIX4_R::new((self.bits & 0x3f) as u8)
}
#[doc = "Bit 6 - SWCL"]
#[inline(always)]
pub fn swcl(&self) -> SWCL_R {
SWCL_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - SWDL0"]
#[inline(always)]
pub fn swdl0(&self) -> SWDL0_R {
SWDL0_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - SWDL1"]
#[inline(always)]
pub fn swdl1(&self) -> SWDL1_R {
SWDL1_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - HSICL"]
#[inline(always)]
pub fn hsicl(&self) -> HSICL_R {
HSICL_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - HSIDL0"]
#[inline(always)]
pub fn hsidl0(&self) -> HSIDL0_R {
HSIDL0_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - HSIDL1"]
#[inline(always)]
pub fn hsidl1(&self) -> HSIDL1_R {
HSIDL1_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - FTXSMCL"]
#[inline(always)]
pub fn ftxsmcl(&self) -> FTXSMCL_R {
FTXSMCL_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - FTXSMDL"]
#[inline(always)]
pub fn ftxsmdl(&self) -> FTXSMDL_R {
FTXSMDL_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - CDOFFDL"]
#[inline(always)]
pub fn cdoffdl(&self) -> CDOFFDL_R {
CDOFFDL_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 16 - TDDL"]
#[inline(always)]
pub fn tddl(&self) -> TDDL_R {
TDDL_R::new(((self.bits >> 16) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:5 - UIX4"]
#[inline(always)]
#[must_use]
pub fn uix4(&mut self) -> UIX4_W<WPCR0_SPEC, 0> {
UIX4_W::new(self)
}
#[doc = "Bit 6 - SWCL"]
#[inline(always)]
#[must_use]
pub fn swcl(&mut self) -> SWCL_W<WPCR0_SPEC, 6> {
SWCL_W::new(self)
}
#[doc = "Bit 7 - SWDL0"]
#[inline(always)]
#[must_use]
pub fn swdl0(&mut self) -> SWDL0_W<WPCR0_SPEC, 7> {
SWDL0_W::new(self)
}
#[doc = "Bit 8 - SWDL1"]
#[inline(always)]
#[must_use]
pub fn swdl1(&mut self) -> SWDL1_W<WPCR0_SPEC, 8> {
SWDL1_W::new(self)
}
#[doc = "Bit 9 - HSICL"]
#[inline(always)]
#[must_use]
pub fn hsicl(&mut self) -> HSICL_W<WPCR0_SPEC, 9> {
HSICL_W::new(self)
}
#[doc = "Bit 10 - HSIDL0"]
#[inline(always)]
#[must_use]
pub fn hsidl0(&mut self) -> HSIDL0_W<WPCR0_SPEC, 10> {
HSIDL0_W::new(self)
}
#[doc = "Bit 11 - HSIDL1"]
#[inline(always)]
#[must_use]
pub fn hsidl1(&mut self) -> HSIDL1_W<WPCR0_SPEC, 11> {
HSIDL1_W::new(self)
}
#[doc = "Bit 12 - FTXSMCL"]
#[inline(always)]
#[must_use]
pub fn ftxsmcl(&mut self) -> FTXSMCL_W<WPCR0_SPEC, 12> {
FTXSMCL_W::new(self)
}
#[doc = "Bit 13 - FTXSMDL"]
#[inline(always)]
#[must_use]
pub fn ftxsmdl(&mut self) -> FTXSMDL_W<WPCR0_SPEC, 13> {
FTXSMDL_W::new(self)
}
#[doc = "Bit 14 - CDOFFDL"]
#[inline(always)]
#[must_use]
pub fn cdoffdl(&mut self) -> CDOFFDL_W<WPCR0_SPEC, 14> {
CDOFFDL_W::new(self)
}
#[doc = "Bit 16 - TDDL"]
#[inline(always)]
#[must_use]
pub fn tddl(&mut self) -> TDDL_W<WPCR0_SPEC, 16> {
TDDL_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DSI wrapper PHY configuration register 0\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`wpcr0::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`wpcr0::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct WPCR0_SPEC;
impl crate::RegisterSpec for WPCR0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`wpcr0::R`](R) reader structure"]
impl crate::Readable for WPCR0_SPEC {}
#[doc = "`write(|w| ..)` method takes [`wpcr0::W`](W) writer structure"]
impl crate::Writable for WPCR0_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets WPCR0 to value 0"]
impl crate::Resettable for WPCR0_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! Tests auto-converted from "sass-spec/spec/non_conformant/parser/interpolate/13_escaped_single_quoted"
#[allow(unused)]
use super::rsass;
// From "sass-spec/spec/non_conformant/parser/interpolate/13_escaped_single_quoted/01_inline.hrx"
#[test]
fn t01_inline() {
assert_eq!(
rsass(
".result {\
\n output: \'l\\\\ite\\ral\';\
\n output: #{\'l\\\\ite\\ral\'};\
\n output: \"[#{\'l\\\\ite\\ral\'}]\";\
\n output: \"#{\'l\\\\ite\\ral\'}\";\
\n output: \'#{\'l\\\\ite\\ral\'}\';\
\n output: \"[\'#{\'l\\\\ite\\ral\'}\']\";\
\n}\
\n"
)
.unwrap(),
".result {\
\n output: \"l\\\\iteral\";\
\n output: l\\iteral;\
\n output: \"[l\\\\iteral]\";\
\n output: \"l\\\\iteral\";\
\n output: \"l\\\\iteral\";\
\n output: \"[\'l\\\\iteral\']\";\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/parser/interpolate/13_escaped_single_quoted/02_variable.hrx"
#[test]
fn t02_variable() {
assert_eq!(
rsass(
"$input: \'l\\\\ite\\ral\';\
\n.result {\
\n output: $input;\
\n output: #{$input};\
\n output: \"[#{$input}]\";\
\n output: \"#{$input}\";\
\n output: \'#{$input}\';\
\n output: \"[\'#{$input}\']\";\
\n}\
\n"
)
.unwrap(),
".result {\
\n output: \"l\\\\iteral\";\
\n output: l\\iteral;\
\n output: \"[l\\\\iteral]\";\
\n output: \"l\\\\iteral\";\
\n output: \"l\\\\iteral\";\
\n output: \"[\'l\\\\iteral\']\";\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/parser/interpolate/13_escaped_single_quoted/03_inline_double.hrx"
#[test]
fn t03_inline_double() {
assert_eq!(
rsass(
".result {\
\n output: #{#{\'l\\\\ite\\ral\'}};\
\n output: #{\"[#{\'l\\\\ite\\ral\'}]\"};\
\n output: #{\"#{\'l\\\\ite\\ral\'}\"};\
\n output: #{\'#{\'l\\\\ite\\ral\'}\'};\
\n output: #{\"[\'#{\'l\\\\ite\\ral\'}\']\"};\
\n}\
\n"
)
.unwrap(),
".result {\
\n output: l\\iteral;\
\n output: [l\\iteral];\
\n output: l\\iteral;\
\n output: l\\iteral;\
\n output: [\'l\\iteral\'];\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/parser/interpolate/13_escaped_single_quoted/04_variable_double.hrx"
#[test]
fn t04_variable_double() {
assert_eq!(
rsass(
"$input: \'l\\\\ite\\ral\';\
\n.result {\
\n output: #{#{$input}};\
\n output: #{\"[#{$input}]\"};\
\n output: #{\"#{$input}\"};\
\n output: #{\'#{$input}\'};\
\n output: #{\"[\'#{$input}\']\"};\
\n}\
\n"
)
.unwrap(),
".result {\
\n output: l\\iteral;\
\n output: [l\\iteral];\
\n output: l\\iteral;\
\n output: l\\iteral;\
\n output: [\'l\\iteral\'];\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/parser/interpolate/13_escaped_single_quoted/05_variable_quoted_double.hrx"
#[test]
fn t05_variable_quoted_double() {
assert_eq!(
rsass(
"$input: \'l\\\\ite\\ral\';\
\n.result {\
\n dquoted: \"#{#{$input}}\";\
\n dquoted: \"#{\"[#{$input}]\"}\";\
\n dquoted: \"#{\"#{$input}\"}\";\
\n dquoted: \"#{\'#{$input}\'}\";\
\n dquoted: \"#{\"[\'#{$input}\']\"}\";\
\n squoted: \'#{#{$input}}\';\
\n squoted: \'#{\"[#{$input}]\"}\';\
\n squoted: \'#{\"#{$input}\"}\';\
\n squoted: \'#{\'#{$input}\'}\';\
\n squoted: \'#{\"[\'#{$input}\']\"}\';\
\n}\
\n"
)
.unwrap(),
".result {\
\n dquoted: \"l\\\\iteral\";\
\n dquoted: \"[l\\\\iteral]\";\
\n dquoted: \"l\\\\iteral\";\
\n dquoted: \"l\\\\iteral\";\
\n dquoted: \"[\'l\\\\iteral\']\";\
\n squoted: \"l\\\\iteral\";\
\n squoted: \"[l\\\\iteral]\";\
\n squoted: \"l\\\\iteral\";\
\n squoted: \"l\\\\iteral\";\
\n squoted: \"[\'l\\\\iteral\']\";\
\n}\
\n"
);
}
// From "sass-spec/spec/non_conformant/parser/interpolate/13_escaped_single_quoted/06_escape_interpolation.hrx"
#[test]
fn t06_escape_interpolation() {
assert_eq!(
rsass(
"$input: \'l\\\\ite\\ral\';\
\n.result {\
\n output: \"[\\#{\'l\\\\ite\\ral\'}]\";\
\n output: \"\\#{\'l\\\\ite\\ral\'}\";\
\n output: \'\\#{\'l\\\\ite\\ral\'}\';\
\n output: \"[\'\\#{\'l\\\\ite\\ral\'}\']\";\
\n}\
\n"
)
.unwrap(),
".result {\
\n output: \"[#{\'l\\\\iteral\'}]\";\
\n output: \"#{\'l\\\\iteral\'}\";\
\n output: \"#{\" l\\\\iteral \"}\";\
\n output: \"[\'#{\'l\\\\iteral\'}\']\";\
\n}\
\n"
);
}
|
use druid::widget::{Align, Button, Flex, Label, Padding, TextBox};
use druid::{AppLauncher, Data, Lens, LocalizedString, Widget, WidgetExt, WindowDesc};
const VERTICAL_WIDGET_SPACING: f64 = 20.0;
const HORIZTONAL_WIDGET_SPACING: f64 = 8.0;
const WINDOW_TITLE: LocalizedString<HelloState> = LocalizedString::new("Rakaly");
#[derive(Clone, Data, Lens)]
struct HelloState {
steam_name: String,
api_key: String,
}
pub fn run() {
// describe the main window
let main_window = WindowDesc::new(build_root_widget)
.title(WINDOW_TITLE)
.window_size((400.0, 400.0));
// create the initial app state
let initial_state = HelloState {
steam_name: "".into(),
api_key: "".into(),
};
// start the application
AppLauncher::with_window(main_window)
.launch(initial_state)
.expect("Failed to launch application");
}
fn build_root_widget() -> impl Widget<HelloState> {
let watch_dir = if let Some(user_dirs) = directories::UserDirs::new() {
user_dirs
.document_dir()
.map(|x| {
x.join("Paradox Interactive")
.join("Europa Universalis IV")
.join("save games")
})
.map(|x| x.display().to_string())
.unwrap_or_else(|| String::from("unknown"))
} else {
String::from("unknown")
};
let intro_text_1 = Label::new("When the \"Start\" button is pressed, Rakaly will");
let intro_text_2 = Label::new("automatically start watching the following directory");
let intro_text_3 = Label::new("for any changes:");
let intro_text_4 = Label::new(watch_dir);
let intro_text_5 = Label::new("Rakaly will upload the new files to the server.");
let intro_text_6 = Label::new("To start the uploader on boot, click \"Enable on Startup\"");
let steam_name_box = TextBox::new().expand_width().lens(HelloState::steam_name);
let steam_name_row = Flex::row()
.must_fill_main_axis(true)
.with_child(Label::new("Steam Name:"))
.with_spacer(HORIZTONAL_WIDGET_SPACING)
.with_flex_child(steam_name_box, 1.0);
let api_key_box = TextBox::new().expand_width().lens(HelloState::api_key);
let api_key_row = Flex::row()
.must_fill_main_axis(true)
.with_child(Label::new("API Key:"))
.with_spacer(HORIZTONAL_WIDGET_SPACING)
.with_flex_child(api_key_box, 1.0);
let immediate_btn_layout = Flex::row()
.with_child(Button::new("Start").on_click(|_ctx, _data: &mut HelloState, _env| {}))
.with_spacer(HORIZTONAL_WIDGET_SPACING)
.with_child(Button::new("Stop").on_click(|_ctx, _data: &mut HelloState, _env| {}));
let service_btn_layout = Flex::row()
.with_child(
Button::new("Enable on Startup").on_click(|_ctx, _data: &mut HelloState, _env| {}),
)
.with_spacer(HORIZTONAL_WIDGET_SPACING)
.with_child(
Button::new("Disable on Startup").on_click(|_ctx, _data: &mut HelloState, _env| {}),
);
let layout = Flex::column()
.with_child(intro_text_1)
.with_child(intro_text_2)
.with_child(intro_text_3)
.with_child(intro_text_4)
.with_child(intro_text_5)
.with_child(intro_text_6)
.with_spacer(VERTICAL_WIDGET_SPACING)
.with_child(steam_name_row)
.with_spacer(VERTICAL_WIDGET_SPACING)
.with_child(api_key_row)
.with_spacer(VERTICAL_WIDGET_SPACING)
.with_child(immediate_btn_layout)
.with_spacer(VERTICAL_WIDGET_SPACING)
.with_child(service_btn_layout)
.expand_width();
Padding::new(10.0, Align::left(layout))
}
|
extern crate clap;
extern crate dirs;
mod config;
use clap::{App, SubCommand, Arg};
use std::path::Path;
use std::io::prelude::*;
use std::io::{BufReader, BufRead};
use std::fs::{OpenOptions, create_dir_all, read_dir};
use std::error::Error;
use std::time::{SystemTime, Duration};
fn track_task(task_name: &str) -> Result<(), Box<dyn Error>> {
create_dir_all(config::base_path())?;
let task_path = config::base_path().join(Path::new(task_name));
let mut file = OpenOptions::new()
.append(true)
.create(true)
.write(true)
.open(task_path)?;
let time = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH)?;
let timestamp = time.as_secs();
writeln!(file, "{}", timestamp)?;
Ok(())
}
fn task_time(task_name: &str) -> Result<(), Box<dyn Error>> {
let task_path = config::base_path().join(Path::new(task_name));
let file = BufReader::new(OpenOptions::new()
.read(true)
.open(task_path)?);
let mut duration = Duration::new(0, 0);
let mut previous_time: Option<u64> = Option::None;
for line in file.lines() {
if previous_time.is_some() {
let next = line?.as_str().parse::<u64>()?;
duration = duration + Duration::new(next - previous_time.expect(""), 0);
previous_time = None;
} else {
previous_time = Some(line?.as_str().parse::<u64>()?);
}
}
println!("{:?}h", duration.as_secs() / 3600);
Ok(())
}
fn break_tasks() -> Result<(), Box<dyn Error>> {
let tasks_path = config::base_path();
for task_entry in read_dir(tasks_path)? {
let task_path = task_entry?.path();
if task_path.is_file() {
let file= BufReader::new(
OpenOptions::new().read(true).open(&task_path)?
);
if file.lines().count() % 2 != 0 {
track_task(task_path.file_name().and_then(|f| f.to_str()).expect(""))?;
}
}
}
Ok(())
}
fn continue_tasks() -> Result<(), Box<dyn Error>> {
let tasks_path = config::base_path();
let mut most_recent: Option<(String, u64)> = None;
for task_entry in read_dir(tasks_path)? {
let task_path = task_entry?.path();
if task_path.is_file() {
let file = BufReader::new(
OpenOptions::new().read(true).open(&task_path)?
);
let lines: Vec<String> = file.lines().into_iter().map(|l| l.expect("")).collect();
if lines.len() % 2 == 0 && lines.len() > 0 {
let last_timestamp = lines.last().expect("").parse::<u64>()?;
let (_, current_most_recent_timestamp) = most_recent.clone().unwrap_or((String::new(), 0u64));
if last_timestamp > current_most_recent_timestamp {
most_recent = Some((task_path.file_name().and_then(|f| f.to_str()).expect("").to_string(), last_timestamp))
}
}
}
}
most_recent.and_then(|(most_recent_task, _)| {
track_task(most_recent_task.as_str()).err()
});
Ok(())
}
fn main() -> Result<(), Box<dyn Error>> {
let time_command_name = "time";
let time_sub_command = SubCommand::with_name(time_command_name).arg(
Arg::with_name("").takes_value(true).required(true)
);
let task_time_command_name = "sum";
let task_time_command = SubCommand::with_name(task_time_command_name).arg(
Arg::with_name("").takes_value(true).required(true)
);
let break_command_name = "break";
let break_command = SubCommand::with_name(break_command_name);
let continue_command_name = "continue";
let continue_command = SubCommand::with_name(continue_command_name);
let matches = App::new("track")
.subcommand(time_sub_command)
.subcommand(task_time_command)
.subcommand(break_command)
.subcommand(continue_command)
.get_matches();
if let Some(command) = matches.subcommand_matches(time_command_name) {
let task = command.value_of("").expect("");
track_task(task)?;
}
if let Some(command) = matches.subcommand_matches(task_time_command_name) {
let task = command.value_of("").expect("");
task_time(task)?;
}
if let Some(_command) = matches.subcommand_matches(break_command_name) {
break_tasks()?;
}
if let Some(_command) = matches.subcommand_matches(continue_command_name) {
continue_tasks()?;
}
Ok(())
}
|
use embedded_hal::blocking::i2c::{Operation as I2cOperation, Transactional};
const ADDR: u8 = 0x48;
pub struct PCF8591<I2C> {
i2c: I2C,
}
enum ADCNum {
AIN0,
AIN1,
AIN2,
AIN3,
}
impl<I2C> PCF8591<I2C>
where
I2C: Transactional,
{
pub fn new(i2c: I2C) -> Self {
Self { i2c }
}
pub fn query_ain0(&mut self) -> Result<u8, I2C::Error> {
self.query(ADCNum::AIN0)
}
pub fn query_ain1(&mut self) -> Result<u8, I2C::Error> {
self.query(ADCNum::AIN1)
}
pub fn query_ain2(&mut self) -> Result<u8, I2C::Error> {
self.query(ADCNum::AIN2)
}
pub fn query_ain3(&mut self) -> Result<u8, I2C::Error> {
self.query(ADCNum::AIN3)
}
fn query(&mut self, num: ADCNum) -> Result<u8, I2C::Error> {
let write_buffer = match num {
ADCNum::AIN0 => [0x40],
ADCNum::AIN1 => [0x41],
ADCNum::AIN2 => [0x42],
ADCNum::AIN3 => [0x43],
};
let mut read_buffer = [0];
let mut ops = [
I2cOperation::Write(&write_buffer),
I2cOperation::Read(&mut read_buffer),
];
self.i2c.exec(ADDR, &mut ops).and(Ok(read_buffer[0]))
}
}
|
mod expand_repeats;
mod expand_tokens;
mod extract_default_aliases;
mod extract_tokens;
mod flatten_grammar;
mod intern_symbols;
mod process_inlines;
pub(crate) use self::expand_tokens::expand_tokens;
use self::expand_repeats::expand_repeats;
use self::extract_default_aliases::extract_default_aliases;
use self::extract_tokens::extract_tokens;
use self::flatten_grammar::flatten_grammar;
use self::intern_symbols::intern_symbols;
use self::process_inlines::process_inlines;
use super::grammars::{
ExternalToken, InlinedProductionMap, InputGrammar, LexicalGrammar, PrecedenceEntry,
SyntaxGrammar, Variable,
};
use super::rules::{AliasMap, Precedence, Rule, Symbol};
use anyhow::{anyhow, Result};
use std::{
cmp::Ordering,
collections::{hash_map, HashMap, HashSet},
mem,
};
pub(crate) struct IntermediateGrammar<T, U> {
variables: Vec<Variable>,
extra_symbols: Vec<T>,
expected_conflicts: Vec<Vec<Symbol>>,
precedence_orderings: Vec<Vec<PrecedenceEntry>>,
external_tokens: Vec<U>,
variables_to_inline: Vec<Symbol>,
supertype_symbols: Vec<Symbol>,
word_token: Option<Symbol>,
}
pub(crate) type InternedGrammar = IntermediateGrammar<Rule, Variable>;
pub(crate) type ExtractedSyntaxGrammar = IntermediateGrammar<Symbol, ExternalToken>;
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct ExtractedLexicalGrammar {
pub variables: Vec<Variable>,
pub separators: Vec<Rule>,
}
impl<T, U> Default for IntermediateGrammar<T, U> {
fn default() -> Self {
Self {
variables: Default::default(),
extra_symbols: Default::default(),
expected_conflicts: Default::default(),
precedence_orderings: Default::default(),
external_tokens: Default::default(),
variables_to_inline: Default::default(),
supertype_symbols: Default::default(),
word_token: Default::default(),
}
}
}
/// Transform an input grammar into separate components that are ready
/// for parse table construction.
pub(crate) fn prepare_grammar(
input_grammar: &InputGrammar,
) -> Result<(
SyntaxGrammar,
LexicalGrammar,
InlinedProductionMap,
AliasMap,
)> {
validate_precedences(input_grammar)?;
let interned_grammar = intern_symbols(input_grammar)?;
let (syntax_grammar, lexical_grammar) = extract_tokens(interned_grammar)?;
let syntax_grammar = expand_repeats(syntax_grammar);
let mut syntax_grammar = flatten_grammar(syntax_grammar)?;
let lexical_grammar = expand_tokens(lexical_grammar)?;
let default_aliases = extract_default_aliases(&mut syntax_grammar, &lexical_grammar);
let inlines = process_inlines(&syntax_grammar, &lexical_grammar)?;
Ok((syntax_grammar, lexical_grammar, inlines, default_aliases))
}
/// Check that all of the named precedences used in the grammar are declared
/// within the `precedences` lists, and also that there are no conflicting
/// precedence orderings declared in those lists.
fn validate_precedences(grammar: &InputGrammar) -> Result<()> {
// For any two precedence names `a` and `b`, if `a` comes before `b`
// in some list, then it cannot come *after* `b` in any list.
let mut pairs = HashMap::new();
for list in &grammar.precedence_orderings {
for (i, mut entry1) in list.iter().enumerate() {
for mut entry2 in list.iter().skip(i + 1) {
if entry2 == entry1 {
continue;
}
let mut ordering = Ordering::Greater;
if entry1 > entry2 {
ordering = Ordering::Less;
mem::swap(&mut entry1, &mut entry2);
}
match pairs.entry((entry1, entry2)) {
hash_map::Entry::Vacant(e) => {
e.insert(ordering);
}
hash_map::Entry::Occupied(e) => {
if e.get() != &ordering {
return Err(anyhow!(
"Conflicting orderings for precedences {} and {}",
entry1,
entry2
));
}
}
}
}
}
}
// Check that no rule contains a named precedence that is not present in
// any of the `precedences` lists.
fn validate(rule_name: &str, rule: &Rule, names: &HashSet<&String>) -> Result<()> {
match rule {
Rule::Repeat(rule) => validate(rule_name, rule, names),
Rule::Seq(elements) | Rule::Choice(elements) => elements
.iter()
.map(|e| validate(rule_name, e, names))
.collect(),
Rule::Metadata { rule, params } => {
if let Precedence::Name(n) = ¶ms.precedence {
if !names.contains(n) {
return Err(anyhow!(
"Undeclared precedence '{}' in rule '{}'",
n,
rule_name
));
}
}
validate(rule_name, rule, names)?;
Ok(())
}
_ => Ok(()),
}
}
let precedence_names = grammar
.precedence_orderings
.iter()
.flat_map(|l| l.iter())
.filter_map(|p| {
if let PrecedenceEntry::Name(n) = p {
Some(n)
} else {
None
}
})
.collect::<HashSet<&String>>();
for variable in &grammar.variables {
validate(&variable.name, &variable.rule, &precedence_names)?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::generate::grammars::{InputGrammar, Variable, VariableType};
#[test]
fn test_validate_precedences_with_undeclared_precedence() {
let grammar = InputGrammar {
precedence_orderings: vec![
vec![
PrecedenceEntry::Name("a".to_string()),
PrecedenceEntry::Name("b".to_string()),
],
vec![
PrecedenceEntry::Name("b".to_string()),
PrecedenceEntry::Name("c".to_string()),
PrecedenceEntry::Name("d".to_string()),
],
],
variables: vec![
Variable {
name: "v1".to_string(),
kind: VariableType::Named,
rule: Rule::Seq(vec![
Rule::prec_left(Precedence::Name("b".to_string()), Rule::string("w")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("x")),
]),
},
Variable {
name: "v2".to_string(),
kind: VariableType::Named,
rule: Rule::repeat(Rule::Choice(vec![
Rule::prec_left(Precedence::Name("omg".to_string()), Rule::string("y")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("z")),
])),
},
],
..Default::default()
};
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().to_string(),
"Undeclared precedence 'omg' in rule 'v2'",
);
}
#[test]
fn test_validate_precedences_with_conflicting_order() {
let grammar = InputGrammar {
precedence_orderings: vec![
vec![
PrecedenceEntry::Name("a".to_string()),
PrecedenceEntry::Name("b".to_string()),
],
vec![
PrecedenceEntry::Name("b".to_string()),
PrecedenceEntry::Name("c".to_string()),
PrecedenceEntry::Name("a".to_string()),
],
],
variables: vec![
Variable {
name: "v1".to_string(),
kind: VariableType::Named,
rule: Rule::Seq(vec![
Rule::prec_left(Precedence::Name("b".to_string()), Rule::string("w")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("x")),
]),
},
Variable {
name: "v2".to_string(),
kind: VariableType::Named,
rule: Rule::repeat(Rule::Choice(vec![
Rule::prec_left(Precedence::Name("a".to_string()), Rule::string("y")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("z")),
])),
},
],
..Default::default()
};
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().to_string(),
"Conflicting orderings for precedences 'a' and 'b'",
);
}
}
|
use std::str;
#[derive(Clone, Debug, Default, Hash, PartialEq, Serialize, Deserialize)]
pub struct Table {
pub name: String,
pub alias: Option<String>,
}
impl<'a> From<&'a str> for Table {
fn from(t: &str) -> Table {
Table {
name: String::from(t),
alias: None,
}
}
}
|
use crate::{
_utils::error::BootError,
account::controller::create_account_router,
auth::controller::create_auth_router,
import::controller::create_import_router,
post::controller::create_post_router,
search::{controller::create_search_router, cron_job::SearchCronJob},
tag::controller::create_tag_router,
web::controller::create_web_router,
};
use axum::{routing::get, Json, Router};
use local_ip_address::local_ip;
use serde_json::json;
use std::sync::Arc;
use tokio_cron_scheduler::JobScheduler;
use super::{
layers::{
cors::create_cors_layer,
trace::{create_trace_layer, enable_tracing},
},
servers::{local::run_local_server, loopback::run_loopback_server},
state::{create_app_state, AppState},
};
pub async fn actual_main() {
let _guard = enable_tracing();
// create a shared-by-reference state
let app_state = create_app_state().await.unwrap();
// setup cron jobs
let cron_jobs = create_cron_jobs(app_state.clone()).await.unwrap();
cron_jobs.start().await.unwrap();
// create the app router
let app = create_app(app_state.clone()).await.unwrap();
// get the local IP address of the system
match local_ip() {
Ok(ip) => {
// run both loopback and local servers
let loopback_server = run_loopback_server(app.clone());
let local_server = run_local_server(app, ip);
// await both servers concurrently
let (_, _) = tokio::join!(loopback_server, local_server);
}
Err(e) => {
// log the error as info and run only the loopback server
tracing::info!("Running only on the loopback address: {}", e);
let loopback_server = run_loopback_server(app);
// await the loopback server
loopback_server.await;
}
}
}
// create and configure the app router
async fn create_app(app_state: AppState) -> Result<Router, BootError> {
let app = Router::new();
let app = app
// @TODO-ZM: align on model naming convention
.nest("/posts", create_post_router())
.nest("/search", create_search_router())
.nest("/accounts", create_account_router())
.nest("/tags", create_tag_router())
.nest("/auth", create_auth_router())
.nest("/web/", create_web_router())
.nest("/import", create_import_router())
.route(
"/",
get(|| async {
Json(json!({
"app": { "version": env!("CARGO_PKG_VERSION") },
"repository": { "url": env!("CARGO_PKG_REPOSITORY") }
}))
}),
)
.with_state(app_state);
let app = app.layer(create_cors_layer()).layer(create_trace_layer());
Ok(app)
}
async fn create_cron_jobs(app_state: AppState) -> Result<JobScheduler, BootError> {
let schedule = JobScheduler::new().await;
if schedule.is_err() {
return Err(BootError::CronJobSetupError);
}
let schedule = schedule.unwrap();
let search_cron_job = Arc::new(SearchCronJob { app_state });
// @TODO-ZM: add un-indexing cron job
let registration_result = schedule
.add(search_cron_job.create_indexing_cron_job().unwrap())
.await;
if registration_result.is_err() {
tracing::error!(
"Error while registering search cron job: {:?}",
registration_result.err()
);
return Err(BootError::CronJobSetupError);
}
let registration_result = schedule
.add(search_cron_job.create_bk_tree_refresher_cron_job().unwrap())
.await;
if registration_result.is_err() {
tracing::error!(
"Error while registering bk-tree refresher cron job: {:?}",
registration_result.err()
);
return Err(BootError::CronJobSetupError);
}
Ok(schedule)
}
|
#![warn(rust_2018_idioms)]
// TODO: remove this!
#![allow(dead_code)]
use std::collections::HashMap;
use crate::model::{FlatDeck, Hand, Player};
pub mod client;
pub mod model;
pub mod protocol;
pub mod server;
struct State {
deck: FlatDeck,
players: HashMap<Player, Option<Hand>>,
}
struct PlayerState {
hand: Option<Hand>,
}
enum Phase {}
const GAME_VERSION: &'static str = env!("CARGO_PKG_VERSION");
|
// Copyright 2015-2016 Brian Smith.
//
// Permission to use, copy, modify, and/or distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
// SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
// OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
// CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
//! Key Agreement: ECDH, including X25519.
//!
//! # Example
//!
//! Note that this example uses X25519, but ECDH using NIST P-256/P-384 is done
//! exactly the same way, just substituting
//! `agreement::ECDH_P256`/`agreement::ECDH_P384` for `agreement::X25519`.
//!
//! ```
//! # fn x25519_agreement_example() -> Result<(), ()> {
//! use ring::{agreement, rand};
//! use ring::input::Input;
//!
//! let rng = rand::SystemRandom::new();
//!
//! let my_private_key =
//! try!(agreement::EphemeralPrivateKey::generate(&agreement::X25519, &rng));
//!
//! // Make `my_public_key` a byte slice containing my public key. In a real
//! // application, this would be sent to the peer in an encoded protocol
//! // message.
//! let mut my_public_key = [0u8; agreement::PUBLIC_KEY_MAX_LEN];
//! let my_public_key =
//! &mut my_public_key[..my_private_key.public_key_len()];
//! try!(my_private_key.compute_public_key(my_public_key));
//!
//! // In a real application, the peer public key would be parsed out of a
//! // protocol message. Here we just generate one.
//! let mut peer_public_key_buf = [0u8; agreement::PUBLIC_KEY_MAX_LEN];
//! let peer_public_key;
//! {
//! let peer_private_key =
//! try!(agreement::EphemeralPrivateKey::generate(&agreement::X25519,
//! &rng));
//! peer_public_key =
//! &mut peer_public_key_buf[..peer_private_key.public_key_len()];
//! try!(peer_private_key.compute_public_key(peer_public_key));
//! }
//! let peer_public_key = try!(Input::new(peer_public_key));
//!
//! // In a real application, the protocol specifies how to determine what
//! // algorithm was used to generate the peer's private key. Here, we know it
//! // is X25519 since we just generated it.
//! let peer_public_key_alg = &agreement::X25519;
//!
//! let error_value = ();
//!
//! agreement::agree_ephemeral(my_private_key, peer_public_key_alg,
//! peer_public_key, error_value, |_key_material| {
//! // In a real application, we'd apply a KDF to the key material and the
//! // public keys (as recommended in RFC 7748) and then derive session
//! // keys from the result. We omit all that here.
//! Ok(())
//! })
//! # }
//! ```
pub use ec::PUBLIC_KEY_MAX_LEN;
pub use ec::ecdh::{
Algorithm,
EphemeralPrivateKey,
X25519,
agree_ephemeral,
};
#[cfg(not(feature = "no_heap"))]
pub use ec::ecdh::{
ECDH_P256,
ECDH_P384,
};
|
#![feature(box_syntax, box_patterns)]
#![feature(plugin)]
#![plugin(stainless)]
extern crate tinytl;
pub use tinytl::syntax::*;
pub use tinytl::types::*;
pub use tinytl::infer::*;
pub use tinytl::env::*;
pub use std::collections::HashMap;
pub fn run_infer_spec(expr: &Expr, expect: &'static str) {
assert_eq!(format!("{}", generalize(&HashMap::new(), &infer(&get_assumptions(), expr))), expect);
reset_var();
}
describe! infer_spec {
it "should unify" {
let mono1 = &TArrow(Box::new(TVar('a')), Box::new(TInt));
let mono2 = &TArrow(Box::new(TVar('b')), Box::new(TVar('b')));
let mono3 = &TArrow(Box::new(TVar('a')), Box::new(TVar('b')));
let mono4 = &TArrow(Box::new(TArrow(Box::new(TVar('b')), Box::new(TVar('c')))), Box::new(TVar('c')));
let mono5 = &TArrow(Box::new(TVar('a')), Box::new(TInt));
let mono6 = &TArrow(Box::new(TVar('a')), Box::new(TVar('b')));
// 𝐒[a → Z] ∘ 𝐒[a → b]
let mono7 = &TArrow(Box::new(TVar('a')), Box::new(TInt));
let mono8 = &TArrow(Box::new(TVar('b')), Box::new(TVar('a')));
let subrule1 = unify(mono1, mono2);
let subrule2 = unify(mono3, mono4);
let subrule3 = unify(mono5, mono6);
let subrule4 = unify(mono7, mono8);
assert_eq!(subrule1.get(&'a'), Some(&TInt));
assert_eq!(subrule1.get(&'b'), Some(&TInt));
assert_eq!(subrule2.get(&'a'), Some(&TArrow(Box::new(TVar('c')), Box::new(TVar('c')))));
assert_eq!(subrule2.get(&'b'), Some(&TVar('c')));
assert_eq!(subrule3.get(&'a'), None); //checkout make_single_subrule for reason
assert_eq!(subrule3.get(&'b'), Some(&TInt));
assert_eq!(subrule4.get(&'a'), Some(&TInt));
assert_eq!(subrule4.get(&'b'), Some(&TInt));
}
it "should infer" {
run_infer_spec(&EVar("id"), "∀a. a → a");
run_infer_spec(&EApp(Box::new(EVar("id")), Box::new(EApp(Box::new(EVar("id")), Box::new(EVar("one"))))), "int");
run_infer_spec(&EApp(Box::new(EApp(Box::new(EVar("eq")), Box::new(EVar("false")))), Box::new(EVar("true"))), "bool");
run_infer_spec(&EVar("compose"), "∀a. ∀b. ∀c. (b → c) → (a → b) → a → c");
run_infer_spec(&EApp(Box::new(EVar("compose")), Box::new(EVar("not"))), "∀a. (a → bool) → a → bool");
run_infer_spec(&EApp(Box::new(EApp(Box::new(EVar("compose")), Box::new(EVar("not")))), Box::new(EApp(Box::new(EVar("eq")), Box::new(EVar("one"))))), "int → bool");
run_infer_spec(&EApp(Box::new(EVar("compose")), Box::new(EApp(Box::new(EVar("add")), Box::new(EVar("one"))))), "∀a. (a → int) → a → int");
run_infer_spec(&EApp(Box::new(EApp(Box::new(EApp(Box::new(EVar("compose")), Box::new(EVar("eq")))), Box::new(EVar("add")))), Box::new(EVar("one"))), "(int → int) → bool");
run_infer_spec(&EApp(Box::new(EVar("compose")), Box::new(EVar("compose"))), "∀a. ∀d. ∀e. ∀f. (a → e → f) → a → (d → e) → d → f");
run_infer_spec(&EAbs("a", Box::new(ELet("x", Box::new(EAbs("b", Box::new(ELet("y", Box::new(EAbs("c", Box::new(EApp(Box::new(EVar("a")), Box::new(EVar("zero")))))), Box::new(EApp(Box::new(EVar("y")), Box::new(EVar("one")))))))), Box::new(EApp(Box::new(EVar("x")), Box::new(EVar("one"))))))), "∀h. (int → h) → h");
run_infer_spec(&EApp(Box::new(EApp(Box::new(EVar("choose")), Box::new(EAbs("a", Box::new(EAbs("b", Box::new(EVar("a")))))))), Box::new(EAbs("a", Box::new(EAbs("b", Box::new(EVar("b"))))))), "∀f. f → f → f");
run_infer_spec(&EAbs("x", Box::new(EAbs("y", Box::new(ELet("x", Box::new(EApp(Box::new(EVar("x")), Box::new(EVar("y")))), Box::new(EAbs("x", Box::new(EApp(Box::new(EVar("y")), Box::new(EVar("x"))))))))))), "∀c. ∀d. ∀e. ((d → e) → c) → (d → e) → d → e");
}
}
|
pub mod ast;
pub mod env;
pub mod eval;
pub mod parse;
|
use std::convert::TryFrom;
use std::fmt;
use num_enum::TryFromPrimitive;
use crate::device::Device;
use crate::encoder::Encoder;
use crate::error::Result;
use crate::mode::Mode;
use crate::object::Object;
use crate::object::ObjectType;
use crate::rawdevice::drm_mode_get_connector;
#[allow(dead_code)]
#[derive(Clone)]
#[derive(Copy)]
#[derive(Debug)]
#[derive(PartialEq)]
#[derive(TryFromPrimitive)]
#[repr(u32)]
pub enum ConnectorStatus {
Connected = 1,
Disconnected,
Unknown,
}
#[allow(dead_code)]
#[derive(Clone)]
#[derive(Copy)]
#[derive(Debug)]
#[derive(PartialEq)]
#[derive(TryFromPrimitive)]
#[repr(u32)]
pub enum ConnectorType {
Unknown,
VGA,
DVII,
DVID,
DVIA,
Composite,
SVIDEO,
LVDS,
Component,
MiniDin9,
DisplayPort,
HDMIA,
HDMIB,
TV,
EDP,
Virtual,
DSI,
DPI,
Writeback,
SPI,
}
impl fmt::Display for ConnectorType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ConnectorType::Component => write!(f, "Component"),
ConnectorType::Composite => write!(f, "Composite"),
ConnectorType::DPI => write!(f, "DPI"),
ConnectorType::DSI => write!(f, "DSI"),
ConnectorType::DVIA => write!(f, "DVI-A"),
ConnectorType::DVID => write!(f, "DVI-D"),
ConnectorType::DVII => write!(f, "DVI-I"),
ConnectorType::DisplayPort => write!(f, "DisplayPort"),
ConnectorType::EDP => write!(f, "eDP"),
ConnectorType::HDMIA => write!(f, "HDMI-A"),
ConnectorType::HDMIB => write!(f, "HDMI-B"),
ConnectorType::LVDS => write!(f, "LVDS"),
ConnectorType::MiniDin9 => write!(f, "MiniDin9"),
ConnectorType::SPI => write!(f, "SPI"),
ConnectorType::SVIDEO => write!(f, "S-VIDEO"),
ConnectorType::TV => write!(f, "TV"),
ConnectorType::Unknown => write!(f, "Unknown"),
ConnectorType::VGA => write!(f, "VGA"),
ConnectorType::Virtual => write!(f, "Virtual"),
ConnectorType::Writeback => write!(f, "Writeback"),
}
}
}
#[derive(Debug)]
pub struct Connector<'a> {
dev: &'a Device,
id: u32,
type_: ConnectorType,
type_id: u32,
status: ConnectorStatus,
mm_height: usize,
mm_width: usize,
}
impl<'a> Object for Connector<'_> {
fn get_dev(&self) -> &Device {
self.dev
}
fn get_id(&self) -> u32 {
self.id
}
fn get_type(&self) -> ObjectType {
ObjectType::Connector
}
}
impl<'a> Connector<'a> {
pub(crate) fn new(
dev: &'a Device,
connector: drm_mode_get_connector,
) -> Result<Connector<'_>> {
Ok(Connector {
dev,
id: connector.connector_id,
status: ConnectorStatus::try_from(connector.connection).unwrap(),
type_: ConnectorType::try_from(connector.connector_type).unwrap(),
// For some reason the type ID starts at 1, make it consistent
type_id: connector.connector_type_id - 1,
mm_height: connector.mm_height as usize,
mm_width: connector.mm_width as usize,
})
}
pub fn get_encoders(&'_ self) -> Result<Vec<Encoder<'a>>> {
self.dev.get_connector_encoders(self)
}
pub fn get_index(&self) -> u32 {
self.type_id
}
pub fn get_modes(&'_ self) -> Result<Vec<Mode>> {
self.dev.get_connector_modes(self)
}
pub fn get_status(&self) -> ConnectorStatus {
self.status
}
pub fn get_type(&self) -> ConnectorType {
self.type_
}
}
|
extern crate info;
fn main() {
println!("info is: {}", info::INFO);
}
|
use super::{Item, ItemMediaType, ListType, StrNum};
use std::error::Error;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct Manga {
id: u32,
status: u8,
manga_id: u32,
manga_title: StrNum,
manga_num_chapters: i16,
manga_publishing_status: u8,
manga_url: String,
manga_media_type_string: MangaType,
}
#[derive(Debug, Serialize, Deserialize)]
enum MangaType {
Manga,
Novel,
#[serde(rename = "One-shot")]
OneShot,
Doujinshi,
Manhwa,
Manhua,
}
impl From<&Manga> for Item {
fn from(manga: &Manga) -> Item {
Item {
item_type: ListType::Manga,
id: manga.manga_id,
title: manga.manga_title.to_string(),
amount: manga.manga_num_chapters,
publishing_status: manga.manga_publishing_status,
url: manga.manga_url.clone(),
media_type: match manga.manga_media_type_string {
MangaType::Manga => ItemMediaType::Manga,
MangaType::Novel => ItemMediaType::Novel,
MangaType::OneShot => ItemMediaType::OneShot,
MangaType::Doujinshi => ItemMediaType::Doujinshi,
MangaType::Manhwa => ItemMediaType::Manhwa,
MangaType::Manhua => ItemMediaType::Manhua,
},
}
}
}
pub fn fetch_all<F>(user: String, fun: F) -> Result<Vec<Manga>, Box<dyn Error>>
where
F: Fn(usize) -> (),
{
let mut offset: usize = 0;
let mut list: Vec<Manga> = Vec::with_capacity(300);
loop {
fun(offset);
let mut manga = fetch_data(&user, offset as u16)?;
if manga.is_empty() {
break;
}
list.append(&mut manga);
offset = list.len();
}
Ok(list)
}
pub fn fetch_data(user: &str, offset: u16) -> Result<Vec<Manga>, Box<dyn Error>> {
let url = format!(
"https://myanimelist.net/mangalist/{user}/load.json?status=6&offset={offset}",
user = user,
offset = offset
);
let res = attohttpc::get(url).send()?.text()?;
let manga: Vec<Manga> = serde_json::from_str(&res)?;
Ok(manga)
}
|
use installer::archive::*;
fn main(){
let args: Vec<String> = std::env::args().skip(1).collect();
let archive = archive(&args.get(0).unwrap()).unwrap();
println!("archived with len: {}", archive.len());
//let compressed = compress(&archive).unwrap();
//println!("compressed");
//let decompressed = decompress(&compressed).unwrap();
//println!("decompressed");
dearchive(&archive, "./random").unwrap();
println!("dearchived");
} |
/*
chapter 4
syntax and semantics
*/
fn main() {
let a = vec![1, 2, 3];
let b = vec![1, 2, 3];
fn foo(a: &Vec<i32>, b: &Vec<i32>) -> i32 {
// do stuff with a and b
println!("{:?}\n{:?}", a, b);
// hand back ownership, and the result of our function
42
}
let answer = foo(&a, &b);
println!("{}", answer);
// we can use a and b here!
}
// output should be:
/*
42
*/
|
use std::io::Result;
/// 写入数据,并且同时写入到的follower/slaves, 但忽略follower的返回值。
/// 如果master写入失败,则请求直接返回。
/// 忽略所有follower的写入失败情况。
use std::pin::Pin;
use std::task::{Context, Poll};
use futures::ready;
use protocol::Protocol;
use crate::{AsyncReadAll, AsyncWriteAll, Request, Response};
pub struct AsyncSetSync<M, F, P> {
master: M,
master_done: bool,
// 所有往master写的数据,只需要往followers写,不需要读,
// 要么当前请求是noreply的,要么由其他的Reader负责读取
// 并且忽略所有返回结果即可。
followers: Vec<F>,
// 当前follower写入到的索引位置
f_idx: usize,
parser: P,
noreply: Option<Request>,
}
impl<M, F, P> AsyncSetSync<M, F, P> {
pub fn from_master(master: M, followers: Vec<F>, parser: P) -> Self {
Self {
master: master,
master_done: false,
followers: followers,
f_idx: 0,
parser: parser,
noreply: None,
}
}
}
impl<M, F, P> AsyncWriteAll for AsyncSetSync<M, F, P>
where
M: AsyncWriteAll + Unpin,
F: AsyncWriteAll + Unpin,
P: Protocol,
{
fn poll_write(mut self: Pin<&mut Self>, cx: &mut Context, buf: &Request) -> Poll<Result<()>> {
//log::debug!(" set req: {:?}", buf.data());
let me = &mut *self;
if !me.master_done {
ready!(Pin::new(&mut me.master).poll_write(cx, buf))?;
me.master_done = true;
if me.followers.len() > 0 {
let noreply = me.parser.copy_noreply(buf);
me.noreply = Some(noreply);
}
}
if me.followers.len() > 0 {
let noreply = me.noreply.as_ref().unwrap();
while me.f_idx < me.followers.len() {
let w = Pin::new(unsafe { me.followers.get_unchecked_mut(me.f_idx) });
if let Err(_e) = ready!(w.poll_write(cx, noreply)) {
log::debug!("write follower failed idx:{} err:{:?}", me.f_idx, _e);
}
me.f_idx += 1;
}
}
me.f_idx = 0;
Poll::Ready(Ok(()))
}
}
impl<M, F, P> AsyncReadAll for AsyncSetSync<M, F, P>
where
M: AsyncReadAll + Unpin,
F: AsyncReadAll + Unpin,
P: Protocol,
{
#[inline(always)]
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<Response>> {
let me = &mut *self;
let response = ready!(Pin::new(&mut me.master).poll_next(cx))?;
me.f_idx = 0;
me.master_done = false;
me.noreply.take();
Poll::Ready(Ok(response))
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.