text stringlengths 8 4.13M |
|---|
//! # RedisRs
//! A simple redis client library
//! This library revolves around the Connection struct.
//! Every request is sent via Connection methods.
//! Requests can also be sent using the `send_raw_request` function.
//! Examples
//! Create a connection and send requests
//!```
//! extern crate redis_rs;
//! use std::net::TcpStream;
//! use redis_rs::connection::Connection;
//! use redis_rs::response::RedisResponse;
//!
//! let host = "127.0.0.1";
//! let port = 6379;
//! let addr = format!("{}:{}", host, port);
//! let stream = TcpStream::connect(addr).unwrap();
//!
//! // stream can be anything that implements read and write
//! let mut client = Connection::new(host, port, stream);
//!
//! // send a request
//! let _ = client.send_raw_request("SET FOO BAR");
//! // or use a supported command
//! let response = client.get("FOO").unwrap();
//!
//! // match against the response to extract the value
//! if let RedisResponse::BulkString(value) = response {
//! println!("{}", value);
//! }
//!```
pub mod connection;
pub mod enums;
mod parse;
pub mod response;
|
use std::convert::Infallible;
use juniper::graphql_object;
struct Query;
#[graphql_object]
impl Query {
fn ping() -> Result<bool, Infallible> {
Ok(false)
}
}
|
use aoc2018::*;
struct Recipe {
pub data: Vec<usize>,
a: usize,
b: usize,
}
impl Recipe {
fn new() -> Self {
let mut data = Vec::new();
data.push(3);
data.push(7);
Recipe { data, a: 0, b: 1 }
}
fn make(&mut self) -> usize {
let a = self.a;
let b = self.b;
let mut sum = self.data[a] + self.data[b];
let mut c = 1;
while sum >= 10 {
c += 1;
self.data.push(sum % 10);
sum /= 10;
}
self.data.push(sum);
let s = self.data.len() - c;
(&mut self.data[s..]).reverse();
self.a = (a + self.data[a] + 1) % self.data.len();
self.b = (b + self.data[b] + 1) % self.data.len();
self.data.len()
}
}
fn part1(input: usize) -> String {
let mut recipe = Recipe::new();
while recipe.make() < (input + 10) {}
recipe.data[input..(input + 10)]
.iter()
.cloned()
.map(|d| d.to_string())
.collect()
}
fn part2(mut input: usize) -> usize {
let mut recipe = Recipe::new();
let needle = {
let mut needle = Vec::new();
while input > 9 {
needle.push(input % 10);
input /= 10;
}
needle.push(input);
needle.reverse();
needle
};
let mut ptr = 0;
loop {
let cur = recipe.make();
while ptr + needle.len() < cur {
if needle == &recipe.data[ptr..(ptr + needle.len())] {
return ptr;
}
ptr += 1;
}
}
}
fn main() -> Result<(), Error> {
let input = 209231;
assert_eq!(part1(input), "6126491027");
assert_eq!(part2(input), 20191616);
Ok(())
}
|
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
pub enum Command {
/// Query the Vault database
Query {
/// Extract fastqs
#[structopt(short,long, parse(from_os_str))]
extract: Option<PathBuf>,
/// Create samplesheet from results. Format depends on filename (.xlsx, .tsv)
#[structopt(short,long)]
samplesheet: Option<PathBuf>,
/// Filter
#[structopt(long)]
filter: Vec<String>,
/// Limit result count
#[structopt(long)]
limit: Option<usize>,
/// A full-text search string
query: String,
},
/// Import a samplesheet and match samples against the database
Import {
/// Extract fastqs
#[structopt(short,long, parse(from_os_str))]
extract: Option<PathBuf>,
/// Create samplesheet from results. Format depends on filename (.xlsx, .tsv)
#[structopt(short,long)]
samplesheet: Option<PathBuf>,
/// Override DB entries with these samplesheet columns (comma-separated)
#[structopt(long)]
overrides: Option<String>,
xlsx: PathBuf,
},
/// Update the database
Update {
/// Root folder for sequencing runs
#[structopt(default_value = "/mnt/ngs/01-Rohdaten", long, parse(from_os_str))]
rundir: PathBuf,
/// Root folder for Cellsheet/spikeINBC lookup
#[structopt(default_value = "/mnt/L/05-Molekulargenetik/09-NGS/01-Markerscreening", long, parse(from_os_str))]
celldir: PathBuf,
},
/// Start the Rocket handler
Web,
}
#[derive(StructOpt, Debug)]
pub struct Opt {
/// DB connection URI
#[structopt(default_value = "postgresql://vaultuser:_@vault.med2.uni-kiel.local/vault", long)]
pub connstr: String,
/// Number of threads to use (default: all cores)
#[structopt(default_value = "0", long, short)]
pub threads: usize,
#[structopt(subcommand)]
pub cmd: Command,
}
|
pub mod algorithms;
mod among;
mod snowball_env;
pub use snowball::among::Among;
pub use snowball::snowball_env::SnowballEnv;
|
extern crate json;
use algo_tools::load_json_tests;
struct Solution;
impl Solution {
pub fn find_peak_element(nums: Vec<i32>) -> i32 {
let len = nums.len();
let mut left : usize = 0;
let mut right : usize = len - 1;
if (len < 2) || (nums[0] > nums[1]) {
return 0;
}
if nums[right] > nums[right - 1] {
return right as i32;
}
while left < right {
let middle : usize = left + (right - left) / 2;
if (middle > 0) && (nums[middle] <= nums[middle - 1]) {
right = middle - 1;
} else if (middle < (len - 1)) && (nums[middle] <= nums[middle + 1]) {
//assert!(left != middle);
left = middle + 1;
} else {
return middle as i32;
}
}
left as i32
}
}
fn run_test_case(test_case: &json::JsonValue) -> i32 {
let _i = &test_case["in"];
let expected = test_case["expected"].as_i32().unwrap();
let mut i = Vec::new();
for jop in _i.members() {
i.push(jop.as_i32().unwrap());
}
let result = Solution::find_peak_element(i.clone());
if result == expected {
return 0;
}
println!("find_peak_element({:?}) returned {:?} but expected {:?}\n",
i, result, expected);
1
}
fn main() {
let (tests, test_idx) = load_json_tests();
let (mut successes, mut failures) = (0, 0);
if test_idx >= tests.len() as i32 {
println!("Wrong index {}, only {} tests available!!", test_idx, tests.len());
return
}
if test_idx != -1 {
let rc = run_test_case(&tests[test_idx as usize]);
if rc == 0 { successes += 1; }
else { failures += 1; }
} else {
println!("{} tests specified", tests.len());
for i in 0..tests.len() {
let rc = run_test_case(&tests[i]);
if rc == 0 { successes += 1; }
else { failures += 1; }
}
}
if failures > 0 {
println!("{} tests succeeded and {} tests failed!!", successes, failures);
} else {
println!("All {} tests succeeded!!", successes);
}
}
|
use crate::gl_wrapper::fbo::{FBO, DepthStencilTarget};
use crate::gl_wrapper::texture_2d::Texture2D;
use crate::gl_wrapper::rbo::RBO;
use crate::containers::CONTAINER;
use crate::shaders::post_processing::{KernelShader, GaussianBlurShader};
use crate::shapes::PredefinedShapes;
use crate::gl_wrapper::TextureFormat;
pub trait PPEffect: Send + Sync {
fn apply(&self, input: &FBO) -> &FBO;
fn apply_to_screen(&self, input: &FBO);
}
pub struct Kernel {
kernel: Vec<f32>,
fb: FBO
}
impl Kernel {
pub fn new(kernel: Vec<f32>) -> Self {
// Validate kernel size
let sq = f32::sqrt(kernel.len() as f32);
if !(sq == sq.floor() && sq.floor() as u32 % 2 == 1) {
panic!("Kernel len must be square of odd number")
}
let mut color_texture = Texture2D::new();
color_texture.allocate(TextureFormat::RGBA, 1920, 1080, 1);
let depth_stencil_rb = RBO::new();
depth_stencil_rb.create_depth_stencil(1920, 1080);
Kernel {
kernel,
fb: FBO::new(color_texture, DepthStencilTarget::RBO(depth_stencil_rb))
}
}
fn _apply(&self, input: &FBO) {
gl_call!(gl::ClearColor(1.0, 0.5, 1.0, 1.0));
gl_call!(gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT | gl::STENCIL_BUFFER_BIT));
let pp_shader = CONTAINER.get_local::<KernelShader>();
let quad_vao = CONTAINER.get_local::<PredefinedShapes>().shapes.get("unit_quad").unwrap();
pp_shader.bind_screen_texture(&input.color_texture);
pp_shader.bind_kernel(&self.kernel);
quad_vao.bind();
gl_call!(gl::Disable(gl::DEPTH_TEST));
gl_call!(gl::DrawArrays(gl::TRIANGLES, 0, 6));
}
}
impl PPEffect for Kernel {
fn apply(&self, input: &FBO) -> &FBO {
self.fb.bind();
self._apply(input);
&self.fb
}
fn apply_to_screen(&self, input: &FBO) {
FBO::bind_default();
self._apply(input);
}
}
pub struct GaussianBlur {
kernel: Vec<f32>,
v_pass: FBO,
h_pass: FBO
}
impl GaussianBlur {
// TODO specify kernel size / sigma(intensity) and precalculate the kernel
pub fn new(kernel: Vec<f32>) -> Self {
// Validate kernel size
if kernel.len() % 2 != 1 {
panic!("GaussianBlur kernel len must be an odd number")
}
let create_fb = || {
let mut color_texture = Texture2D::new();
color_texture.allocate(TextureFormat::RGBA, 1920, 1080, 1);
let depth_stencil_rb = RBO::new();
depth_stencil_rb.create_depth_stencil(1920, 1080);
// TODO Prefer composition over setters
FBO::new(color_texture, DepthStencilTarget::RBO(depth_stencil_rb))
};
GaussianBlur {
kernel,
v_pass: create_fb(),
h_pass: create_fb(),
}
}
fn _apply(&self, input: &FBO, to_screen: bool) {
let pp_shader = CONTAINER.get_local::<GaussianBlurShader>();
let quad_vao = CONTAINER.get_local::<PredefinedShapes>().shapes.get("unit_quad").unwrap();
// v pass
pp_shader.bind_screen_texture(&input.color_texture);
pp_shader.bind_kernel(&self.kernel, true);
quad_vao.bind();
self.v_pass.bind();
gl_call!(gl::ClearColor(1.0, 0.5, 1.0, 1.0));
gl_call!(gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT | gl::STENCIL_BUFFER_BIT));
gl_call!(gl::Disable(gl::DEPTH_TEST));
gl_call!(gl::DrawArrays(gl::TRIANGLES, 0, 6));
// h pass
pp_shader.bind_screen_texture(&self.v_pass.color_texture);
pp_shader.bind_kernel(&self.kernel, false);
quad_vao.bind();
if !to_screen { self.h_pass.bind(); } else { FBO::bind_default(); }
gl_call!(gl::ClearColor(1.0, 0.5, 1.0, 1.0));
gl_call!(gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT | gl::STENCIL_BUFFER_BIT));
gl_call!(gl::Disable(gl::DEPTH_TEST));
gl_call!(gl::DrawArrays(gl::TRIANGLES, 0, 6));
}
}
impl PPEffect for GaussianBlur {
fn apply(&self, input: &FBO) -> &FBO {
self._apply(input, false);
&self.h_pass
}
fn apply_to_screen(&self, input: &FBO) {
self._apply(input, true);
}
} |
use std::fmt;
use std::collections::HashSet;
// strum::IntoEnumIterator is required for EnumIter, but produces unused import warning
#[allow(unused_imports)]
use strum::IntoEnumIterator;
use strum_macros::EnumIter;
/// Color is one of the intrisic parts of the game of Magic. This enum is used to designate
/// a single color.
#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash, EnumIter, Ord, PartialOrd)]
pub enum Color {
White,
Blue,
Black,
Red,
Green,
}
const WHITE_SYMBOL: char = 'W';
const BLUE_SYMBOL: char = 'U';
const BLACK_SYMBOL: char = 'B';
const RED_SYMBOL: char = 'R';
const GREEN_SYMBOL: char = 'G';
impl Color {
/// Returns the letter used to represent the color
pub fn symbol(self) -> char {
use Color::*;
match self {
White => WHITE_SYMBOL,
Blue => BLUE_SYMBOL,
Black => BLACK_SYMBOL,
Red => RED_SYMBOL,
Green => GREEN_SYMBOL
}
}
/// Returns the color represented by a letter
pub fn from_symbol(symbol: char) -> Option<Color> {
use Color::*;
match symbol {
WHITE_SYMBOL => Some(White),
BLUE_SYMBOL => Some(Blue),
BLACK_SYMBOL => Some(Black),
RED_SYMBOL => Some(Red),
GREEN_SYMBOL => Some(Green),
_ => None
}
}
}
/// A collection of colors. The Colors type is an alias of std::collection::HashSet<Color>
/// that implements the HasColor trait
pub type Colors = HashSet<Color>;
/// This trait should be implemented on any object that can be considered colored as it gives
/// access to many common color checks that appear in the magic rules.
pub trait HasColor {
/// The colors() method returns a set of colors that the implementor has. The set is a
/// std::collections::HashSet<Color> aliased to Colors.
fn colors(&self) -> Colors;
/// The is_colored() method returns true if the object has at least one color.
fn is_colored(&self) -> bool {
!self.colors().is_empty()
}
/// The is_colorless() method returns true if the object has no colors
fn is_colorless(&self) -> bool {
!self.is_colored()
}
/// The is_multicolored() method returns true if the object has more than one color.
fn is_multicolored(&self) -> bool {
self.colors().len() > 1
}
/// The is_multicolored() method returns true if the object has only one color.
fn is_monocolored(&self) -> bool {
self.colors().len() == 1
}
/// The is_multicolored() method returns true if the object has the provided color
fn is_color(&self, color: Color) -> bool {
self.colors().contains(&color)
}
}
impl HasColor for Colors {
fn colors(&self) -> Colors {
return self.clone()
}
}
impl fmt::Display for Color {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.symbol())
}
}
impl HasColor for Color {
fn colors(&self) -> Colors {
let mut set = Colors::new();
set.insert(self.clone());
set
}
fn is_colored(&self) -> bool {
true
}
fn is_colorless(&self) -> bool {
false
}
fn is_multicolored(&self) -> bool {
false
}
fn is_monocolored(&self) -> bool {
true
}
fn is_color(&self, color: Color) -> bool {
*self == color
}
}
#[cfg(test)]
mod color_test {
use super::*;
#[test]
fn from_symbol() {
assert_eq!(Some(Color::White), Color::from_symbol('W'));
assert_eq!(Some(Color::Blue), Color::from_symbol('U'));
assert_eq!(Some(Color::Black), Color::from_symbol('B'));
assert_eq!(Some(Color::Red), Color::from_symbol('R'));
assert_eq!(Some(Color::Green), Color::from_symbol('G'));
assert_eq!(None, Color::from_symbol('?'));
}
#[test]
fn to_symbol() {
assert_eq!(Color::White.symbol(), 'W');
assert_eq!(Color::Blue.symbol(), 'U');
assert_eq!(Color::Black.symbol(), 'B');
assert_eq!(Color::Red.symbol(), 'R');
assert_eq!(Color::Green.symbol(), 'G');
}
#[test]
fn color_iter() {
let mut colors = Color::iter();
assert_eq!(colors.next().unwrap(), Color::White);
assert_eq!(colors.next().unwrap(), Color::Blue);
assert_eq!(colors.next().unwrap(), Color::Black);
assert_eq!(colors.next().unwrap(), Color::Red);
assert_eq!(colors.next().unwrap(), Color::Green);
assert_eq!(colors.next(), None);
}
#[test]
fn color_ord() {
assert_eq!(Color::White < Color::Blue, true);
assert_eq!(Color::Blue < Color::Black, true);
assert_eq!(Color::Black < Color::Red, true);
assert_eq!(Color::Red < Color::Green, true);
assert_eq!(Color::Green < Color::White, false);
assert_eq!(Color::White <= Color::Blue, true);
assert_eq!(Color::Blue <= Color::Black, true);
assert_eq!(Color::Black <= Color::Red, true);
assert_eq!(Color::Red <= Color::Green, true);
assert_eq!(Color::Green <= Color::White, false);
assert_eq!(Color::White == Color::White, true);
assert_eq!(Color::Blue == Color::Blue, true);
assert_eq!(Color::Black == Color::Black, true);
assert_eq!(Color::Red == Color::Red, true);
assert_eq!(Color::Green == Color::Green, true);
}
}
|
use std::{collections::BTreeMap, str::FromStr};
use mysql::chrono::{NaiveDate};
use mysql_common::bigdecimal::BigDecimal;
use crate::timeseries::TimeSeries;
pub struct AlphaVantage {
key: String,
}
impl AlphaVantage {
pub fn with_key(key: &str) -> Self {
AlphaVantage {
key: String::from_str(key).unwrap(),
}
}
pub fn fetch_daily(&self, symbol: &str, compact: bool) -> TimeSeries {
let url;
if compact {
url = format!("https://www.alphavantage.co/query?function=TIME_SERIES_DAILY_ADJUSTED&outputsize=compact&symbol={}&apikey={}", symbol, self.key);
} else {
url = format!("https://www.alphavantage.co/query?function=TIME_SERIES_DAILY_ADJUSTED&outputsize=full&symbol={}&apikey={}", symbol, self.key);
}
let json = reqwest::blocking::get(&url).expect(&format!("Couldn't get json from Alphavantage! Request url: {}", &url)).text().unwrap();
let parsed = json::parse(&json).unwrap();
let mut data: BTreeMap<NaiveDate, (BigDecimal, Option<f32>)> = BTreeMap::new();
parsed["Time Series (Daily)"].entries().for_each(|x| {
let entry_json = &parsed["Time Series (Daily)"][x.0];
data.insert(NaiveDate::from_str(x.0).unwrap(),
(BigDecimal::from_str(&entry_json["4. close"].to_string()).unwrap(),
Some(entry_json["8. split coefficient"].to_string().parse().unwrap())));
});
if data.is_empty() {
panic!("KEY INVALID OR EQUITY INVALID!");
}
TimeSeries {
equity_name: String::from_str(symbol).unwrap(),
entries: data,
}
}
} |
use {
crate::controls::CameraRotation,
derive_new::new,
rough::{
amethyst::{
controls::{HideCursor, WindowFocus},
core::{ecs::prelude::*, timing::Time, transform::Transform, ParentHierarchy},
derive::SystemDesc,
renderer::Camera,
},
math::wave::sin_wave,
na::Vector3,
physics::{get_input_velocity, InputVelocity, PhysicsState, Speed, Velocity},
vek::Vec3,
},
};
pub struct SinWaveArgs {
amplitude: f32,
frequency: f32,
phase: f32,
}
pub struct XYBob {
x: SinWaveArgs,
y: SinWaveArgs,
}
impl XYBob {
pub fn bob(&self, time: f32) -> Vector3<f32> {
Vector3::new(Self::f(time, &self.x), Self::f(time, &self.y), 0.0)
}
#[inline(always)]
fn f(time: f32, args: &SinWaveArgs) -> f32 {
sin_wave(time, args.amplitude, args.frequency, args.phase)
}
}
pub struct ViewBobs {
base_translation: Vector3<f32>,
time: f32,
swim_bob: XYBob,
walk_bob: XYBob,
}
impl ViewBobs {
pub fn new(base_translation: Vector3<f32>) -> Self {
Self {
base_translation,
time: 0.0,
swim_bob: XYBob {
x: SinWaveArgs {
amplitude: 0.2,
frequency: 10.0,
phase: 0.0,
},
y: SinWaveArgs {
amplitude: 0.1,
frequency: 20.0,
phase: 0.0,
},
},
walk_bob: XYBob {
x: SinWaveArgs {
amplitude: 0.15,
frequency: 10.0,
phase: 0.0,
},
y: SinWaveArgs {
amplitude: 0.075,
frequency: 20.0,
phase: 0.0,
},
},
}
}
pub fn update(
&mut self,
dt: f32,
speed: f32,
physics_state: &PhysicsState,
) -> Option<Vector3<f32>> {
if !(physics_state.on_ground || physics_state.in_liquid()) {
return None;
}
self.time += dt * speed;
if physics_state.on_ground {
Some(self.base_translation + self.walk_bob.bob(self.time))
} else {
Some(self.base_translation + self.swim_bob.bob(self.time))
}
}
}
impl Component for ViewBobs {
type Storage = HashMapStorage<Self>;
}
#[derive(SystemData)]
pub struct ViewBobbingSystemData<'a> {
time: Read<'a, Time>,
window_focus: Read<'a, WindowFocus>,
hide_cursor: Read<'a, HideCursor>,
hierarchy: ReadExpect<'a, ParentHierarchy>,
entities: Entities<'a>,
transforms: WriteStorage<'a, Transform>,
physics_states: ReadStorage<'a, PhysicsState>,
velocities: ReadStorage<'a, Velocity>,
input_velocities: ReadStorage<'a, InputVelocity>,
speeds: ReadStorage<'a, Speed>,
cameras: ReadStorage<'a, Camera>,
camera_rotations: WriteStorage<'a, CameraRotation>,
view_bobs: WriteStorage<'a, ViewBobs>,
}
impl<'a> ViewBobbingSystemData<'a> {
pub fn is_running(&self) -> bool {
self.window_focus.is_focused && self.hide_cursor.hide
}
}
#[derive(Debug, SystemDesc, new)]
#[system_desc(name(ViewBobbingSystemDesc))]
pub struct ViewBobbingSystem;
impl ViewBobbingSystem {
pub const NAME: &'static str = "view_bobbing";
}
impl<'a> System<'a> for ViewBobbingSystem {
type SystemData = ViewBobbingSystemData<'a>;
fn run(&mut self, data: Self::SystemData) {
if !data.is_running() {
return;
}
let ViewBobbingSystemData {
time,
hierarchy,
entities,
mut transforms,
physics_states,
velocities,
input_velocities,
speeds,
cameras,
mut camera_rotations,
mut view_bobs,
..
} = data;
for (entity, transform, _camera, camera_rotation, view_bobs) in (
&entities,
&mut transforms,
&cameras,
&mut camera_rotations,
&mut view_bobs,
)
.join()
{
let parent_entity = if let Some(parent_entity) = hierarchy.parent(entity) {
parent_entity
} else {
continue;
};
if let Some(physics_state) = physics_states.get(parent_entity) {
let dt = time.delta_seconds();
let speed = {
let input_velocity =
get_input_velocity(parent_entity, &input_velocities, &speeds);
let velocity = velocities
.get(parent_entity)
.map(|v| v.0)
.unwrap_or(Vec3::zero());
(velocity + input_velocity).magnitude() * dt
};
if speed < 0.01 {
continue;
}
if let Some(translation) = view_bobs.update(dt, speed, physics_state) {
let translation = camera_rotation.rotate_into(translation);
transform.set_translation(translation);
}
}
}
}
}
|
fn main() {
println!("{} {}", say_hello("me"), say_hello("you"));
}
pub fn say_hello(name: &str) -> String {
let message = format!("hello, {}!", name);
message
}
|
// Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
//! Structs for reading and writing manifests of flash boot stage images.
//!
//! Note: The structs below must match the definitions in
//! sw/device/silicon_creator/lib/manifest.h.
#![deny(warnings)]
#![deny(unused)]
#![deny(unsafe_code)]
use std::mem::size_of;
use memoffset::offset_of;
use zerocopy::AsBytes;
use zerocopy::FromBytes;
// Currently, these definitions must be updated manually but they can be
// generated using the following commands (requires bindgen):
// cargo install bindgen
// cd "${REPO_TOP}"
// bindgen --allowlist-type manifest_t --allowlist-var "MANIFEST_.*" \
// --no-doc-comments --no-layout-tests \
// sw/device/silicon_creator/lib/manifest.h \
// -- -I./ -Isw/device/lib/base/freestanding
pub const MANIFEST_SIZE: u32 = 832;
/// Manifest for boot stage images stored in flash.
#[repr(C)]
#[derive(FromBytes, AsBytes, Debug, Default)]
pub struct Manifest {
pub identifier: u32,
pub signature: SigverifyRsaBuffer,
pub image_length: u32,
pub image_major_version: u32,
pub image_minor_version: u32,
pub image_timestamp: u64,
pub exponent: u32,
pub binding_value: KeymgrBindingValue,
pub max_key_version: u32,
pub modulus: SigverifyRsaBuffer,
}
/// A type that holds 96 32-bit words for RSA-3072.
#[repr(C)]
#[derive(FromBytes, AsBytes, Debug)]
pub struct SigverifyRsaBuffer {
pub data: [u32; 96usize],
}
impl Default for SigverifyRsaBuffer {
fn default() -> Self {
Self { data: [0; 96usize] }
}
}
#[repr(C)]
#[derive(FromBytes, AsBytes, Debug, Default)]
pub struct KeymgrBindingValue {
pub data: [u32; 8usize],
}
/// Checks the layout of the manifest struct.
///
/// Implemented as a function because using `offset_of!` at compile-time
/// requires a nightly compiler.
/// TODO(#6915): Convert this to a unit test after we start running rust tests during our builds.
pub fn check_manifest_layout() {
assert_eq!(offset_of!(Manifest, identifier), 0);
assert_eq!(offset_of!(Manifest, signature), 4);
assert_eq!(offset_of!(Manifest, image_length), 388);
assert_eq!(offset_of!(Manifest, image_major_version), 392);
assert_eq!(offset_of!(Manifest, image_minor_version), 396);
assert_eq!(offset_of!(Manifest, image_timestamp), 400);
assert_eq!(offset_of!(Manifest, exponent), 408);
assert_eq!(offset_of!(Manifest, binding_value), 412);
assert_eq!(offset_of!(Manifest, max_key_version), 444);
assert_eq!(offset_of!(Manifest, modulus), 448);
assert_eq!(size_of::<Manifest>(), MANIFEST_SIZE as usize);
}
|
use anyhow::{bail, Context};
use itertools::{Itertools, MinMaxResult};
const INPUT: &str = include_str!("input.txt");
fn part1(values: &[usize]) -> anyhow::Result<usize> {
let result = values
.iter()
.enumerate()
.skip(25)
.map(|(i, &x)| (&values[i - 25..i], x))
.find(|&(prev, x)| {
!prev
.iter()
.cartesian_product(prev)
.any(|(&a, &b)| a + b == x)
})
.context("no gaps found")?;
Ok(result.1)
}
fn part2(values: &[usize], target: usize) -> anyhow::Result<usize> {
let (i, j) = (0..values.len())
.flat_map(|i| (i + 1..values.len()).map(move |j| (i, j)))
.find(|&(i, j)| values[i..j].iter().sum::<usize>() == target)
.context("no range found")?;
match values[i..j].iter().minmax() {
MinMaxResult::MinMax(low, high) => Ok(low + high),
_ => bail!("not enough elements in range"),
}
}
fn main() -> anyhow::Result<()> {
let values: Vec<_> = INPUT.lines().map(str::parse).collect::<Result<_, _>>()?;
let target = part1(&values)?;
println!("part 1: {}", target);
println!("part 2: {}", part2(&values, target)?);
Ok(())
}
|
use std::ops::Not;
pub type CartId = String;
pub type ItemId = String;
pub type Quantity = u32;
pub type OrderId = String;
#[allow(dead_code)]
#[derive(Debug, Clone)]
pub enum CartCommand {
Create {
cart_id: CartId,
},
Cancel {
cart_id: CartId,
},
ChangeQty {
cart_id: CartId,
item_id: ItemId,
qty: Quantity,
},
Order {
cart_id: CartId,
order_id: OrderId,
},
}
#[allow(dead_code)]
#[derive(Debug, Clone)]
pub enum CartEvent {
Created {
cart_id: CartId,
},
Cancelled {
cart_id: CartId,
},
ChangedQty {
cart_id: CartId,
item_id: ItemId,
qty: Quantity,
},
Ordered {
cart_id: CartId,
order_id: OrderId,
},
}
impl CartEvent {
fn created(cart_id: CartId) -> Self {
Self::Created {
cart_id: cart_id,
}
}
fn cancelled(cart_id: CartId) -> Self {
Self::Cancelled {
cart_id: cart_id,
}
}
fn changed(cart_id: CartId, item_id: ItemId, qty: Quantity) -> Self {
Self::ChangedQty {
cart_id: cart_id,
item_id: item_id,
qty: qty,
}
}
fn ordered(cart_id: CartId, order_id: OrderId) -> Self {
Self::Ordered {
cart_id: cart_id,
order_id: order_id,
}
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq)]
pub enum Cart {
Nothing,
Empty {
id: CartId,
},
Active {
id: CartId,
items: Vec<CartItem>,
},
Ordered {
id: CartId,
items: Vec<CartItem>,
order_id: OrderId,
},
Cancelled {
id: CartId,
},
}
#[derive(Debug, Clone, PartialEq)]
pub struct CartItem {
item_id: ItemId,
qty: Quantity,
}
#[allow(dead_code)]
impl Cart {
pub fn handle(self, cmd: &CartCommand) -> (Self, Option<CartEvent>) {
match cmd {
CartCommand::Create { cart_id } =>
Self::transit(self, CartEvent::created(cart_id.clone())),
CartCommand::Cancel { cart_id } =>
Self::transit(self, CartEvent::cancelled(cart_id.clone())),
CartCommand::ChangeQty { cart_id, item_id, qty } =>
Self::transit(self,
CartEvent::changed(cart_id.clone(), item_id.clone(), *qty)),
CartCommand::Order { cart_id, order_id } =>
Self::transit(self,
CartEvent::ordered(cart_id.clone(), order_id.clone())),
}
}
pub fn restore<'a, T>(self, events: T) -> Self
where
T: Iterator<Item = &'a CartEvent>
{
events.fold(self, Self::apply)
}
fn transit(self, event: CartEvent) -> (Self, Option<CartEvent>) {
let state = Self::apply(self.clone(), &event);
if self.eq(&state) {
(self, None)
} else {
(state, Some(event))
}
}
fn apply(self, event: &CartEvent) -> Self {
match &self {
Self::Nothing => match event {
CartEvent::Created { cart_id }
if cart_id.is_empty().not() =>
Self::Empty { id: cart_id.clone() },
_ => self
},
Self::Empty { id: cid } => match event {
CartEvent::ChangedQty { cart_id, item_id, qty}
if cid.eq(cart_id) && *qty > 0 => {
let item = CartItem {
item_id: item_id.clone(),
qty: *qty
};
Self::Active { id: cart_id.clone(), items: vec![item] }
},
CartEvent::Cancelled { cart_id }
if cid.eq(cart_id) =>
Self::Cancelled { id: cart_id.clone() },
_ => self
},
Self::Active { id: cid, items } => match event {
CartEvent::ChangedQty { cart_id, item_id, qty}
if cid.eq(cart_id) =>
Self::change_qty(items, cart_id, item_id, *qty),
CartEvent::Cancelled { cart_id }
if cid.eq(cart_id) =>
Self::Cancelled { id: cart_id.clone() },
CartEvent::Ordered { cart_id, order_id }
if cid.eq(cart_id) =>
Self::Ordered {
id: cart_id.clone(),
items: items.clone(),
order_id: order_id.clone()
},
_ => self
},
_ => self
}
}
fn change_qty(items: &Vec<CartItem>, cart_id: &CartId,
item_id: &ItemId, qty: Quantity) -> Self {
let item =
if qty > 0 {
Some(CartItem { item_id: item_id.clone(), qty: qty })
} else {
None
};
let new_items: Vec<_> =
items.iter()
.cloned()
.filter(|t| t.item_id.eq(item_id).not())
.chain(item.iter().cloned())
.collect();
if new_items.is_empty() {
Self::Empty { id: cart_id.clone() }
} else {
Self::Active { id: cart_id.clone(), items: new_items }
}
}
}
|
mod lib;
use lib::readcsv;
fn main() {
let r = readcsv("1.csv");
println!("{:?}", r);
}
|
mod tests {
use test::Bencher;
#[bench]
fn bench_add1(b : &mut Bencher) {
b.iter(|| {
let mut res = 0i32;
for _ in (0..2000) {
match res {
-1 => {}
x if x < 1000 => res += 1,
_ => res = -1
}
};
res
})
}
#[bench]
fn bench_add2(b : &mut Bencher) {
b.iter(|| {
(0..2000).fold(Some(0), |r, _| {
r.and_then(|x| {
if x < 1000 { Some(x + 1) }
else { None }
})
})
})
}
}
// test option::tests::bench_add1 ... bench: 2959 ns/iter (+/- 1509)
// test option::tests::bench_add2 ... bench: 3313 ns/iter (+/- 961)
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::DSLPPWRCFG {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = "Possible values of the field `SYSCTL_DSLPPWRCFG_SRAMPM`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYSCTL_DSLPPWRCFG_SRAMPMR {
#[doc = "Active Mode"]
SYSCTL_DSLPPWRCFG_SRAMPM_NRM,
#[doc = "Standby Mode"]
SYSCTL_DSLPPWRCFG_SRAMPM_SBY,
#[doc = "Low Power Mode"]
SYSCTL_DSLPPWRCFG_SRAMPM_LP,
#[doc = r"Reserved"]
_Reserved(u8),
}
impl SYSCTL_DSLPPWRCFG_SRAMPMR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
match *self {
SYSCTL_DSLPPWRCFG_SRAMPMR::SYSCTL_DSLPPWRCFG_SRAMPM_NRM => 0,
SYSCTL_DSLPPWRCFG_SRAMPMR::SYSCTL_DSLPPWRCFG_SRAMPM_SBY => 1,
SYSCTL_DSLPPWRCFG_SRAMPMR::SYSCTL_DSLPPWRCFG_SRAMPM_LP => 3,
SYSCTL_DSLPPWRCFG_SRAMPMR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline(always)]
pub fn _from(value: u8) -> SYSCTL_DSLPPWRCFG_SRAMPMR {
match value {
0 => SYSCTL_DSLPPWRCFG_SRAMPMR::SYSCTL_DSLPPWRCFG_SRAMPM_NRM,
1 => SYSCTL_DSLPPWRCFG_SRAMPMR::SYSCTL_DSLPPWRCFG_SRAMPM_SBY,
3 => SYSCTL_DSLPPWRCFG_SRAMPMR::SYSCTL_DSLPPWRCFG_SRAMPM_LP,
i => SYSCTL_DSLPPWRCFG_SRAMPMR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `SYSCTL_DSLPPWRCFG_SRAMPM_NRM`"]
#[inline(always)]
pub fn is_sysctl_dslppwrcfg_srampm_nrm(&self) -> bool {
*self == SYSCTL_DSLPPWRCFG_SRAMPMR::SYSCTL_DSLPPWRCFG_SRAMPM_NRM
}
#[doc = "Checks if the value of the field is `SYSCTL_DSLPPWRCFG_SRAMPM_SBY`"]
#[inline(always)]
pub fn is_sysctl_dslppwrcfg_srampm_sby(&self) -> bool {
*self == SYSCTL_DSLPPWRCFG_SRAMPMR::SYSCTL_DSLPPWRCFG_SRAMPM_SBY
}
#[doc = "Checks if the value of the field is `SYSCTL_DSLPPWRCFG_SRAMPM_LP`"]
#[inline(always)]
pub fn is_sysctl_dslppwrcfg_srampm_lp(&self) -> bool {
*self == SYSCTL_DSLPPWRCFG_SRAMPMR::SYSCTL_DSLPPWRCFG_SRAMPM_LP
}
}
#[doc = "Values that can be written to the field `SYSCTL_DSLPPWRCFG_SRAMPM`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYSCTL_DSLPPWRCFG_SRAMPMW {
#[doc = "Active Mode"]
SYSCTL_DSLPPWRCFG_SRAMPM_NRM,
#[doc = "Standby Mode"]
SYSCTL_DSLPPWRCFG_SRAMPM_SBY,
#[doc = "Low Power Mode"]
SYSCTL_DSLPPWRCFG_SRAMPM_LP,
}
impl SYSCTL_DSLPPWRCFG_SRAMPMW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline(always)]
pub fn _bits(&self) -> u8 {
match *self {
SYSCTL_DSLPPWRCFG_SRAMPMW::SYSCTL_DSLPPWRCFG_SRAMPM_NRM => 0,
SYSCTL_DSLPPWRCFG_SRAMPMW::SYSCTL_DSLPPWRCFG_SRAMPM_SBY => 1,
SYSCTL_DSLPPWRCFG_SRAMPMW::SYSCTL_DSLPPWRCFG_SRAMPM_LP => 3,
}
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_DSLPPWRCFG_SRAMPMW<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_DSLPPWRCFG_SRAMPMW<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYSCTL_DSLPPWRCFG_SRAMPMW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Active Mode"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_srampm_nrm(self) -> &'a mut W {
self.variant(SYSCTL_DSLPPWRCFG_SRAMPMW::SYSCTL_DSLPPWRCFG_SRAMPM_NRM)
}
#[doc = "Standby Mode"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_srampm_sby(self) -> &'a mut W {
self.variant(SYSCTL_DSLPPWRCFG_SRAMPMW::SYSCTL_DSLPPWRCFG_SRAMPM_SBY)
}
#[doc = "Low Power Mode"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_srampm_lp(self) -> &'a mut W {
self.variant(SYSCTL_DSLPPWRCFG_SRAMPMW::SYSCTL_DSLPPWRCFG_SRAMPM_LP)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(3 << 0);
self.w.bits |= ((value as u32) & 3) << 0;
self.w
}
}
#[doc = "Possible values of the field `SYSCTL_DSLPPWRCFG_FLASHPM`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYSCTL_DSLPPWRCFG_FLASHPMR {
#[doc = "Active Mode"]
SYSCTL_DSLPPWRCFG_FLASHPM_NRM,
#[doc = "Low Power Mode"]
SYSCTL_DSLPPWRCFG_FLASHPM_SLP,
#[doc = r"Reserved"]
_Reserved(u8),
}
impl SYSCTL_DSLPPWRCFG_FLASHPMR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
match *self {
SYSCTL_DSLPPWRCFG_FLASHPMR::SYSCTL_DSLPPWRCFG_FLASHPM_NRM => 0,
SYSCTL_DSLPPWRCFG_FLASHPMR::SYSCTL_DSLPPWRCFG_FLASHPM_SLP => 2,
SYSCTL_DSLPPWRCFG_FLASHPMR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline(always)]
pub fn _from(value: u8) -> SYSCTL_DSLPPWRCFG_FLASHPMR {
match value {
0 => SYSCTL_DSLPPWRCFG_FLASHPMR::SYSCTL_DSLPPWRCFG_FLASHPM_NRM,
2 => SYSCTL_DSLPPWRCFG_FLASHPMR::SYSCTL_DSLPPWRCFG_FLASHPM_SLP,
i => SYSCTL_DSLPPWRCFG_FLASHPMR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `SYSCTL_DSLPPWRCFG_FLASHPM_NRM`"]
#[inline(always)]
pub fn is_sysctl_dslppwrcfg_flashpm_nrm(&self) -> bool {
*self == SYSCTL_DSLPPWRCFG_FLASHPMR::SYSCTL_DSLPPWRCFG_FLASHPM_NRM
}
#[doc = "Checks if the value of the field is `SYSCTL_DSLPPWRCFG_FLASHPM_SLP`"]
#[inline(always)]
pub fn is_sysctl_dslppwrcfg_flashpm_slp(&self) -> bool {
*self == SYSCTL_DSLPPWRCFG_FLASHPMR::SYSCTL_DSLPPWRCFG_FLASHPM_SLP
}
}
#[doc = "Values that can be written to the field `SYSCTL_DSLPPWRCFG_FLASHPM`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYSCTL_DSLPPWRCFG_FLASHPMW {
#[doc = "Active Mode"]
SYSCTL_DSLPPWRCFG_FLASHPM_NRM,
#[doc = "Low Power Mode"]
SYSCTL_DSLPPWRCFG_FLASHPM_SLP,
}
impl SYSCTL_DSLPPWRCFG_FLASHPMW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline(always)]
pub fn _bits(&self) -> u8 {
match *self {
SYSCTL_DSLPPWRCFG_FLASHPMW::SYSCTL_DSLPPWRCFG_FLASHPM_NRM => 0,
SYSCTL_DSLPPWRCFG_FLASHPMW::SYSCTL_DSLPPWRCFG_FLASHPM_SLP => 2,
}
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_DSLPPWRCFG_FLASHPMW<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_DSLPPWRCFG_FLASHPMW<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYSCTL_DSLPPWRCFG_FLASHPMW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Active Mode"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_flashpm_nrm(self) -> &'a mut W {
self.variant(SYSCTL_DSLPPWRCFG_FLASHPMW::SYSCTL_DSLPPWRCFG_FLASHPM_NRM)
}
#[doc = "Low Power Mode"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_flashpm_slp(self) -> &'a mut W {
self.variant(SYSCTL_DSLPPWRCFG_FLASHPMW::SYSCTL_DSLPPWRCFG_FLASHPM_SLP)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(3 << 4);
self.w.bits |= ((value as u32) & 3) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_DSLPPWRCFG_TSPDR {
bits: bool,
}
impl SYSCTL_DSLPPWRCFG_TSPDR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_DSLPPWRCFG_TSPDW<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_DSLPPWRCFG_TSPDW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 8);
self.w.bits |= ((value as u32) & 1) << 8;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_DSLPPWRCFG_LDOSMR {
bits: bool,
}
impl SYSCTL_DSLPPWRCFG_LDOSMR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_DSLPPWRCFG_LDOSMW<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_DSLPPWRCFG_LDOSMW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 9);
self.w.bits |= ((value as u32) & 1) << 9;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:1 - SRAM Power Modes"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_srampm(&self) -> SYSCTL_DSLPPWRCFG_SRAMPMR {
SYSCTL_DSLPPWRCFG_SRAMPMR::_from(((self.bits >> 0) & 3) as u8)
}
#[doc = "Bits 4:5 - Flash Power Modes"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_flashpm(&self) -> SYSCTL_DSLPPWRCFG_FLASHPMR {
SYSCTL_DSLPPWRCFG_FLASHPMR::_from(((self.bits >> 4) & 3) as u8)
}
#[doc = "Bit 8 - Temperature Sense Power Down"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_tspd(&self) -> SYSCTL_DSLPPWRCFG_TSPDR {
let bits = ((self.bits >> 8) & 1) != 0;
SYSCTL_DSLPPWRCFG_TSPDR { bits }
}
#[doc = "Bit 9 - LDO Sleep Mode"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_ldosm(&self) -> SYSCTL_DSLPPWRCFG_LDOSMR {
let bits = ((self.bits >> 9) & 1) != 0;
SYSCTL_DSLPPWRCFG_LDOSMR { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:1 - SRAM Power Modes"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_srampm(&mut self) -> _SYSCTL_DSLPPWRCFG_SRAMPMW {
_SYSCTL_DSLPPWRCFG_SRAMPMW { w: self }
}
#[doc = "Bits 4:5 - Flash Power Modes"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_flashpm(&mut self) -> _SYSCTL_DSLPPWRCFG_FLASHPMW {
_SYSCTL_DSLPPWRCFG_FLASHPMW { w: self }
}
#[doc = "Bit 8 - Temperature Sense Power Down"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_tspd(&mut self) -> _SYSCTL_DSLPPWRCFG_TSPDW {
_SYSCTL_DSLPPWRCFG_TSPDW { w: self }
}
#[doc = "Bit 9 - LDO Sleep Mode"]
#[inline(always)]
pub fn sysctl_dslppwrcfg_ldosm(&mut self) -> _SYSCTL_DSLPPWRCFG_LDOSMW {
_SYSCTL_DSLPPWRCFG_LDOSMW { w: self }
}
}
|
pub fn dec_ith(decnum: i32, i: i32) -> u8 {
return ((decnum % (10_i32.pow(i as u32))) / 10_i32.pow((i - 1) as u32)) as u8;
}
pub fn bitstring_to_i32(bitstr: &str) -> i32 {
let mut num: i32 = 0;
for (i, bitchar) in bitstr.chars().enumerate() {
if bitchar == '1' {
num |= 1 << (bitstr.len() - 1 - i);
}
}
println!("{}", bitstr);
num
}
#[cfg(test)]
mod tests {
use bits::*;
#[test]
fn test_dec_ith() {
assert_eq!(dec_ith(4002, 1), 2);
assert_eq!(dec_ith(4002, 2), 0);
assert_eq!(dec_ith(4002, 3), 0);
assert_eq!(dec_ith(4002, 4), 4);
}
} |
use std::env;
use std::fmt::Display;
use std::fs;
use std::io;
use std::io::{BufRead, BufReader};
use std::mem;
use std::os::unix::io::AsRawFd;
use std::ptr;
use std::str;
use crate::kb::Key;
use crate::term::Term;
pub use crate::common_term::*;
pub const DEFAULT_WIDTH: u16 = 80;
#[inline]
pub fn is_a_terminal(out: &Term) -> bool {
unsafe { libc::isatty(out.as_raw_fd()) != 0 }
}
pub fn is_a_color_terminal(out: &Term) -> bool {
if !is_a_terminal(out) {
return false;
}
if env::var("NO_COLOR").is_ok() {
return false;
}
match env::var("TERM") {
Ok(term) => term != "dumb",
Err(_) => false,
}
}
pub fn c_result<F: FnOnce() -> libc::c_int>(f: F) -> io::Result<()> {
let res = f();
if res != 0 {
Err(io::Error::last_os_error())
} else {
Ok(())
}
}
pub fn terminal_size(out: &Term) -> Option<(u16, u16)> {
unsafe {
if libc::isatty(libc::STDOUT_FILENO) != 1 {
return None;
}
let mut winsize: libc::winsize = std::mem::zeroed();
// FIXME: ".into()" used as a temporary fix for a libc bug
// https://github.com/rust-lang/libc/pull/704
#[allow(clippy::useless_conversion)]
libc::ioctl(out.as_raw_fd(), libc::TIOCGWINSZ.into(), &mut winsize);
if winsize.ws_row > 0 && winsize.ws_col > 0 {
Some((winsize.ws_row as u16, winsize.ws_col as u16))
} else {
None
}
}
}
pub fn read_secure() -> io::Result<String> {
let f_tty;
let fd = unsafe {
if libc::isatty(libc::STDIN_FILENO) == 1 {
f_tty = None;
libc::STDIN_FILENO
} else {
let f = fs::OpenOptions::new()
.read(true)
.write(true)
.open("/dev/tty")?;
let fd = f.as_raw_fd();
f_tty = Some(BufReader::new(f));
fd
}
};
let mut termios = core::mem::MaybeUninit::uninit();
c_result(|| unsafe { libc::tcgetattr(fd, termios.as_mut_ptr()) })?;
let mut termios = unsafe { termios.assume_init() };
let original = termios;
termios.c_lflag &= !libc::ECHO;
c_result(|| unsafe { libc::tcsetattr(fd, libc::TCSAFLUSH, &termios) })?;
let mut rv = String::new();
let read_rv = if let Some(mut f) = f_tty {
f.read_line(&mut rv)
} else {
io::stdin().read_line(&mut rv)
};
c_result(|| unsafe { libc::tcsetattr(fd, libc::TCSAFLUSH, &original) })?;
read_rv.map(|_| {
let len = rv.trim_end_matches(&['\r', '\n'][..]).len();
rv.truncate(len);
rv
})
}
fn poll_fd(fd: i32, timeout: i32) -> io::Result<bool> {
let mut pollfd = libc::pollfd {
fd,
events: libc::POLLIN,
revents: 0,
};
let ret = unsafe { libc::poll(&mut pollfd as *mut _, 1, timeout) };
if ret < 0 {
Err(io::Error::last_os_error())
} else {
Ok(pollfd.revents & libc::POLLIN != 0)
}
}
#[cfg(target_os = "macos")]
fn select_fd(fd: i32, timeout: i32) -> io::Result<bool> {
unsafe {
let mut read_fd_set: libc::fd_set = mem::zeroed();
let mut timeout_val;
let timeout = if timeout < 0 {
ptr::null_mut()
} else {
timeout_val = libc::timeval {
tv_sec: (timeout / 1000) as _,
tv_usec: (timeout * 1000) as _,
};
&mut timeout_val
};
libc::FD_ZERO(&mut read_fd_set);
libc::FD_SET(fd, &mut read_fd_set);
let ret = libc::select(
fd + 1,
&mut read_fd_set,
ptr::null_mut(),
ptr::null_mut(),
timeout,
);
if ret < 0 {
Err(io::Error::last_os_error())
} else {
Ok(libc::FD_ISSET(fd, &read_fd_set))
}
}
}
fn select_or_poll_term_fd(fd: i32, timeout: i32) -> io::Result<bool> {
// There is a bug on macos that ttys cannot be polled, only select()
// works. However given how problematic select is in general, we
// normally want to use poll there too.
#[cfg(target_os = "macos")]
{
if unsafe { libc::isatty(fd) == 1 } {
return select_fd(fd, timeout);
}
}
poll_fd(fd, timeout)
}
fn read_single_char(fd: i32) -> io::Result<Option<char>> {
// timeout of zero means that it will not block
let is_ready = select_or_poll_term_fd(fd, 0)?;
if is_ready {
// if there is something to be read, take 1 byte from it
let mut buf: [u8; 1] = [0];
read_bytes(fd, &mut buf, 1)?;
Ok(Some(buf[0] as char))
} else {
//there is nothing to be read
Ok(None)
}
}
// Similar to libc::read. Read count bytes into slice buf from descriptor fd.
// If successful, return the number of bytes read.
// Will return an error if nothing was read, i.e when called at end of file.
fn read_bytes(fd: i32, buf: &mut [u8], count: u8) -> io::Result<u8> {
let read = unsafe { libc::read(fd, buf.as_mut_ptr() as *mut _, count as usize) };
if read < 0 {
Err(io::Error::last_os_error())
} else if read == 0 {
Err(io::Error::new(
io::ErrorKind::UnexpectedEof,
"Reached end of file",
))
} else if buf[0] == b'\x03' {
Err(io::Error::new(
io::ErrorKind::Interrupted,
"read interrupted",
))
} else {
Ok(read as u8)
}
}
fn read_single_key_impl(fd: i32) -> Result<Key, io::Error> {
loop {
match read_single_char(fd)? {
Some('\x1b') => {
// Escape was read, keep reading in case we find a familiar key
break if let Some(c1) = read_single_char(fd)? {
if c1 == '[' {
if let Some(c2) = read_single_char(fd)? {
match c2 {
'A' => Ok(Key::ArrowUp),
'B' => Ok(Key::ArrowDown),
'C' => Ok(Key::ArrowRight),
'D' => Ok(Key::ArrowLeft),
'H' => Ok(Key::Home),
'F' => Ok(Key::End),
'Z' => Ok(Key::BackTab),
_ => {
let c3 = read_single_char(fd)?;
if let Some(c3) = c3 {
if c3 == '~' {
match c2 {
'1' => Ok(Key::Home), // tmux
'2' => Ok(Key::Insert),
'3' => Ok(Key::Del),
'4' => Ok(Key::End), // tmux
'5' => Ok(Key::PageUp),
'6' => Ok(Key::PageDown),
'7' => Ok(Key::Home), // xrvt
'8' => Ok(Key::End), // xrvt
_ => Ok(Key::UnknownEscSeq(vec![c1, c2, c3])),
}
} else {
Ok(Key::UnknownEscSeq(vec![c1, c2, c3]))
}
} else {
// \x1b[ and 1 more char
Ok(Key::UnknownEscSeq(vec![c1, c2]))
}
}
}
} else {
// \x1b[ and no more input
Ok(Key::UnknownEscSeq(vec![c1]))
}
} else {
// char after escape is not [
Ok(Key::UnknownEscSeq(vec![c1]))
}
} else {
//nothing after escape
Ok(Key::Escape)
};
}
Some(c) => {
let byte = c as u8;
let mut buf: [u8; 4] = [byte, 0, 0, 0];
break if byte & 224u8 == 192u8 {
// a two byte unicode character
read_bytes(fd, &mut buf[1..], 1)?;
Ok(key_from_utf8(&buf[..2]))
} else if byte & 240u8 == 224u8 {
// a three byte unicode character
read_bytes(fd, &mut buf[1..], 2)?;
Ok(key_from_utf8(&buf[..3]))
} else if byte & 248u8 == 240u8 {
// a four byte unicode character
read_bytes(fd, &mut buf[1..], 3)?;
Ok(key_from_utf8(&buf[..4]))
} else {
Ok(match c {
'\n' | '\r' => Key::Enter,
'\x7f' => Key::Backspace,
'\t' => Key::Tab,
'\x01' => Key::Home, // Control-A (home)
'\x05' => Key::End, // Control-E (end)
'\x08' => Key::Backspace, // Control-H (8) (Identical to '\b')
_ => Key::Char(c),
})
};
}
None => {
// there is no subsequent byte ready to be read, block and wait for input
// negative timeout means that it will block indefinitely
match select_or_poll_term_fd(fd, -1) {
Ok(_) => continue,
Err(_) => break Err(io::Error::last_os_error()),
}
}
}
}
}
pub fn read_single_key() -> io::Result<Key> {
let tty_f;
let fd = unsafe {
if libc::isatty(libc::STDIN_FILENO) == 1 {
libc::STDIN_FILENO
} else {
tty_f = fs::OpenOptions::new()
.read(true)
.write(true)
.open("/dev/tty")?;
tty_f.as_raw_fd()
}
};
let mut termios = core::mem::MaybeUninit::uninit();
c_result(|| unsafe { libc::tcgetattr(fd, termios.as_mut_ptr()) })?;
let mut termios = unsafe { termios.assume_init() };
let original = termios;
unsafe { libc::cfmakeraw(&mut termios) };
termios.c_oflag = original.c_oflag;
c_result(|| unsafe { libc::tcsetattr(fd, libc::TCSADRAIN, &termios) })?;
let rv: io::Result<Key> = read_single_key_impl(fd);
c_result(|| unsafe { libc::tcsetattr(fd, libc::TCSADRAIN, &original) })?;
// if the user hit ^C we want to signal SIGINT to outselves.
if let Err(ref err) = rv {
if err.kind() == io::ErrorKind::Interrupted {
unsafe {
libc::raise(libc::SIGINT);
}
}
}
rv
}
pub fn key_from_utf8(buf: &[u8]) -> Key {
if let Ok(s) = str::from_utf8(buf) {
if let Some(c) = s.chars().next() {
return Key::Char(c);
}
}
Key::Unknown
}
#[cfg(not(target_os = "macos"))]
lazy_static::lazy_static! {
static ref IS_LANG_UTF8: bool = match std::env::var("LANG") {
Ok(lang) => lang.to_uppercase().ends_with("UTF-8"),
_ => false,
};
}
#[cfg(target_os = "macos")]
pub fn wants_emoji() -> bool {
true
}
#[cfg(not(target_os = "macos"))]
pub fn wants_emoji() -> bool {
*IS_LANG_UTF8
}
pub fn set_title<T: Display>(title: T) {
print!("\x1b]0;{}\x07", title);
}
|
use std::cmp::{max, min};
use std::collections::{HashMap, HashSet};
use itertools::Itertools;
use whiteread::parse_line;
const ten97: usize = 1000000007;
fn alphabet2idx(c: char) -> usize {
if c.is_ascii_lowercase() {
c as u8 as usize - 'a' as u8 as usize
} else if c.is_ascii_uppercase() {
c as u8 as usize - 'A' as u8 as usize
} else {
panic!("wtf")
}
}
fn main() {
let (n, k): (usize, isize) = parse_line().unwrap();
let aa: Vec<isize> = parse_line().unwrap();
let bb: Vec<isize> = parse_line().unwrap();
let mut diff = 0;
for i in 0..n {
diff += (aa[i] - bb[i]).abs();
}
if k >= diff && (k - diff) % 2 == 0 {
println!("Yes");
} else {
println!("No");
}
}
|
extern crate json;
use algo_tools::load_json_tests;
struct Solution;
// impl Solution {
// pub fn eval_rpn(tokens: Vec<String>) -> i32 {
// let mut numbers = Vec::new();
// let mut res = tokens[0].parse::<i32>().unwrap();
// for token in tokens[1..].iter() {
// match token.as_str() {
// "+" => res += numbers.pop().unwrap(); break,
// "-" => res -= numbers.pop().unwrap(),
// "*" => res *= numbers.pop().unwrap(),
// "/" => res /= numbers.pop().unwrap(),
// _ => numbers.push(token.parse::<i32>().unwrap()),
// };
// res
// }
// }
// impl Solution {
// pub fn eval_rpn(tokens: Vec<String>) -> i32 {
// let mut numbers = Vec::new();
// let mut res = None;
// for token in tokens.iter() {
// match token.as_str() {
// "+" =>
// {
// let num = numbers.pop().unwrap();
// res = match res {
// Some(x) => Some(num + x),
// None => Some(num + numbers.pop().unwrap())
// };
// },
// "-" =>
// {
// let num = numbers.pop().unwrap();
// res = match res {
// Some(x) => Some(num - x),
// None => Some(numbers.pop().unwrap() - num)
// };
// },
// "*" =>
// {
// let num = numbers.pop().unwrap();
// res = match res {
// Some(x) => Some(num * x),
// None => Some(numbers.pop().unwrap() * num)
// };
// },
// "/" =>
// {
// let num = numbers.pop().unwrap();
// res = match res {
// Some(x) => Some(num / x),
// None => Some(numbers.pop().unwrap() / num)
// };
// },
// _ => numbers.push(token.parse::<i32>().unwrap()),
// };
// }
// res.unwrap()
// }
// }
impl Solution {
pub fn eval_rpn(tokens: Vec<String>) -> i32 {
let mut numbers = Vec::new();
for token in tokens.iter() {
match token.as_str() {
"+" => {
let num0 = numbers.pop().unwrap();
let num1 = numbers.pop().unwrap();
numbers.push(num1 + num0);
},
"-" => {
let num0 = numbers.pop().unwrap();
let num1 = numbers.pop().unwrap();
numbers.push(num1 - num0);
},
"*" => {
let num0 = numbers.pop().unwrap();
let num1 = numbers.pop().unwrap();
numbers.push(num1 * num0);
},
"/" => {
let num0 = numbers.pop().unwrap();
let num1 = numbers.pop().unwrap();
numbers.push(num1 / num0);
},
_ => numbers.push(token.parse::<i32>().unwrap()),
};
}
numbers.pop().unwrap()
}
}
fn run_test_case(test_case: &json::JsonValue) -> i32 {
let _i = &test_case["in"];
let expected = test_case["expected"].as_i32().unwrap();
let mut i = Vec::new();
for jop in _i.members() {
i.push(jop.to_string());
}
let result = Solution::eval_rpn(i.clone());
if result == expected {
return 0;
}
println!("eval_rpn({:?}) returned {:?} but expected {:?}\n",
i, result, expected);
1
}
fn main() {
let (tests, test_idx) = load_json_tests();
let (mut successes, mut failures) = (0, 0);
if test_idx >= tests.len() as i32 {
println!("Wrong index {}, only {} tests available!!", test_idx, tests.len());
return
}
if test_idx != -1 {
let rc = run_test_case(&tests[test_idx as usize]);
if rc == 0 { successes += 1; }
else { failures += 1; }
} else {
println!("{} tests specified", tests.len());
for i in 0..tests.len() {
let rc = run_test_case(&tests[i]);
if rc == 0 { successes += 1; }
else { failures += 1; }
}
}
if failures > 0 {
println!("{} tests succeeded and {} tests failed!!", successes, failures);
} else {
println!("All {} tests succeeded!!", successes);
}
} |
use std::backtrace::Backtrace;
use std::fmt::{Debug, Display};
use std::hash::Hash;
use std::sync::Arc;
use liblumen_term::Tag;
use crate::erts::term::prelude::*;
pub trait Repr:
Sized + Copy + Debug + Display + PartialEq<Self> + Eq + PartialOrd<Self> + Ord + Hash + Send
{
type Encoding: liblumen_term::Encoding;
fn as_usize(&self) -> usize;
fn value(&self) -> <Self::Encoding as liblumen_term::Encoding>::Type;
fn decode_header(
&self,
tag: Tag<<Self::Encoding as liblumen_term::Encoding>::Type>,
literal: Option<bool>,
) -> Result<TypedTerm, TermDecodingError>
where
Self: Encoded,
{
let ptr = Boxed::new(self as *const _ as *mut u64).ok_or_else(|| {
TermDecodingError::NoneValue {
backtrace: Arc::new(Backtrace::capture()),
}
})?;
match tag {
// Tuple cannot be constructed directly, as it is a dynamically-sized type,
// instead we construct a fat pointer which requires the length of the tuple;
// to get that we have to access the arity stored in the tuple header
//
// NOTE: This happens with a few other types as well, so if you see this pattern,
// the reasoning is the same for each case
Tag::Tuple => {
let tuple = unsafe { Tuple::from_raw_term(ptr.cast::<Self>().as_ptr()) };
Ok(TypedTerm::Tuple(tuple))
}
Tag::Closure => {
let closure = unsafe { Closure::from_raw_term(ptr.cast::<Self>().as_ptr()) };
Ok(TypedTerm::Closure(closure))
}
Tag::HeapBinary => {
let bin = unsafe { HeapBin::from_raw_term(ptr.cast::<Self>().as_ptr()) };
Ok(TypedTerm::HeapBinary(bin))
}
#[cfg(not(target_arch = "x86_64"))]
Tag::Float => Ok(TypedTerm::Float(ptr.cast::<Float>())),
Tag::BigInteger => Ok(TypedTerm::BigInteger(ptr.cast::<BigInteger>())),
Tag::Reference => Ok(TypedTerm::Reference(ptr.cast::<Reference>())),
Tag::ResourceReference => Ok(TypedTerm::ResourceReference(ptr.cast::<Resource>())),
Tag::ProcBin => match literal {
Some(false) => Ok(TypedTerm::ProcBin(ptr.cast::<ProcBin>())),
Some(true) => Ok(TypedTerm::BinaryLiteral(ptr.cast::<BinaryLiteral>())),
None => {
let offset = BinaryLiteral::flags_offset();
debug_assert_eq!(offset, ProcBin::inner_offset());
let flags_ptr = unsafe {
(self as *const _ as *const u8).offset(offset as isize)
as *const BinaryFlags
};
let flags = unsafe { *flags_ptr };
if flags.is_literal() {
Ok(TypedTerm::BinaryLiteral(ptr.cast::<BinaryLiteral>()))
} else {
Ok(TypedTerm::ProcBin(ptr.cast::<ProcBin>()))
}
}
},
Tag::SubBinary => Ok(TypedTerm::SubBinary(ptr.cast::<SubBinary>())),
Tag::MatchContext => Ok(TypedTerm::MatchContext(ptr.cast::<MatchContext>())),
Tag::ExternalPid => Ok(TypedTerm::ExternalPid(ptr.cast::<ExternalPid>())),
Tag::ExternalPort => Ok(TypedTerm::ExternalPort(ptr.cast::<ExternalPort>())),
Tag::ExternalReference => Ok(TypedTerm::ExternalReference(
ptr.cast::<ExternalReference>(),
)),
Tag::Map => Ok(TypedTerm::Map(ptr.cast::<Map>())),
Tag::None => Err(TermDecodingError::NoneValue {
backtrace: Arc::new(Backtrace::capture()),
}),
_ => Err(TermDecodingError::InvalidTag {
backtrace: Arc::new(Backtrace::capture()),
}),
}
}
/// Decodes this raw term as a header, any non-header values will result in a panic
///
/// NOTE: This is assumed to be used during decoding when this term has already been
/// typechecked as a header type.
#[inline]
unsafe fn decode_header_unchecked(
&self,
tag: Tag<<Self::Encoding as liblumen_term::Encoding>::Type>,
literal: Option<bool>,
) -> TypedTerm
where
Self: Encoded,
{
match self.decode_header(tag.clone(), literal) {
Ok(term) => term,
Err(_) => panic!("invalid type tag: {:?}", tag),
}
}
}
|
use std::env;
use std::fs::File;
use std::io;
use std::io::Read;
use std::io::Write;
use crc::crc32;
const FORMAT: &[u8] = b"BPS1";
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() != 4 {
println!("usage: beatr <patch> <original> <output>");
}
let patch: Vec<u8> = slurp(&args[1]).unwrap();
let src: Vec<u8> = slurp(&args[2]).unwrap();
let target = apply_patch(&patch, &src).unwrap();
let mut out = File::create(&args[3]).unwrap();
out.write_all(&target).unwrap();
}
fn slurp(path: &str) -> io::Result<Vec<u8>> {
let mut file = File::open(path)?;
let mut v = Vec::new();
file.read_to_end(&mut v)?;
Ok(v)
}
fn apply_patch(patch: &[u8], src: &[u8]) -> Result<Vec<u8>, String> {
if FORMAT != &patch[0..4] {
return Err(format!("Not a valid bps header: {:?}", &patch[0..4]));
}
let remaining = &patch[4..];
let (source_sz, remaining) = decodenum(remaining)?;
let (target_sz, remaining) = decodenum(remaining)?;
let (metadata_sz, remaining) = decodenum(remaining)?;
let mut target = Vec::with_capacity(target_sz as usize);
if metadata_sz > (i64::max_value() as u64) {
return Err(format!("illegal metadata size {}", metadata_sz));
}
let mut remaining = &remaining[metadata_sz as usize..];
if (source_sz as usize) != src.len() {
return Err(format!("patch does not apply to input file, expected length: {}, actual: {}",
source_sz, src.len()));
}
let mut source_relative_offset: usize = 0;
let mut target_relative_offset: usize = 0;
while remaining.len() > 12 {
let (data, r) = decodenum(remaining)?;
remaining = r;
match action(data) {
Action::SourceRead(length) => {
let pos = target.len();
target.extend(&src[pos..pos + length]);
}
Action::TargetRead(length) => {
target.extend(&remaining[0..length]);
remaining = &remaining[length..];
}
Action::SourceCopy(length) => {
let (data, r) = decodenum(remaining)?;
remaining = r;
source_relative_offset = ((source_relative_offset as isize) + decode_signed(data)) as usize;
let end = source_relative_offset + length;
target.extend(&src[source_relative_offset..end]);
source_relative_offset += length;
}
Action::TargetCopy(length) => {
let (data, r) = decodenum(remaining)?;
remaining = r;
target_relative_offset = ((target_relative_offset as isize) + decode_signed(data)) as usize;
for _i in 0..length {
target.push(target[target_relative_offset]);
target_relative_offset += 1;
}
}
}
}
if remaining.len() != 12 {
Err("invalid bps file".to_string())
} else {
verify_crc32(&remaining[0..4], crc32::checksum_ieee(&src))?;
verify_crc32(&remaining[4..8], crc32::checksum_ieee(&target))?;
verify_crc32(&remaining[8..12], crc32::checksum_ieee(&patch[0..patch.len() - 4]))?;
Ok(target)
}
}
fn verify_crc32(expected: &[u8], actual: u32) -> Result<(), String> {
let (e0, e1, e2, e3) = (
u32::from(expected[0]),
u32::from(expected[1]),
u32::from(expected[2]),
u32::from(expected[3]));
let expected = e0 | e1 << 8 | e2 << 16 | e3 << 24;
if expected != actual {
Err(format!("CRC doesn't match (expect: {}, actual: {})", expected, actual))
} else {
Ok(())
}
}
#[derive(Debug)]
enum Action {
SourceRead(usize),
TargetRead(usize),
SourceCopy(usize),
TargetCopy(usize),
}
fn action(data: u64) -> Action {
let command = data & 0b11;
let length = ((data >> 2) + 1) as usize;
match command {
0 => Action::SourceRead(length),
1 => Action::TargetRead(length),
2 => Action::SourceCopy(length),
3 => Action::TargetCopy(length),
_ => panic!("illegal command identifier")
}
}
fn decodenum(src: &[u8]) -> Result<(u64, &[u8]), &'static str> {
let mut data: u64 = 0;
let mut shift: u64 = 1;
for (i, b) in src.iter().enumerate() {
let x = u64::from(*b);
data += (x & 0x7F) * shift;
if x & 0x80 != 0 {
return Ok((data, &src[i + 1..]));
}
shift <<= 7;
data += shift;
}
Err("Invalid bps file")
}
fn decode_signed(num: u64) -> isize {
let signum: isize = match num & 1 {
0 => 1,
_ => -1
};
signum * ((num >> 1) as isize)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test1byte() {
for i in 0..(1 << 7 - 1) {
enc_dec_num(i);
}
}
#[test]
fn testmaxbyte() {
enc_dec_num(1 << 7);
}
#[test]
fn test2byte() {
enc_dec_num(1 << 9);
}
#[test]
fn test3byte() {
enc_dec_num(1 << 17 + 37);
}
fn encodenum(mut src: u64, dst: &mut Vec<u8>) {
loop {
let val: u8 = (src as u8) & 0x7F;
src >>= 7;
if src == 0 {
dst.push(val | 0x80);
return;
}
dst.push(val);
src -= 1;
}
}
fn enc_dec_num(num: u64) {
let mut dst = Vec::new();
encodenum(num, &mut dst);
let (dec, rem) = decodenum(&dst).unwrap();
assert_eq!(dec, num);
assert_eq!(rem.len(), 0);
}
}
|
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_attribute]
pub fn command(attr: TokenStream, item: TokenStream) -> TokenStream {
println!("{}", attr);
println!("{}", item);
item
} |
use crate::metrics::{handle_time, Scoped, Stats};
use futures::{future, try_ready, Future, Poll};
use http::{Request, Response};
use linkerd2_proxy_transport::tls;
use std::marker::PhantomData;
use tower::retry as tower_retry;
pub use tower::retry::budget::Budget;
use tracing::trace;
pub trait CanRetry {
type Retry: Retry + Clone;
fn can_retry(&self) -> Option<Self::Retry>;
}
pub trait Retry: Sized {
fn retry<B1, B2>(&self, req: &Request<B1>, res: &Response<B2>) -> Result<(), NoRetry>;
fn clone_request<B: TryClone>(&self, req: &Request<B>) -> Option<Request<B>>;
}
pub enum NoRetry {
Success,
Budget,
}
pub trait TryClone: Sized {
fn try_clone(&self) -> Option<Self>;
}
pub struct Layer<S, K, A, B> {
registry: S,
_p: PhantomData<(K, fn(A) -> B)>,
}
pub struct Stack<M, S, K, A, B> {
inner: M,
registry: S,
_p: PhantomData<(K, fn(A) -> B)>,
}
pub struct MakeFuture<F, R, S> {
inner: F,
policy: Option<Policy<R, S>>,
}
pub type Service<R, Svc, St> = tower_retry::Retry<Policy<R, St>, Svc>;
#[derive(Clone)]
pub struct Policy<R, S>(R, S);
// === impl Layer ===
pub fn layer<S, K, A, B>(registry: S) -> Layer<S, K, A, B> {
Layer {
registry,
_p: PhantomData,
}
}
impl<S: Clone, K, A, B> Clone for Layer<S, K, A, B> {
fn clone(&self) -> Self {
Layer {
registry: self.registry.clone(),
_p: PhantomData,
}
}
}
impl<M, S, K, A, B> tower::layer::Layer<M> for Layer<S, K, A, B>
where
S: Scoped<K> + Clone,
S::Scope: Clone,
A: TryClone,
{
type Service = Stack<M, S, K, A, B>;
fn layer(&self, inner: M) -> Self::Service {
Stack {
inner,
registry: self.registry.clone(),
_p: PhantomData,
}
}
}
// === impl Stack ===
impl<M: Clone, S: Clone, K, A, B> Clone for Stack<M, S, K, A, B> {
fn clone(&self) -> Self {
Stack {
inner: self.inner.clone(),
registry: self.registry.clone(),
_p: PhantomData,
}
}
}
/// impl MakeService
impl<T, M, S, K, A, B> tower::Service<T> for Stack<M, S, K, A, B>
where
T: CanRetry + Clone,
M: tower::MakeService<T, Request<A>, Response = Response<B>>,
M::Service: Clone,
S: Scoped<K>,
S::Scope: Clone,
K: From<T>,
A: TryClone,
{
type Response = tower::util::Either<Service<T::Retry, M::Service, S::Scope>, M::Service>;
type Error = M::MakeError;
type Future = MakeFuture<M::Future, T::Retry, S::Scope>;
fn poll_ready(&mut self) -> Poll<(), Self::Error> {
self.inner.poll_ready()
}
fn call(&mut self, target: T) -> Self::Future {
let policy = if let Some(retries) = target.can_retry() {
trace!("stack is retryable");
let stats = self.registry.scoped(target.clone().into());
Some(Policy(retries, stats))
} else {
None
};
let inner = self.inner.make_service(target);
MakeFuture { inner, policy }
}
}
// === impl MakeFuture ===
impl<F, R, S> Future for MakeFuture<F, R, S>
where
F: Future,
{
type Item = tower::util::Either<Service<R, F::Item, S>, F::Item>;
type Error = F::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
let inner = try_ready!(self.inner.poll());
if let Some(policy) = self.policy.take() {
Ok(tower::util::Either::A(tower_retry::Retry::new(policy, inner)).into())
} else {
Ok(tower::util::Either::B(inner).into())
}
}
}
// === impl Policy ===
impl<R, S, A, B, E> tower_retry::Policy<Request<A>, Response<B>, E> for Policy<R, S>
where
R: Retry + Clone,
S: Stats + Clone,
A: TryClone,
{
type Future = future::FutureResult<Self, ()>;
fn retry(&self, req: &Request<A>, result: Result<&Response<B>, &E>) -> Option<Self::Future> {
match result {
Ok(res) => match self.0.retry(req, res) {
Ok(()) => {
trace!("retrying request");
Some(future::ok(self.clone()))
}
Err(NoRetry::Budget) => {
self.1.incr_retry_skipped_budget();
None
}
Err(NoRetry::Success) => None,
},
Err(_err) => {
trace!("cannot retry transport error");
None
}
}
}
fn clone_request(&self, req: &Request<A>) -> Option<Request<A>> {
if let Some(clone) = self.0.clone_request(req) {
trace!("cloning request");
Some(clone)
} else {
trace!("request could not be cloned");
None
}
}
}
// TODO this needs to be moved up into the application!
impl<B: TryClone> TryClone for Request<B> {
fn try_clone(&self) -> Option<Self> {
if let Some(body) = self.body().try_clone() {
let mut clone = Request::new(body);
*clone.method_mut() = self.method().clone();
*clone.uri_mut() = self.uri().clone();
*clone.headers_mut() = self.headers().clone();
*clone.version_mut() = self.version();
if let Some(ext) = self.extensions().get::<tls::accept::Meta>() {
clone.extensions_mut().insert(ext.clone());
}
// Count retries toward the request's total handle time.
if let Some(ext) = self.extensions().get::<handle_time::Tracker>() {
clone.extensions_mut().insert(ext.clone());
}
Some(clone)
} else {
None
}
}
}
|
// Copyright 2019, The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use chrono::{NaiveDateTime, Utc};
use rand::{rngs::OsRng, seq::SliceRandom};
use std::time::Duration;
use tari_comms::peer_manager::NodeId;
pub struct PeerPool {
last_updated: Option<NaiveDateTime>,
node_ids: Vec<NodeId>,
stale_interval: Duration,
}
impl PeerPool {
pub fn new(stale_interval: Duration) -> Self {
Self {
last_updated: None,
node_ids: Vec::default(),
stale_interval,
}
}
pub fn len(&self) -> usize {
self.node_ids.len()
}
pub fn is_stale(&self) -> bool {
self.last_updated
.map(|dt| {
let chrono_dt = chrono::Duration::from_std(self.stale_interval)
.expect("PeerPool::stale_interval is too large (overflows chrono::Duration::from_std)");
dt.checked_add_signed(chrono_dt)
.map(|dt| dt < Utc::now().naive_utc())
.expect("PeerPool::stale_interval is too large (overflows i32 when added to NaiveDateTime)")
})
.unwrap_or(true)
}
pub fn set_node_ids(&mut self, node_ids: Vec<NodeId>) {
self.node_ids = node_ids;
self.last_updated = Some(Utc::now().naive_utc());
}
pub fn remove(&mut self, node_id: &NodeId) -> Option<NodeId> {
let pos = self.node_ids.iter().position(|n| n == node_id)?;
Some(self.node_ids.remove(pos))
}
pub fn push(&mut self, node_id: NodeId) {
self.node_ids.push(node_id)
}
pub fn node_ids(&self) -> &[NodeId] {
&self.node_ids
}
pub fn sample(&self, n: usize) -> Vec<&NodeId> {
self.node_ids.choose_multiple(&mut OsRng, n).collect()
}
pub fn contains(&self, node_id: &NodeId) -> bool {
self.node_ids.iter().any(|n| n == node_id)
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::test_utils::make_node_id;
use std::iter::repeat_with;
#[test]
fn is_stale() {
let mut pool = PeerPool::new(Duration::from_secs(100));
assert_eq!(pool.is_stale(), true);
pool.set_node_ids(vec![]);
assert_eq!(pool.is_stale(), false);
pool.last_updated = Some(
Utc::now()
.naive_utc()
.checked_sub_signed(chrono::Duration::from_std(Duration::from_secs(101)).unwrap())
.unwrap(),
);
assert_eq!(pool.is_stale(), true);
}
#[test]
fn sample() {
let mut pool = PeerPool::new(Duration::from_secs(100));
let node_ids = repeat_with(make_node_id).take(10).collect::<Vec<_>>();
pool.set_node_ids(node_ids.clone());
let mut sample = pool.sample(4);
assert_eq!(sample.len(), 4);
node_ids.into_iter().for_each(|node_id| {
if let Some(pos) = sample.iter().position(|n| *n == &node_id) {
sample.remove(pos);
}
});
assert_eq!(sample.len(), 0);
}
}
|
mod with_atom_class;
use proptest::prop_assert_eq;
use liblumen_alloc::atom;
use liblumen_alloc::erts::exception;
use liblumen_alloc::erts::term::prelude::*;
use crate::erlang::raise_3::result;
use crate::test::strategy;
#[test]
fn without_atom_class_errors_badarg() {
run!(
|arc_process| {
(
strategy::term::is_not_atom(arc_process.clone()),
strategy::term(arc_process.clone()),
strategy::term::list::proper(arc_process.clone()),
)
},
|(class, reason, stacktrace)| {
prop_assert_badarg!(
result(class, reason, stacktrace),
format!("class ({}) is not an atom", class)
);
Ok(())
},
);
}
|
use std::fmt;
use serde::de::{SeqAccess, Visitor};
use serde::ser::SerializeTuple;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
#[derive(Debug, PartialEq)]
pub struct Varuint32(u32);
impl Serialize for Varuint32 {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut bytes: Vec<u8> = Vec::new();
let mut val: u64 = self.0 as u64;
loop {
let mut b: u8 = (val as u8) & 0x7f;
val >>= 7;
b |= ((val > 0) as u8) << 7;
bytes.push(b);
if val == 0 {
break;
}
}
let mut tup = serializer.serialize_tuple(bytes.len())?;
for b in &bytes {
tup.serialize_element(b)?;
}
tup.end()
}
}
impl<'de> Deserialize<'de> for Varuint32 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct Varuint32Visitor;
impl<'de> Visitor<'de> for Varuint32Visitor {
type Value = Varuint32;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a variant integer")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut n = 0;
let mut shift = 0;
let mut final_byte = 0;
while let Some(elem) = seq.next_element()? {
let byte: u8 = elem;
if byte < 128 {
final_byte = byte;
break;
}
n |= ((byte & 127) as usize) << shift;
shift += 7;
}
let value = n | ((final_byte as usize) << shift);
Ok(Varuint32(value as u32))
}
}
deserializer.deserialize_tuple(8, Varuint32Visitor)
}
}
#[test]
fn test_varuint32_serialization() {
use super::eos_serialize;
let zero = Varuint32(0);
assert_eq!(eos_serialize(&zero).unwrap(), [0]);
let one = Varuint32(1);
assert_eq!(eos_serialize(&one).unwrap(), [1]);
let v230 = Varuint32(230);
assert_eq!(eos_serialize(&v230).unwrap(), [230, 1]);
let v2048 = Varuint32(2048);
assert_eq!(eos_serialize(&v2048).unwrap(), [128, 16]);
let full = Varuint32(4294967295);
assert_eq!(eos_serialize(&full).unwrap(), [255, 255, 255, 255, 15]);
}
#[test]
fn test_varuint32_deserialization() {
use super::eos_deserialize;
let zero = [0];
assert_eq!(eos_deserialize::<Varuint32>(&zero).unwrap(), Varuint32(0));
let one = [1];
assert_eq!(eos_deserialize::<Varuint32>(&one).unwrap(), Varuint32(1));
let v230 = [230, 1];
assert_eq!(eos_deserialize::<Varuint32>(&v230).unwrap(), Varuint32(230));
let v2048 = [128, 16];
assert_eq!(
eos_deserialize::<Varuint32>(&v2048).unwrap(),
Varuint32(2048)
);
let full = [255, 255, 255, 255, 15];
assert_eq!(
eos_deserialize::<Varuint32>(&full).unwrap(),
Varuint32(4294967295)
);
}
#[test]
fn test_varuint32_structs_serializations() {
use super::{eos_deserialize, eos_serialize};
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct Test {
init_block: u32,
net_usage: Varuint32,
message: String,
}
let test1 = Test {
init_block: 123456,
net_usage: Varuint32(2048),
message: String::from("Hello world!"),
};
let bytes = eos_serialize(&test1).unwrap();
let test2: Test = eos_deserialize(&bytes).unwrap();
assert_eq!(test1, test2);
}
|
use crate::{
component::UserInterfaceView,
resource::{ApplicationData, UserInterfaceRes},
};
use core::{
app::AppLifeCycle,
ecs::{Join, Read, ReadExpect, System, Write, WriteStorage},
};
use input::resource::{InputController, TriggerState};
use raui_core::{
application::Application,
interactive::default_interactions_engine::{Interaction, PointerButton},
layout::default_layout_engine::DefaultLayoutEngine,
widget::{
component::interactive::navigation::{NavSignal, NavTextChange},
setup as core_setup,
utils::Vec2,
},
};
use raui_material::setup as material_setup;
use std::collections::HashMap;
#[derive(Default)]
pub struct UserInterfaceSystem {
last_pointer_pos: Vec2,
}
impl<'s> System<'s> for UserInterfaceSystem {
type SystemData = (
ReadExpect<'s, AppLifeCycle>,
Write<'s, UserInterfaceRes>,
Read<'s, InputController>,
WriteStorage<'s, UserInterfaceView>,
);
fn run(&mut self, (life_cycle, mut res, input, mut views): Self::SystemData) {
let ui: &mut UserInterfaceRes = &mut *res;
ui.data = std::mem::take(&mut ui.data)
.into_iter()
.filter(|(k, _)| views.join().any(|v| k == v.app_id()))
.collect::<HashMap<_, _>>();
for view in (&mut views).join() {
if !ui.data.contains_key(view.app_id()) {
let mut application = Application::new();
application.setup(core_setup);
application.setup(material_setup);
if let Some(setup) = ui.setup {
setup(&mut application);
}
ui.data.insert(
view.app_id().to_owned(),
ApplicationData {
application,
interactions: Default::default(),
coords_mapping: Default::default(),
},
);
}
if view.dirty {
view.dirty = false;
let app = ui.application_mut(view.app_id()).unwrap();
let root = app
.deserialize_node(view.root().clone())
.expect("Could not deserialize UI node");
app.apply(root);
}
}
let pointer_pos = Vec2 {
x: input.axis_or_default(&ui.pointer_axis_x),
y: input.axis_or_default(&ui.pointer_axis_y),
};
let pointer_moved = (pointer_pos.x - self.last_pointer_pos.x).abs() > 1.0e-6
|| (pointer_pos.y - self.last_pointer_pos.y).abs() > 1.0e-6;
self.last_pointer_pos = pointer_pos;
let pointer_trigger = input.trigger_or_default(&ui.pointer_action_trigger);
let pointer_context = input.trigger_or_default(&ui.pointer_context_trigger);
let mut text = input
.text()
.chars()
.filter_map(|c| {
if !c.is_control() {
Some(NavTextChange::InsertCharacter(c))
} else if c == '\n' || c == '\r' {
Some(NavTextChange::NewLine)
} else {
None
}
})
.collect::<Vec<_>>();
let accept = input.trigger_or_default(&ui.navigate_accept);
let cancel = input.trigger_or_default(&ui.navigate_cancel);
let up = input.trigger_or_default(&ui.navigate_up) == TriggerState::Pressed;
let down = input.trigger_or_default(&ui.navigate_down) == TriggerState::Pressed;
let left = input.trigger_or_default(&ui.navigate_left) == TriggerState::Pressed;
let right = input.trigger_or_default(&ui.navigate_right) == TriggerState::Pressed;
let prev = input.trigger_or_default(&ui.navigate_prev) == TriggerState::Pressed;
let next = input.trigger_or_default(&ui.navigate_next) == TriggerState::Pressed;
if input.trigger_or_default(&ui.text_move_cursor_left) == TriggerState::Pressed {
text.push(NavTextChange::MoveCursorLeft);
}
if input.trigger_or_default(&ui.text_move_cursor_right) == TriggerState::Pressed {
text.push(NavTextChange::MoveCursorRight);
}
if input.trigger_or_default(&ui.text_move_cursor_start) == TriggerState::Pressed {
text.push(NavTextChange::MoveCursorStart);
}
if input.trigger_or_default(&ui.text_move_cursor_end) == TriggerState::Pressed {
text.push(NavTextChange::MoveCursorEnd);
}
if input.trigger_or_default(&ui.text_delete_left) == TriggerState::Pressed {
text.push(NavTextChange::DeleteLeft);
}
if input.trigger_or_default(&ui.text_delete_right) == TriggerState::Pressed {
text.push(NavTextChange::DeleteRight);
}
for data in ui.data.values_mut() {
let pointer_pos = data.coords_mapping.real_to_virtual_vec2(pointer_pos);
if pointer_moved {
data.interactions
.interact(Interaction::PointerMove(pointer_pos));
}
match pointer_trigger {
TriggerState::Pressed => {
data.interactions.interact(Interaction::PointerDown(
PointerButton::Trigger,
pointer_pos,
));
}
TriggerState::Released => {
data.interactions
.interact(Interaction::PointerUp(PointerButton::Trigger, pointer_pos));
}
_ => {}
}
match pointer_context {
TriggerState::Pressed => {
data.interactions.interact(Interaction::PointerDown(
PointerButton::Context,
pointer_pos,
));
}
TriggerState::Released => {
data.interactions
.interact(Interaction::PointerUp(PointerButton::Context, pointer_pos));
}
_ => {}
}
for change in &text {
data.interactions
.interact(Interaction::Navigate(NavSignal::TextChange(*change)));
}
match accept {
TriggerState::Pressed => {
data.interactions
.interact(Interaction::Navigate(NavSignal::Accept(true)));
}
TriggerState::Released => {
data.interactions
.interact(Interaction::Navigate(NavSignal::Accept(false)));
}
_ => {}
}
match cancel {
TriggerState::Pressed => {
data.interactions
.interact(Interaction::Navigate(NavSignal::Cancel(true)));
}
TriggerState::Released => {
data.interactions
.interact(Interaction::Navigate(NavSignal::Cancel(false)));
}
_ => {}
}
if up {
data.interactions
.interact(Interaction::Navigate(NavSignal::Up));
}
if down {
data.interactions
.interact(Interaction::Navigate(NavSignal::Down));
}
if left {
data.interactions
.interact(Interaction::Navigate(NavSignal::Left));
}
if right {
data.interactions
.interact(Interaction::Navigate(NavSignal::Right));
}
if prev {
data.interactions
.interact(Interaction::Navigate(NavSignal::Prev));
}
if next {
data.interactions
.interact(Interaction::Navigate(NavSignal::Next));
}
}
let mut meta = views.join().collect::<Vec<_>>();
meta.sort_by(|a, b| a.input_order.cmp(&b.input_order));
let mut captured = false;
for view in meta {
if let Some(data) = ui.data.get_mut(view.app_id()) {
data.application.animations_delta_time = life_cycle.delta_time_seconds();
data.application.process();
data.application
.layout(&data.coords_mapping, &mut DefaultLayoutEngine)
.unwrap_or_default();
if captured {
data.interactions.clear_queue(true);
}
data.interactions.deselect_when_no_button_found =
view.deselect_when_no_button_found;
if let Ok(result) = data.application.interact(&mut data.interactions) {
if view.capture_input && result.is_any() {
captured = true;
}
}
}
}
res.last_frame_captured = captured;
}
}
|
use std::env;
use std::fs::{self, File};
use std::io;
use std::io::prelude::*;
use std::path::Path;
use toml::Value;
use crate::cmd::call;
use crate::error::FatalError;
use crate::Features;
fn cargo() -> String {
env::var("CARGO").unwrap_or_else(|_| "cargo".to_owned())
}
pub fn publish(
dry_run: bool,
manifest_path: &Path,
features: &Features,
registry: Option<&str>,
token: Option<&str>,
) -> Result<bool, FatalError> {
let cargo = cargo();
let mut command: Vec<&str> = vec![
&cargo,
"publish",
"--manifest-path",
manifest_path.to_str().unwrap(),
];
if let Some(registry) = registry {
command.push("--registry");
command.push(registry);
}
if let Some(token) = token {
command.push("--token");
command.push(token);
}
let additional = match features {
Features::None => None,
Features::Selective(vec) => Some(vec.join(" ")),
Features::All => {
command.push("--all-features");
None
}
};
if let Some(ref additional) = additional {
command.push(additional);
}
call(command, dry_run)
}
pub fn wait_for_publish(
name: &str,
version: &str,
timeout: std::time::Duration,
dry_run: bool,
) -> Result<(), FatalError> {
if !dry_run {
let now = std::time::Instant::now();
let sleep_time = std::time::Duration::from_secs(1);
let index = crates_index::Index::new_cargo_default();
let mut logged = false;
loop {
match index.update() {
Err(e) => {
log::debug!("Crate index update failed with {}", e);
}
_ => (),
}
let crate_data = index.crate_(name);
let published = crate_data
.iter()
.flat_map(|c| c.versions().iter())
.find(|v| v.version() == version)
.is_some();
if published {
break;
} else if timeout < now.elapsed() {
return Err(FatalError::PublishTimeoutError);
}
if !logged {
log::info!("Waiting for publish to complete...");
logged = true;
}
std::thread::sleep(sleep_time);
}
}
Ok(())
}
pub fn set_package_version(manifest_path: &Path, version: &str) -> Result<(), FatalError> {
let temp_manifest_path = manifest_path
.parent()
.unwrap_or_else(|| Path::new("."))
.join("Cargo.toml.work");
{
let manifest = load_from_file(manifest_path)?;
let mut manifest: toml_edit::Document = manifest.parse().map_err(FatalError::from)?;
manifest["package"]["version"] = toml_edit::value(version);
let mut file_out = File::create(&temp_manifest_path).map_err(FatalError::from)?;
file_out
.write(manifest.to_string_in_original_order().as_bytes())
.map_err(FatalError::from)?;
}
fs::rename(temp_manifest_path, manifest_path)?;
Ok(())
}
pub fn set_dependency_version(
manifest_path: &Path,
name: &str,
version: &str,
) -> Result<(), FatalError> {
let temp_manifest_path = manifest_path
.parent()
.unwrap_or_else(|| Path::new("."))
.join("Cargo.toml.work");
{
let manifest = load_from_file(manifest_path)?;
let mut manifest: toml_edit::Document = manifest.parse().map_err(FatalError::from)?;
for key in &["dependencies", "dev-dependencies", "build-dependencies"] {
if manifest.as_table().contains_key(key)
&& manifest[key]
.as_table()
.expect("manifest is already verified")
.contains_key(name)
{
manifest[key][name]["version"] = toml_edit::value(version);
}
}
let mut file_out = File::create(&temp_manifest_path).map_err(FatalError::from)?;
file_out
.write(manifest.to_string_in_original_order().as_bytes())
.map_err(FatalError::from)?;
}
fs::rename(temp_manifest_path, manifest_path)?;
Ok(())
}
pub fn update_lock(manifest_path: &Path) -> Result<(), FatalError> {
cargo_metadata::MetadataCommand::new()
.manifest_path(manifest_path)
.exec()
.map_err(FatalError::from)?;
Ok(())
}
pub fn parse_cargo_config(manifest_path: &Path) -> Result<Value, FatalError> {
let cargo_file_content = load_from_file(&manifest_path).map_err(FatalError::from)?;
cargo_file_content.parse().map_err(FatalError::from)
}
fn load_from_file(path: &Path) -> io::Result<String> {
let mut file = File::open(path)?;
let mut s = String::new();
file.read_to_string(&mut s)?;
Ok(s)
}
#[cfg(test)]
mod test {
use super::*;
use assert_fs;
#[allow(unused_imports)] // Not being detected
use assert_fs::prelude::*;
use predicates::prelude::*;
mod parse_cargo_config {
use super::*;
#[test]
fn doesnt_panic() {
parse_cargo_config(Path::new("Cargo.toml")).unwrap();
}
}
mod set_package_version {
use super::*;
#[test]
fn succeeds() {
let temp = assert_fs::TempDir::new().unwrap();
temp.copy_from("tests/fixtures/simple", &["**"]).unwrap();
let manifest_path = temp.child("Cargo.toml");
let meta = cargo_metadata::MetadataCommand::new()
.manifest_path(manifest_path.path())
.exec()
.unwrap();
assert_eq!(meta.packages[0].version.to_string(), "0.1.0");
set_package_version(manifest_path.path(), "2.0.0").unwrap();
let meta = cargo_metadata::MetadataCommand::new()
.manifest_path(manifest_path.path())
.exec()
.unwrap();
assert_eq!(meta.packages[0].version.to_string(), "2.0.0");
temp.close().unwrap();
}
}
mod set_dependency_version {
use super::*;
#[test]
fn preserve_table_order() {
let temp = assert_fs::TempDir::new().unwrap();
temp.copy_from("tests/fixtures/simple", &["**"]).unwrap();
let manifest_path = temp.child("Cargo.toml");
manifest_path
.write_str(
r#"
[package]
name = "t"
version = "0.1.0"
authors = []
edition = "2018"
[dependencies]
foo = { version = "1.0", path = "../" }
[package.metadata.release]
"#,
)
.unwrap();
set_dependency_version(manifest_path.path(), "foo", "2.0").unwrap();
manifest_path.assert(
predicate::str::similar(
r#"
[package]
name = "t"
version = "0.1.0"
authors = []
edition = "2018"
[dependencies]
foo = { version = "2.0", path = "../" }
[package.metadata.release]
"#,
)
.from_utf8()
.from_file_path(),
);
temp.close().unwrap();
}
#[test]
fn dependencies() {
let temp = assert_fs::TempDir::new().unwrap();
temp.copy_from("tests/fixtures/simple", &["**"]).unwrap();
let manifest_path = temp.child("Cargo.toml");
manifest_path
.write_str(
r#"
[package]
name = "t"
version = "0.1.0"
authors = []
edition = "2018"
[build-dependencies]
[dependencies]
foo = { version = "1.0", path = "../" }
"#,
)
.unwrap();
set_dependency_version(manifest_path.path(), "foo", "2.0").unwrap();
manifest_path.assert(
predicate::str::similar(
r#"
[package]
name = "t"
version = "0.1.0"
authors = []
edition = "2018"
[build-dependencies]
[dependencies]
foo = { version = "2.0", path = "../" }
"#,
)
.from_utf8()
.from_file_path(),
);
temp.close().unwrap();
}
#[test]
fn dev_dependencies() {
let temp = assert_fs::TempDir::new().unwrap();
temp.copy_from("tests/fixtures/simple", &["**"]).unwrap();
let manifest_path = temp.child("Cargo.toml");
manifest_path
.write_str(
r#"
[package]
name = "t"
version = "0.1.0"
authors = []
edition = "2018"
[dependencies]
[dev-dependencies]
foo = { version = "1.0", path = "../" }
"#,
)
.unwrap();
set_dependency_version(manifest_path.path(), "foo", "2.0").unwrap();
manifest_path.assert(
predicate::str::similar(
r#"
[package]
name = "t"
version = "0.1.0"
authors = []
edition = "2018"
[dependencies]
[dev-dependencies]
foo = { version = "2.0", path = "../" }
"#,
)
.from_utf8()
.from_file_path(),
);
temp.close().unwrap();
}
#[test]
fn build_dependencies() {
let temp = assert_fs::TempDir::new().unwrap();
temp.copy_from("tests/fixtures/simple", &["**"]).unwrap();
let manifest_path = temp.child("Cargo.toml");
manifest_path
.write_str(
r#"
[package]
name = "t"
version = "0.1.0"
authors = []
edition = "2018"
[dev-dependencies]
[build-dependencies]
foo = { version = "1.0", path = "../" }
"#,
)
.unwrap();
set_dependency_version(manifest_path.path(), "foo", "2.0").unwrap();
manifest_path.assert(
predicate::str::similar(
r#"
[package]
name = "t"
version = "0.1.0"
authors = []
edition = "2018"
[dev-dependencies]
[build-dependencies]
foo = { version = "2.0", path = "../" }
"#,
)
.from_utf8()
.from_file_path(),
);
temp.close().unwrap();
}
#[test]
fn all_dependencies() {
let temp = assert_fs::TempDir::new().unwrap();
temp.copy_from("tests/fixtures/simple", &["**"]).unwrap();
let manifest_path = temp.child("Cargo.toml");
manifest_path
.write_str(
r#"
[package]
name = "t"
version = "0.1.0"
authors = []
edition = "2018"
[dependencies]
foo = { version = "1.0", path = "../" }
[build-dependencies]
foo = { version = "1.0", path = "../" }
[dev-dependencies]
foo = { version = "1.0", path = "../" }
"#,
)
.unwrap();
set_dependency_version(manifest_path.path(), "foo", "2.0").unwrap();
manifest_path.assert(
predicate::str::similar(
r#"
[package]
name = "t"
version = "0.1.0"
authors = []
edition = "2018"
[dependencies]
foo = { version = "2.0", path = "../" }
[build-dependencies]
foo = { version = "2.0", path = "../" }
[dev-dependencies]
foo = { version = "2.0", path = "../" }
"#,
)
.from_utf8()
.from_file_path(),
);
temp.close().unwrap();
}
}
mod update_lock {
use super::*;
#[test]
fn in_pkg() {
let temp = assert_fs::TempDir::new().unwrap();
temp.copy_from("tests/fixtures/simple", &["**"]).unwrap();
let manifest_path = temp.child("Cargo.toml");
let lock_path = temp.child("Cargo.lock");
set_package_version(manifest_path.path(), "2.0.0").unwrap();
lock_path.assert(predicate::path::eq_file(Path::new(
"tests/fixtures/simple/Cargo.lock",
)));
update_lock(manifest_path.path()).unwrap();
lock_path.assert(
predicate::path::eq_file(Path::new("tests/fixtures/simple/Cargo.lock")).not(),
);
temp.close().unwrap();
}
#[test]
fn in_pure_workspace() {
let temp = assert_fs::TempDir::new().unwrap();
temp.copy_from("tests/fixtures/pure_ws", &["**"]).unwrap();
let manifest_path = temp.child("b/Cargo.toml");
let lock_path = temp.child("Cargo.lock");
set_package_version(manifest_path.path(), "2.0.0").unwrap();
lock_path.assert(predicate::path::eq_file(Path::new(
"tests/fixtures/pure_ws/Cargo.lock",
)));
update_lock(manifest_path.path()).unwrap();
lock_path.assert(
predicate::path::eq_file(Path::new("tests/fixtures/pure_ws/Cargo.lock")).not(),
);
temp.close().unwrap();
}
#[test]
fn in_mixed_workspace() {
let temp = assert_fs::TempDir::new().unwrap();
temp.copy_from("tests/fixtures/mixed_ws", &["**"]).unwrap();
let manifest_path = temp.child("Cargo.toml");
let lock_path = temp.child("Cargo.lock");
set_package_version(manifest_path.path(), "2.0.0").unwrap();
lock_path.assert(predicate::path::eq_file(Path::new(
"tests/fixtures/mixed_ws/Cargo.lock",
)));
update_lock(manifest_path.path()).unwrap();
lock_path.assert(
predicate::path::eq_file(Path::new("tests/fixtures/mixed_ws/Cargo.lock")).not(),
);
temp.close().unwrap();
}
}
}
|
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)]
pub enum TouchPhase {
Start,
Move,
End,
Cancel,
}
impl From<winit::event::TouchPhase> for TouchPhase {
fn from(phase: winit::event::TouchPhase) -> Self {
match phase {
winit::event::TouchPhase::Started => TouchPhase::Start,
winit::event::TouchPhase::Moved => TouchPhase::Move,
winit::event::TouchPhase::Ended => TouchPhase::End,
winit::event::TouchPhase::Cancelled => TouchPhase::Cancel,
}
}
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
pub type IJsonValue = *mut ::core::ffi::c_void;
pub type JsonArray = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct JsonErrorStatus(pub i32);
impl JsonErrorStatus {
pub const Unknown: Self = Self(0i32);
pub const InvalidJsonString: Self = Self(1i32);
pub const InvalidJsonNumber: Self = Self(2i32);
pub const JsonValueNotFound: Self = Self(3i32);
pub const ImplementationLimit: Self = Self(4i32);
}
impl ::core::marker::Copy for JsonErrorStatus {}
impl ::core::clone::Clone for JsonErrorStatus {
fn clone(&self) -> Self {
*self
}
}
pub type JsonObject = *mut ::core::ffi::c_void;
pub type JsonValue = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct JsonValueType(pub i32);
impl JsonValueType {
pub const Null: Self = Self(0i32);
pub const Boolean: Self = Self(1i32);
pub const Number: Self = Self(2i32);
pub const String: Self = Self(3i32);
pub const Array: Self = Self(4i32);
pub const Object: Self = Self(5i32);
}
impl ::core::marker::Copy for JsonValueType {}
impl ::core::clone::Clone for JsonValueType {
fn clone(&self) -> Self {
*self
}
}
|
use crate::Edge;
pub struct DirectedGraph<V> {
vertices: Vec<Vertex<V>>,
edges: usize,
}
#[cfg_attr(feature = "serde", derive(::serde::Serialize, ::serde::Deserialize))]
struct Vertex<V> {
incoming: Vec<Edge<usize>>,
outgoing: Vec<Edge<usize>>,
data: V,
}
impl<V> DirectedGraph<V> {
pub fn new() -> Self {
DirectedGraph {
edges: 0,
vertices: vec![],
}
}
pub fn add_vertex(&mut self, data: V) -> usize {
let id = self.vertices.len();
self.vertices.push(Vertex {
incoming: vec![],
outgoing: vec![],
data,
});
id
}
/// note: if edge is not present, runtime is linear in the number of edges on the relevant
/// vertices. if edge is present, runtime is logarithmic in the number of edges on the
/// relevant vertices.
pub fn add_edge(&mut self, from: usize, to: usize, cost: f64) {
assert!(
from < self.vertices.len() && to < self.vertices.len(),
"from and to vertices must exist"
);
let outgoing = &mut self.vertices[from].outgoing;
match outgoing.binary_search_by_key(&to, |e| e.destination) {
Ok(i) => outgoing[i].cost = cost,
Err(i) => {
outgoing.insert(
i,
Edge {
destination: to,
cost,
},
);
self.edges += 1;
}
}
let incoming = &mut self.vertices[from].incoming;
match incoming.binary_search_by_key(&from, |e| e.destination) {
Ok(i) => incoming[i].cost = cost,
Err(i) => incoming.insert(
i,
Edge {
destination: from,
cost,
},
),
}
}
/// bulk loading method
pub fn try_add_edges(&mut self, edges: &[(usize, usize, f64)]) -> Result<(), &'static str> {
let mut result = Ok(());
for &(from, to, cost) in edges {
if from >= self.vertices.len() || to >= self.vertices.len() {
result = Err("Edge vertices don't exist");
break;
}
self.vertices[from].outgoing.push(Edge {
destination: to,
cost,
});
self.vertices[to].outgoing.push(Edge {
destination: from,
cost,
});
}
self.edges = 0;
for vertex in &mut self.vertices {
vertex.incoming.sort_by_key(|e| e.destination);
vertex.incoming.dedup_by(|a, b| {
// in the case of duplicate edges, we want to keep the *last* edge in the list.
// since Vec::dedup keeps the first value instead of the last, we need to swap the
// values when they're in the same bucket to get that behavior
let same_bucket = a.destination == b.destination;
if same_bucket {
std::mem::swap(a, b);
}
same_bucket
});
vertex.outgoing.sort_by_key(|e| e.destination);
vertex.outgoing.dedup_by(|a, b| {
let same_bucket = a.destination == b.destination;
if same_bucket {
std::mem::swap(a, b);
}
same_bucket
});
self.edges += vertex.outgoing.len();
}
result
}
pub fn vertex_data(&self, vertex: usize) -> &V {
&self.vertices[vertex].data
}
pub fn outgoing_edges(&self, vertex: usize) -> &[Edge<usize>] {
&self.vertices[vertex].outgoing
}
pub fn incoming_edges(&self, vertex: usize) -> &[Edge<usize>] {
&self.vertices[vertex].incoming
}
pub unsafe fn vertex_data_unchecked(&self, vertex: usize) -> &V {
&self.vertices.get_unchecked(vertex).data
}
pub unsafe fn outgoing_edges_unchecked(&self, vertex: usize) -> &[Edge<usize>] {
&self.vertices.get_unchecked(vertex).outgoing
}
pub unsafe fn incoming_edges_unchecked(&self, vertex: usize) -> &[Edge<usize>] {
&self.vertices.get_unchecked(vertex).incoming
}
pub fn len(&self) -> usize {
self.vertices.len()
}
pub fn total_edges(&self) -> usize {
self.edges
}
/// note: runtime is logarithmic in the number of edges on the from vertex.
pub fn find_edge(&self, from: usize, to: usize) -> Option<&Edge<usize>> {
self.vertices[from]
.outgoing
.binary_search_by_key(&to, |e| e.destination)
.ok()
.map(|i| &self.vertices[from].outgoing[i])
}
}
#[cfg(feature = "serde")]
mod serde {
use serde::ser::{SerializeSeq, SerializeStruct};
use serde::{Deserialize, Serialize};
impl<V: Serialize> Serialize for super::DirectedGraph<V> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut s = serializer.serialize_struct("DirectedGraph", 2)?;
s.serialize_field(
"vertices",
&StreamingSequence {
count: self.vertices.len(),
iter: || self.vertices.iter().map(|v| &v.data),
},
)?;
s.serialize_field(
"edges",
&StreamingSequence {
count: self.edges,
iter: || {
self.vertices.iter().enumerate().flat_map(|(from, v)| {
v.outgoing
.iter()
.map(move |e| (from, e.destination, e.cost))
})
},
},
)?;
s.end()
}
}
struct StreamingSequence<F> {
count: usize,
iter: F,
}
impl<F, I, T> Serialize for StreamingSequence<F>
where
T: Serialize,
F: Fn() -> I,
I: Iterator<Item = T>,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut s = serializer.serialize_seq(Some(self.count))?;
for v in (self.iter)() {
s.serialize_element(&v)?;
}
s.end()
}
}
#[derive(Serialize, Deserialize)]
struct DirectedGraphTransport<V> {
vertices: Vec<V>,
edges: Vec<(usize, usize, f64)>,
}
impl<'de, V: Deserialize<'de>> Deserialize<'de> for super::DirectedGraph<V> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let v = DirectedGraphTransport::deserialize(deserializer)?;
let mut graph = super::DirectedGraph::new();
for data in v.vertices {
graph.add_vertex(data);
}
if let Err(e) = graph.try_add_edges(&v.edges) {
return Err(serde::de::Error::custom(e));
}
Ok(graph)
}
}
}
|
// actor/player.rs
use std::cmp::Ordering;
#[derive(Eq)]
pub struct Player {
pub name: String,
pub money: i32,
pub knowledge: i32,
pub tiles: Vec<usize>,
pub skip_one_turn: bool,
pub still_playing: bool,
pub is_computer: bool,
}
impl PartialEq for Player {
fn eq(&self, other: &Player) -> bool {
self.name == other.name
}
}
impl Clone for Player {
fn clone(&self) -> Player {
Player {name: self.name.clone(),
money: self.money,
knowledge: self.knowledge,
tiles: self.tiles.clone(),
skip_one_turn: self.skip_one_turn,
still_playing: self.still_playing,
is_computer: self.is_computer}
}
}
#[test]
fn test() {
let p1 = Player{
name: String::from("fisk"),
money: 25,
knowledge: 16,
tiles: Vec::new(),
skip_one_turn: false,
still_playing: false,
is_computer: false
};
assert!(p1 == p1);
assert!(p1 == p1.clone());
}
|
use std::io;
use std::iter;
fn rpt
fn main(){
let reader = io::stdin();
let rpt = iter::repeat;
let mut ip = String::new();
reader.read_line(&mut ip);
let n = ip.trim().parse();
for i in (1..n) {
println!(rpt("*"));
}
} |
// Copyright 2015 Jerome Rasky <jerome@rasky.co>
//
// Licensed under the Apache License, version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// <http://www.apache.org/licenses/LICENSE-2.0>
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either expressed or implied. See the
// License for the specific language concerning governing permissions and
// limitations under the License.
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use std::collections::{HashMap, BinaryHeap};
use std::borrow::{Cow, IntoCow};
use std::cmp;
use std::path;
use error::StringError;
use constants::*;
#[derive(PartialEq)]
enum CharClass {
Whitespace,
Numeric,
Alphabetic,
First,
Other
}
#[derive(Debug)]
struct LineInfo {
char_map: HashMap<char, Vec<usize>>,
heatmap: Vec<isize>,
pub factor: isize
}
#[derive(Debug)]
pub struct LineMatch {
score: isize,
factor: isize,
line: Cow<'static, str>
}
#[derive(Debug)]
pub struct SearchBase {
lines: HashMap<Cow<'static, str>, LineInfo>
}
impl Default for SearchBase {
fn default() -> SearchBase {
SearchBase {
lines: HashMap::default()
}
}
}
impl Ord for LineMatch {
fn cmp(&self, other: &LineMatch) -> cmp::Ordering {
match self.score.cmp(&other.score) {
cmp::Ordering::Equal => self.factor.cmp(&other.factor),
order => order
}
}
}
impl PartialOrd for LineMatch {
fn partial_cmp(&self, other: &LineMatch) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for LineMatch {
fn eq(&self, other: &LineMatch) -> bool {
self.cmp(other) == cmp::Ordering::Equal
}
}
impl Eq for LineMatch {}
impl SearchBase {
pub fn read_history<T: AsRef<path::Path>>(&mut self, path: T) -> Result<isize, StringError> {
let input_file = match File::open(path) {
Ok(f) => BufReader::new(f),
Err(e) => return Err(StringError::new("Could not open history file", Some(Box::new(e))))
};
let mut line_number = -1;
for m_line in input_file.lines() {
let line = match m_line {
Ok(line) => line,
Err(e) => {
return Err(StringError::new("Failed to read line", Some(Box::new(e))));
}
};
line_number += 1;
// generate the line info
let info = LineInfo::new(&line, line_number);
// insert the line into the map
self.lines.insert(line.into_cow(), info);
}
Ok(line_number)
}
pub fn query_inplace<T: AsRef<str>>(&self, query: T, matches: &mut BinaryHeap<LineMatch>) {
// search for a match
for (line, info) in self.lines.iter() {
let line_score = match info.query_score(&query) {
None => {
// non-matching line
continue;
},
Some(score) => {
score
}
};
// negate everything so we can use push_pop
let match_item = LineMatch {
score: -line_score,
factor: -info.factor,
line: line.clone()
};
let matches_len = matches.len();
let matches_capacity = matches.capacity();
let insert;
match matches.peek() {
None => {
insert = true;
},
Some(item) => {
if &match_item < item || matches_len < matches_capacity {
insert = true
} else {
insert = false;
}
}
}
if insert {
if matches_len < matches_capacity {
matches.push(match_item);
} else {
matches.push_pop(match_item);
}
}
}
}
pub fn query<T: AsRef<str>>(&self, query: T) -> Vec<Cow<'static, str>> {
// allocate the match object
let mut matches: BinaryHeap<LineMatch> = BinaryHeap::with_capacity(MATCH_NUMBER);
self.query_inplace(query, &mut matches);
// result contains a vector of the top MATCH_NUMBER lines, in descending score order
matches.into_sorted_vec().into_iter().map(|x| {x.line}).collect()
}
}
impl LineInfo {
fn new<T: AsRef<str>>(item: T, factor: isize) -> LineInfo {
let mut map: HashMap<char, Vec<usize>> = HashMap::new();
let mut heat = vec![];
let line = item.as_ref();
let mut ws_score = 0;
let mut cs_score = 0;
let mut cur_class = CharClass::First;
// character class changes don't stack
let mut cs_change = false;
for (idx, c) in line.chars().enumerate() {
// don't process characters beyond MAX_LEN
if idx > MAX_LEN {
break;
}
// don't map whitespace
if !c.is_whitespace() {
// update the character class change score if needed
if cur_class == CharClass::First {
// add the first character factor on top of class change
cs_score += FIRST_FACTOR;
}
if c.is_numeric() {
if cur_class != CharClass::Numeric {
cur_class = CharClass::Numeric;
if !cs_change {
cs_score += CLASS_FACTOR;
cs_change = true;
}
} else {
cs_change = false;
}
} else if c.is_alphabetic() {
if cur_class != CharClass::Alphabetic {
cur_class = CharClass::Alphabetic;
if !cs_change {
cs_score += CLASS_FACTOR;
cs_change = true;
}
} else {
cs_change = false;
}
} else {
if cur_class != CharClass::Other {
cur_class = CharClass::Other;
if !cs_change {
cs_score += CLASS_FACTOR;
cs_change = true;
}
} else {
cs_change = false;
}
}
// add an entry in the character map
map.entry(c).or_insert(Vec::new()).push(idx);
if c.is_uppercase() {
for lc in c.to_lowercase() {
// also insert all lowercase equivalents of this character
// but not the other way around, so that typing something
// uppercase specifies to match uppercase
map.entry(lc).or_insert(Vec::new()).push(idx);
}
}
} else {
// whitespace is treated differently
cur_class = CharClass::Whitespace;
ws_score = WHITESPACE_FACTOR;
}
// push to the heatmap
heat.push(ws_score + cs_score);
// reduce things
ws_score /= WHITESPACE_REDUCE;
if !cs_change {
cs_score /= CLASS_REDUCE;
}
}
LineInfo {
char_map: map,
heatmap: heat,
factor: factor
}
}
fn query_sequence<T: AsRef<str>>(&self, query_item: T) -> Option<Vec<Vec<usize>>> {
let query = query_item.as_ref();
let mut positions: Vec<Vec<usize>> = vec![];
let mut match_len = query.len();
for c in query.chars() {
// ignore whitespace characters
if c.is_whitespace() {
trace!("Skipping whitespace character");
match_len -= 1;
continue;
}
trace!("Matching character: {:?}", c);
match self.char_map.get(&c) {
None => break,
Some(list) => {
let to_push;
match positions.last() {
None => {
to_push = list.clone();
},
Some(item) => {
match list.binary_search(&item[0]) {
Ok(idx) => {
if idx >= list.len() - 1 {
// line is non-matching
break;
} else {
to_push = list.split_at(idx + 1).1.into();
}
},
Err(idx) => {
if idx >= list.len() {
// line is non-matching
break;
} else {
to_push = list.split_at(idx).1.into();
}
}
}
}
}
positions.push(to_push);
}
}
}
if positions.len() == match_len {
Some(positions)
} else {
None
}
}
fn query_positions<T: AsRef<str>>(&self, query: T) -> Option<Vec<Vec<usize>>> {
match self.query_sequence(query) {
None => None,
Some(positions) => {
// matching line
// create our idx vector
let mut idx = vec![0; positions.len()];
let mut result = vec![];
loop {
// check that current configuration is strictly increasing
let mut ignore = false;
{
let mut last_pos = None;
for (i, pos) in idx.iter().enumerate() {
match last_pos {
None => last_pos = Some(positions[i][*pos]),
Some(other) => {
if other >= positions[i][*pos] {
ignore = true;
break;
} else {
last_pos = Some(positions[i][*pos]);
}
}
}
}
}
if !ignore {
// add the configuration to the list
result.push(idx.iter().enumerate().map(|(i, pos)| {positions[i][*pos]}).collect());
}
// update our position vector
let mut update_idx = idx.len() - 1;
let mut finished = false;
loop {
idx[update_idx] += 1;
if idx[update_idx] >= positions[update_idx].len() {
if update_idx == 0 {
// we're finished with all permutations
finished = true;
break;
} else {
idx[update_idx] = 0;
update_idx -= 1;
}
} else {
// finished updating for this permutation
break;
}
}
if finished {
// finished with everything
break;
}
}
// return result
Some(result)
}
}
}
fn query_score<T: AsRef<str>>(&self, query: T) -> Option<isize> {
match self.query_positions(query) {
None => None,
Some(positions) => {
let mut top_score = None;
for pgroup in positions.iter() {
// find the average distance between the indexes
let mut dist_total = 0;
let mut dist_count = 0;
for i in 0..pgroup.len() - 1 {
dist_total += (pgroup[i + 1] - pgroup[i]) as isize;
dist_count += 1;
}
// avoid division by zero
if dist_count == 0 {
dist_count = 1;
}
// sum the heatmap
let heat_sum: isize = pgroup.iter().map(|pos| {self.heatmap[*pos]}).sum();
let score = (dist_total / dist_count) * DIST_WEIGHT +
heat_sum * HEAT_WEIGHT;
match top_score {
None => top_score = Some(score),
Some(last) => {
if score > last {
top_score = Some(score);
}
}
}
}
// return the result
match top_score {
None => None,
Some(score) => {
Some(score + self.factor / FACTOR_REDUCE)
}
}
}
}
}
}
|
pub struct Lehmer64 {
value: u128,
}
impl Lehmer64 {
pub fn new() -> Lehmer64 {
let res = Lehmer64 {
value: unsafe { core::arch::x86_64::_rdtsc() } as u128,
};
res
}
pub fn rand(&mut self) -> usize {
self.value = unsafe { std::intrinsics::unchecked_mul(self.value, 0xda942042e4dd58b5) };
// self.value = self.value.wrapping_mul(0xda942042e4dd58b5);
(self.value >> 64) as usize
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
failure::Error,
fidl_fuchsia_io::DirectoryProxy,
fidl_fuchsia_space::{
ErrorCode as SpaceErrorCode, ManagerRequest as SpaceManagerRequest,
ManagerRequestStream as SpaceManagerRequestStream,
},
fuchsia_syslog::{fx_log_err, fx_log_info},
fuchsia_zircon::Status,
futures::prelude::*,
};
pub async fn serve(
pkgfs_ctl: DirectoryProxy,
mut stream: SpaceManagerRequestStream,
) -> Result<(), Error> {
while let Some(event) = stream.try_next().await? {
let SpaceManagerRequest::Gc { responder } = event;
responder.send(&mut gc(&pkgfs_ctl).await)?;
}
Ok(())
}
async fn gc(pkgfs_ctl: &DirectoryProxy) -> Result<(), SpaceErrorCode> {
fx_log_info!("triggering pkgfs gc");
match pkgfs_ctl.unlink("garbage").await {
Ok(c) if c == Status::OK.into_raw() => Ok(()),
r => {
fx_log_err!("error unlinking /pkgfs/ctl/garbage: {:?}", r);
Err(SpaceErrorCode::Internal)
}
}
}
|
//! Widget and data binding utilities
use crate::prelude::WidgetData;
pub mod decorator;
pub mod wrapper;
/// Helper type to contain bound data and data changed callback.
pub struct WidgetDataHolder<W, D = ()>
where
D: WidgetData,
{
pub data: D,
pub last_version: D::Version,
pub on_data_changed: fn(&mut W, &D::Data),
}
impl<W> Default for WidgetDataHolder<W, ()> {
fn default() -> Self {
Self {
data: (),
last_version: (),
on_data_changed: |_, _| (),
}
}
}
impl<W> WidgetDataHolder<W, ()> {
pub fn new<D>(data: D) -> WidgetDataHolder<W, D>
where
D: WidgetData,
{
WidgetDataHolder {
data,
last_version: D::Version::default(),
on_data_changed: |_, _| (),
}
}
}
impl<W, D> WidgetDataHolder<W, D>
where
D: WidgetData,
{
pub fn update(&mut self, widget: &mut W) {
let current_version = self.data.version();
if current_version != self.last_version {
self.last_version = current_version;
self.data.read(widget, self.on_data_changed);
}
}
}
|
use procon_reader::ProconReader;
use std::cmp::Ordering;
fn main() {
let stdin = std::io::stdin();
let mut rd = ProconReader::new(stdin.lock());
let t: usize = rd.get();
for _ in 0..t {
let k: u64 = rd.get();
solve(k);
}
}
fn solve(k: u64) {
let mut diag = 1;
for x in 1.. {
if diag - (x - 1) <= k && k <= diag + (x - 1) {
match diag.cmp(&k) {
Ordering::Less => {
println!("{} {}", x, x - (k - diag));
}
Ordering::Equal => {
println!("{} {}", x, x);
}
Ordering::Greater => {
println!("{} {}", x - (diag - k), x);
}
}
break;
}
diag += x * 2;
}
}
// 1, 3, 7, 13, 21, 31
// 2 4 6 8 10
|
//! Repositories under http://github.com/lumen that have Erlang code that needs to be tested to be
//! compiled with `lumen` and `liblumen_otp` BIFs.
#[path = "lumen/otp.rs"]
mod otp;
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use alloc::sync::Arc;
use alloc::string::String;
use spin::Mutex;
use super::super::super::super::qlib::common::*;
use super::super::super::super::qlib::linux_def::*;
use super::super::super::super::task::*;
use super::super::super::ramfs::symlink::*;
use super::super::super::dirent::*;
use super::super::super::mount::*;
use super::super::super::inode::*;
use super::super::super::super::threadmgr::thread::*;
use super::super::symlink_proc::*;
pub struct ExeNode {
pub thread: Thread,
}
impl ExeNode {
pub fn Executable(&self) -> Result<Dirent> {
let mm = self.thread.lock().memoryMgr.clone();
let ret = mm.metadata.lock().executable.clone();
match ret {
None => return Err(Error::SysError(SysErr::ENOENT)),
Some(d) => Ok(d)
}
}
}
impl ReadLinkNode for ExeNode {
fn ReadLink(&self, _link: &Symlink, task: &Task, _dir: &Inode) -> Result<String> {
let exe = self.Executable()?;
let kernel = task.Thread().lock().k.clone();
let root = kernel.RootDir();
let (name, _) = exe.FullName(&root);
return Ok(name)
}
fn GetLink(&self, link: &Symlink, task: &Task, dir: &Inode) -> Result<Dirent> {
return link.GetLink(task, dir);
}
}
pub fn NewExe(task: &Task, thread: &Thread, msrc: &Arc<Mutex<MountSource>>) -> Inode {
let node = ExeNode {
thread: thread.clone()
};
return SymlinkNode::New(task, msrc, node, Some(thread.clone()))
}
|
extern crate serde_json;
use datamodel_parser::RootTypes;
pub struct JSONFormatterOptions {
pub pretty: bool,
}
pub fn format(
options: JSONFormatterOptions,
types: Vec<RootTypes>,
) -> Result<String, String> {
if options.pretty {
match serde_json::to_string_pretty(&types) {
Ok(f) => return Ok(f),
Err(e) => return Err(e.to_string()),
}
} else {
match serde_json::to_string(&types) {
Ok(f) => return Ok(f),
Err(e) => return Err(e.to_string()),
}
}
}
|
#[cfg_attr(rustfmt, rustfmt_skip)]
#[allow(unknown_lints)]
#[allow(clippy)]
mod grammar;
/// Contains all structures related to the AST for the WebIDL grammar.
pub mod ast;
/// Contains the visitor trait needed to traverse the AST and helper walk functions.
pub mod visitor;
pub use lalrpop_util::ParseError;
use lexer::{LexicalError, Token};
/// The result that is returned when an input string is parsed. If the parse succeeds, the `Ok`
/// result will be a vector of definitions representing the AST. If the parse fails, the `Err` will
/// be either an error from the lexer or the parser.
pub type ParseResult = Result<ast::AST, ParseError<usize, Token, LexicalError>>;
/// The parser that is used to parse WebIDL. It really serves as a wrapper around the parse
/// function exposed by lalrpop.
#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
pub struct Parser;
impl Parser {
/// Creates a new parser.
pub fn new() -> Self {
Parser
}
/// Parses a given input string and returns an AST.
///
/// # Example
///
/// ```
/// use webidl::*;
/// use webidl::ast::*;
///
/// let parser = Parser::new();
/// let result = parser.parse_string("[Attribute] interface Node { };");
///
/// assert_eq!(result,
/// Ok(vec![Definition::Interface(Interface::NonPartial(NonPartialInterface {
/// extended_attributes: vec![
/// Box::new(ExtendedAttribute::NoArguments(
/// Other::Identifier("Attribute".to_string())))],
/// inherits: None,
/// members: vec![],
/// name: "Node".to_string()
/// }))]));
/// ```
pub fn parse_string(&self, input: &str) -> ParseResult {
grammar::parse_Definitions(::Lexer::new(input))
}
}
|
use crate::PubComId;
use std::collections::HashSet;
use toml::Value;
static VALUE_MISSING: &str = "value is missing";
static VALUE_TYPEERROR: &str = "value is not of type Integer";
static SPOTS_MISSING: &str = "spots is missing";
static SPOTS_TYPEERROR: &str = "spots is not of type Integer";
static VALUES_MISSING: &str = "values is missing";
static VALUES_TYPEERROR: &str = "values is not of type Array";
static VALUES_LENERROR: &str = "values is not of length four";
static NAME_MISSING: &str = "name is missing";
static NAME_TYPEERROR: &str = "name is not of type String";
static ACTION_FORBIDDEN: &str = "action is forbidden";
/// Represents a train stop
#[derive(Clone, Debug)]
pub enum Stop {
/// Stop with a city
City(City),
/// Stop with an off-board location
Location(Location),
}
/// Represents a city
#[derive(Clone, Debug)]
pub struct City {
value: u32,
stations: HashSet<PubComId>,
spots: usize,
name: Option<String>,
}
impl City {
pub(crate) fn from_toml(toml: &Value) -> Self {
let value = toml
.get("value")
.expect(VALUE_MISSING)
.as_integer()
.expect(VALUE_TYPEERROR) as u32;
let spots = toml
.get("spots")
.expect(SPOTS_MISSING)
.as_integer()
.expect(SPOTS_TYPEERROR) as usize;
let name = toml
.get("name")
.map(|n| n.as_str().expect(NAME_TYPEERROR).to_string());
Self {
value,
stations: HashSet::new(),
spots,
name,
}
}
pub(crate) fn place_station(&mut self, pub_com: PubComId) {
if self.stations.len() < self.spots && !self.stations.contains(&pub_com) {
self.stations.insert(pub_com);
} else {
panic!(ACTION_FORBIDDEN);
}
}
}
/// Represents an off-board location
#[derive(Clone, Debug)]
pub struct Location {
values: (u32, u32, u32, u32),
station: Option<PubComId>,
name: String,
}
impl Location {
pub(crate) fn from_toml(toml: &Value) -> Self {
let values = toml
.get("values")
.expect(VALUES_MISSING)
.as_array()
.expect(VALUES_TYPEERROR);
if values.len() != 4 {
panic!(VALUES_LENERROR);
}
let values = (
values[0].as_integer().expect(VALUE_TYPEERROR) as u32,
values[1].as_integer().expect(VALUE_TYPEERROR) as u32,
values[2].as_integer().expect(VALUE_TYPEERROR) as u32,
values[3].as_integer().expect(VALUE_TYPEERROR) as u32,
);
let name = toml
.get("name")
.expect(NAME_MISSING)
.as_str()
.expect(NAME_TYPEERROR);
Self {
values,
station: None,
name: name.to_string(),
}
}
pub(crate) fn place_station(&mut self, pub_com: PubComId) {
if self.station.is_none() {
self.station = Some(pub_com);
} else {
panic!(ACTION_FORBIDDEN);
}
}
}
|
use tui::{buffer::Buffer, layout::Rect, widgets::Widget};
pub struct Clear<T: Widget>(T);
impl<T: Widget> Clear<T> {
pub fn new(w: T) -> Self {
Self(w)
}
}
impl<T: Widget> Widget for Clear<T> {
fn draw(&mut self, area: Rect, buf: &mut Buffer) {
if area.width < 2 || area.height < 2 {
return;
}
for x in area.left()..area.right() {
for y in area.top()..area.bottom() {
buf.get_mut(x, y).reset();
}
}
self.0.draw(area, buf);
}
}
|
/**
* Copyright © 2019
* Sami Shalayel <sami.shalayel@tutamail.com>,
* Carl Schwan <carl@carlschwan.eu>,
* Daniel Freiermuth <d_freiermu14@cs.uni-kl.de>
*
* This work is free. You can redistribute it and/or modify it under the
* terms of the Do What The Fuck You Want To Public License, Version 2,
* as published by Sam Hocevar. See the LICENSE file for more details.
*
* This program is free software. It comes without any warranty, to
* the extent permitted by applicable law. You can redistribute it
* and/or modify it under the terms of the Do What The Fuck You Want
* To Public License, Version 2, as published by Sam Hocevar. See the LICENSE
* file for more details. **/
use std::fmt;
use std::fmt::Display;
use std::fmt::Formatter;
use std::io;
#[derive(Debug)]
pub enum Error {
ParseError(wavefront_obj::ParseError),
Io(io::Error),
Time(std::time::SystemTimeError),
Error(String),
}
impl Display for Error {
fn fmt(&self, formatter: &mut Formatter) -> fmt::Result {
match *self {
Error::Io(ref error) => error.fmt(formatter),
Error::ParseError(ref error) => error.message.fmt(formatter),
Error::Error(ref error) => error.fmt(formatter),
Error::Time(ref error) => error.fmt(formatter),
}
}
}
impl From<wavefront_obj::ParseError> for Error {
fn from(error: wavefront_obj::ParseError) -> Self {
Error::ParseError(error)
}
}
impl From<std::time::SystemTimeError> for Error {
fn from(error: std::time::SystemTimeError) -> Self {
Error::Time(error)
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::Io(error)
}
}
impl From<String> for Error {
fn from(error: String) -> Self {
Error::Error(error)
}
}
impl From<&str> for Error {
fn from(error: &str) -> Self {
Error::Error(String::from(error))
}
}
|
// This is the file that gets imported when using 'mod world;'
// re-export modules so they are visible to modules using this one.
pub mod geometry;
pub mod items;
pub mod world; |
#![feature(nll)]
#![feature(integer_atomics)]
extern crate gtk;
#[macro_use]
extern crate relm;
#[macro_use]
extern crate relm_derive;
extern crate crossbeam_channel;
#[macro_use]
extern crate failure;
extern crate packt_core;
extern crate tokio;
extern crate tokio_core;
extern crate tokio_io;
extern crate tokio_process;
mod view;
fn main() {
relm::run::<view::Win>(()).unwrap();
}
|
extern crate rand;
use go::GoGame;
use mcts::Controller;
use std::collections;
use go::Stone;
use go::Vertex;
extern crate time;
pub struct Engine {
game: GoGame,
controller: Controller,
rng: rand::StdRng,
commands: collections::HashMap<String, fn(&mut Engine, Vec<&str>) -> Result<String, String> >,
analyze_commands: collections::HashMap<String, fn(&mut Engine, Vec<&str>) -> Result<String, String> >,
pub running: bool,
}
impl Engine {
pub fn new(rng: rand::StdRng) -> Engine {
let mut analyze_commands: collections::HashMap<String, fn(&mut Engine, Vec<&str>) -> Result<String, String> > = collections::HashMap::new();
let mut commands: collections::HashMap<String, fn(&mut Engine, Vec<&str>) -> Result<String, String> > = collections::HashMap::new();
commands.insert("quit".to_string(), Engine::quit);
commands.insert("name".to_string(), Engine::name);
commands.insert("boardsize".to_string(), Engine::boardsize);
commands.insert("version".to_string(), Engine::version);
commands.insert("clear_board".to_string(), Engine::clear_board);
commands.insert("list_commands".to_string(), Engine::list_commands);
commands.insert("genmove".to_string(), Engine::genmove);
commands.insert("play".to_string(), Engine::play);
commands.insert("gogui-analyze_commands".to_string(), Engine::gogui_analyze_commands);
analyze_commands.insert("move_values".to_string(), Engine::move_values);
Engine {
game: GoGame::new(9),
controller: Controller::new(),
rng: rng,
commands: commands,
analyze_commands: analyze_commands,
running: true,
}
}
pub fn execute(&mut self, command: String) -> String {
let args = command.split(" ").collect::<Vec<_>>();
if args.is_empty() {
return "? must specify a command".to_string();
}
let res = if self.commands.contains_key(args[0]) {
self.commands[args[0]](self, args)
} else if self.analyze_commands.contains_key(args[0]) {
self.analyze_commands[args[0]](self, args)
} else {
Err("unknown command".to_string())
};
match res {
Ok(msg) => format!("= {}", msg),
Err(msg) => format!("? {}", msg),
}
}
fn play(&mut self, args: Vec<&str>) -> Result<String, String> {
if args.len() != 3 {
return Err("expected: play C V".to_string());
}
self.game.play(try!(args[1].parse::<Stone>()), try!(args[2].parse::<Vertex>()));
info!("new board:\n{:?}", self.game);
Ok("".to_string())
}
fn genmove(&mut self, args: Vec<&str>) -> Result<String, String> {
if args.len() != 2 {
return Err("expected: genmove C".to_string());
}
let start = time::PreciseTime::now();
let color = try!(args[1].parse::<Stone>());
self.game.to_play = color;
let num_simulations = 10000;
let v = self.controller.gen_move(&self.game, num_simulations, &mut self.rng);
self.game.play(color, v);
let duration = start.to(time::PreciseTime::now());
info!("generate move in {} ({:.2} kpps)\n{:?}", duration,
num_simulations as f64 / duration.num_milliseconds() as f64, self.game);
Ok(format!("{}", v))
}
fn move_values(&mut self, _: Vec<&str>) -> Result<String, String> {
// let num_simulations = 100000;
// self.controller.gen_move(&self.game, num_simulations, &mut self.rng);
let mut res = "".to_string();
// for c in self.controller.root.children.iter() {
// res.push_str(&format!("COLOR #0000{:02x} {}\n",
// (c.uct(num_simulations) * 255f64) as u8, c.vertex));
// res.push_str(&format!("LABEL {} {}\n", c.vertex,
// (c.uct(num_simulations) * 100f64) as u8));
// }
Ok(res)
}
fn quit(&mut self, _: Vec<&str>) -> Result<String, String> {
self.running = false;
Ok("".to_string())
}
fn version(&mut self, _: Vec<&str>) -> Result<String, String> {
Ok("1".to_string())
}
fn name(&mut self, _: Vec<&str>) -> Result<String, String> {
Ok("ErGo".to_string())
}
fn clear_board(&mut self, _: Vec<&str>) -> Result<String, String> {
self.game.reset();
Ok("".to_string())
}
fn boardsize(&mut self, args: Vec<&str>) -> Result<String, String> {
if args.len() != 2 {
return Err("expected: boardsize N".to_string());
}
let n = args[1].parse::<usize>();
if n.is_err() {
return Err(format!("expected integer, got '{}'", args[1]));
}
self.game = GoGame::new(n.unwrap());
Ok("".to_string())
}
fn list_commands(&mut self, _: Vec<&str>) -> Result<String, String> {
Ok(self.commands.keys().map(|s| s.clone())
.collect::<Vec<String>>().connect(" "))
}
fn gogui_analyze_commands(&mut self, _: Vec<&str>) -> Result<String, String> {
Ok(self.analyze_commands.keys().map(|s| format!("gfx/{}/{}", s, s))
.collect::<Vec<String>>().connect(" "))
}
}
|
// Copyright 2019. The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//! This module is responsible for handling logic responsible for storing the blockchain state.
//!
//! It is structured in such a way that clients (e.g. base nodes) can configure the various components of the state
//! (kernels, utxos, etc) in whichever way they like. It's possible to have the UTXO set in memory, and the kernels
//! backed by LMDB, while the merkle trees are stored in flat files for example.
mod blockchain_database;
mod consts;
mod db_transaction;
mod error;
mod historical_block;
mod lmdb_db;
mod memory_db;
mod metadata;
// public modules
pub mod async_db;
// Public API exports
pub use blockchain_database::{
calculate_mmr_roots,
fetch_header,
fetch_headers,
fetch_target_difficulties,
is_stxo,
is_utxo,
BlockAddResult,
BlockchainBackend,
BlockchainDatabase,
BlockchainDatabaseConfig,
MutableMmrState,
Validators,
};
pub use db_transaction::{DbKey, DbKeyValuePair, DbTransaction, DbValue, MetadataKey, MetadataValue, MmrTree};
pub use error::ChainStorageError;
pub use historical_block::HistoricalBlock;
pub use lmdb_db::{
create_lmdb_database,
LMDBDatabase,
LMDB_DB_BLOCK_HASHES,
LMDB_DB_HEADERS,
LMDB_DB_KERNELS,
LMDB_DB_KERNEL_MMR_CP_BACKEND,
LMDB_DB_METADATA,
LMDB_DB_ORPHANS,
LMDB_DB_RANGE_PROOF_MMR_CP_BACKEND,
LMDB_DB_STXOS,
LMDB_DB_UTXOS,
LMDB_DB_UTXO_MMR_CP_BACKEND,
};
pub use memory_db::MemoryDatabase;
pub use metadata::ChainMetadata;
|
fn main() {
// mut
let mut mutability = 32; // mut : 변수의 변형
println!("The value of mutability : {}", mutability);
mutability = 6;
println!("The value of mutability : {}", mutability);
println!("");
// const
/*
상수, immutable 변수의 차이점
상수 : variable shadowing 불가능
let a = 1;
let a = a*a;
const b: u32 = 3;
const b: u32 = b*b; // errir[E0428]
상수 : global scope 사용 가능
let version = "1.0.0"; // error
const VERSION: &'static str = "1.0.0";
fn main() {
...
}
상수 : 타입 명시 필요
let inch = 23.3
const E = 2.71; // error
const PI: f64 = 3.14;
상수 : constant expression으로만 초기화 가능 (함수 반환값, 런타임에 결정되는 값 사용불가)
fn plus_one(x: i32) -> i32 {
x + 1
}
const fn plus_one_const(x:i32) -> i32 {
x + 1
}
fn main() {
const N1: i32 = plus_one(3); // error
const N2: i32 = plus_one_const(3);
}
*/
// const
const MAX_POINTS: u32 = 100_000;
println!("The MAX_PONTS : {}", MAX_POINTS);
println!("");
// Shdowing
let shdowing = 4;
let shdowing = shdowing + 1;
let shdowing = shdowing * 2;
println!("The value of shdowing : {}", shdowing);
let spaces = " "; // 문자열 유형
let spaces = spaces.len(); // 첫 번째 것과 동일한 이름을 가진 새롭게 정의된 숫자 유형의 변수
println!("The spaces : {}", spaces);
println!("");
// Data Types
let scalar_string = "TutorialsPoint"; // string type
let scalar_float = 4.5; // float type
let scalar_boolean = true; // boolean type
let scalar_char = '♥'; //unicode character type
println!("scalar_string : {}",scalar_string);
println!("scalar_float : {}",scalar_float);
println!("scalar_boolean : {}",scalar_boolean);
println!("scalar_char : {}",scalar_char);
println!("");
/*
스칼라 타입
정수형
부호 없는 32비트 변수 : u32 (Unsigned)
부호 있는 32비트 변수 : i32 (Signed)
ex : u8 타입 : 0 에서 2^8 - 1, 0 에서 255 까지의 값을 저장
정수형 리터럴
Decimal
Hex ex) 0xff
Octal ex) 0o77
Binary ex) 0b1111_0000
Byte (u8 only) ex) b'A'
부동 소수점
f32 : 32bit
f64 : 64bit
Boolean
let t = true;
let t:bool = false;
문자
let c = 'z';
let z = 'Z';
let black_heart = '🖤';
*/
// let tuples: (i32, f64, u8) = (500, 5.3, 1);
// let (tuples_i32, tuples_f64, tuples_u8) = tuples;
// println!("tuples_f64 : {}", tuples_f64);
// // destructuring
// let destructuring_tuples: (i32, f64, u8) = (500, 5.3, 1);
// let destructuring_tuples_i32 = destructuring_tuples.0;
// let destructuring_tuples_f64 = destructuring_tuples.1;
// let destructuring_tuples_u8 = destructuring_tuples.2;
// println!("destructuring_tuples_i32 : {}", destructuring_tuples_i32);
// println!("destructuring_tuples_f64 : {}", destructuring_tuples_f64);
// println!("destructuring_tuples_u8 : {}", destructuring_tuples_u8);
} |
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
impl super::INPUT {
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
}
#[doc = r" Value of the field"]
pub struct AIN0R {
bits: bool,
}
impl AIN0R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct AIN1R {
bits: bool,
}
impl AIN1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct AIN2R {
bits: bool,
}
impl AIN2R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct AIN3R {
bits: bool,
}
impl AIN3R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SIN0R {
bits: bool,
}
impl SIN0R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SIN1R {
bits: bool,
}
impl SIN1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SIN2R {
bits: bool,
}
impl SIN2R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SIN3R {
bits: bool,
}
impl SIN3R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Real-time status of input 0."]
#[inline]
pub fn ain0(&self) -> AIN0R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
AIN0R { bits }
}
#[doc = "Bit 1 - Real-time status of input 1."]
#[inline]
pub fn ain1(&self) -> AIN1R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
AIN1R { bits }
}
#[doc = "Bit 2 - Real-time status of input 2."]
#[inline]
pub fn ain2(&self) -> AIN2R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
};
AIN2R { bits }
}
#[doc = "Bit 3 - Real-time status of input 3."]
#[inline]
pub fn ain3(&self) -> AIN3R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
};
AIN3R { bits }
}
#[doc = "Bit 16 - Input 0 state synchronized to the SCT clock."]
#[inline]
pub fn sin0(&self) -> SIN0R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SIN0R { bits }
}
#[doc = "Bit 17 - Input 1 state synchronized to the SCT clock."]
#[inline]
pub fn sin1(&self) -> SIN1R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SIN1R { bits }
}
#[doc = "Bit 18 - Input 2 state synchronized to the SCT clock."]
#[inline]
pub fn sin2(&self) -> SIN2R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SIN2R { bits }
}
#[doc = "Bit 19 - Input 3 state synchronized to the SCT clock."]
#[inline]
pub fn sin3(&self) -> SIN3R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SIN3R { bits }
}
}
|
use std::collections::BTreeMap;
pub fn solve(input: Vec<&str>, result: i32) {
let number: Vec<i32> = input.iter().map(|x| x.parse().unwrap()).collect();
part1(&number, result);
part2(&number, result);
}
fn part1(list: &Vec<i32>, result: i32) {
for i in list.iter() {
for j in list.iter() {
if (i + j) == result {
println!("{} + {} = {}", i, j, result);
return;
}
}
}
}
fn part2(list: &Vec<i32>, result: i32) {
let mut map: BTreeMap<i32, i32> = BTreeMap::new();
for i in list.iter() {
map.insert(*i, 1);
for j in list.iter() {
let sum = i + j;
let key = 2020 - sum;
if map.contains_key(&key) {
println!("{} + {} + {} = {}", i, j, key, result);
return;
}
}
}
}
|
//! Port controller (PIO)
//!
//! Size: 1K
//!
//! PxY_Select variants mapped to alt functions:
//! * 000 (U0): input
//! * 001 (U1): output
//! * 010 (U2): AF0
//! * 011 (U3): AF1
//! * 100 (U4): AF2
//! * 101 (U5): AF3
//! * 110 (U6): AF4
//! * 111 (U7): disabled
use core::marker::PhantomData;
use core::ops::{Deref, DerefMut};
use static_assertions::const_assert_eq;
pub const PADDR: usize = 0x01C2_0800;
register! {
Config0,
u32,
RW,
Fields [
Pin0 WIDTH(U3) OFFSET(U0) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin1 WIDTH(U3) OFFSET(U4) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin2 WIDTH(U3) OFFSET(U8) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin3 WIDTH(U3) OFFSET(U12) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin4 WIDTH(U3) OFFSET(U16) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin5 WIDTH(U3) OFFSET(U20) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin6 WIDTH(U3) OFFSET(U24) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin7 WIDTH(U3) OFFSET(U28) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
]
}
register! {
Config1,
u32,
RW,
Fields [
Pin8 WIDTH(U3) OFFSET(U0) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin9 WIDTH(U3) OFFSET(U4) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin10 WIDTH(U3) OFFSET(U8) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin11 WIDTH(U3) OFFSET(U12) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin12 WIDTH(U3) OFFSET(U16) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin13 WIDTH(U3) OFFSET(U20) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin14 WIDTH(U3) OFFSET(U24) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
Pin15 WIDTH(U3) OFFSET(U28) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
]
}
register! {
Config2,
u32,
RW,
Fields [
Pin16 WIDTH(U3) OFFSET(U0) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
]
}
register! {
Config3,
u32,
RW,
Fields [
Pin24 WIDTH(U3) OFFSET(U0) [
Input = U0,
Output = U1,
Af0 = U2,
Af1 = U3,
Af2 = U4,
Af3 = U5,
Af4 = U6,
Disabled = U7
]
]
}
register! {
Data,
u32,
RW,
Fields [
Pin0 WIDTH(U1) OFFSET(U0),
Pin1 WIDTH(U1) OFFSET(U1),
Pin2 WIDTH(U1) OFFSET(U2),
Pin3 WIDTH(U1) OFFSET(U3),
Pin4 WIDTH(U1) OFFSET(U4),
Pin5 WIDTH(U1) OFFSET(U5),
Pin6 WIDTH(U1) OFFSET(U6),
Pin7 WIDTH(U1) OFFSET(U7),
Pin8 WIDTH(U1) OFFSET(U8),
Pin9 WIDTH(U1) OFFSET(U9),
Pin10 WIDTH(U1) OFFSET(U10),
Pin11 WIDTH(U1) OFFSET(U11),
Pin12 WIDTH(U1) OFFSET(U12),
Pin13 WIDTH(U1) OFFSET(U13),
Pin14 WIDTH(U1) OFFSET(U14),
Pin15 WIDTH(U1) OFFSET(U15),
Pin16 WIDTH(U1) OFFSET(U16),
Pin17 WIDTH(U1) OFFSET(U17),
Pin18 WIDTH(U1) OFFSET(U18),
Pin19 WIDTH(U1) OFFSET(U19),
Pin20 WIDTH(U1) OFFSET(U20),
Pin21 WIDTH(U1) OFFSET(U21),
Pin22 WIDTH(U1) OFFSET(U22),
Pin23 WIDTH(U1) OFFSET(U23),
Pin24 WIDTH(U1) OFFSET(U24),
]
}
register! {
Driv0,
u32,
RW,
Fields [
Pin0 WIDTH(U2) OFFSET(U0) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin1 WIDTH(U2) OFFSET(U2) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin2 WIDTH(U2) OFFSET(U4) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin3 WIDTH(U2) OFFSET(U6) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin4 WIDTH(U2) OFFSET(U8) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin5 WIDTH(U2) OFFSET(U10) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin6 WIDTH(U2) OFFSET(U12) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin7 WIDTH(U2) OFFSET(U14) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin8 WIDTH(U2) OFFSET(U16) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin9 WIDTH(U2) OFFSET(U18) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin10 WIDTH(U2) OFFSET(U20) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin11 WIDTH(U2) OFFSET(U22) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin12 WIDTH(U2) OFFSET(U24) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin13 WIDTH(U2) OFFSET(U26) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin14 WIDTH(U2) OFFSET(U28) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
Pin15 WIDTH(U2) OFFSET(U30) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
]
}
register! {
Driv1,
u32,
RW,
Fields [
Pin16 WIDTH(U2) OFFSET(U0) [
Level0 = U0,
Level1 = U1,
Level2 = U2,
Level3 = U3
]
]
}
register! {
Pull0,
u32,
RW,
Fields [
Pin0 WIDTH(U2) OFFSET(U0) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin1 WIDTH(U2) OFFSET(U2) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin2 WIDTH(U2) OFFSET(U4) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin3 WIDTH(U2) OFFSET(U6) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin4 WIDTH(U2) OFFSET(U8) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin5 WIDTH(U2) OFFSET(U10) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin6 WIDTH(U2) OFFSET(U12) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin7 WIDTH(U2) OFFSET(U14) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin8 WIDTH(U2) OFFSET(U16) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin9 WIDTH(U2) OFFSET(U18) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin10 WIDTH(U2) OFFSET(U20) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin11 WIDTH(U2) OFFSET(U22) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin12 WIDTH(U2) OFFSET(U24) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin13 WIDTH(U2) OFFSET(U26) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin14 WIDTH(U2) OFFSET(U28) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
Pin15 WIDTH(U2) OFFSET(U30) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
]
}
register! {
Pull1,
u32,
RW,
Fields [
Pin16 WIDTH(U2) OFFSET(U0) [
Disabled = U0,
PullUp = U1,
PullDown = U2
]
]
}
const_assert_eq!(core::mem::size_of::<RegisterBlock>(), 0x90);
#[repr(C)]
pub struct RegisterBlock {
__reserved_0: [u32; 9], // 0x00
pub pb_cfg0: Config0::Register, // 0x24
pub pb_cfg1: Config1::Register, // 0x28
__reserved_1: u32, // 0x2C
__reserved_2: u32, // 0x30
pub pb_data: Data::Register, // 0x34
pub pb_driv0: Driv0::Register, // 0x38
__reserved_3: u32, // 0x3C
pub pb_pull0: Pull0::Register, // 0x40
__reserved_4: u32, // 0x44
pub pc_cfg0: Config0::Register, // 0x48
pub pc_cfg1: Config1::Register, // 0x4C
pub pc_cfg2: Config2::Register, // 0x50
__reserved_5: u32, // 0x54
pub pc_data: Data::Register, // 0x58
pub pc_driv0: Driv0::Register, // 0x5C
pub pc_driv1: Driv1::Register, // 0x60
pub pc_pull0: Pull0::Register, // 0x64
pub pc_pull1: Pull1::Register, // 0x68
pub pd_cfg0: Config0::Register, // 0x6C
pub pd_cfg1: Config1::Register, // 0x70
pub pd_cfg2: Config2::Register, // 0x74
pub pd_cfg3: Config3::Register, // 0x78
pub pd_data: Data::Register, // 0x7C
pub pd_driv0: Driv0::Register, // 0x80
pub pd_driv1: Driv1::Register, // 0x84
pub pd_pull0: Pull0::Register, // 0x88
pub pd_pull1: Pull1::Register, // 0x8C
}
pub struct PIO {
_marker: PhantomData<*const ()>,
}
unsafe impl Send for PIO {}
impl PIO {
pub unsafe fn from_paddr() -> Self {
Self {
_marker: PhantomData,
}
}
pub fn as_ptr(&self) -> *const RegisterBlock {
PADDR as *const _
}
pub const unsafe fn ptr() -> *const RegisterBlock {
PADDR as *const _
}
pub fn as_mut_ptr(&mut self) -> *mut RegisterBlock {
PADDR as *mut _
}
pub const unsafe fn mut_ptr() -> *mut RegisterBlock {
PADDR as *mut _
}
}
impl Deref for PIO {
type Target = RegisterBlock;
fn deref(&self) -> &RegisterBlock {
unsafe { &*self.as_ptr() }
}
}
impl DerefMut for PIO {
fn deref_mut(&mut self) -> &mut RegisterBlock {
unsafe { &mut *self.as_mut_ptr() }
}
}
|
extern crate rayon;
extern crate clap;
extern crate atty;
extern crate regex;
extern crate grusp_core;
pub mod args;
use rayon::prelude::*;
use std::path::PathBuf;
use std::io::BufReader;
use std::fs::File;
use std::io::stdin;
use grusp_core::grusp;
fn main() {
let opts = match args::get_opts() {
Ok(o) => o,
Err(e) => {
println!("{:?}", e);
std::process::exit(1);
}
};
let matcher = grusp::Matcher::new(&opts.regex)
.keep_lines(!(opts.just_files.is_some() || opts.is_count_only))
.invert_match(opts.is_inverted);
if let Some(ref queries) = opts.queries {
let stats = grusp::StatCollector::new();
let files = grusp::FileCollector::new(&queries).max_depth(opts.max_depth).collect();
let has_files = !files.is_empty();
if opts.is_concurrent {
files
.into_par_iter()
.for_each(|p| {
match_file(p, &opts, &matcher, &stats)
});
} else {
files
.into_iter()
.for_each(|p| {
match_file(p, &opts, &matcher, &stats)
});
};
if stats.total() == 0 && !(has_files && opts.just_files.without_matches()) {
std::process::exit(1);
}
} else {
let stdin = stdin();
let mut reader = stdin.lock();
let matches = matcher
.with_line_numbers(false)
.collect(&mut reader)
.expect("Could not parse stdin");
if matches.has_matches() {
println!(
"{}",
grusp::Display::new(matches)
.count_only(opts.is_count_only)
.color(opts.is_colored)
.just_file_names(opts.just_files.is_some())
);
} else {
std::process::exit(1);
}
}
}
fn match_file(path: PathBuf,
opts: &args::Opts,
matcher: &grusp::Matcher,
stats: &grusp::StatCollector) {
let handle = File::open(&path).unwrap();
let mut reader = BufReader::new(handle);
let matches = matcher
.collect(&mut reader)
.expect("Could not parse file")
.add_path(&path);
stats.add(&matches);
if (matches.has_matches() && opts.just_files.show_matches()) ||
(!matches.has_matches() && opts.just_files.without_matches()) {
println!(
"{}",
grusp::Display::new(matches)
.count_only(opts.is_count_only)
.color(opts.is_colored)
.just_file_names(opts.just_files.is_some())
);
}
}
|
use std::collections::{HashMap, HashSet};
use std::iter::FromIterator;
use std::str;
/* Mirror the product(repeat=n) iter from Python itertools
https://github.com/python/cpython/blob/234531b4462b20d668762bd78406fd2ebab129c9/Modules/itertoolsmodule.c#L2095
https://dev.to/naufraghi/procedural-macro-in-rust-101-k3f
this macro also looks useful:
https://stackoverflow.com/questions/45786955/how-to-compose-functions-in-rust/45792463#45792463
use composeTwo -> productTwo?
pub fn product<I, T>(iter: I, repeat: usize) -> Vec<T>
where
I: Iterator<Item = T>,
{
Vec::new()
}
*/
pub fn revc(text: &str) -> String {
text.chars()
.rev()
.map(|x| match x {
'A' => "T",
'C' => "G",
'G' => "C",
'T' => "A",
_ => panic!(),
})
.collect()
}
pub fn hamming(seq1: &str, seq2: &str) -> usize {
seq1.chars()
.zip(seq2.chars())
.filter(|(b1, b2)| b1 != b2)
.count()
}
pub fn neighbors(pattern: &str, d: usize) -> HashSet<String> {
if d == 0 {
return HashSet::from_iter(vec![pattern.into()]);
}
if pattern.len() == 1 {
return HashSet::from_iter(vec!["A".into(), "C".into(), "G".into(), "T".into()]);
}
let mut neighborhood = HashSet::default();
for t in neighbors(&pattern[1..], d) {
if hamming(&pattern[1..], &t) < d {
for bp in vec!["A", "C", "G", "T"].into_iter() {
neighborhood.insert(bp.to_owned() + &t);
}
} else {
neighborhood.insert(pattern[0..1].to_owned() + &t);
}
}
neighborhood
}
pub fn composition<'a>(k: usize, text: &'a str) -> impl Iterator<Item = &'a str> {
text.as_bytes()
.windows(k)
.map(|w| str::from_utf8(w).unwrap())
}
// TODO: avoid creating adj_list, rewrite the loop as iter code and return it
// before collect?
pub fn overlap<'a>(text: &'a [&str]) -> impl Iterator<Item = (String, String)> + 'a {
let mut adj_list = HashMap::with_capacity(text.len());
for kmer in text {
let mut neighbors = Vec::with_capacity(text.len());
for other in text {
if other == kmer {
continue;
} else if kmer[1..] == other[..kmer.len() - 1] {
neighbors.push(other);
}
}
adj_list.insert(kmer, neighbors);
}
adj_list.into_iter().flat_map(move |(node, neighbors)| {
neighbors
.into_iter()
.map(move |neighbor| (node.to_string(), neighbor.to_string()))
})
}
pub type Matrix = Vec<Vec<f64>>;
#[inline]
pub fn nt_to_pos(nt: u8) -> usize {
match nt {
b'A' => 0,
b'C' => 1,
b'G' => 2,
b'T' => 3,
_ => unimplemented!(),
}
}
pub fn kmer_prob(kmer: &[u8], matrix: &Matrix) -> f64 {
let mut prob = 1.;
for (i, nt) in kmer.iter().enumerate() {
prob = prob * matrix[nt_to_pos(*nt)][i];
}
prob
}
pub fn profile_most_probable(text: &str, k: usize, matrix: &Matrix) -> String {
let mut most_probable_kmer = text[0..k].as_bytes();
let mut max_prob = kmer_prob(most_probable_kmer, matrix);
for kmer in text[1..].as_bytes().windows(k) {
let prob = kmer_prob(kmer, matrix);
if prob > max_prob {
max_prob = prob;
most_probable_kmer = kmer
}
}
String::from_utf8_lossy(most_probable_kmer).into()
}
#[inline]
pub fn pos_to_nt(pos: usize) -> u8 {
match pos {
0 => b'A',
1 => b'C',
2 => b'G',
3 => b'T',
_ => unimplemented!(),
}
}
pub fn profile_matrix(motifs: &[String], k: usize) -> Matrix {
let mut matrix: Matrix = vec![vec![0.0; k]; 4];
for i in 0..k {
for nt in 0..4 {
matrix[nt][i] = motifs
.iter()
.filter(|motif| motif.as_bytes()[i] == pos_to_nt(nt))
.count() as f64
/ motifs.len() as f64;
}
}
matrix
}
pub fn score(motifs: &[String]) -> u64 {
let size = motifs[0].len();
let mut common_kmer: String = "".into();
for i in 0..size {
let mut max_freq = 0;
let mut pos_freq: HashMap<u8, u64> = HashMap::default();
for motif in motifs {
let nt = motif.as_bytes()[i];
let entry = pos_freq.entry(nt).or_insert(0);
*entry += 1;
max_freq = u64::max(max_freq, *entry);
}
for (nt, freq) in pos_freq {
if freq == max_freq {
common_kmer.push(nt as char);
break;
}
}
}
motifs
.iter()
.map(|kmer| hamming(&common_kmer, kmer) as u64)
.sum()
}
pub fn profile_matrix_with_pseudocounts(motifs: &[String], k: usize) -> Matrix {
let mut matrix: Matrix = vec![vec![0.0; k]; 4];
for i in 0..k {
for nt in 0..4 {
matrix[nt][i] = (motifs
.iter()
.filter(|motif| motif.as_bytes()[i] == pos_to_nt(nt))
.count() as f64
+ 1.)
/ (motifs.len() as f64 + 4.);
}
}
matrix
}
|
#![allow(dead_code)]
#![allow(unused_imports)]
use crate::traits::{ReadoutError, ShellFormat};
use crate::extra;
use std::io::Error;
use std::path::Path;
use std::process::{Command, Stdio};
use std::{env, fs};
use std::{ffi::CStr, path::PathBuf};
#[cfg(any(target_os = "linux", target_os = "macos", target_os = "android"))]
use sysctl::SysctlError;
#[cfg(any(target_os = "linux", target_os = "macos", target_os = "android"))]
impl From<SysctlError> for ReadoutError {
fn from(e: SysctlError) -> Self {
ReadoutError::Other(format!("Could not access sysctl: {:?}", e))
}
}
impl From<std::io::Error> for ReadoutError {
fn from(e: Error) -> Self {
ReadoutError::Other(e.to_string())
}
}
#[cfg(any(target_os = "linux", target_os = "netbsd", target_os = "android"))]
pub(crate) fn uptime() -> Result<usize, ReadoutError> {
let uptime_file_text = fs::read_to_string("/proc/uptime")?;
let uptime_text = uptime_file_text.split_whitespace().next().unwrap();
let parsed_uptime = uptime_text.parse::<f64>();
match parsed_uptime {
Ok(s) => Ok(s as usize),
Err(e) => Err(ReadoutError::Other(format!(
"Could not convert '{}' to a digit: {:?}",
uptime_text, e
))),
}
}
#[cfg(any(target_os = "linux", target_os = "netbsd", target_os = "android"))]
pub(crate) fn desktop_environment() -> Result<String, ReadoutError> {
let desktop_env = env::var("DESKTOP_SESSION").or_else(|_| env::var("XDG_CURRENT_DESKTOP"));
match desktop_env {
Ok(de) => {
if de.to_lowercase() == "xinitrc" {
return Err(ReadoutError::Other(
"You appear to be only running a window manager.".to_string(),
));
}
Ok(extra::ucfirst(de))
}
Err(_) => Err(ReadoutError::Other(
"You appear to be only running a window manager.".to_string(),
)),
}
}
#[cfg(any(target_os = "linux", target_os = "netbsd", target_os = "android"))]
pub(crate) fn window_manager() -> Result<String, ReadoutError> {
if extra::which("wmctrl") {
let wmctrl = Command::new("wmctrl")
.arg("-m")
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.expect("ERROR: failed to spawn \"wmctrl\" process");
let wmctrl_out = wmctrl
.stdout
.expect("ERROR: failed to open \"wmctrl\" stdout");
let head = Command::new("head")
.args(&["-n", "1"])
.stdin(Stdio::from(wmctrl_out))
.stdout(Stdio::piped())
.spawn()
.expect("ERROR: failed to spawn \"head\" process");
let output = head
.wait_with_output()
.expect("ERROR: failed to wait for \"head\" process to exit");
let window_manager = String::from_utf8(output.stdout)
.expect("ERROR: \"wmctrl -m | head -n1\" process stdout was not valid UTF-8");
let window_man_name =
extra::pop_newline(String::from(window_manager.replace("Name:", "").trim()));
if window_man_name == "N/A" || window_man_name.is_empty() {
return Err(ReadoutError::Other(format!(
"Window manager not available — it could that it is not EWMH-compliant."
)));
}
return Ok(window_man_name);
}
Err(ReadoutError::Other(
"\"wmctrl\" must be installed to display your window manager.".to_string(),
))
}
#[cfg(target_family = "unix")]
pub(crate) fn terminal() -> Result<String, ReadoutError> {
// The following code is the equivalent of running:
// ps -p $(ps -p $$ -o ppid=) o comm=
let ppid = Command::new("ps")
.arg("-p")
.arg(unsafe { libc::getppid() }.to_string())
.arg("-o")
.arg("ppid=")
.output()
.expect("ERROR: failed to start \"ps\" process");
let terminal_ppid = String::from_utf8(ppid.stdout)
.expect("ERROR: \"ps\" process stdout was not valid UTF-8")
.trim()
.to_string();
let name = Command::new("ps")
.arg("-p")
.arg(terminal_ppid)
.arg("-o")
.arg("comm=")
.output()
.expect("ERROR: failed to start \"ps\" output");
let terminal_name = extra::ucfirst(
String::from_utf8(name.stdout)
.expect("ERROR: \"ps\" process stdout was not valid UTF-8")
.trim(),
);
if terminal_name.is_empty() {
return Err(ReadoutError::Other(String::from(
"Terminal name was empty.",
)));
}
Ok(terminal_name)
}
#[cfg(target_family = "unix")]
fn get_passwd_struct() -> Result<*mut libc::passwd, ReadoutError> {
let uid: libc::uid_t = unsafe { libc::geteuid() };
// Do not call free on passwd pointer according to man page.
let passwd = unsafe { libc::getpwuid(uid) };
if passwd != std::ptr::null_mut() {
return Ok(passwd);
}
Err(ReadoutError::Other(String::from(
"Unable to read account information.",
)))
}
#[cfg(target_family = "unix")]
pub(crate) fn username() -> Result<String, ReadoutError> {
let passwd = get_passwd_struct()?;
let name = unsafe { CStr::from_ptr((*passwd).pw_name) };
if let Ok(str) = name.to_str() {
return Ok(String::from(str));
}
Err(ReadoutError::Other(String::from(
"Unable to read username for the current UID.",
)))
}
#[cfg(target_family = "unix")]
pub(crate) fn shell(shorthand: ShellFormat) -> Result<String, ReadoutError> {
let passwd = get_passwd_struct()?;
let shell_name = unsafe { CStr::from_ptr((*passwd).pw_shell) };
if let Ok(str) = shell_name.to_str() {
let path = String::from(str);
match shorthand {
ShellFormat::Relative => {
let path = Path::new(&path);
return Ok(path.file_stem().unwrap().to_str().unwrap().into());
}
_ => {
return Ok(path);
}
}
}
Err(ReadoutError::Other(String::from(
"Unable to read default shell for the current UID.",
)))
}
#[cfg(any(target_os = "linux", target_os = "netbsd", target_os = "android"))]
pub(crate) fn cpu_model_name() -> String {
use std::io::{BufRead, BufReader};
let file = fs::File::open("/proc/cpuinfo");
match file {
Ok(content) => {
let reader = BufReader::new(content);
for line in reader.lines().flatten() {
if line.starts_with("model name") {
return line
.replace("model name", "")
.replace(":", "")
.trim()
.to_string();
}
}
String::new()
}
Err(_e) => String::new(),
}
}
#[cfg(any(target_os = "macos", target_os = "netbsd"))]
pub(crate) fn cpu_usage() -> Result<usize, ReadoutError> {
let nelem: i32 = 1;
let mut value: f64 = 0.0;
let value_ptr: *mut f64 = &mut value;
let cpu_load = unsafe { libc::getloadavg(value_ptr, nelem) };
if cpu_load != -1 {
if let Ok(logical_cores) = cpu_cores() {
return Ok((value as f64 / logical_cores as f64 * 100.0).round() as usize);
}
}
Err(ReadoutError::Other(format!(
"getloadavg failed with return code: {}",
cpu_load
)))
}
#[cfg(target_family = "unix")]
pub(crate) fn cpu_cores() -> Result<usize, ReadoutError> {
Ok(num_cpus::get())
}
#[cfg(target_family = "unix")]
pub(crate) fn cpu_physical_cores() -> Result<usize, ReadoutError> {
Ok(num_cpus::get_physical())
}
/// Obtain the value of a specified field from `/proc/meminfo` needed to calculate memory usage
#[cfg(any(target_os = "linux", target_os = "netbsd", target_os = "android"))]
pub(crate) fn get_meminfo_value(value: &str) -> u64 {
use std::io::{BufRead, BufReader};
let file = fs::File::open("/proc/meminfo");
match file {
Ok(content) => {
let reader = BufReader::new(content);
for line in reader.lines().flatten() {
if line.starts_with(value) {
let s_mem_kb: String = line.chars().filter(|c| c.is_digit(10)).collect();
return s_mem_kb.parse::<u64>().unwrap_or(0);
}
}
0
}
Err(_e) => 0,
}
}
pub(crate) fn local_ip() -> Result<String, ReadoutError> {
if let Some(s) = local_ipaddress::get() {
Ok(s)
} else {
Err(ReadoutError::Other(String::from(
"Unable to get local IP address.",
)))
}
}
pub(crate) fn count_cargo() -> Option<usize> {
use std::fs::read_dir;
if let Ok(cargo_home) = std::env::var("CARGO_HOME") {
let cargo_bin = PathBuf::from(cargo_home).join("bin");
if cargo_bin.exists() {
if let Ok(read_dir) = read_dir(cargo_bin) {
return Some(read_dir.count());
}
}
return None;
}
None
}
|
use std::io::Write;
use std::time::Instant;
use compresstimator::Compresstimator;
fn main() -> std::io::Result<()> {
let estimator = Compresstimator::default();
for path in std::env::args_os().skip(1) {
let path = std::path::PathBuf::from(path);
print!("{}\t", path.display());
let start = Instant::now();
match estimator.compresstimate_file(&path) {
Ok(ratio) => {
print!("Est ({:.2?}): {:.2}x\t", start.elapsed(), ratio);
}
Err(e) => {
println!("Error: {}", e);
continue;
}
}
std::io::stdout().flush()?;
let start = Instant::now();
match estimator.base_truth(&path) {
Ok(ratio) => {
println!("Actual ({:.2?}): {:.2}x", start.elapsed(), ratio);
}
Err(e) => {
println!("Error: {}", e);
}
}
}
Ok(())
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use common_exception::ErrorCode;
use common_exception::Result;
use common_expression::types::number::UInt32Type;
use common_expression::types::DataType;
use common_expression::types::NumberDataType;
use common_expression::types::ValueType;
use common_expression::BlockEntry;
use common_expression::DataBlock;
use common_expression::DataField;
use common_expression::DataSchema;
use common_expression::DataSchemaRefExt;
use common_expression::Value;
use common_meta_app::principal::UserOptionFlag;
use common_meta_app::tenant::TenantQuota;
use common_meta_types::MatchSeq;
use common_users::UserApiProvider;
use crate::procedures::OneBlockProcedure;
use crate::procedures::Procedure;
use crate::procedures::ProcedureFeatures;
use crate::sessions::QueryContext;
use crate::sessions::TableContext;
pub struct TenantQuotaProcedure;
impl TenantQuotaProcedure {
pub fn try_create() -> Result<Box<dyn Procedure>> {
Ok(TenantQuotaProcedure {}.into_procedure())
}
}
#[async_trait::async_trait]
impl OneBlockProcedure for TenantQuotaProcedure {
fn name(&self) -> &str {
"TENANT_QUOTA"
}
fn features(&self) -> ProcedureFeatures {
ProcedureFeatures::default()
.variadic_arguments(0, 5)
.management_mode_required(true)
}
/// args:
/// tenant_id: string
/// max_databases: u32
/// max_tables_per_database: u32
/// max_stages: u32
/// max_files_per_stage: u32
async fn all_data(&self, ctx: Arc<QueryContext>, args: Vec<String>) -> Result<DataBlock> {
let mut tenant = ctx.get_tenant();
if !args.is_empty() {
let user_info = ctx.get_current_user()?;
if !user_info.has_option_flag(UserOptionFlag::TenantSetting) {
return Err(ErrorCode::PermissionDenied(format!(
"Access denied: '{}' requires user {} option flag",
self.name(),
UserOptionFlag::TenantSetting
)));
}
tenant = args[0].clone();
}
let quota_api = UserApiProvider::instance().get_tenant_quota_api_client(&tenant)?;
let res = quota_api.get_quota(MatchSeq::GE(0)).await?;
let mut quota = res.data;
if args.len() <= 1 {
return self.to_block("a);
};
quota.max_databases = args[1].parse::<u32>()?;
if let Some(max_tables) = args.get(2) {
quota.max_tables_per_database = max_tables.parse::<u32>()?;
};
if let Some(max_stages) = args.get(3) {
quota.max_stages = max_stages.parse::<u32>()?;
};
if let Some(max_files_per_stage) = args.get(4) {
quota.max_files_per_stage = max_files_per_stage.parse::<u32>()?
};
quota_api
.set_quota("a, MatchSeq::Exact(res.seq))
.await?;
self.to_block("a)
}
fn schema(&self) -> Arc<DataSchema> {
DataSchemaRefExt::create(vec![
DataField::new("max_databases", DataType::Number(NumberDataType::UInt32)),
DataField::new(
"max_tables_per_database",
DataType::Number(NumberDataType::UInt32),
),
DataField::new("max_stages", DataType::Number(NumberDataType::UInt32)),
DataField::new(
"max_files_per_stage",
DataType::Number(NumberDataType::UInt32),
),
])
}
}
impl TenantQuotaProcedure {
fn to_block(&self, quota: &TenantQuota) -> Result<DataBlock> {
Ok(DataBlock::new(
vec![
BlockEntry {
data_type: DataType::Number(NumberDataType::UInt32),
value: Value::Scalar(UInt32Type::upcast_scalar(quota.max_databases)),
},
BlockEntry {
data_type: DataType::Number(NumberDataType::UInt32),
value: Value::Scalar(UInt32Type::upcast_scalar(quota.max_tables_per_database)),
},
BlockEntry {
data_type: DataType::Number(NumberDataType::UInt32),
value: Value::Scalar(UInt32Type::upcast_scalar(quota.max_stages)),
},
BlockEntry {
data_type: DataType::Number(NumberDataType::UInt32),
value: Value::Scalar(UInt32Type::upcast_scalar(quota.max_files_per_stage)),
},
],
1,
))
}
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::headblock_pacemaker::HeadBlockPacemaker;
use crate::ondemand_pacemaker::OndemandPacemaker;
use crate::schedule_pacemaker::SchedulePacemaker;
use crate::stratum::mint;
use actix::prelude::*;
use anyhow::Result;
use bus::BusActor;
use chain::to_block_chain_collection;
use chain::BlockChain;
use config::{NodeConfig, PacemakerStrategy};
use crypto::hash::HashValue;
use futures::channel::mpsc;
use futures::prelude::*;
use logger::prelude::*;
pub use miner_client::miner::{Miner as MinerClient, MinerClientActor};
use sc_stratum::Stratum;
use starcoin_txpool_api::TxPoolAsyncService;
use starcoin_wallet_api::WalletAccount;
use std::marker::PhantomData;
use std::sync::Arc;
use std::time::Duration;
use storage::Store;
use traits::ChainAsyncService;
use traits::{Consensus, ConsensusHeader};
use types::transaction::TxStatus;
mod headblock_pacemaker;
mod miner;
mod miner_client;
mod ondemand_pacemaker;
mod schedule_pacemaker;
mod stratum;
pub(crate) type TransactionStatusEvent = Arc<Vec<(HashValue, TxStatus)>>;
#[derive(Default, Debug, Message)]
#[rtype(result = "Result<()>")]
pub struct GenerateBlockEvent {}
pub struct MinerActor<C, P, CS, S, H>
where
C: Consensus + Sync + Send + 'static,
P: TxPoolAsyncService + Sync + Send + 'static,
CS: ChainAsyncService + Sync + Send + 'static,
S: Store + Sync + Send + 'static,
H: ConsensusHeader + Sync + Send + 'static,
{
config: Arc<NodeConfig>,
txpool: P,
storage: Arc<S>,
phantom_c: PhantomData<C>,
chain: CS,
miner: miner::Miner<H>,
stratum: Arc<Stratum>,
miner_account: WalletAccount,
arbiter: Arbiter,
}
impl<C, P, CS, S, H> MinerActor<C, P, CS, S, H>
where
C: Consensus + Sync + Send + 'static,
P: TxPoolAsyncService + Sync + Send + 'static,
CS: ChainAsyncService + Sync + Send + 'static,
S: Store + Sync + Send + 'static,
H: ConsensusHeader + Sync + Send + 'static,
{
pub fn launch(
config: Arc<NodeConfig>,
bus: Addr<BusActor>,
storage: Arc<S>,
txpool: P,
chain: CS,
mut transaction_receiver: Option<mpsc::UnboundedReceiver<TransactionStatusEvent>>,
miner_account: WalletAccount,
) -> Result<Addr<Self>> {
let actor = MinerActor::create(move |ctx| {
let (sender, receiver) = mpsc::channel(100);
ctx.add_message_stream(receiver);
match &config.miner.pacemaker_strategy {
PacemakerStrategy::HeadBlock => {
let pacemaker = HeadBlockPacemaker::new(bus.clone(), sender);
pacemaker.start();
}
PacemakerStrategy::Ondemand => {
OndemandPacemaker::new(
bus.clone(),
sender.clone(),
transaction_receiver.take().unwrap(),
)
.start();
}
PacemakerStrategy::Schedule => {
SchedulePacemaker::new(Duration::from_secs(config.miner.dev_period), sender)
.start();
}
};
let miner = miner::Miner::new(bus.clone(), config.clone());
let stratum = sc_stratum::Stratum::start(
&config.miner.stratum_server,
Arc::new(stratum::StratumManager::new(miner.clone())),
None,
)
.unwrap();
let arbiter = Arbiter::new();
MinerActor {
config,
txpool,
storage,
phantom_c: PhantomData,
chain,
miner,
stratum,
miner_account,
arbiter,
}
});
Ok(actor)
}
}
impl<C, P, CS, S, H> Actor for MinerActor<C, P, CS, S, H>
where
C: Consensus + Sync + Send + 'static,
P: TxPoolAsyncService + Sync + Send + 'static,
CS: ChainAsyncService + Sync + Send + 'static,
S: Store + Sync + Send + 'static,
H: ConsensusHeader + Sync + Send + 'static,
{
type Context = Context<Self>;
fn started(&mut self, _ctx: &mut Self::Context) {
info!("Miner actor started");
}
}
impl<C, P, CS, S, H> Handler<GenerateBlockEvent> for MinerActor<C, P, CS, S, H>
where
C: Consensus + Sync + Send + 'static,
P: TxPoolAsyncService + Sync + Send + 'static,
CS: ChainAsyncService + Sync + Send + 'static,
S: Store + Sync + Send + 'static,
H: ConsensusHeader + Sync + Send + 'static,
{
type Result = Result<()>;
fn handle(&mut self, _event: GenerateBlockEvent, ctx: &mut Self::Context) -> Self::Result {
let txpool = self.txpool.clone();
let storage = self.storage.clone();
let chain = self.chain.clone();
let config = self.config.clone();
let miner = self.miner.clone();
let stratum = self.stratum.clone();
let miner_account = self.miner_account.clone();
let arbiter = self.arbiter.clone();
let f = async {
let txns = txpool
.clone()
.get_pending_txns(None)
.await
.unwrap_or(vec![]);
let startup_info = chain.master_startup_info().await?;
debug!("head block : {:?}, txn len: {}", startup_info, txns.len());
let master = startup_info.master.clone();
let collection = to_block_chain_collection(
config.clone(),
startup_info,
storage.clone(),
txpool.clone(),
)?;
let block_chain = BlockChain::<C, S, P>::new(
config.clone(),
master,
storage,
txpool,
Arc::downgrade(&collection),
)?;
mint::<H, C>(
stratum,
miner,
config,
miner_account,
txns,
&block_chain,
arbiter,
)?;
drop(block_chain);
drop(collection);
Ok(())
}
.map(|result: Result<()>| {
if let Err(err) = result {
error!("Failed to process generate block event:{:?}", err)
}
})
.into_actor(self);
ctx.spawn(f);
Ok(())
}
}
|
use std::io::BufRead;
use rand::Rng;
type yao6 = Vec<u8>;
fn main() {
// let res=coin_divinate();
let res = dayanshi_divinate("").0;
println!("{:?}", res)
}
pub fn coin_divinate(event:&str) -> yao6 {
let mut rng = rand::thread_rng();
let mut res_vec: Vec<u8> = Vec::new();
for times in 0..6 {
let x: u8 = rng.gen_range(0..2);
res_vec.push(x);
}
// println!("{:?}",res_vec);
res_vec.reverse();
res_vec
}
// reference: https://zhuanlan.zhihu.com/p/409348265
pub fn dayanshi_divinate(event:&str) -> (yao6,Vec<Vec<(u8,u8)>>,Vec<String>) {
let mut rng = rand::thread_rng();
let mut res_vec: Vec<u8> = Vec::new();
let mut str_ls:Vec<String>=Vec::new();
let mut detailed_ls:Vec<Vec<(u8,u8)>>=Vec::new();
for i_yao in 0..6 {
// println!("----------");
// 大衍 50
let dayan: u8 = 50;
// 奢草 49
let mut shecao: u8 = 49;
let mut this_detailed_ls:Vec<(u8,u8)>=Vec::new();
// A: to obtain one yao.
for repet_times in 0..3 {
// A.1. 分而为二以象两
let mut split1: u8 = rng.gen_range(0..shecao+1);
let mut split2: u8 = shecao - split1;
// println!("分二 {},{}",split1,split2);
// A.2. 挂一以象三
if repet_times==0 {
let mut selected_index: u8 = rng.gen_range(0..2);
if split1==1 || split1==0 {
selected_index=1;
}
else if split2==1 || split2==0 {
selected_index=0;
}
if selected_index == 0{
split1 = split1 - 1; // 冲
} else {
split2 = split2 - 1;
}
}
// println!("挂一之后:{},{}",split1,split2);
// A.3. 数之以四以象四时 (揲四)
// A.3.1 the phenomenon of 覍
if split1==0{
this_detailed_ls.push((0,4));
split2-=4;
}
else if split2==0 {
this_detailed_ls.push((4,0));
split1-=4;
}
// A.3.2 the phenomenon of 4-4, the old yin
else if split1%4==0{
this_detailed_ls.push((4,4));
// println!("-=4: {},{}",split1,split2);
// A.4. 归奇于指以象闰
split1-=4;
split2-=4;
}
// A.3.3 others
else {
let yu1=split1%4;
let yu2=split2%4;
this_detailed_ls.push((yu1,yu2));
// A.4. 归奇于指以象闰
split1-=yu1;
split2-=yu2;
}
// println!("归奇后: {} {}",split1,split2);
shecao = split1 + split2;
// println!("shecao remain: {}",shecao);
}
let mut xang_ls:Vec<u8>=vec![];
for element in &this_detailed_ls{
if element.0!=element.1{
xang_ls.push(1); // yong
}
else {
xang_ls.push(0); // yin
}
}
this_detailed_ls.reverse();
detailed_ls.push(this_detailed_ls);
let res:u8=xang_ls.iter().sum();
if res==0{
str_ls.push(String::from("老阴"));
res_vec.push(0);
}
else if res==1{
str_ls.push(String::from("少阳"));
res_vec.push(1);
}
else if res==2 {
str_ls.push(String::from("少阴"));
res_vec.push(0);
}
else if res==3 {
str_ls.push(String::from("老阳"));
res_vec.push(1);
}
}
res_vec.reverse();
detailed_ls.reverse();
str_ls.reverse();
(res_vec,detailed_ls,str_ls)
}
|
use std::{borrow::BorrowMut, sync::mpsc::TryRecvError, time::Duration};
use actyx_sdk::service::EventService;
use tello::{command_mode::CommandModeState, odometry::Odometry, CommandMode, Drone};
use tokio::time::sleep;
use crate::twins::{
drone_twin::DroneTwin,
mission_twin::types::{DelayWaypoint, GoToWaypoint, TurnWaypoint, Waypoint},
};
pub struct DroneControl {
drone: Option<CommandMode>,
}
impl DroneControl {
pub fn new() -> Self {
Self { drone: None }
}
#[allow(dead_code)]
pub fn try_recv_state(&mut self) -> Result<CommandModeState, TryRecvError> {
if let Some(d) = self.drone.borrow_mut() {
let mut last: Option<CommandModeState> = None;
while let Ok(s) = d.state_receiver.try_recv() {
last = Some(s);
}
println!("new State {:?}", last);
last.map(|s| Ok(s)).unwrap_or(Err(TryRecvError::Empty))
} else {
Err(TryRecvError::Disconnected)
}
}
pub fn is_drone_connected(&self) -> bool {
self.drone.as_ref().is_some()
}
pub async fn connect(&mut self, ip: String) -> Result<(), String> {
if let None = self.drone.as_ref() {
let drone = Drone::new(&*ip).command_mode();
self.drone = Some(drone);
}
self.drone.as_mut().unwrap().enable().await
}
pub async fn take_off(&mut self) -> Result<(), String> {
if let Some(drone) = self.drone.as_mut() {
drone.take_off().await
} else {
Err("no drone connected".to_string())
}
}
pub async fn exec_waypoint(
&mut self,
service: impl EventService,
drone_id: String,
mission_id: String,
wp: &Waypoint,
waypoint_idx: i32,
) -> Result<(), anyhow::Error> {
println!("execute waypoint: {:?}", wp);
match wp {
Waypoint::Goto(GoToWaypoint {
distance, height, ..
}) => {
if let Some(d) = self.drone.borrow_mut() {
DroneTwin::emit_drone_started_to_next_waypoint(
service.clone(),
drone_id.clone(),
mission_id.clone(),
waypoint_idx,
)
.await?;
let target_height = *height as i32;
let z = target_height - d.odometry.z.round() as i32;
let distance = (distance * 100.0).round();
let mut fife_meters = ((distance / 500.0).floor() as i32).max(0);
let mut rest = (distance % 500.0) as i32;
let mut do_480_step = false;
// if the last movement would be smaller than
if rest < 20 && fife_meters > 0 {
fife_meters -= 1;
do_480_step = true;
rest += 20;
}
// only the last segment will move in the Z direction to avoid < 20 cm movements
let rest_z = (!do_480_step && fife_meters == 0)
.then(|| z)
.unwrap_or_default();
d.go_to(rest, 0, rest_z, 100)
.await
.map_err(anyhow::Error::msg)?;
if do_480_step {
let d_z = (fife_meters == 0).then(|| z).unwrap_or_default();
d.go_to(480, 0, d_z, 100)
.await
.map_err(anyhow::Error::msg)?;
}
for i in 0..fife_meters {
let d_z = (i == fife_meters - 1).then(|| z).unwrap_or_default();
d.go_to(500, 0, d_z, 100)
.await
.map_err(anyhow::Error::msg)?;
}
// d.forward(0).await.map_err(anyhow::Error::msg)?;
} else {
return Err(anyhow::Error::msg("no drone connected".to_string()));
}
DroneTwin::emit_drone_arrived_at_waypoint(
service.clone(),
drone_id,
mission_id,
waypoint_idx,
)
.await?;
Ok(())
}
Waypoint::Turn(TurnWaypoint { deg, .. }) => {
if let Some(d) = self.drone.borrow_mut() {
DroneTwin::emit_drone_started_to_next_waypoint(
service.clone(),
drone_id.clone(),
mission_id.clone(),
waypoint_idx,
)
.await?;
let deg = *deg;
if deg > 0 {
d.cw(deg as u32).await.map_err(anyhow::Error::msg)?
} else {
d.ccw((-deg) as u32).await.map_err(anyhow::Error::msg)?
}
} else {
return Err(anyhow::Error::msg("no drone connected".to_string()));
}
DroneTwin::emit_drone_arrived_at_waypoint(
service.clone(),
drone_id,
mission_id,
waypoint_idx,
)
.await?;
Ok(())
}
Waypoint::Delay(DelayWaypoint { duration, .. }) => {
DroneTwin::emit_drone_started_to_next_waypoint(
service.clone(),
drone_id.clone(),
mission_id.clone(),
waypoint_idx,
)
.await?;
sleep(Duration::from_millis(*duration as u64)).await;
DroneTwin::emit_drone_arrived_at_waypoint(
service.clone(),
drone_id,
mission_id,
waypoint_idx,
)
.await?;
Ok(())
}
}
}
pub async fn land(&mut self) -> Result<(), String> {
if let Some(drone) = self.drone.as_ref() {
drone.land().await?;
Ok(())
} else {
Err("can't land !?".to_string())
}
}
pub fn pos(&self) -> Odometry {
self.drone
.as_ref()
.map(|d| d.odometry.clone())
.unwrap_or_default()
}
}
|
extern crate log;
extern crate simplelog;
use simplelog::*;
use std::fs::File;
pub fn logger(logfile: &str) {
CombinedLogger::init(vec![
TermLogger::new(LevelFilter::Debug, Config::default(), TerminalMode::Mixed).unwrap(),
WriteLogger::new(
LevelFilter::Info,
Config::default(),
File::create(logfile).unwrap(),
),
]).unwrap();
}
|
extern crate hex;
extern crate rand;
extern crate rayon;
use algebra::fields::mnt6753::Fr as MNT6753Fr;
use algebra::fields::mnt4753::Fr as MNT4753Fr;
use algebra::{PrimeField, MulShort};
use std::marker::PhantomData;
use crate::crh::{
FieldBasedHashParameters, poseidon::{
parameters::{MNT4753PoseidonParameters, MNT6753PoseidonParameters}
}
};
use crate::crh::{FieldBasedHash, BatchFieldBasedHash};
use crate::Error;
pub mod parameters;
pub struct PoseidonHash<F: PrimeField, P: PoseidonParameters<Fr = F>>{
_field: PhantomData<F>,
_parameters: PhantomData<P>,
}
pub struct PoseidonBatchHash<F: PrimeField, P: PoseidonParameters<Fr = F>>{
_field: PhantomData<F>,
_parameters: PhantomData<P>,
}
pub trait PoseidonParameters: 'static + FieldBasedHashParameters{
const T: usize; // Number of S-Boxesb
const R_F:i32; // Number of full rounds
const R_P:i32; // Number of partial rounds
const R:usize; // The rate of the hash function
const ZERO:Self::Fr; // The zero element in the field
const C2:Self::Fr; // The constant 3 to add in the position corresponding to the capacity
const AFTER_ZERO_PERM: &'static[Self::Fr]; // State vector after a zero permutation
const ROUND_CST: &'static[Self::Fr]; // Array of round constants
const MDS_CST: &'static[Self::Fr]; // The MDS matrix
const MDS_CST_SHORT: &'static[Self::Fr]; // The MDS matrix for fast matrix multiplication
}
impl<F: PrimeField + MulShort, P: PoseidonParameters<Fr=F>> PoseidonBatchHash<F, P> {
// Function that does the mix matrix
// It uses Montgomery multiplication
// Constants are defined such that the result is x * t * 2^768,
// that is the Montgomery representation of the operand x * t, and t is the 64-bit constant
#[allow(dead_code)]
fn matrix_mix (state: &mut Vec<F>) {
// the new state where the result will be stored initialized to zero elements
let mut new_state = vec![F::zero(); P::T];
let m_11 = P::MDS_CST[0];
let m_12 = P::MDS_CST[1];
let m_13 = P::MDS_CST[2];
// scalar multiplication for position 0 of the state vector
let elem_0 = state[0].mul(&m_11);
let elem_1 = state[1].mul(&m_12);
let elem_2 = state[2].mul(&m_13);
new_state[0] = elem_0;
new_state[0] += &elem_1;
new_state[0] += &elem_2;
// scalar multiplication for position 1 of the state vector
let m_21 = P::MDS_CST[3];
let m_22 = P::MDS_CST[4];
let m_23 = P::MDS_CST[5];
let elem_3 = state[0].mul(&m_21);
let elem_4 = state[1].mul(&m_22);
let elem_5 = state[2].mul(&m_23);
new_state[1] = elem_3;
new_state[1] += &elem_4;
new_state[1] += &elem_5;
// scalar multiplication for the position 2 of the state vector
let m_31 = P::MDS_CST[6];
let m_32 = P::MDS_CST[7];
let m_33 = P::MDS_CST[8];
let elem_6 = state[0].mul(&m_31);
let elem_7 = state[1].mul(&m_32);
let elem_8 = state[2].mul(&m_33);
new_state[2] = elem_6;
new_state[2] += &elem_7;
new_state[2] += &elem_8;
// copy the result to the state vector
state[0] = new_state[0];
state[1] = new_state[1];
state[2] = new_state[2];
}
// Function that does the mix matrix with fast algorithm
// It uses a partial Montgomery multiplication defined as PM(x, t) = x * t * 2^-64 mod M
// t is a 64-bit matrix constant. In the algorithm, the constants are represented in
// partial Montgomery representation, i.e. t * 2^64 mod M
fn matrix_mix_short (state: &mut Vec<F>) {
//use algebra::MulShort;
// the new state where the result will be stored initialized to zero elements
let mut new_state = vec![F::zero(); P::T];
let m_11 = P::MDS_CST_SHORT[0];
let m_12 = P::MDS_CST_SHORT[1];
let m_13 = P::MDS_CST_SHORT[2];
let elem_0 = m_11.mul_short(&state[0]);
let elem_1 = m_12.mul_short(&state[1]);
let elem_2 = m_13.mul_short(&state[2]);
new_state[0] = elem_0;
new_state[0] += &elem_1;
new_state[0] += &elem_2;
// scalar multiplication for position 1 of the state vector
let m_21 = P::MDS_CST_SHORT[3];
let m_22 = P::MDS_CST_SHORT[4];
let m_23 = P::MDS_CST_SHORT[5];
let elem_3 = m_21.mul_short(&state[0]);
let elem_4 = m_22.mul_short(&state[1]);
let elem_5 = m_23.mul_short(&state[2]);
new_state[1] = elem_3;
new_state[1] += &elem_4;
new_state[1] += &elem_5;
// scalar multiplication for the position 2 of the state vector
let m_31 = P::MDS_CST_SHORT[6];
let m_32 = P::MDS_CST_SHORT[7];
let m_33 = P::MDS_CST_SHORT[8];
let elem_6 = m_31.mul_short(&state[0]);
let elem_7 = m_32.mul_short(&state[1]);
let elem_8 = m_33.mul_short(&state[2]);
new_state[2] = elem_6;
new_state[2] += &elem_7;
new_state[2] += &elem_8;
// copy the result to the state vector
state[0] = new_state[0];
state[1] = new_state[1];
state[2] = new_state[2];
}
fn poseidon_full_round(vec_state: &mut Vec<Vec<P::Fr>>, round_cst_idx: &mut usize) {
// For each of the element position of the state vector
for j in 0..P::T {
// get the constant associated to element position of the state vector
let rc = P::ROUND_CST[*round_cst_idx];
// go over each of the state vectors and add the constant
for k in 0..vec_state.len() {
vec_state[k][j] += &rc;
}
*round_cst_idx += 1;
}
// Apply the S-BOX to each of the elements of the state vector
// Use Montgomery simultaneous inversion
let mut w: Vec<P::Fr> = Vec::new();
let mut accum_prod = P::Fr::one();
w.push(accum_prod);
// Calculate the intermediate partial products
for i in 0..vec_state.len() {
for j in 0..P::T {
accum_prod = accum_prod * &vec_state[i][j];
w.push(accum_prod);
}
}
// if the accum_prod is zero, it means that one of the S-Boxes is zero
// in that case compute the inverses individually
if accum_prod == P::Fr::zero() {
for i in 0..vec_state.len() {
for j in 0..P::T {
if vec_state[i][j] != P::Fr::zero() {
vec_state[i][j] = vec_state[i][j].inverse().unwrap();
}
}
}
} else {
// Calculate the inversion of the products
// The inverse always exists in this case
let mut w_bar = accum_prod.inverse().unwrap();
// Extract the individual inversions
let mut idx: i64 = w.len() as i64 - 2;
for i in (0..vec_state.len()).rev() {
for j in (0..P::T).rev() {
let vec_1 = vec_state[i][j].clone();
vec_state[i][j] = w_bar * &w[idx as usize];
w_bar = w_bar * &vec_1;
idx -= 1;
}
}
}
}
fn poseidon_partial_round(vec_state: &mut Vec<Vec<P::Fr>>, round_cst_idx: &mut usize) {
// For each of the state vector element position
for j in 0..P::T {
// get the constant associated to state vector element position
let rc = P::ROUND_CST[*round_cst_idx];
// go over each state vector
for k in 0..vec_state.len() {
vec_state[k][j] += &rc;
}
*round_cst_idx += 1;
}
// Apply the S-BOX to the first elements of each of the state vector
let mut w: Vec<P::Fr> = Vec::new();
let mut accum_prod = P::Fr::one();
w.push(accum_prod);
// Calculate the intermediate partial products
for i in 0..vec_state.len() {
accum_prod = accum_prod * &vec_state[i][0];
w.push(accum_prod);
}
// if the accum_prod is zero, it means that one of the S-Boxes is zero
// in that case compute the inverses individually
if accum_prod == P::Fr::zero() {
for i in 0..(vec_state.len() - 1) {
if vec_state[i][0] != P::Fr::zero() {
vec_state[i][0] = vec_state[i][0].inverse().unwrap();
}
}
} else {
// Calculate the inversion of the products
// Use Montgomery simultaneous inversion
let mut w_bar = accum_prod.inverse().unwrap();
// Extract the individual inversions
let mut idx: i64 = w.len() as i64 - 2;
for i in (0..vec_state.len()).rev() {
let vec_1 = vec_state[i][0].clone();
vec_state[i][0] = w_bar * &w[idx as usize];
w_bar = w_bar * &vec_1;
idx -= 1;
}
}
}
fn poseidon_perm_gen(vec_state: &mut Vec<Vec<P::Fr>>) {
// index that goes over the round constants
let mut round_cst_idx: usize = 0;
// Full rounds
for _i in 0..P::R_F {
Self::poseidon_full_round(vec_state, &mut round_cst_idx);
// Perform the matrix mix
for i in 0..vec_state.len() {
//Self::matrix_mix_short(&mut vec_state[i]);
Self::matrix_mix_short(&mut vec_state[i]);
}
}
// Partial rounds
for _i in 0..P::R_P {
Self::poseidon_partial_round(vec_state, &mut round_cst_idx);
// Perform the matrix mix
for i in 0..vec_state.len() {
//Self::matrix_mix_short(&mut vec_state[i]);
Self::matrix_mix_short(&mut vec_state[i]);
}
}
// Full rounds
for _i in 0..(P::R_F - 1) {
Self::poseidon_full_round(vec_state, &mut round_cst_idx);
// Perform the matrix mix
for i in 0..vec_state.len() {
//Self::matrix_mix_short(&mut vec_state[i]);
Self::matrix_mix_short(&mut vec_state[i]);
}
}
Self::poseidon_full_round(vec_state, &mut round_cst_idx);
}
}
impl<F: PrimeField + MulShort, P: PoseidonParameters<Fr=F>> PoseidonHash<F, P> {
// Function that does the mix matrix
// It uses Montgomery multiplication
// Constants are defined such that the result is x * t * 2^768,
// that is the Montgomery representation of the operand x * t, and t is the 64-bit constant
fn matrix_mix (state: &mut Vec<F>) {
// the new state where the result will be stored initialized to zero elements
let mut new_state = vec![F::zero(); P::T];
let m_11 = P::MDS_CST[0];
let m_12 = P::MDS_CST[1];
let m_13 = P::MDS_CST[2];
// scalar multiplication for position 0 of the state vector
let elem_0 = state[0].mul(&m_11);
let elem_1 = state[1].mul(&m_12);
let elem_2 = state[2].mul(&m_13);
new_state[0] = elem_0;
new_state[0] += &elem_1;
new_state[0] += &elem_2;
// scalar multiplication for position 1 of the state vector
let m_21 = P::MDS_CST[3];
let m_22 = P::MDS_CST[4];
let m_23 = P::MDS_CST[5];
let elem_3 = state[0].mul(&m_21);
let elem_4 = state[1].mul(&m_22);
let elem_5 = state[2].mul(&m_23);
new_state[1] = elem_3;
new_state[1] += &elem_4;
new_state[1] += &elem_5;
// scalar multiplication for the position 2 of the state vector
let m_31 = P::MDS_CST[6];
let m_32 = P::MDS_CST[7];
let m_33 = P::MDS_CST[8];
let elem_6 = state[0].mul(&m_31);
let elem_7 = state[1].mul(&m_32);
let elem_8 = state[2].mul(&m_33);
new_state[2] = elem_6;
new_state[2] += &elem_7;
new_state[2] += &elem_8;
// copy the result to the state vector
state[0] = new_state[0];
state[1] = new_state[1];
state[2] = new_state[2];
}
// Function that does the mix matrix with fast algorithm
// It uses a partial Montgomery multiplication defined as PM(x, t) = x * t * 2^-64 mod M
// t is a 64-bit matrix constant. In the algorithm, the constants are represented in
// partial Montgomery representation, i.e. t * 2^64 mod M
fn matrix_mix_short (state: &mut Vec<F>) {
//use algebra::MulShort;
// the new state where the result will be stored initialized to zero elements
let mut new_state = vec![F::zero(); P::T];
let m_11 = P::MDS_CST_SHORT[0];
let m_12 = P::MDS_CST_SHORT[1];
let m_13 = P::MDS_CST_SHORT[2];
let elem_0 = m_11.mul_short(&state[0]);
let elem_1 = m_12.mul_short(&state[1]);
let elem_2 = m_13.mul_short(&state[2]);
new_state[0] = elem_0;
new_state[0] += &elem_1;
new_state[0] += &elem_2;
// scalar multiplication for position 1 of the state vector
let m_21 = P::MDS_CST_SHORT[3];
let m_22 = P::MDS_CST_SHORT[4];
let m_23 = P::MDS_CST_SHORT[5];
let elem_3 = m_21.mul_short(&state[0]);
let elem_4 = m_22.mul_short(&state[1]);
let elem_5 = m_23.mul_short(&state[2]);
new_state[1] = elem_3;
new_state[1] += &elem_4;
new_state[1] += &elem_5;
// scalar multiplication for the position 2 of the state vector
let m_31 = P::MDS_CST_SHORT[6];
let m_32 = P::MDS_CST_SHORT[7];
let m_33 = P::MDS_CST_SHORT[8];
let elem_6 = m_31.mul_short(&state[0]);
let elem_7 = m_32.mul_short(&state[1]);
let elem_8 = m_33.mul_short(&state[2]);
new_state[2] = elem_6;
new_state[2] += &elem_7;
new_state[2] += &elem_8;
// copy the result to the state vector
state[0] = new_state[0];
state[1] = new_state[1];
state[2] = new_state[2];
}
fn poseidon_perm (state: &mut Vec<F>) {
let use_fast = true;
// index that goes over the round constants
let mut round_cst_idx = 0;
// First full rounds
for _i in 0..P::R_F {
// Add the round constants to the state vector
for d in state.iter_mut() {
let rc = P::ROUND_CST[round_cst_idx];
*d += &rc;
round_cst_idx += 1;
}
// Apply the S-BOX to each of the elements of the state vector
// Optimization for the inversion S-Box
let w2 = state[0] * &state[1];
let w = state[2] * &w2;
if w == P::Fr::zero() {
// At least one of the S-Boxes is zero
for d in state.iter_mut() {
// The S-BOX is an inversion function
if *d != P::Fr::zero() {
*d = (*d).inverse().unwrap();
}
}
} else {
let mut w_bar = w.inverse().unwrap();
let z_2 = w_bar * &w2;
w_bar = w_bar * &state[2];
state[2] = z_2;
let z_1 = w_bar * &state[0];
state[0] = w_bar * &state[1];
state[1] = z_1;
}
// Perform the matrix mix
if use_fast {
Self::matrix_mix_short(state);
} else {
Self::matrix_mix(state);
}
}
// Partial rounds
for _i in 0..P::R_P {
// Add the round constants to the state vector
for d in state.iter_mut() {
let rc = P::ROUND_CST[round_cst_idx];
*d += &rc;
round_cst_idx += 1;
}
// Apply S-BOX only to the first element of the state vector
if state[0]!=P::Fr::zero() {
state[0] = state[0].inverse().unwrap();
}
// Apply the matrix mix
if use_fast {
Self::matrix_mix_short(state);
} else {
Self::matrix_mix(state);
}
}
// Second full rounds
// Process only to R_F -1 iterations. The last iteration does not contain a matrix mix
for _i in 0..(P::R_F-1) {
// Add the round constants
for d in state.iter_mut() {
//let rc = MNT4753Fr::from_str(ROUND_CST[round_cst_idx]).map_err(|_| ()).unwrap();
let rc = P::ROUND_CST[round_cst_idx];
*d += &rc;
round_cst_idx += 1;
}
// Apply the S-BOX to each of the elements of the state vector
// Optimization for the inversion S-Box
let w2 = state[0] * &state[1];
let w = state[2] * &w2;
if w == P::Fr::zero() {
// At least one of the S-Boxes is zero
for d in state.iter_mut() {
// The S-BOX is an inversion function
if *d != P::Fr::zero() {
*d = (*d).inverse().unwrap();
}
}
} else {
let mut w_bar = w.inverse().unwrap();
let z_2 = w_bar * &w2;
w_bar = w_bar * &state[2];
state[2] = z_2;
let z_1 = w_bar * &state[0];
state[0] = w_bar * &state[1];
state[1] = z_1;
}
// Apply matrix mix
if use_fast {
Self::matrix_mix_short(state);
} else {
Self::matrix_mix(state);
}
}
// Last full round does not perform the matrix_mix
// Add the round constants
for d in state.iter_mut() {
let rc = P::ROUND_CST[round_cst_idx];
*d += &rc;
round_cst_idx += 1;
}
// Apply the S-BOX to each of the elements of the state vector
// Optimization for the inversion S-Box
let w2 = state[0] * &state[1];
let w = state[2] * &w2;
if w == P::Fr::zero() {
for d in state.iter_mut() {
// The S-BOX is an inversion function
if *d != P::Fr::zero() {
*d = (*d).inverse().unwrap();
}
}
} else {
let mut w_bar = w.inverse().unwrap();
let z_2 = w_bar * &w2;
w_bar = w_bar * &state[2];
state[2] = z_2;
let z_1 = w_bar * &state[0];
state[0] = w_bar * &state[1];
state[1] = z_1;
}
}
}
impl<F: PrimeField + MulShort, P: PoseidonParameters<Fr = F>> FieldBasedHash for PoseidonHash<F, P> {
type Data = F;
type Parameters = P;
fn evaluate(input: &[F]) -> Result<F, Error> {
// state is a vector of 3 elements. They are initialized to constants that are obtained after applying a permutation to a zero elements vector
let mut state = vec![P::AFTER_ZERO_PERM[0], P::AFTER_ZERO_PERM[1], P::AFTER_ZERO_PERM[2]];
// calculate the number of cycles to process the input dividing in portions of rate elements
let num_cycles = input.len() / P::R;
// check if the input is a multiple of the rate by calculating the remainder of the division
let rem = input.len() % P::R;
// index to process the input
let mut input_idx = 0;
// iterate of the portions of rate elements
for _i in 0..num_cycles {
// add the elements to the state vector. Add rate elements
for j in 0..P::R {
state[j] += &input[input_idx];
input_idx += 1;
}
// for application to a 2-1 Merkle tree, add the constant 3 to the third state vector
state[P::R] += &P::C2;
// apply permutation after adding the input vector
Self::poseidon_perm(&mut state);
}
// in case the input is not a multiple of the rate process the remainder part padding a zero
if rem != 0 {
state[0] += &input[input_idx];
state[P::R] += &P::C2;
// apply permutation after adding the input vector
Self::poseidon_perm(&mut state);
}
// return the first element of the state vector as the hash digest
Ok(state[0])
}
}
impl<F: PrimeField + MulShort, P: PoseidonParameters<Fr = F>> BatchFieldBasedHash for PoseidonBatchHash<F, P> {
type Data = F;
type Parameters = P;
fn batch_evaluate_2_1(input_array: &mut[F]) {
// Input:
// This function calculates the hashes of pairs of inputs.
// The inputs are arranged in an array and arranged as pairs
// Example:
// (d_00, d01, d_10, d_11, d_20, d_21, ...
// Output:
// The output will be placed in the same array taking half of the positions
// as the rate of the hash function is 2 field elements
// Checks that input contains data
assert_ne!(input_array.len(), 0, "Input to the hash has length 0.");
assert_eq!(input_array.len() % 2, 0, "The length of the input to the hash is not even.");
let input_length = input_array.len() / 2;
// Assign pre-computed values of the state vector equivalent to a permutation with zero element state vector
let state_z = vec![P::AFTER_ZERO_PERM[0], P::AFTER_ZERO_PERM[1], P::AFTER_ZERO_PERM[2]];
// Copy the result of the permutation to a vector of state vectors of the length equal to the length of the input
// state is a vector of 3-element state vector.
let mut state = Vec::new();
for _i in 0..input_length {
state.push(state_z.clone());
}
// input_idx is to scan the input_array
let mut input_idx = 0;
for k in 0..input_length {
state[k][0] += &input_array[input_idx];
input_idx += 1;
state[k][1] += &input_array[input_idx];
input_idx += 1;
// constant to add for a 2-1 Merkle tree
state[k][2] += &P::C2;
}
// apply permutation after adding the input vector
Self::poseidon_perm_gen(&mut state);
// overwrite the input with the result of the hash
for k in 0..input_array.len()/2 {
input_array[k] = state[k][0];
}
}
}
pub type MNT4PoseidonHash = PoseidonHash<MNT4753Fr, MNT4753PoseidonParameters>;
pub type MNT6PoseidonHash = PoseidonHash<MNT6753Fr, MNT6753PoseidonParameters>;
#[cfg(test)]
mod test {
use super::*;
use rayon::prelude::*;
use rand_xorshift::XorShiftRng;
use std::str::FromStr;
use crate::{FieldBasedHash, BatchFieldBasedHash, PoseidonBatchHash};
use super::rand::SeedableRng;
use algebra::UniformRand;
use std::time::Instant;
#[test]
fn test_poseidon_hash_mnt4() {
let mut input = Vec::new();
input.push(MNT4753Fr::from_str("1").unwrap());
input.push(MNT4753Fr::from_str("2").unwrap());
let output = MNT4PoseidonHash::evaluate(&input);
println!("{:?}", output);
}
#[test]
fn test_poseidon_hash_mnt6() {
let mut input = Vec::new();
input.push(MNT6753Fr::from_str("1").unwrap());
input.push(MNT6753Fr::from_str("2").unwrap());
let output = MNT6PoseidonHash::evaluate(&mut input);
println!("{:?}", output);
}
#[test]
fn test_hash_speed() {
// =============================================================================
// Computation for MNT4
type Mnt4PoseidonHash = PoseidonHash<MNT4753Fr, MNT4753PoseidonParameters>;
type Mnt4BatchPoseidonHash = PoseidonBatchHash<MNT4753Fr, MNT4753PoseidonParameters>;
// the number of rounds to test
let num_rounds = 1000;
// the vectors that store random input data
let mut vec_vec_elem_4753 = Vec::new();
let mut array_elem_4753 = Vec::new();
// the random number generator to generate random input data
// let mut rng = &mut thread_rng();
let mut rng = XorShiftRng::seed_from_u64(1231275789u64);
// we need the double of number of rounds because we have two inputs
for _ in 0..num_rounds {
let mut vec_elem_4753 = Vec::new();
let elem1 = MNT4753Fr::rand(&mut rng);
let elem2 = MNT4753Fr::rand(&mut rng);
vec_elem_4753.push(elem1.clone());
vec_elem_4753.push(elem2.clone());
vec_vec_elem_4753.push(vec_elem_4753);
array_elem_4753.push(elem1.clone());
array_elem_4753.push(elem2.clone());
}
// =============================================================================
// Calculate Poseidon Hash for mnt4753
let now_4753 = Instant::now();
let mut output_4753 = Vec::new();
for i in 0..num_rounds {
// Call the poseidon hash
let output = Mnt4PoseidonHash::evaluate(&vec_vec_elem_4753[i]);
output_4753.push(output.unwrap());
}
let new_now_4753 = Instant::now();
// =============================================================================
// Calculate Poseidon Hash for mnt4753 batch evaluation
let mut array1 = Vec::new();
let mut array2 = Vec::new();
let mut array3 = Vec::new();
let mut array4 = Vec::new();
for i in 0..(num_rounds / 4) {
array1.push(vec_vec_elem_4753[i][0].clone());
array1.push(vec_vec_elem_4753[i][1].clone());
}
for i in (num_rounds / 4)..(num_rounds / 2) {
array2.push(vec_vec_elem_4753[i][0].clone());
array2.push(vec_vec_elem_4753[i][1].clone());
}
for i in (num_rounds / 2)..(num_rounds * 3 / 4) {
array3.push(vec_vec_elem_4753[i][0].clone());
array3.push(vec_vec_elem_4753[i][1].clone());
}
for i in (num_rounds * 3 / 4)..(num_rounds) {
array4.push(vec_vec_elem_4753[i][0].clone());
array4.push(vec_vec_elem_4753[i][1].clone());
}
let mut array_array_input = Vec::new();
array_array_input.push(array1);
array_array_input.push(array2);
array_array_input.push(array3);
array_array_input.push(array4);
let now_4753_batch = Instant::now();
array_array_input.par_iter_mut().for_each(|mut p| Mnt4BatchPoseidonHash::batch_evaluate_2_1(&mut p));
let new_now_4753_batch = Instant::now();
// Call the poseidon batch hash
let mut output_4753_batch = Vec::new();
for i in 0..num_rounds / 4 {
output_4753_batch.push(array_array_input[0][i]);
}
for i in 0..num_rounds / 4 {
output_4753_batch.push(array_array_input[1][i]);
}
for i in 0..num_rounds / 4 {
output_4753_batch.push(array_array_input[2][i]);
}
for i in 0..num_rounds / 4 {
output_4753_batch.push(array_array_input[3][i]);
}
// =============================================================================
// Compare results
let output_batch = output_4753_batch;
for i in 0..num_rounds {
if output_4753[i] != output_batch[i] {
println!("Hash outputs, position {}, for MNT4 are not equal.", i);
}
}
println!("End comparison for MNT4.");
// =============================================================================
// Report the timing results
let duration_4753_single = new_now_4753.duration_since(now_4753);
println!("Time for {} rounds MNT4753 single = {:?}", num_rounds, duration_4753_single.as_millis());
let duration_4753_batch = new_now_4753_batch.duration_since(now_4753_batch);
println!("Time for {} rounds MNT4753 batch = {:?}", num_rounds, duration_4753_batch.as_millis());
// =============================================================================
// Computation for MNT6
type Mnt6PoseidonHash = PoseidonHash<MNT6753Fr, MNT6753PoseidonParameters>;
type Mnt6BatchPoseidonHash = PoseidonBatchHash<MNT6753Fr, MNT6753PoseidonParameters>;
// the vectors that store random input data
let mut vec_vec_elem_6753 = Vec::new();
let mut array_elem_6753 = Vec::new();
// the random number generator to generate random input data
let mut rng = XorShiftRng::seed_from_u64(1231275789u64);
// we need the double of number of rounds because we have two inputs
for _ in 0..num_rounds {
let mut vec_elem_6753 = Vec::new();
let elem1 = MNT6753Fr::rand(&mut rng);
let elem2 = MNT6753Fr::rand(&mut rng);
vec_elem_6753.push(elem1.clone());
vec_elem_6753.push(elem2.clone());
vec_vec_elem_6753.push(vec_elem_6753);
array_elem_6753.push(elem1.clone());
array_elem_6753.push(elem2.clone());
}
// =============================================================================
// Calculate Poseidon Hash for mnt6753
let now_6753 = Instant::now();
let mut output_6753 = Vec::new();
for i in 0..num_rounds {
// Call the poseidon hash
let output = Mnt6PoseidonHash::evaluate(&vec_vec_elem_6753[i]);
output_6753.push(output.unwrap());
}
let new_now_6753 = Instant::now();
// =============================================================================
// Calculate Poseidon Hash for mnt6753 batch evaluation
let mut array1 = Vec::new();
let mut array2 = Vec::new();
let mut array3 = Vec::new();
let mut array4 = Vec::new();
for i in 0..(num_rounds / 4) {
array1.push(vec_vec_elem_6753[i][0].clone());
array1.push(vec_vec_elem_6753[i][1].clone());
}
for i in (num_rounds / 4)..(num_rounds / 2) {
array2.push(vec_vec_elem_6753[i][0].clone());
array2.push(vec_vec_elem_6753[i][1].clone());
}
for i in (num_rounds / 2)..(num_rounds * 3 / 4) {
array3.push(vec_vec_elem_6753[i][0].clone());
array3.push(vec_vec_elem_6753[i][1].clone());
}
for i in (num_rounds * 3 / 4)..(num_rounds) {
array4.push(vec_vec_elem_6753[i][0].clone());
array4.push(vec_vec_elem_6753[i][1].clone());
}
let mut array_array_input = Vec::new();
array_array_input.push(array1);
array_array_input.push(array2);
array_array_input.push(array3);
array_array_input.push(array4);
let now_6753_batch = Instant::now();
array_array_input.par_iter_mut().for_each(|mut p| Mnt6BatchPoseidonHash::batch_evaluate_2_1(&mut p));
let new_now_6753_batch = Instant::now();
// Call the poseidon batch hash
let mut output_6753_batch = Vec::new();
for i in 0..num_rounds / 4 {
output_6753_batch.push(array_array_input[0][i]);
}
for i in 0..num_rounds / 4 {
output_6753_batch.push(array_array_input[1][i]);
}
for i in 0..num_rounds / 4 {
output_6753_batch.push(array_array_input[2][i]);
}
for i in 0..num_rounds / 4 {
output_6753_batch.push(array_array_input[3][i]);
}
// =============================================================================
// Compare results
let output_batch = output_6753_batch;
for i in 0..num_rounds {
if output_6753[i] != output_batch[i] {
println!("Hash outputs, position {}, for MNT6 are not equal.", i);
}
}
println!("End comparison for MNT6.");
// =============================================================================
// Report the timing results
let duration_6753_single = new_now_6753.duration_since(now_6753);
println!("Time for {} rounds MNT6753 single = {:?}", num_rounds, duration_6753_single.as_millis());
let duration_6753_batch = new_now_6753_batch.duration_since(now_6753_batch);
println!("Time for {} rounds MNT6753 batch = {:?}", num_rounds, duration_6753_batch.as_millis());
}
} |
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "cache")]
pub struct Model {
#[sea_orm(primary_key)]
#[serde(skip_deserializing)]
pub id: i32,
#[sea_orm(unique)]
pub path: String,
pub etag: Option<String>,
pub content_type: String,
pub body: Vec<u8>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}
|
use crate::tokenizer::TokenType;
#[derive(Debug, Clone)]
pub struct Identifier {
pub name: String,
pub range: (usize, usize)
}
#[derive(Debug, Clone)]
pub struct Literal {
pub value: String,
pub raw_value: String,
pub range: (usize, usize)
}
impl Literal {
pub fn new() -> Self {
Self {
value: "".to_string(),
raw_value: "".to_string(),
range: (0,0)
}
}
pub fn from_str(str: &str) -> Self {
Self {
value: str.to_string(),
raw_value: str.to_string(),
range: (0,0)
}
}
}
#[derive(Debug, Clone)]
pub struct BinaryLiteral { // TODO: Change this to a binary literal
pub value: String,
pub raw_value: String,
pub range: (usize, usize)
}
#[derive(Debug, Clone)]
pub enum VariableLiteral {
Literal(Literal),
BinaryLiteral(BinaryLiteral)
}
#[derive(Debug, Clone)]
pub struct VariableDeclarator {
pub id: Identifier,
pub init: VariableLiteral,
pub range: (usize, usize)
}
#[derive(Debug, Clone)]
pub struct VariableDeclaration {
pub declarations: Vec<VariableDeclarator>,
pub range: (usize, usize)
}
#[derive(Debug, Clone)]
pub struct BlockStatement {
pub body: Vec<AstNode>,
pub range: (usize, usize)
}
#[derive(Debug, Clone)]
pub struct FunctionDeclaration {
pub id: Identifier,
pub params: Vec<Identifier>,
pub body: BlockStatement,
pub range: (usize, usize)
}
#[derive(Debug, Clone)]
pub struct CallExpression {
pub callee: CallExpressionCallee,
pub arguments: Vec<Literal>,
pub range: (usize, usize),
}
#[derive(Debug, Clone)]
pub enum CallExpressionCallee {
Identifier(Box<Identifier>),
MemberExpression(Box<MemberExpression>),
}
#[derive(Debug, Clone)]
pub struct MemberExpression {
pub object: Option<Box<MemberExpression>>,
pub property: Identifier,
pub range: (usize, usize),
}
impl MemberExpression {
pub fn new(property: Identifier) -> Self {
let range = property.range.clone();
Self {
object: None,
property,
range
}
}
}
#[derive(Debug, Clone)]
pub struct ExpressionStatement {
pub expression: CallExpression,
pub range: (usize, usize),
}
#[derive(Debug, Clone)]
pub enum AstNode {
VariableDeclaration(VariableDeclaration),
BlockStatement(BlockStatement),
FunctionDeclaration(FunctionDeclaration),
ExpressionStatement(ExpressionStatement),
} |
use std::fmt::{self, Display};
use crate::ast::{BinOp, Expression, LValue, Program, Statement, UnaryOp};
#[derive(Debug, Clone)]
pub enum Operand {
Literal(String),
Variable(String),
}
impl Display for Operand {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Operand::Literal(str) => write!(f, "{}", str),
Operand::Variable(str) => write!(f, "{}", str),
}
}
}
#[derive(Debug, Clone, Copy)]
pub struct Context(pub i32, pub i32);
impl Context {
pub fn new() -> Self {
Context(0, 0)
}
fn get_new_name(&mut self) -> String {
self.0 += 1;
format!("t{}", self.0)
}
fn get_new_label(&mut self) -> String {
self.1 += 10;
format!("L{}", self.1)
}
}
// Um código de representação intermediaŕio de 3 endereços
#[derive(Debug, Clone)]
pub enum IR {
Label(String),
Operand(Operand),
BinOp(String, Operand, BinOp, Operand),
UnaryOp(String, UnaryOp, Operand),
Assignment(String, Operand),
AccessArray(String, Operand, Operand),
Alloc(String, Operand),
Declare(String, Operand),
Command(String, Operand),
IfTrue(String, String),
IfFalse(String, String),
Goto(String),
}
fn get_last_var_name(instructions: &Vec<IR>) -> Operand {
instructions.last().unwrap().get_var_name()
}
impl Display for IR {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
IR::BinOp(var, rhs, op, lhs) => write!(f, "\t{} = {}{}{}", var, rhs, op, lhs),
IR::UnaryOp(var, op, operand) => write!(f, "\t{} = {}{}", var, op, operand),
IR::Assignment(var, operand) => write!(f, "\t{} = {}", var, operand),
IR::Command(command, operand) => write!(f, "\t{} {}", command, operand),
IR::Alloc(var, operand) => write!(f, "\t{} = alloc {}", var, operand),
IR::Label(label) => write!(f, "{}:", label),
IR::Operand(_operand) => write!(f, ""),
IR::IfTrue(cond, label) => write!(f, "\tif {} goto {}", cond, label),
IR::IfFalse(cond, label) => write!(f, "\tif False {} goto {}", cond, label),
IR::Goto(label) => write!(f, "\tgoto {}", label),
IR::Declare(ty, name) => write!(f, "\t{} {}", ty, name),
IR::AccessArray(var, operand, index) => write!(f, "\t{} = {}[{}]", var, operand, index),
}
}
}
impl IR {
fn get_var_name(&self) -> Operand {
match self {
IR::BinOp(var, _, _, _) => Operand::Variable(var.to_string()),
IR::UnaryOp(var, _, _) => Operand::Variable(var.to_string()),
IR::Assignment(var, _) => Operand::Variable(var.to_string()),
IR::Alloc(var, _) => Operand::Variable(var.to_string()),
IR::Operand(var) => Operand::Variable(var.to_string()),
IR::AccessArray(var, _, _) => Operand::Variable(var.to_string()),
_ => panic!("there is no variable name for this variant"),
}
}
}
pub(crate) trait CodeGeneratable {
fn generate_code(&self, ctx: &mut Context) -> Vec<IR>;
}
impl CodeGeneratable for Expression<'_> {
fn generate_code(&self, ctx: &mut Context) -> Vec<IR> {
match self {
Expression::Binary(rhs, op, lhs) => {
let r_code = rhs.generate_code(ctx);
let l_code = lhs.generate_code(ctx);
let var = ctx.get_new_name();
let code = IR::BinOp(
var.clone(),
get_last_var_name(&r_code),
*op,
get_last_var_name(&l_code),
);
[r_code, l_code, vec![code]].concat()
}
Expression::Unary(op, expr) => {
let expr_code = expr.generate_code(ctx);
let var = ctx.get_new_name();
let code = IR::UnaryOp(var.clone(), *op, get_last_var_name(&expr_code));
[expr_code, vec![code]].concat()
}
Expression::IntLiteral(val) => vec![IR::Operand(Operand::Literal(val.to_string()))],
Expression::FloatLiteral(val) => vec![IR::Operand(Operand::Literal(val.to_string()))],
Expression::StringLiteral(val) => vec![IR::Operand(Operand::Literal(val.to_string()))],
Expression::Null => vec![IR::Operand(Operand::Literal("null".to_string()))],
Expression::LValue(lval) => lval.generate_code(ctx),
Expression::FunctionCall(_, _) => todo!(),
Expression::Alloc(ty) => vec![IR::Alloc(
ctx.get_new_name(),
Operand::Variable(ty.to_string()),
)],
}
}
}
impl CodeGeneratable for Statement<'_> {
fn generate_code(&self, ctx: &mut Context) -> Vec<IR> {
match self {
Statement::VariableDeclaration(ty, name) => {
vec![IR::Declare(
ty.to_string(),
Operand::Variable(name.to_string()),
)]
}
Statement::If {
condition,
true_path,
false_path,
} => {
let l_begin = ctx.get_new_label();
let l_true = ctx.get_new_label();
let l_false = ctx.get_new_label();
let l_end = ctx.get_new_label();
let cond_code = condition.generate_code(ctx);
let cond_var = get_last_var_name(&cond_code);
[
vec![IR::Label(l_begin)],
cond_code,
vec![
IR::IfTrue(cond_var.to_string(), l_true.clone()),
IR::IfFalse(cond_var.to_string(), l_false.clone()),
],
vec![IR::Label(l_true)],
true_path.generate_code(ctx),
vec![IR::Goto(l_end.clone()), IR::Label(l_false)],
false_path
.as_ref()
.map(|stmt| stmt.generate_code(ctx))
.unwrap_or(vec![]),
vec![IR::Label(l_end)],
]
.concat()
}
Statement::StatementList(stmts) => stmts.into_iter().fold(vec![], |code, stmt| {
let n_code = stmt.generate_code(ctx);
[code, n_code].concat()
}),
Statement::Read(lval) => {
let l_code = lval.generate_code(ctx);
[
l_code.clone(),
vec![IR::Command("read".to_string(), get_last_var_name(&l_code))],
]
.concat()
}
Statement::Print(expr) => {
let code = expr.generate_code(ctx);
let var = get_last_var_name(&code);
[code, vec![IR::Command("print".to_string(), var)]].concat()
}
Statement::Return => vec![IR::Command(
"return".to_string(),
Operand::Literal(String::new()),
)],
Statement::Break => vec![IR::Command(
"break".to_string(),
Operand::Literal(String::new()),
)],
Statement::Assignment(lval, expr) => {
let l_code = lval.generate_code(ctx);
let expr_code = expr.generate_code(ctx);
[
l_code.clone(),
expr_code.clone(),
vec![IR::Assignment(
get_last_var_name(&l_code).to_string(),
get_last_var_name(&expr_code),
)],
]
.concat()
}
Statement::For {
initial_assignment,
condition,
post_assignment,
body,
} => {
let l_check = ctx.get_new_label();
let l_true = ctx.get_new_label();
let l_false = ctx.get_new_label();
let cond_code = condition.generate_code(ctx);
let cond_var = get_last_var_name(&cond_code);
[
initial_assignment.generate_code(ctx),
vec![IR::Label(l_check.clone())],
cond_code,
vec![
IR::IfTrue(cond_var.to_string(), l_true.clone()),
IR::IfFalse(cond_var.to_string(), l_false.clone()),
],
vec![IR::Label(l_true)],
body.generate_code(ctx),
post_assignment.generate_code(ctx),
vec![IR::Goto(l_check)],
vec![IR::Label(l_false)],
]
.concat()
}
}
}
}
impl CodeGeneratable for Program<'_> {
fn generate_code(&self, ctx: &mut Context) -> Vec<IR> {
match self {
Program::Statement(stmt) => stmt.generate_code(ctx),
// TODO: not entirely implemented
// we won't have function calls and parameters working correctly
Program::FuncList(f) => f
.into_iter()
.flat_map(|f| {
[
vec![IR::Label(ctx.get_new_label())],
f.clone()
.body
.into_iter()
.flat_map(|stmt| stmt.generate_code(ctx))
.collect::<Vec<IR>>(),
]
.concat()
})
.collect(),
}
}
}
impl CodeGeneratable for LValue<'_> {
fn generate_code(&self, ctx: &mut Context) -> Vec<IR> {
match self {
LValue::NameReference(name) => vec![IR::Operand(Operand::Variable(name.to_string()))],
LValue::ArrayAccess(lval, expr) => {
let l_code = lval.generate_code(ctx);
let e_code = expr.generate_code(ctx);
[
l_code.clone(),
e_code.clone(),
vec![IR::AccessArray(
ctx.get_new_name(),
get_last_var_name(&l_code),
get_last_var_name(&e_code),
)],
]
.concat()
}
}
}
}
|
// Copyright 2016 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use std::fmt::{self, Formatter, Display};
use kvproto::metapb;
use kvproto::raftpb;
use util::worker::Runnable;
use util::escape;
use pd::PdClient;
// Use an asynchronous thread to tell pd something.
pub enum Task {
AskChangePeer {
change_type: raftpb::ConfChangeType,
region: metapb::Region,
peer: metapb::Peer,
},
AskSplit {
region: metapb::Region,
split_key: Vec<u8>,
peer: metapb::Peer,
},
Heartbeat {
store: metapb::Store,
},
}
impl Display for Task {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
Task::AskChangePeer { ref change_type, ref region, .. } => {
write!(f, "ask {:?} for region {}", change_type, region.get_id())
}
Task::AskSplit { ref region, ref split_key, .. } => {
write!(f,
"ask split region {} with key {}",
region.get_id(),
escape(&split_key))
}
Task::Heartbeat { ref store } => write!(f, "heartbeat for store {}", store.get_id()),
}
}
}
pub struct Runner<T: PdClient> {
pd_client: Arc<T>,
}
impl<T: PdClient> Runner<T> {
pub fn new(pd_client: Arc<T>) -> Runner<T> {
Runner { pd_client: pd_client }
}
}
impl<T: PdClient> Runnable<Task> for Runner<T> {
fn run(&mut self, task: Task) {
info!("executing task {}", task);
let res = match task {
Task::AskChangePeer { region, peer, .. } => {
// TODO: We may add change_type in pd protocol later.
self.pd_client.ask_change_peer(region, peer)
}
Task::AskSplit { region, split_key, peer } => {
self.pd_client.ask_split(region, &split_key, peer)
}
Task::Heartbeat { store } => {
// Now we use put store protocol for heartbeat.
self.pd_client.put_store(store)
}
};
if let Err(e) = res {
error!("executing pd command failed {:?}", e);
}
}
}
|
//! Implementation of `errno` functionality for WASI.
//!
//! Adapted from `unix.rs`.
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::str;
use libc::{self, c_char, c_int, size_t, strlen};
use crate::Errno;
fn from_utf8_lossy(input: &[u8]) -> &str {
match str::from_utf8(input) {
Ok(valid) => valid,
Err(error) => unsafe { str::from_utf8_unchecked(&input[..error.valid_up_to()]) },
}
}
pub fn with_description<F, T>(err: Errno, callback: F) -> T
where
F: FnOnce(Result<&str, Errno>) -> T,
{
let mut buf = [0u8; 1024];
let c_str = unsafe {
if strerror_r(err.0, buf.as_mut_ptr() as *mut _, buf.len() as size_t) < 0 {
let fm_err = errno();
if fm_err != Errno(libc::ERANGE) {
return callback(Err(fm_err));
}
}
let c_str_len = strlen(buf.as_ptr() as *const _);
&buf[..c_str_len]
};
callback(Ok(from_utf8_lossy(c_str)))
}
pub const STRERROR_NAME: &str = "strerror_r";
pub fn errno() -> Errno {
unsafe { Errno(*__errno_location()) }
}
pub fn set_errno(Errno(new_errno): Errno) {
unsafe {
*__errno_location() = new_errno;
}
}
extern "C" {
fn __errno_location() -> *mut c_int;
fn strerror_r(errnum: c_int, buf: *mut c_char, buflen: size_t) -> c_int;
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
mod data; // Inspect data maintainer/updater/scanner-from-vmo; compare engine
mod metrics; // Evaluates memory performance of Inspect library
mod puppet; // Interface to target Inspect library wrapper programs (puppets)
mod results; // Stores and formats reports-to-user
mod runner; // Coordinates testing operations
mod trials; // Defines the trials to run
use {
argh::FromArgs,
failure::{bail, Error},
fidl_test_inspect_validate as validate, fuchsia_async as fasync, fuchsia_syslog as syslog,
log::*,
serde_derive::Serialize,
std::str::FromStr,
};
fn init_syslog() {
syslog::init_with_tags(&[]).expect("should not fail");
debug!("Driver did init logger");
}
/// Validate Inspect VMO formats written by 'puppet' programs controlled by
/// this Validator program and exercising Inspect library implementations.
//#[derive(StructOpt, Debug)]
#[derive(Debug, FromArgs)]
struct Opt {
/// report results in a pretty human-readable format. Without this flag,
/// results will be printed as JSON.
#[argh(option, long = "output", default = "OutputType::Json")]
output: OutputType,
/// when trees differ, render 'full' text, 'diff' type difference, or 'both'.
#[argh(option, long = "difftype", default = "DiffType::Full")]
diff_type: DiffType,
/// required arg(s): The URL(s) of the puppet(s).
#[argh(option, long = "url")]
puppet_urls: Vec<String>,
/// quiet has no effect.
#[argh(switch, long = "quiet")]
quiet: bool,
#[argh(switch, long = "verbose")]
/// verbose has no effect.
verbose: bool,
#[argh(switch, long = "version", short = 'v')]
/// version prints the version information and exits.
version: bool,
}
#[derive(Debug)]
enum OutputType {
Text,
Json,
}
impl FromStr for OutputType {
type Err = Error;
fn from_str(s: &str) -> Result<OutputType, Error> {
Ok(match s {
"text" => OutputType::Text,
"json" => OutputType::Json,
_ => bail!("Output type must be 'text' or 'json'"),
})
}
}
/// When reporting a discrepancy between local and remote Data trees, should the output include:
/// - The full rendering of both trees?
/// - The condensed diff between the trees? (This may still be quite large.)
/// - Both full and condensed renderings?
#[derive(Clone, Copy, Debug, Serialize)]
pub enum DiffType {
Full,
Diff,
Both,
}
impl FromStr for DiffType {
type Err = Error;
fn from_str(s: &str) -> Result<DiffType, Error> {
Ok(match s {
"full" => DiffType::Full,
"diff" => DiffType::Diff,
"both" => DiffType::Both,
_ => bail!("Diff type must be 'full' or 'diff' or 'both'"),
})
}
}
#[fasync::run_singlethreaded]
async fn main() -> Result<(), Error> {
init_syslog();
let mut results = results::Results::new();
let Opt { output, puppet_urls, version, diff_type, .. } = argh::from_env();
if version {
println!("Inspect Validator version 0.8. See README.md for more information.");
return Ok(());
}
results.diff_type = diff_type;
run_all_puppets(puppet_urls, &mut results).await;
match output {
OutputType::Text => results.print_pretty_text(),
OutputType::Json => println!("{}", results.to_json()),
}
if results.failed() {
bail!("A test failed")
} else {
Ok(())
}
}
async fn run_all_puppets(urls: Vec<String>, results: &mut results::Results) {
if urls.len() == 0 {
results.error("At least one component URL is required.".to_string());
}
for url in urls {
runner::run_all_trials(&url, results).await;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[fasync::run_singlethreaded(test)]
async fn url_is_required() {
let mut results = results::Results::new();
run_all_puppets(vec![], &mut results).await;
assert!(results.failed());
assert!(results.to_json().contains("At least one"));
}
#[fasync::run_singlethreaded(test)]
async fn bad_url_fails() {
let mut results = results::Results::new();
run_all_puppets(vec!["a".to_owned()], &mut results).await;
assert!(results.failed());
assert!(results.to_json().contains("URL may be invalid"), results.to_json());
}
#[fasync::run_singlethreaded(test)]
async fn all_urls_are_tried() {
let mut results = results::Results::new();
run_all_puppets(vec!["a".to_owned(), "b".to_owned()], &mut results).await;
assert!(results.to_json().contains("invalid: a"));
assert!(results.to_json().contains("invalid: b"));
}
// The only way to test success is to actually start a component, and that's
// not suitable for unit tests. Failure on a valid URL will be caught in integration
// tests.
}
|
use std::io::prelude::*;
fn main() {
let schema = schemars::schema_for!(trycmd::schema::OneShot);
let schema = serde_json::to_string_pretty(&schema).unwrap();
std::io::stdout().write_all(schema.as_bytes()).unwrap();
}
|
use std::time::Instant;
extern crate rustypawn;
use rustypawn::Game;
use rustypawn::millis_since;
fn perft_sub(game: &mut Game, depth: usize) -> usize {
if depth == 0 {
return 1;
}
let move_list = game.generate_moves();
let mut result = 0;
for mv in move_list {
if game.make_move(mv) {
let count_sub = perft_sub(game, depth - 1);
game.unmake_move(mv);
result += count_sub;
}
}
result
}
pub fn perft(fen: &str, depth: usize) -> usize {
match Game::from_fen(fen) {
Ok(mut game) => perft_sub(&mut game, depth),
Err(_) => 0
}
}
#[cfg(test)]
mod tests {
use super::perft;
#[test]
fn perft_initial_position() {
let fen = "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0";
assert_eq!(perft(fen, 1), 20);
assert_eq!(perft(fen, 2), 400);
assert_eq!(perft(fen, 3), 8902);
assert_eq!(perft(fen, 4), 197281);
}
#[test]
fn perft_position_2() {
let fen = "r3k2r/p1ppqpb1/bn2pnp1/3PN3/1p2P3/2N2Q1p/PPPBBPPP/R3K2R w KQkq - 0";
assert_eq!(perft(fen, 1), 48);
assert_eq!(perft(fen, 2), 2039);
assert_eq!(perft(fen, 3), 97862);
}
#[test]
fn perft_position_3() {
let fen = "8/2p5/3p4/KP5r/1R3p1k/8/4P1P1/8 w - - 0";
assert_eq!(perft(fen, 1), 14);
assert_eq!(perft(fen, 2), 191);
assert_eq!(perft(fen, 3), 2812);
assert_eq!(perft(fen, 4), 43238);
assert_eq!(perft(fen, 5), 674624);
}
#[test]
fn perft_position_4w() {
let fen = "r3k2r/Pppp1ppp/1b3nbN/nP6/BBP1P3/q4N2/Pp1P2PP/R2Q1RK1 w kq - 0";
assert_eq!(perft(fen, 1), 6);
assert_eq!(perft(fen, 2), 264);
assert_eq!(perft(fen, 3), 9467);
assert_eq!(perft(fen, 4), 422333);
}
#[test]
fn perft_position_4b() {
let fen = "r2q1rk1/pP1p2pp/Q4n2/bbp1p3/Np6/1B3NBn/pPPP1PPP/R3K2R b KQ - 0";
assert_eq!(perft(fen, 1), 6);
assert_eq!(perft(fen, 2), 264);
assert_eq!(perft(fen, 3), 9467);
assert_eq!(perft(fen, 4), 422333);
}
#[test]
fn perft_position_5() {
let fen = "rnbq1k1r/pp1Pbppp/2p5/8/2B5/8/PPP1NnPP/RNBQK2R w KQ - 1 8";
assert_eq!(perft(fen, 1), 44);
assert_eq!(perft(fen, 2), 1486);
assert_eq!(perft(fen, 3), 62379);
}
#[test]
fn perft_position_6() {
let fen = "r4rk1/1pp1qppp/p1np1n2/2b1p1B1/2B1P1b1/P1NP1N2/1PP1QPPP/R4RK1 w - - 0 10";
assert_eq!(perft(fen, 1), 46);
assert_eq!(perft(fen, 2), 2079);
assert_eq!(perft(fen, 3), 89890);
}
}
fn run_perft(name: &str, fen: &str, depth: usize, verification: usize) {
let count = perft(fen, depth);
assert_eq!(count, verification);
println!("{} {}", name, count);
}
fn main() {
let start = Instant::now();
run_perft("Initial position", "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0", 5, 4865609);
run_perft("Kiwipete", "r3k2r/p1ppqpb1/bn2pnp1/3PN3/1p2P3/2N2Q1p/PPPBBPPP/R3K2R w KQkq -", 4, 4085603);
run_perft("Position 3", "8/2p5/3p4/KP5r/1R3p1k/8/4P1P1/8 w - -", 6, 11030083);
run_perft("Position 6", "r4rk1/1pp1qppp/p1np1n2/2b1p1B1/2B1P1b1/P1NP1N2/1PP1QPPP/R4RK1 w - - 0 10", 4, 3894594);
println!("Time: {} ms", millis_since(&start));
}
|
#![feature(async_closure)]
mod entrypoint;
pub use entrypoint::entrypoint;
|
use std::{fs, slice};
use ve_shader_reflect::*;
fn main() {
let path = format!(
"{}/examples/shaders/gpass_simple_frag.spv",
env!("CARGO_MANIFEST_DIR")
);
let spv = fs::read(path).unwrap();
let info = ve_shader_reflect::reflect_shader(unsafe {
slice::from_raw_parts(spv.as_ptr() as *const u32, spv.len() / 4)
})
.expect("Failed to reflect shader");
print_bindings(&info.set_bindings);
}
fn print_bindings(bindings: &[SetBinding]) {
for b in bindings {
print!("layout (set={}, binding={}) uniform ", b.set, b.binding);
match &b.data {
SetBindingData::Sampler => {
print!("sampler ");
}
SetBindingData::Image { dim } => match dim {
ImageDimension::One => print!("image1D "),
ImageDimension::Two => print!("image2D "),
ImageDimension::Three => print!("image3D "),
ImageDimension::Cube => print!("imageCube "),
ImageDimension::SubpassInput => print!("subpassInput "),
},
SetBindingData::SampledImage { dim } => match dim {
ImageDimension::One => print!("sampler1D "),
ImageDimension::Two => print!("sampler2D "),
ImageDimension::Three => print!("sampler3D "),
ImageDimension::Cube => print!("samplerCube "),
ImageDimension::SubpassInput => print!("samplerSubpassInput "),
},
SetBindingData::UniformBuffer { layout } => {
println!("{} {{", layout.block_name);
print_block_layout(layout);
print!("}} ");
}
}
println!("{};", b.var_name);
}
}
fn print_block_layout(layout: &BlockLayout) {
for member in &layout.members {
print!("\tlayout (Offset={}) ", member.offset);
match member.kind {
BlockMemberType::Unsupported => print!("<unsupported> "),
BlockMemberType::Float => print!("float "),
BlockMemberType::FloatVector(dim) => print!("vec{dim} "),
BlockMemberType::FloatMatrix(dim) => print!("mat{dim} "),
}
println!("{}; // Size={}", member.name, member.size);
}
}
|
pub mod ping;
pub mod about;
// pub mod imageboard;
pub mod vend;
pub mod eightball;
pub mod thesevoices; |
//match if let loop while break continue return if for
fn hoge(m:isize){
match m {
0 => println!("0"),
1...10=> println!("small number!"),
n => println!("too big : {}",n)
}
match (0.0,100){
(0.0,0) => println!("all zeros"),
(f,0...10) => println!("float {} with small number!",f),
_ => println!("other tuple")
}
}
fn main(){
hoge(100)
}
|
#[derive(Debug)]
pub struct Bitfield {
pub bf: Vec<u8>,
}
impl Bitfield {
/// returns true if the bitfield has item at location x
pub fn has(&self, x: usize) -> bool {
if self.bf.len() == 0 || self.bf.len() * 8 < x {
return false;
}
let i = x / 8;
let j = 7 - x % 8;
return (self.bf[i] >> j) & 1 == 1;
}
/// switches the bit at location x to 1
pub fn add(&mut self, x: usize) {
if self.bf.len() == 0 || self.bf.len() * 8 < x {
return;
}
let i = x / 8;
let j = 7 - x % 8;
self.bf[i] = self.bf[i] | (1 << j);
}
pub fn len(&self) -> usize {
self.bf.len()
}
pub fn from(bf: Vec<u8>) -> Bitfield {
Bitfield { bf }
}
pub fn new(i: usize) -> Bitfield {
Bitfield { bf: vec![0; i] }
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_bf_has() {
let bf = super::Bitfield { bf: vec![2] };
assert!(bf.has(6));
assert!(!bf.has(4));
}
#[test]
fn test_add() {
let mut bf = super::Bitfield { bf: vec![0, 0] };
assert!(!bf.has(2));
bf.add(2);
assert!(bf.has(2));
assert!(!bf.has(10));
bf.add(10);
assert!(bf.has(10));
}
}
|
use std::io::{self, BufRead};
fn main() {
let mut valid_count = 0;
for line in io::stdin().lock().lines() {
let line = line.expect("Line to be readable");
let mut space_split = line.split(" ");
match (space_split.next(), space_split.next(), space_split.next()) {
(Some(count), Some(ch), Some(password)) => {
let mut split_count = count.split("-");
let (first_pos, second_pos): (usize, usize) =
match (split_count.next(), split_count.next()) {
(Some(min), Some(max)) => (
min.parse().expect("a number"),
max.parse().expect("a number"),
),
_ => panic!("unexpected range"),
};
let ch = ch.chars().next().unwrap();
let password = password.chars();
let mut password = password.skip(first_pos - 1);
let first = password.next();
let mut password = password.skip(second_pos - first_pos - 1);
let second = password.next();
match (first, second) {
(Some(first), Some(second))
if (first == ch && second != ch) || (first != ch && second == ch) =>
{
valid_count += 1;
}
(Some(first), None) if first == ch => valid_count += 1,
(None, Some(second)) if second == ch => valid_count += 1,
_ => {}
};
}
_ => panic!("Unexpected input"),
}
}
println!("Valid Password Count: {}", valid_count);
}
|
use std::collections::HashSet;
pub fn primes_up_to(upper_bound: u64) -> Vec<u64> {
const NOT_PRIME: u64 = 0;
let mut nums: Vec<u64> = (2..=upper_bound).collect();
(0..nums.len())
.filter_map(|i| {
let prime: u64 = nums[i];
if prime == NOT_PRIME {
return None;
}
(prime..=upper_bound)
.step_by(prime as usize)
.skip(1)
.for_each(|j| {
nums[(j - 2) as usize] = NOT_PRIME;
});
Some(prime)
})
.collect()
}
pub fn primes_up_to_set(upper_bound: u64) -> Vec<u64> {
let nums: Vec<u64> = (2..=upper_bound).collect();
let mut not_prime: HashSet<u64> = HashSet::new();
(0..nums.len())
.filter_map(|i| {
let prime: u64 = (i + 2) as u64;
(prime..=upper_bound)
.step_by(prime as usize)
.skip(1)
.for_each(|j| {
not_prime.insert(j);
});
match not_prime.get(&prime) {
None => Some(prime),
_ => None,
}
})
.collect()
// original version:
// while i < nums.len() {
// let x = *nums.get(i).unwrap();
// if not_prime.contains(&x) == false {
// primes.push(x);
// }
// let mut j = i;
// while j < nums.len() {
// not_prime.insert(*nums.get(j).unwrap());
// j += x as usize;
// }
// i += 1;
// }
// return primes;
}
// https://exercism.io/tracks/rust/exercises/sieve/solutions/63538fd8918d45759a54b660fc9c7636
pub fn primes_up_to_retain(limit: u32) -> Vec<u32> {
let mut primes = Vec::new();
let mut candidates: Vec<_> = (2..limit + 1).rev().collect();
while let Some(prime) = candidates.pop() {
primes.push(prime);
candidates.retain(|n| n % prime != 0);
}
primes
}
// https://exercism.io/tracks/rust/exercises/sieve/solutions/5f64e30e37884b9aa001f7f45ca40579
pub fn primes_up_to_starred(upper_bound: u64) -> Vec<u64> {
let mut numbers: Vec<_> = (2..=upper_bound).map(Option::from).collect();
(0..numbers.len())
.filter_map(|i| {
let prime = numbers[i].take()?;
(prime..=upper_bound)
.step_by(prime as usize)
.skip(1)
.for_each(|i| numbers[(i - 2) as usize] = None);
Some(prime)
})
.collect()
}
|
use std::mem;
use crate::bindings::tflite as bindings;
use crate::interpreter::op_resolver::OpResolver;
use std::ffi::c_void;
cpp! {{
#include "tensorflow/lite/kernels/register.h"
using namespace tflite::ops::builtin;
}}
pub struct Resolver {
handle: Box<bindings::OpResolver>,
}
impl Resolver {
pub fn add_custom(&mut self, name: &str, registration: * const c_void) {
let name = ::std::ffi::CString::new(name).unwrap();
let name_ptr = name.as_ptr();
let resolver_ptr = self.handle.as_mut() as * mut _;
unsafe {
cpp!([resolver_ptr as "BuiltinOpResolver*", name_ptr as "const char*", registration as "TfLiteRegistration*"] -> () as "void" {
resolver_ptr->AddCustom(name_ptr, registration);
return;
})
}
}
}
impl Drop for Resolver {
#[allow(clippy::useless_transmute, clippy::forget_copy, deprecated)]
fn drop(&mut self) {
let handle = Box::into_raw(mem::take(&mut self.handle));
unsafe {
cpp!([handle as "BuiltinOpResolver*"] {
delete handle;
});
}
}
}
impl OpResolver for Resolver {
fn get_resolver_handle(&self) -> &bindings::OpResolver {
self.handle.as_ref()
}
}
impl Default for Resolver {
#[allow(clippy::forget_copy, deprecated)]
fn default() -> Self {
let handle = unsafe {
cpp!([] -> *mut bindings::OpResolver as "OpResolver*" {
return new BuiltinOpResolver();
})
};
let handle = unsafe { Box::from_raw(handle) };
Self { handle }
}
}
|
//! ```elixir
//! # label 4
//! # pushed to stack: (document, parent, old_child)
//! # returned form call: :ok
//! # full stack: (:ok, document, parent, old_child)
//! # returns: {:ok, new_child}
//! {:ok, new_child} = Lumen.Web.Document.create_element(document, "ul");
//! {:ok, replaced_child} = Lumen.Web.replace_child(parent, new_child, old_child)
//! ```
use liblumen_alloc::erts::exception;
use liblumen_alloc::erts::process::Process;
use liblumen_alloc::erts::term::prelude::*;
use super::label_5;
#[native_implemented::label]
fn result(
process: &Process,
ok: Term,
document: Term,
parent: Term,
old_child: Term,
) -> exception::Result<Term> {
assert_eq!(ok, Atom::str_to_term("ok"));
assert!(document.is_boxed_resource_reference());
assert!(parent.is_boxed_resource_reference());
assert!(old_child.is_boxed_resource_reference());
let new_child_tag = process.binary_from_str("ul");
process.queue_frame_with_arguments(
liblumen_web::document::create_element_2::frame()
.with_arguments(false, &[document, new_child_tag]),
);
process.queue_frame_with_arguments(label_5::frame().with_arguments(true, &[parent, old_child]));
Ok(Term::NONE)
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt;
use super::data_type::DataType;
use super::type_id::TypeID;
use crate::prelude::*;
#[derive(Clone, Hash, serde::Deserialize, serde::Serialize)]
pub struct IntervalType {
kind: IntervalKind,
}
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum IntervalKind {
Year,
Quarter,
Month,
Day,
Hour,
Minute,
Second,
Doy,
Dow,
}
impl fmt::Display for IntervalKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(match self {
IntervalKind::Year => "YEAR",
IntervalKind::Quarter => "QUARTER",
IntervalKind::Month => "MONTH",
IntervalKind::Day => "DAY",
IntervalKind::Hour => "HOUR",
IntervalKind::Minute => "MINUTE",
IntervalKind::Second => "SECOND",
IntervalKind::Doy => "DOY",
IntervalKind::Dow => "DOW",
})
}
}
impl From<String> for IntervalKind {
fn from(s: String) -> Self {
match s.as_str() {
"YEAR" => IntervalKind::Year,
"QUARTER" => IntervalKind::Quarter,
"MONTH" => IntervalKind::Month,
"DAY" => IntervalKind::Day,
"HOUR" => IntervalKind::Hour,
"MINUTE" => IntervalKind::Minute,
"SECOND" => IntervalKind::Second,
"DOY" => IntervalKind::Doy,
"DOW" => IntervalKind::Dow,
_ => unreachable!(),
}
}
}
impl IntervalType {
pub fn new(kind: IntervalKind) -> Self {
Self { kind }
}
pub fn new_impl(kind: IntervalKind) -> DataTypeImpl {
DataTypeImpl::Interval(Self { kind })
}
pub fn kind(&self) -> &IntervalKind {
&self.kind
}
}
impl DataType for IntervalType {
fn data_type_id(&self) -> TypeID {
TypeID::Interval
}
fn name(&self) -> String {
format!("Interval({})", self.kind)
}
}
impl std::fmt::Debug for IntervalType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}({:?})", self.name(), self.kind)
}
}
|
use std::mem::size_of;
#[allow(dead_code)]
struct S {
a: u64,
b: u64,
c: u64,
d: u64,
}
fn main() {
println!("{}", size_of::<u32>()); // 4
println!("{}", size_of::<u64>()); // 8
println!("{}", size_of::<Option<u32>>()); // 8
println!("{}", size_of::<Option<u64>>()); // 16
println!("{}", size_of::<S>()); // 32
println!("{}", size_of::<Option<S>>()); // 40
assert_eq!(size_of::<S>() + 8, size_of::<Option<S>>());
println!("{}", size_of::<&S>()); // 8
println!("{}", size_of::<Option<&S>>()); // 8
println!("{}", size_of::<Box<S>>()); // 8
println!("{}", size_of::<Option<Box<S>>>()); // 8
assert_eq!(size_of::<&S>(), size_of::<Option<&S>>());
assert_eq!(size_of::<Box<S>>(), size_of::<Option<Box<S>>>());
}
|
pub mod zstd_proto_graph;
pub use zstd_proto_graph::{
GraphDescriptionSerializer,
GraphDescriptionSerializerError,
IdentifiedGraphSerializer,
IdentifiedGraphSerializerError,
MergedGraphSerializer,
MergedGraphSerializerError,
};
|
use crate::columns::{
CellDelegate, CellRenderExt, HeaderCell, ProvidedColumns, TableColumn, TextCell,
};
use crate::axis_measure::{AxisMeasure, AxisPair, LogIdx, TableAxis};
use crate::config::TableConfig;
use crate::data::{IndexedData, IndexedItems};
use crate::headings::{HeadersFromIndices, SuppliedHeaders};
use crate::table::TableArgs;
use crate::{CellRender, HeaderBuild};
use druid::{theme, Data, KeyOrValue};
use std::marker::PhantomData;
#[derive(Copy, Clone, Debug, Hash, Ord, PartialOrd, Eq, PartialEq)]
pub enum AxisMeasurementType {
Uniform,
Individual, /* O(n) in memory with number of items on the axis */
}
impl Default for AxisMeasurementType {
fn default() -> Self {
AxisMeasurementType::Individual
}
}
pub struct TableBuilder<RowData: Data, TableData: Data> {
table_columns: Vec<TableColumn<RowData, Box<dyn CellDelegate<RowData>>>>,
column_header_delegate: Box<dyn CellRender<String>>,
row_header_delegate: Box<dyn CellRender<LogIdx>>,
table_config: TableConfig,
phantom_td: PhantomData<TableData>,
show_headings: ShowHeadings,
measurements: AxisPair<AxisMeasurementType>,
}
impl<RowData: Data, TableData: IndexedData<Item = RowData, Idx = LogIdx>> Default
for TableBuilder<RowData, TableData>
{
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Clone, Data, Ord, PartialOrd, Eq, PartialEq)]
pub enum ShowHeadings {
Both,
One(TableAxis),
JustCells,
}
impl ShowHeadings {
fn should_show(&self, a: &TableAxis) -> bool {
match self {
Self::Both => true,
Self::JustCells => false,
Self::One(ta) => ta == a,
}
}
}
pub type DefaultTableArgs<TableData> = TableArgs<
TableData,
HeaderBuild<HeadersFromIndices<TableData>, Box<dyn CellRender<LogIdx>>>,
HeaderBuild<SuppliedHeaders<Vec<String>, TableData>, Box<dyn CellRender<String>>>,
ProvidedColumns<TableData, Box<dyn CellDelegate<<TableData as IndexedItems>::Item>>>,
>;
impl<RowData: Data, TableData: IndexedData<Item = RowData, Idx = LogIdx>>
TableBuilder<RowData, TableData>
{
pub fn new() -> TableBuilder<RowData, TableData> {
TableBuilder {
table_columns: Vec::<TableColumn<RowData, Box<dyn CellDelegate<RowData>>>>::new(),
row_header_delegate: Box::new(
HeaderCell::new(TextCell::new().text_color(theme::LABEL_COLOR))
.on_result_of(|br: &LogIdx| br.0.to_string()),
),
column_header_delegate: Box::new(HeaderCell::new(
TextCell::new().text_color(theme::LABEL_COLOR),
)),
table_config: TableConfig::new(),
phantom_td: PhantomData::default(),
show_headings: ShowHeadings::Both,
measurements: AxisPair::new(
AxisMeasurementType::Individual,
AxisMeasurementType::Individual,
),
}
}
pub fn border(mut self, thickness: impl Into<KeyOrValue<f64>>) -> Self {
self.table_config.cell_border_thickness = thickness.into();
self
}
pub fn headings(mut self, show_headings: ShowHeadings) -> Self {
self.show_headings = show_headings;
self
}
pub fn with(mut self, col: TableColumn<RowData, Box<dyn CellDelegate<RowData>>>) -> Self {
self.table_columns.push(col);
self
}
pub fn with_column<CD: CellDelegate<RowData> + 'static>(
mut self,
header: impl Into<String>,
cell_delegate: CD,
) -> Self {
self.add_column(header, cell_delegate);
self
}
pub fn add_column<CD: CellDelegate<RowData> + 'static>(
&mut self,
header: impl Into<String>,
cell_render: CD,
) {
self.table_columns
.push(TableColumn::new(header, Box::new(cell_render)));
}
pub fn measuring_axis(mut self, axis: TableAxis, measure: AxisMeasurementType) -> Self {
self.measurements[axis] = measure;
self
}
pub fn build_measure(&self, axis: TableAxis, size: f64) -> AxisMeasure {
AxisMeasure::new(self.measurements[axis], size)
}
pub fn build_measures(&self) -> AxisPair<AxisMeasure> {
AxisPair::new(
self.build_measure(TableAxis::Rows, 30.),
self.build_measure(TableAxis::Columns, 100.),
)
}
pub fn build_args(self) -> DefaultTableArgs<TableData> {
let column_headers: Vec<String> = self
.table_columns
.iter()
.map(|tc| tc.header.clone())
.collect();
let row_build = if_opt!(
self.show_headings.should_show(&TableAxis::Rows),
HeaderBuild::new(
HeadersFromIndices::<TableData>::new(),
self.row_header_delegate,
)
);
let col_build = if_opt!(
self.show_headings.should_show(&TableAxis::Columns),
HeaderBuild::new(
SuppliedHeaders::new(column_headers),
self.column_header_delegate,
)
);
TableArgs::new(
ProvidedColumns::new(self.table_columns),
row_build,
col_build,
self.table_config,
)
}
}
|
use super::math::vector::Vec2;
#[allow(dead_code)]
pub struct Map {
pub height: i32,
pub width: i32,
pub stride:i32,
pub values: Vec<u8>,
}
impl Map {
pub fn _dimensions(&self) -> Vec2<i32> {
let res = Vec2{x:self.width, y:self.height};
res
}
pub fn index(map:&Map, v:(i32,i32)) -> usize {
let res = v.1 * map.stride + v.0;
res as usize
}
pub fn value(&self, v:(i32,i32)) -> &u8 {
let index = Map::index(self, v);
let val = self.values.get(index).unwrap();
val
}
pub fn _value_mut(&mut self, v:(i32,i32)) -> &mut u8 {
let index = Map::index(self, v);
let val = self.values.get_mut(index).unwrap();
val
}
}
|
#![feature(alloc)]
use std::boxed;
use std::thread;
use std::sync::mpsc::channel;
struct Data {
values: [usize; 512]
}
fn main() {
let count = 10000000;
let (tx, rx) = channel();//2^20);
let threads = 1;
for _ in 0..threads {
let tx = tx.clone();
thread::spawn(move || {
for _ in 0..(count / threads) {
let d = Box::new(Data{values: [0; 512]});
let ptr = boxed::into_raw(d);
let raw = ptr as usize;
tx.send(raw).unwrap();
}
});
}
for _ in 0..count {
let raw = rx.recv().unwrap();
unsafe {
let ptr = Box::from_raw(raw as *mut Data);
drop(ptr);
}
}
}
|
pub mod forget_password;
pub mod home;
pub mod login;
pub mod register;
|
extern crate futures;
extern crate tempdir;
extern crate tokio_fs;
use futures::{Future, Stream};
use std::fs;
use std::sync::{Arc, Mutex};
use tempdir::TempDir;
use tokio_fs::*;
mod pool;
#[test]
fn create() {
let base_dir = TempDir::new("base").unwrap();
let new_dir = base_dir.path().join("foo");
pool::run({ create_dir(new_dir.clone()) });
assert!(new_dir.is_dir());
}
#[test]
fn create_all() {
let base_dir = TempDir::new("base").unwrap();
let new_dir = base_dir.path().join("foo").join("bar");
pool::run({ create_dir_all(new_dir.clone()) });
assert!(new_dir.is_dir());
}
#[test]
fn remove() {
let base_dir = TempDir::new("base").unwrap();
let new_dir = base_dir.path().join("foo");
fs::create_dir(new_dir.clone()).unwrap();
pool::run({ remove_dir(new_dir.clone()) });
assert!(!new_dir.exists());
}
#[test]
fn read() {
let base_dir = TempDir::new("base").unwrap();
let p = base_dir.path();
fs::create_dir(p.join("aa")).unwrap();
fs::create_dir(p.join("bb")).unwrap();
fs::create_dir(p.join("cc")).unwrap();
let files = Arc::new(Mutex::new(Vec::new()));
let f = files.clone();
let p = p.to_path_buf();
pool::run({
read_dir(p).flatten_stream().for_each(move |e| {
let s = e.file_name().to_str().unwrap().to_string();
f.lock().unwrap().push(s);
Ok(())
})
});
let mut files = files.lock().unwrap();
files.sort(); // because the order is not guaranteed
assert_eq!(
*files,
vec!["aa".to_string(), "bb".to_string(), "cc".to_string()]
);
}
|
#[doc = "Reader of register SCSR"]
pub type R = crate::R<u32, super::SCSR>;
#[doc = "Reader of field `DA`"]
pub type DA_R = crate::R<bool, bool>;
#[doc = "Reader of field `RREQ`"]
pub type RREQ_R = crate::R<bool, bool>;
#[doc = "Reader of field `TREQ`"]
pub type TREQ_R = crate::R<bool, bool>;
#[doc = "Reader of field `FBR`"]
pub type FBR_R = crate::R<bool, bool>;
#[doc = "Reader of field `OAR2SEL`"]
pub type OAR2SEL_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - Device Active"]
#[inline(always)]
pub fn da(&self) -> DA_R {
DA_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 0 - Receive Request"]
#[inline(always)]
pub fn rreq(&self) -> RREQ_R {
RREQ_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Transmit Request"]
#[inline(always)]
pub fn treq(&self) -> TREQ_R {
TREQ_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - First Byte Received"]
#[inline(always)]
pub fn fbr(&self) -> FBR_R {
FBR_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - OAR2 Address Matched"]
#[inline(always)]
pub fn oar2sel(&self) -> OAR2SEL_R {
OAR2SEL_R::new(((self.bits >> 3) & 0x01) != 0)
}
}
|
// Copyright 2023 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt;
use std::fmt::Formatter;
use crate::protobuf::RaftReply;
use crate::Change;
use crate::Node;
use crate::TxnReply;
/// The state of an applied raft log.
/// Normally it includes two fields: the state before applying and the state after applying the log.
#[derive(
serde::Serialize,
serde::Deserialize,
Debug,
Clone,
PartialEq,
Eq,
derive_more::From,
derive_more::TryInto,
)]
pub enum AppliedState {
Node {
prev: Option<Node>,
result: Option<Node>,
},
KV(Change<Vec<u8>>),
TxnReply(TxnReply),
#[try_into(ignore)]
None,
}
impl fmt::Display for AppliedState {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "AppliedState: ")?;
match self {
AppliedState::Node { prev, result } => {
write!(f, "Node: prev: {:?}, result: {:?}", prev, result)
}
AppliedState::KV(change) => {
write!(f, "KV: {}", change)
}
AppliedState::TxnReply(txnreply) => {
write!(f, "Txn: {}", txnreply)
}
AppliedState::None => {
write!(f, "None")
}
}
}
}
impl AppliedState {
/// Whether the state changed
pub fn changed(&self) -> bool {
match self {
AppliedState::Node {
ref prev,
ref result,
} => prev != result,
AppliedState::KV(ref ch) => ch.is_changed(),
AppliedState::None => false,
AppliedState::TxnReply(txn) => txn.success,
}
}
pub fn prev_is_some(&self) -> bool {
!self.prev_is_none()
}
pub fn result_is_some(&self) -> bool {
!self.result_is_none()
}
pub fn is_some(&self) -> (bool, bool) {
(self.prev_is_some(), self.result_is_some())
}
pub fn is_none(&self) -> (bool, bool) {
(self.prev_is_none(), self.result_is_none())
}
pub fn prev_is_none(&self) -> bool {
match self {
AppliedState::Node { ref prev, .. } => prev.is_none(),
AppliedState::KV(Change { ref prev, .. }) => prev.is_none(),
AppliedState::None => true,
AppliedState::TxnReply(_txn) => true,
}
}
pub fn result_is_none(&self) -> bool {
match self {
AppliedState::Node { ref result, .. } => result.is_none(),
AppliedState::KV(Change { ref result, .. }) => result.is_none(),
AppliedState::None => true,
AppliedState::TxnReply(txn) => !txn.success,
}
}
}
impl From<AppliedState> for RaftReply {
fn from(msg: AppliedState) -> Self {
let data = serde_json::to_string(&msg).expect("fail to serialize");
RaftReply {
data,
error: "".to_string(),
}
}
}
|
use super::*;
use std::iter::FusedIterator;
///////////////////////////////////////////////////////////////////////////////
/// An iterator which receives the values sent through the channel,
/// blocking until a value is received.
///
/// If the channel is disconnected this will return None without blocking.
pub struct RIter<'a, T> {
pub(super) channel: &'a RReceiver<T>,
}
impl<'a, T> Iterator for RIter<'a, T> {
type Item = T;
fn next(&mut self) -> Option<T> {
self.channel.recv().ok()
}
}
impl<'a, T> Debug for RIter<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.pad("RIter{..}")
}
}
///////////////////////////////////////////////////////////////////////////////
/// An iterator which receives the values sent through the channel,
/// blocking until a value is received.
///
/// If the channel is disconnected this will return None without blocking.
pub struct RIntoIter<T> {
pub(super) channel: RReceiver<T>,
}
impl<T> FusedIterator for RIntoIter<T> {}
impl<T> Iterator for RIntoIter<T> {
type Item = T;
fn next(&mut self) -> Option<T> {
self.channel.recv().ok()
}
}
impl<T> Debug for RIntoIter<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.pad("RIntoIter{..}")
}
}
|
use std::collections::BTreeMap;
use std::fs;
struct Instruction {
name: String,
value: i32,
done: bool,
}
impl Clone for Instruction {
fn clone(&self) -> Instruction {
Instruction {
name: self.name.clone(),
value: self.value,
done: self.done,
}
}
}
impl Instruction {
fn new(full_instruction: &str) -> Instruction {
let mut split_instruction = full_instruction.split(' ');
Instruction {
name: split_instruction.next().unwrap().to_string(),
value: split_instruction.next().unwrap().parse::<i32>().unwrap(),
done: false,
}
}
fn change_name(mut self) -> Instruction {
self.name = match self.name.as_str() {
"nop" => "jmp".to_string(),
"jmp" => "nop".to_string(),
"acc" => "acc".to_string(),
_ => panic!("Impossible"),
};
self
}
}
fn run_instruction(
mut acc: u32,
mut index: u32,
mut program: BTreeMap<u32, Instruction>,
) -> Result<(u32, u32, BTreeMap<u32, Instruction>), bool> {
let instruction = match program.get_mut(&index) {
Some(x) => x,
None => return Err(true),
};
if instruction.done {
return Err(false);
}
match instruction.name.as_str() {
"nop" => {
index += 1;
}
"acc" => {
index += 1;
acc = acc.wrapping_add(instruction.value as u32)
}
"jmp" => index = index.wrapping_add(instruction.value as u32),
_ => panic!("Impossible!"),
}
instruction.done = true;
Ok((acc, index, program))
}
fn main() {
let data = fs::read_to_string("input").expect("Error");
let mut counter = 0;
loop {
match run_program(&data, counter) {
true => break,
false => {
counter += 1;
},
};
}
println!("over.")
}
fn run_program(data: &String, modify: u32) -> bool {
let mut program: BTreeMap<u32, Instruction> = BTreeMap::new();
data.lines().enumerate().for_each(|(idx, line)| {
program.insert(idx as u32, Instruction::new(line));
});
let original_instruction = program.get_mut(&modify).unwrap();
*original_instruction = original_instruction.clone().change_name();
let mut acc = 0;
let mut next_position = 0;
let program_complete;
loop {
match run_instruction(acc, next_position, program) {
Ok(result) => {
acc = result.0;
next_position = result.1;
program = result.2;
}
Err(x) => {
program_complete = x;
break;
}
}
}
println!("Program completed: {}", program_complete);
println!("Oy, oy, oy, the result is {}", acc);
program_complete
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt::Debug;
use std::fmt::Display;
use std::fmt::Formatter;
use serde::Deserialize;
use serde::Serialize;
/// Storage params which contains the detailed storage info.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum StorageParams {
Azblob(StorageAzblobConfig),
Fs(StorageFsConfig),
Ftp(StorageFtpConfig),
Gcs(StorageGcsConfig),
#[cfg(feature = "storage-hdfs")]
Hdfs(StorageHdfsConfig),
Http(StorageHttpConfig),
Ipfs(StorageIpfsConfig),
Memory,
Moka(StorageMokaConfig),
Obs(StorageObsConfig),
Oss(StorageOssConfig),
S3(StorageS3Config),
Redis(StorageRedisConfig),
Webhdfs(StorageWebhdfsConfig),
/// None means this storage type is none.
///
/// This type is mostly for cache which mean bypass the cache logic.
None,
}
impl Default for StorageParams {
fn default() -> Self {
StorageParams::Fs(StorageFsConfig::default())
}
}
impl StorageParams {
/// Whether this storage params is secure.
///
/// Query will forbid this storage config unless `allow_insecure` has been enabled.
pub fn is_secure(&self) -> bool {
match self {
StorageParams::Azblob(v) => v.endpoint_url.starts_with("https://"),
StorageParams::Fs(_) => false,
StorageParams::Ftp(v) => v.endpoint.starts_with("ftps://"),
#[cfg(feature = "storage-hdfs")]
StorageParams::Hdfs(_) => false,
StorageParams::Http(v) => v.endpoint_url.starts_with("https://"),
StorageParams::Ipfs(c) => c.endpoint_url.starts_with("https://"),
StorageParams::Memory => false,
StorageParams::Moka(_) => false,
StorageParams::Obs(v) => v.endpoint_url.starts_with("https://"),
StorageParams::Oss(v) => v.endpoint_url.starts_with("https://"),
StorageParams::S3(v) => v.endpoint_url.starts_with("https://"),
StorageParams::Gcs(v) => v.endpoint_url.starts_with("https://"),
StorageParams::Redis(_) => false,
StorageParams::Webhdfs(v) => v.endpoint_url.starts_with("https://"),
StorageParams::None => false,
}
}
/// map the given root with.
pub fn map_root(mut self, f: impl Fn(&str) -> String) -> Self {
match &mut self {
StorageParams::Azblob(v) => v.root = f(&v.root),
StorageParams::Fs(v) => v.root = f(&v.root),
StorageParams::Ftp(v) => v.root = f(&v.root),
#[cfg(feature = "storage-hdfs")]
StorageParams::Hdfs(v) => v.root = f(&v.root),
StorageParams::Http(_) => {}
StorageParams::Ipfs(v) => v.root = f(&v.root),
StorageParams::Memory => {}
StorageParams::Moka(_) => {}
StorageParams::Obs(v) => v.root = f(&v.root),
StorageParams::Oss(v) => v.root = f(&v.root),
StorageParams::S3(v) => v.root = f(&v.root),
StorageParams::Gcs(v) => v.root = f(&v.root),
StorageParams::Redis(v) => v.root = f(&v.root),
StorageParams::Webhdfs(v) => v.root = f(&v.root),
StorageParams::None => {}
};
self
}
pub fn is_fs(&self) -> bool {
matches!(self, StorageParams::Fs(_))
}
}
/// StorageParams will be displayed by `{protocol}://{key1=value1},{key2=value2}`
impl Display for StorageParams {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
StorageParams::Azblob(v) => write!(
f,
"azblob | container={},root={},endpoint={}",
v.container, v.root, v.endpoint_url
),
StorageParams::Fs(v) => write!(f, "fs | root={}", v.root),
StorageParams::Ftp(v) => {
write!(f, "ftp | root={},endpoint={}", v.root, v.endpoint)
}
StorageParams::Gcs(v) => write!(
f,
"gcs | bucket={},root={},endpoint={}",
v.bucket, v.root, v.endpoint_url
),
#[cfg(feature = "storage-hdfs")]
StorageParams::Hdfs(v) => {
write!(f, "hdfs | root={},name_node={}", v.root, v.name_node)
}
StorageParams::Http(v) => {
write!(f, "http | endpoint={},paths={:?}", v.endpoint_url, v.paths)
}
StorageParams::Ipfs(c) => {
write!(f, "ipfs | endpoint={},root={}", c.endpoint_url, c.root)
}
StorageParams::Memory => write!(f, "memory"),
StorageParams::Moka(v) => write!(f, "moka | max_capacity={}", v.max_capacity),
StorageParams::Obs(v) => write!(
f,
"obs | bucket={},root={},endpoint={}",
v.bucket, v.root, v.endpoint_url
),
StorageParams::Oss(v) => write!(
f,
"oss | bucket={},root={},endpoint={}",
v.bucket, v.root, v.endpoint_url
),
StorageParams::S3(v) => {
write!(
f,
"s3 | bucket={},root={},endpoint={}",
v.bucket, v.root, v.endpoint_url
)
}
StorageParams::Redis(v) => {
write!(
f,
"redis | db={},root={},endpoint={}",
v.db, v.root, v.endpoint_url
)
}
StorageParams::Webhdfs(v) => {
write!(f, "webhdfs | root={},endpoint={}", v.root, v.endpoint_url)
}
StorageParams::None => {
write!(f, "none",)
}
}
}
}
/// Config for storage backend azblob.
#[derive(Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageAzblobConfig {
pub endpoint_url: String,
pub container: String,
pub account_name: String,
pub account_key: String,
pub root: String,
}
impl Debug for StorageAzblobConfig {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("StorageAzblobConfig")
.field("endpoint_url", &self.endpoint_url)
.field("container", &self.container)
.field("root", &self.root)
.field("account_name", &self.account_name)
.field("account_key", &mask_string(&self.account_key, 3))
.finish()
}
}
/// Config for storage backend fs.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageFsConfig {
pub root: String,
}
impl Default for StorageFsConfig {
fn default() -> Self {
Self {
root: "_data".to_string(),
}
}
}
pub const STORAGE_FTP_DEFAULT_ENDPOINT: &str = "ftps://127.0.0.1";
/// Config for FTP and FTPS data source
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageFtpConfig {
pub endpoint: String,
pub root: String,
pub username: String,
pub password: String,
}
impl Default for StorageFtpConfig {
fn default() -> Self {
Self {
endpoint: STORAGE_FTP_DEFAULT_ENDPOINT.to_string(),
username: "".to_string(),
password: "".to_string(),
root: "/".to_string(),
}
}
}
impl Debug for StorageFtpConfig {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("StorageFtpConfig")
.field("endpoint", &self.endpoint)
.field("root", &self.root)
.field("username", &self.username)
.field("password", &mask_string(self.password.as_str(), 3))
.finish()
}
}
pub static STORAGE_GCS_DEFAULT_ENDPOINT: &str = "https://storage.googleapis.com";
/// Config for storage backend GCS.
#[derive(Clone, PartialEq, Eq, Deserialize, Serialize)]
pub struct StorageGcsConfig {
pub endpoint_url: String,
pub bucket: String,
pub root: String,
pub credential: String,
}
impl Default for StorageGcsConfig {
fn default() -> Self {
Self {
endpoint_url: STORAGE_GCS_DEFAULT_ENDPOINT.to_string(),
bucket: String::new(),
root: String::new(),
credential: String::new(),
}
}
}
impl Debug for StorageGcsConfig {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("StorageGcsConfig")
.field("endpoint", &self.endpoint_url)
.field("bucket", &self.bucket)
.field("root", &self.root)
.field("credential", &mask_string(&self.credential, 3))
.finish()
}
}
/// Config for storage backend hdfs.
///
/// # Notes
///
/// Ideally, we should export this config only when hdfs feature enabled.
/// But export this struct without hdfs feature is safe and no harm. So we
/// export it to make crates' lives that depend on us easier.
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageHdfsConfig {
pub name_node: String,
pub root: String,
}
pub static STORAGE_S3_DEFAULT_ENDPOINT: &str = "https://s3.amazonaws.com";
/// Config for storage backend s3.
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageS3Config {
pub endpoint_url: String,
pub region: String,
pub bucket: String,
pub access_key_id: String,
pub secret_access_key: String,
/// Temporary security token used for authentications
///
/// This recommended to use since users don't need to store their permanent credentials in their
/// scripts or worksheets.
///
/// refer to [documentations](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html) for details.
pub security_token: String,
pub master_key: String,
pub root: String,
/// This flag is used internally to control whether databend load
/// credentials from environment like env, profile and web token.
pub disable_credential_loader: bool,
/// Enable this flag to send API in virtual host style.
///
/// - Virtual Host Style: `https://bucket.s3.amazonaws.com`
/// - Path Style: `https://s3.amazonaws.com/bucket`
pub enable_virtual_host_style: bool,
/// The RoleArn that used for AssumeRole.
pub role_arn: String,
/// The ExternalId that used for AssumeRole.
pub external_id: String,
}
impl Default for StorageS3Config {
fn default() -> Self {
StorageS3Config {
endpoint_url: STORAGE_S3_DEFAULT_ENDPOINT.to_string(),
region: "".to_string(),
bucket: "".to_string(),
access_key_id: "".to_string(),
secret_access_key: "".to_string(),
security_token: "".to_string(),
master_key: "".to_string(),
root: "".to_string(),
disable_credential_loader: false,
enable_virtual_host_style: false,
role_arn: "".to_string(),
external_id: "".to_string(),
}
}
}
impl Debug for StorageS3Config {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("StorageS3Config")
.field("endpoint_url", &self.endpoint_url)
.field("region", &self.region)
.field("bucket", &self.bucket)
.field("root", &self.root)
.field("disable_credential_loader", &self.disable_credential_loader)
.field("enable_virtual_host_style", &self.enable_virtual_host_style)
.field("role_arn", &self.role_arn)
.field("external_id", &self.external_id)
.field("access_key_id", &mask_string(&self.access_key_id, 3))
.field(
"secret_access_key",
&mask_string(&self.secret_access_key, 3),
)
.field("security_token", &mask_string(&self.security_token, 3))
.field("master_key", &mask_string(&self.master_key, 3))
.finish()
}
}
/// Config for storage backend http.
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageHttpConfig {
pub endpoint_url: String,
pub paths: Vec<String>,
}
pub const STORAGE_IPFS_DEFAULT_ENDPOINT: &str = "https://ipfs.io";
/// Config for IPFS storage backend
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageIpfsConfig {
pub endpoint_url: String,
pub root: String,
}
/// Config for storage backend obs.
#[derive(Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageObsConfig {
pub endpoint_url: String,
pub bucket: String,
pub access_key_id: String,
pub secret_access_key: String,
pub root: String,
}
impl Debug for StorageObsConfig {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("StorageObsConfig")
.field("endpoint_url", &self.endpoint_url)
.field("bucket", &self.bucket)
.field("root", &self.root)
.field("access_key_id", &mask_string(&self.access_key_id, 3))
.field(
"secret_access_key",
&mask_string(&self.secret_access_key, 3),
)
.finish()
}
}
/// config for Aliyun Object Storage Service
#[derive(Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageOssConfig {
pub endpoint_url: String,
pub presign_endpoint_url: String,
pub bucket: String,
pub access_key_id: String,
pub access_key_secret: String,
pub root: String,
}
impl Debug for StorageOssConfig {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("StorageOssConfig")
.field("endpoint_url", &self.endpoint_url)
.field("presign_endpoint_url", &self.presign_endpoint_url)
.field("bucket", &self.bucket)
.field("root", &self.root)
.field("access_key_id", &mask_string(&self.access_key_id, 3))
.field(
"access_key_secret",
&mask_string(&self.access_key_secret, 3),
)
.finish()
}
}
/// config for Moka Object Storage Service
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageMokaConfig {
pub max_capacity: u64,
pub time_to_live: i64,
pub time_to_idle: i64,
}
impl Default for StorageMokaConfig {
#[no_sanitize(address)]
fn default() -> Self {
Self {
// Use 1G as default.
max_capacity: 1024 * 1024 * 1024,
// Use 1 hour as default time to live
time_to_live: 3600,
// Use 10 minutes as default time to idle.
time_to_idle: 600,
}
}
}
/// config for Redis Storage Service
#[derive(Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageRedisConfig {
pub endpoint_url: String,
pub username: Option<String>,
pub password: Option<String>,
pub root: String,
pub db: i64,
/// TTL in seconds
pub default_ttl: Option<i64>,
}
impl Debug for StorageRedisConfig {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let mut d = f.debug_struct("StorageRedisConfig");
d.field("endpoint_url", &self.endpoint_url)
.field("db", &self.db)
.field("root", &self.root)
.field("default_ttl", &self.default_ttl);
if let Some(username) = &self.username {
d.field("username", &mask_string(username, 3));
}
if let Some(password) = &self.password {
d.field("usernpasswordame", &mask_string(password, 3));
}
d.finish()
}
}
/// config for WebHDFS Storage Service
#[derive(Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct StorageWebhdfsConfig {
pub endpoint_url: String,
pub root: String,
pub delegation: String,
}
impl Debug for StorageWebhdfsConfig {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let mut ds = f.debug_struct("StorageWebhdfsConfig");
ds.field("endpoint_url", &self.endpoint_url)
.field("root", &self.root);
ds.field("delegation", &mask_string(&self.delegation, 3));
ds.finish()
}
}
/// Mask a string by "******", but keep `unmask_len` of suffix.
///
/// Copied from `common-base` so that we don't need to depend on it.
#[inline]
pub fn mask_string(s: &str, unmask_len: usize) -> String {
if s.len() <= unmask_len {
s.to_string()
} else {
let mut ret = "******".to_string();
ret.push_str(&s[(s.len() - unmask_len)..]);
ret
}
}
|
use std::env;
fn main() {
let mut sum = 0;
for input in env::args() {
let _i = match input.parse::<i32>() {
Ok(_i) => {
sum += _i
},
Err(_e) => {
println!("{}: Not a valid integer!", input)
}
};
println!("Sum: {}", sum);
}
} |
// Copyright 2020-2021, The Tremor Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::lifecycle::ActivatorLifecycleFsm;
use crate::repository::{BindingArtefact, OfframpArtefact, OnrampArtefact, PipelineArtefact};
use crate::url::TremorUrl;
/// A servant ID
pub type Id = TremorUrl;
/// A pipeline servant
pub type Pipeline = ActivatorLifecycleFsm<PipelineArtefact>;
/// An onramp servant
pub type Onramp = ActivatorLifecycleFsm<OnrampArtefact>;
/// An offramp servant
pub type Offramp = ActivatorLifecycleFsm<OfframpArtefact>;
/// A binding servant
pub type Binding = ActivatorLifecycleFsm<BindingArtefact>;
|
pub fn hello_box() {
box_demo();
cons_in_rust();
}
fn box_demo() {
let b = Box::new(5);
println!("b = {}", b);
}
enum List {
Cons(i32, Box<List>),
Nil,
}
fn cons_in_rust() {
let list = List::Cons(
1,
Box::new(List::Cons(2, Box::new(List::Cons(3, Box::new(List::Nil))))),
);
}
|
pub fn example12()
{
let sum_nums = |x: i32, y: i32| x + y;
xprintln!("7 + 8 = {}", sum_nums(7, 8));
let num_ten = 10;
let add_ten = |x: i32| x + num_ten;
xprintln!("5 + 10 = {}", add_ten(5));
} |
use gcd::Gcd;
use num::traits::PrimInt;
#[derive(Clone, Copy, Debug)]
pub struct Fraction<Int> {
num: Int,
den: Int,
}
impl<Int> Fraction<Int>
where
Int: PrimInt,
{
pub fn new(num: Int, den: Int) -> Self {
Self { num: num, den: den }
}
pub fn num(&self) -> Int {
self.num
}
pub fn den(&self) -> Int {
self.den
}
pub fn floor(&self) -> Int {
self.num / self.den
}
pub fn ceil(&self) -> Int {
(self.num + self.den - Int::one()) / self.den
}
pub fn is_integer(&self) -> bool {
(self.num % self.den) == Int::zero()
}
}
impl<Int> Fraction<Int>
where
Int: Gcd + PrimInt,
{
pub fn gcd(&self) -> Int {
gcd::Gcd::gcd(self.num, self.den)
}
pub fn is_reduced(&self) -> bool {
self.gcd() == Int::one()
}
pub fn reduce(&mut self) {
let gcd = self.gcd();
self.num = self.num / gcd;
self.den = self.den / gcd;
}
}
impl<Int> core::cmp::Ord for Fraction<Int>
where
Int: PrimInt,
{
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
(self.num * other.den).cmp(&(self.den * other.num))
}
}
impl<Int> core::cmp::PartialOrd for Fraction<Int>
where
Int: PrimInt,
{
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl<Int> core::cmp::Eq for Fraction<Int> where Int: PrimInt {}
impl<Int> core::cmp::PartialEq for Fraction<Int>
where
Int: PrimInt,
{
fn eq(&self, other: &Self) -> bool {
self.num * other.den == self.den * other.num
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.