text stringlengths 8 4.13M |
|---|
pub mod process_epoch;
|
extern crate glutin_window;
extern crate graphics;
extern crate opengl_graphics;
extern crate piston;
use glutin_window::GlutinWindow as Window;
use opengl_graphics::{GlGraphics, OpenGL};
use piston::event_loop::{EventSettings, Events};
use piston::input::{RenderArgs, RenderEvent, UpdateArgs, UpdateEvent};
use piston::window::WindowSettings;
use rand::Rng;
use std::collections::HashMap;
use std::io;
// Ball object with physics properties
#[derive(Copy, Clone, Debug)]
struct Ball {
acceleration: Vector2D,
velocity: Vector2D,
location: Vector2D,
prev_location: Vector2D, // Location info for t-1 for collision calculations
radius: f64,
color: [f32; 4],
mass: f64, // Mass in KG
}
// 2D vector representation
#[derive(Copy, Clone, Debug)]
struct Vector2D {
x: f64,
y: f64,
}
impl Vector2D {
fn dot(&self, other: &Vector2D) -> f64 {
return self.x * other.x + self.y * other.y;
}
fn norm(&self) -> f64 {
return (self.dot(self)).sqrt();
}
fn normalize(&self) -> Vector2D {
Vector2D {
x: self.x / self.norm(),
y: self.y / self.norm(),
}
}
fn subtract(&self, other: &Vector2D) -> Vector2D {
Vector2D {
x: self.x - other.x,
y: self.y - other.y,
}
}
fn add(&self, other: &Vector2D) -> Vector2D {
Vector2D {
x: self.x + other.x,
y: self.y + other.y,
}
}
fn scale(&self, scale: f64) -> Vector2D {
Vector2D {
x: scale * self.x,
y: scale * self.y,
}
}
}
pub struct Simulation {
gl: GlGraphics, // OpenGL drawing backend.
balls: Vec<Ball>, // Collection of objects in scene
resolution: (f64, f64),
simulation_factor: f64,
gravity_on: bool,
}
impl Simulation {
fn render(&mut self, args: &RenderArgs) {
use graphics::*;
const BLACK: [f32; 4] = [0.0, 0.0, 0.0, 0.0];
let balls = &self.balls;
self.gl.draw(args.viewport(), |c, gl| {
// Clear the screen.
clear(BLACK, gl);
for ball in balls.iter() {
let transform1 = c.transform.trans(ball.location.x, ball.location.y);
let circle = rectangle::centered_square(0.0, 0.0, ball.radius);
ellipse(ball.color, circle, transform1, gl);
}
});
}
fn update(&mut self, args: &UpdateArgs) {
const G: f64 = 6.67408e-11; // Gravitation Constant
let mut acc_updates: HashMap<u32, Vector2D> = HashMap::new();
// Calculate Forces on each ball
for i in 0..self.balls.len() {
let mut acc = Vector2D { x: 0.0, y: 0.0 };
if self.gravity_on {
let current_ball = &self.balls[i];
for j in 0..self.balls.len() {
if i == j {
continue;
}
// Gravitation force
let mut gravitation_dir =
self.balls[j].location.subtract(¤t_ball.location);
let mut magnitude =
G * self.balls[j].mass / (gravitation_dir.norm() * gravitation_dir.norm());
if magnitude > 100.0 {
magnitude = 100.0;
}
gravitation_dir = gravitation_dir.normalize();
let gravitation_force = gravitation_dir.scale(magnitude * 1.0);
acc = acc.add(&gravitation_force);
}
}
// Adding constant acceleration
acc = acc.add(&Vector2D { x: 0.0, y: 0.0 });
acc_updates.insert(i as u32, acc);
}
let mut balls_copy = self.balls.to_vec();
for (i, ball) in balls_copy.iter_mut().enumerate() {
// Update Ball Acceleration
ball.acceleration = *acc_updates.get(&(i as u32)).expect("Did not want this");
// Update Ball velocity
ball.velocity = Vector2D {
x: ball.velocity.x + ball.acceleration.x * args.dt * self.simulation_factor as f64,
y: ball.velocity.y + ball.acceleration.y * args.dt * self.simulation_factor as f64,
};
if ball.velocity.norm() > 100.0 {
ball.velocity = ball.velocity.normalize().scale(100.0);
}
ball.prev_location = ball.location;
// Update ball location
ball.location = Vector2D {
x: ball.location.x + ball.velocity.x * self.simulation_factor as f64 * args.dt,
y: ball.location.y + ball.velocity.y * self.simulation_factor as f64 * args.dt,
};
self.clamp_ball_location(ball);
}
self.balls = balls_copy;
// Check for collisions with other particles and update each ball
self.balls = self.check_for_collisions_and_update_velocity();
}
fn clamp_ball_location(&self, ball: &mut Ball) {
// Check for collisions with window boundaries
if ball.location.y + ball.radius > self.resolution.1 {
ball.velocity = Vector2D {
x: ball.velocity.x,
y: -1.0 * ball.velocity.y,
};
ball.location.y = self.resolution.1 - ball.radius;
}
if ball.location.y - ball.radius < 0.0 {
ball.velocity = Vector2D {
x: ball.velocity.x,
y: -1.0 * ball.velocity.y,
};
ball.location.y = ball.radius;
}
if ball.location.x + ball.radius > self.resolution.0 {
ball.velocity = Vector2D {
x: -1.0 * ball.velocity.x,
y: ball.velocity.y,
};
ball.location.x = self.resolution.0 - ball.radius;
}
if ball.location.x - ball.radius < 0.0 {
ball.velocity = Vector2D {
x: -1.0 * ball.velocity.x,
y: ball.velocity.y,
};
ball.location.x = ball.radius;
}
}
fn check_for_collisions_and_update_velocity(&mut self) -> Vec<Ball> {
let mut sorted_balls = self.balls.to_vec();
sorted_balls.sort_by(|a, b| a.location.x.partial_cmp(&b.location.x).unwrap());
let mut velocity_updates: Vec<Vector2D> = Vec::new();
let mut location_updates: Vec<Vector2D> = Vec::new();
for i in 0..sorted_balls.len() {
velocity_updates.push(sorted_balls[i].velocity);
location_updates.push(sorted_balls[i].location);
}
for i in 0..sorted_balls.len() {
for j in (i + 1)..sorted_balls.len() {
let ball1 = &sorted_balls[i];
let ball2 = &sorted_balls[j];
const SMALL_T: f64 = 0.00001;
let is_collision = location_updates[i].subtract(&location_updates[j]).norm()
< ball1.radius + ball2.radius;
if !is_collision {
continue;
}
// Naive way to resolve overlapping collision
loop {
if self.gravity_on {
break;
}
let is_collision = location_updates[i].subtract(&location_updates[j]).norm()
<= ball1.radius + ball2.radius;
if !is_collision {
break;
}
location_updates[i] =
location_updates[i].add(&velocity_updates[i].scale(-SMALL_T));
location_updates[j] =
location_updates[j].add(&velocity_updates[j].scale(-SMALL_T));
}
// Update the particle velocities
let v1_minus_v2 = velocity_updates[i].subtract(&velocity_updates[j]);
let x1_minus_x2 = location_updates[i].subtract(&location_updates[j]);
let distance = x1_minus_x2.norm();
let mass_term_1 = (2.0 * ball2.mass) / (ball1.mass + ball2.mass);
let dot_product_term_1 = v1_minus_v2.dot(&x1_minus_x2) / (distance * distance);
let mut velocity_ball1 = velocity_updates[i]
.subtract(&x1_minus_x2.scale(dot_product_term_1 * mass_term_1));
let mass_term_2 = (2.0 * ball1.mass) / (ball1.mass + ball2.mass);
let v2_minus_v1 = v1_minus_v2.scale(-1.0);
let x2_minus_x1 = x1_minus_x2.scale(-1.0);
let dot_product_term_2 = v2_minus_v1.dot(&x2_minus_x1) / (distance * distance);
let mut velocity_ball2 = velocity_updates[j]
.subtract(&x2_minus_x1.scale(dot_product_term_2 * mass_term_2));
if velocity_ball2.norm() > 100.0 {
velocity_ball2 = velocity_ball2.normalize().scale(100.0);
}
if velocity_ball1.norm() > 100.0 {
velocity_ball1 = velocity_ball1.normalize().scale(100.0);
}
velocity_updates[i] = velocity_ball1;
velocity_updates[j] = velocity_ball2;
}
}
for (i, ball) in sorted_balls.iter_mut().enumerate() {
ball.velocity = velocity_updates[i];
ball.location = location_updates[i];
self.clamp_ball_location(ball);
}
return sorted_balls;
}
}
fn main() {
// Change this to OpenGL::V2_1 if not working.
let opengl = OpenGL::V3_2;
let mut num_balls = String::new();
println!("Enter number of balls in simulation: ");
io::stdin()
.read_line(&mut num_balls)
.ok()
.expect("Couldn't read line");
let num_balls: u32 = num_balls.trim().parse().expect("Wanted a number");
let mut gravity = String::new();
println!("Do you want gravitational attraction (0 is No, any other number yes): ");
io::stdin()
.read_line(&mut gravity)
.ok()
.expect("Couldn't read line");
let gravity: u32 = gravity.trim().parse().expect("Wanted a number");
let mut gravity_on = false;
if gravity != 0 {
gravity_on = true;
}
// Window resolution
let width = 1200.0;
let height = 600.0;
// Create an Glutin window.
let mut window: Window = WindowSettings::new("simulation", [width, height])
.graphics_api(opengl)
.exit_on_esc(true)
.build()
.unwrap();
// Create objects in simulation
let mut balls = Vec::new();
let radius = 10.0;
let mut rng = rand::thread_rng();
for _ in 0..num_balls {
balls.push(Ball {
acceleration: Vector2D { x: 0.0, y: 0.0 },
velocity: Vector2D {
x: rng.gen_range(-100.0..100.0),
y: rng.gen_range(-100.0..100.0),
},
location: Vector2D {
x: rng.gen_range(0.0..(width)),
y: rng.gen_range(0.0..(height)),
},
prev_location: Vector2D { x: 0.0, y: 0.0 },
radius,
color: [
rng.gen_range(0.2..1.0),
rng.gen_range(0.2..1.0),
rng.gen_range(0.2..1.0),
rng.gen_range(0.5..1.0),
],
mass: 1e16,
});
}
// Create a new simulation and run it.
let mut simulation = Simulation {
gl: GlGraphics::new(opengl),
balls: balls,
resolution: (width, height),
simulation_factor: 1.0,
gravity_on: gravity_on,
};
let mut settings = EventSettings::new();
settings.ups = 1000;
let mut events = Events::new(settings);
while let Some(e) = events.next(&mut window) {
if let Some(args) = e.render_args() {
simulation.render(&args);
}
if let Some(args) = e.update_args() {
simulation.update(&args);
}
}
}
|
use crate::{bikes::BicycleDomain, error::Error};
pub trait BicycleRepoInterface {
fn create(&self, bike: BicycleDomain) -> Result<BicycleDomain, Error>;
fn update(&self, bike: BicycleDomain) -> Result<BicycleDomain, Error>;
fn delete(&self, id: i32) -> Result<bool, Error>;
fn find_all(&self) -> Result<Vec<BicycleDomain>, Error>;
fn find_by_id(&self, id: i32) -> Result<BicycleDomain, Error>;
}
|
use aubio_rs::{OnsetMode, Tempo};
use nannou::prelude::*;
use nannou::ui::prelude::*;
use nannou_audio as audio;
use ringbuf::{Consumer, Producer, RingBuffer};
fn main() {
nannou::app(model).update(update).simple_window(view).run();
}
widget_ids! {
struct Ids {
startstop,
threshold,
silence,
}
}
struct Model {
ui: Ui,
ids: Ids,
in_stream: audio::Stream<InputModel>,
consumer: Consumer<f32>,
tempo: Tempo,
tempo_result: f32,
strength: f32,
threshold: f32,
silence: f32,
}
struct InputModel {
producer: Producer<f32>,
}
fn model(app: &App) -> Model {
let mut ui = app.new_ui().build().unwrap();
let ids = Ids::new(ui.widget_id_generator());
let audio_host = audio::Host::new();
let ringbuf = RingBuffer::<f32>::new(2048);
let (producer, consumer) = ringbuf.split();
let in_model = InputModel { producer };
let in_stream = audio_host
.new_input_stream::<InputModel, f32>(in_model)
.capture(input)
.sample_rate(44100)
.build()
.unwrap();
let mut tempo = Tempo::new(OnsetMode::Complex, 1024, 512, 44100).unwrap();
tempo.set_silence(0.1);
tempo.set_threshold(0.3);
Model {
ui,
ids,
in_stream,
consumer,
tempo,
tempo_result: 0.0,
strength: 0.0,
threshold: 0.3,
silence: 0.1,
}
}
fn input(model: &mut InputModel, buffer: &audio::Buffer) {
for frame in buffer.frames() {
model.producer.push(frame[0]).ok();
}
}
fn update(_app: &App, model: &mut Model, _update: Update) {
let ui = &mut model.ui.set_widgets();
for _click in widget::Button::new()
.top_left_with_margin(20.0)
.w_h(200.0, 60.0)
.label(if model.in_stream.is_playing() {
"Stop"
} else {
"Start"
})
.set(model.ids.startstop, ui)
{
if model.in_stream.is_playing() {
model.in_stream.pause().unwrap();
} else {
model.in_stream.play().unwrap();
}
}
for value in widget::Slider::new(model.threshold, 0.0, 1.0)
.down(10.0)
.w_h(200.0, 30.0)
.label("Threshold")
.set(model.ids.threshold, ui)
{
model.threshold = value as f32;
model.tempo.set_threshold(model.threshold);
}
for value in widget::Slider::new(model.silence, 0.0, 1.0)
.down(10.0)
.w_h(200.0, 30.0)
.label("Silence")
.set(model.ids.silence, ui)
{
model.silence = value as f32;
model.tempo.set_silence(model.silence);
}
while model.consumer.len() >= 1024 {
let mut samples = [0.0f32; 1024];
model.consumer.access(|s1, s2| {
let len_s1 = std::cmp::min(1024, s1.len());
samples[0..len_s1].copy_from_slice(&s1[0..len_s1]);
let len_s2 = std::cmp::min(1024 - len_s1, s2.len());
samples[len_s1..1024].copy_from_slice(&s2[0..len_s2]);
});
model.consumer.discard(512);
model.tempo_result = model.tempo.do_result(samples).unwrap();
if model.tempo_result > 0.0 {
model.strength = 1.0
}
}
model.strength *= 0.8;
}
fn view(app: &App, model: &Model, frame: Frame) {
let draw = app.draw();
draw.background().rgb(0.2, 0.2, 0.2);
draw.ellipse()
.color(RED)
.x_y(0.0, 0.0)
.w_h(model.strength * 400.0, model.strength * 400.0);
draw.to_frame(app, &frame).unwrap();
model.ui.draw_to_frame(app, &frame).unwrap();
}
|
use mygrep::*;
#[test]
fn query_casesensitive() {
let query = "duct";
let contents = "\
Rust:
safe, fast, productive.
rusty.
Duct tape.";
assert_eq!(vec!["safe, fast, productive."], search(query, contents));
}
#[test]
fn query_caseinsensitive() {
let query = "RuSt";
let contents = "\
Rust:
safe, fast, productive.
rusty.
Duct tape.";
assert_eq!(vec!["Rust:", "rusty."], search_ignore_case(query, contents));
} |
use num_format::{Locale, ToFormattedString};
use std::io::{stdout, Write};
use std::iter::repeat;
use std::time::Instant;
use unicode_segmentation::UnicodeSegmentation;
use crate::hash_file_process::{FileProcessEntry, FileProgress};
use crate::speed::get_speed;
use crate::tty::terminal_size;
const OUTPUT_REFRESH_IN_MILLIS: u32 = 233;
pub struct Output {
output_width: usize,
refresh_rate_in_millis: u32,
last_output_instant: Option<Instant>,
last_output_file_progress: FileProgress,
}
impl Output {
pub fn new() -> Self {
let (output_width, _) = terminal_size().unwrap();
Output {
output_width: (output_width.0 - 1) as usize,
refresh_rate_in_millis: OUTPUT_REFRESH_IN_MILLIS,
last_output_instant: None,
last_output_file_progress: FileProgress {
..Default::default()
},
}
}
fn pad_line(&self, line: String) -> String {
let mut padded_line = line.clone();
let line_len = line.graphemes(true).count();
if line_len < self.output_width {
let gap = self.output_width - line_len;
let pad = &repeat(" ").take(gap).collect::<String>();
padded_line = line + pad;
}
padded_line
}
fn write(
&mut self,
file_path: &str,
file_size: u64,
bytes_processed: u64,
info: &str,
new_line: bool,
error: bool,
) {
let now = Instant::now();
let ellapsed_millis = match self.last_output_instant {
Some(instant) => now.duration_since(instant).as_millis(),
_ => 0,
};
if error || new_line || ellapsed_millis > self.refresh_rate_in_millis.into() {
let mut info_output = String::new();
if error {
info_output = format!(" => {}", info);
} else if self.last_output_file_progress.file_path == file_path {
if bytes_processed != self.last_output_file_progress.bytes_processed {
let percent = match file_size {
0 => 100,
_ => bytes_processed * 100 / file_size,
};
let speed = get_speed(
bytes_processed,
self.last_output_file_progress.bytes_processed,
ellapsed_millis,
);
info_output = format!(
" ({}; {} %; {} {})",
file_size.to_formatted_string(&Locale::en),
percent.to_formatted_string(&Locale::en),
speed.bytes_per_interval.to_formatted_string(&Locale::en),
speed.unit
);
}
}
let printed_file_path: String;
let file_path_max_size = self.output_width - info_output.len();
let mut file_path_graphemes = file_path.graphemes(true);
let file_path_len = file_path_graphemes.clone().count();
if file_path_max_size < file_path_len {
let offset = file_path_len - file_path_max_size + "..".len();
for _ in 0..offset {
file_path_graphemes.next();
}
printed_file_path = format!("{}{}", "..", file_path_graphemes.as_str());
} else {
printed_file_path = file_path.to_owned();
}
let line_output = self.pad_line(format!("{}{}", printed_file_path, info_output));
if error {
eprintln!(" {}\r", line_output);
} else if new_line {
println!(" {}\r", line_output);
} else {
print!(" {}\r", line_output);
}
stdout().flush().unwrap();
self.last_output_instant = Some(Instant::now());
self.last_output_file_progress = FileProgress {
file_path: file_path.into(),
file_size,
bytes_processed,
};
}
}
pub fn write_init(&mut self) {
print!(" Opening files...\r");
stdout().flush().unwrap();
self.last_output_instant = Some(Instant::now());
}
pub fn write_error(&mut self, file_process_entry: &FileProcessEntry) {
self.write(
file_process_entry.file_path.to_str().unwrap(),
0,
0,
&format!("{:?}", file_process_entry.state),
true,
true,
);
}
pub fn write_progress(&mut self, file_progress: &FileProgress) {
self.write(
&file_progress.file_path,
file_progress.file_size,
file_progress.bytes_processed,
&file_progress.file_size.to_formatted_string(&Locale::en),
false,
false,
);
}
pub fn write_processed(&mut self, file_path: &str) {
self.write(file_path, 0, 0, "", false, false);
}
pub fn write_result(&self, result: String) {
println!("{}\r", self.pad_line(result));
}
pub fn clear_line(&self) {
print!("{}\r", self.pad_line("".into()));
stdout().flush().unwrap();
}
}
|
fn main() {
let number1;
print!("{}", number1);
number1 = 12;
}
|
pub mod add;
pub mod cp;
pub mod ld;
pub mod misc;
pub mod adc;
pub mod xor;
pub mod bit;
pub mod jump;
pub mod call;
pub mod rotate;
pub mod ret;
pub mod sbc;
pub mod sub;
pub mod and;
pub mod res;
|
pub mod manifest;
pub use manifest::*; |
fn lower_bound<T: PartialOrd>(list: &Vec<T>, value: &T) -> usize {
if list.is_empty() {
return 0;
}
let mut lower = 0usize;
let mut upper = list.len();
while lower != upper {
let middle = lower + upper >> 1;
if list[middle] < *value {
lower = middle + 1;
} else {
upper = middle;
}
}
return lower;
}
fn lis<T: PartialOrd + Copy>(list: &Vec<T>) -> Vec<T> {
if list.is_empty() {
return Vec::new();
}
let mut subseq: Vec<T> = Vec::new();
subseq.push(*list.first().unwrap());
for i in list[1..].iter() {
if *i <= *subseq.last().unwrap() {
let index = lower_bound(&subseq, i);
subseq[index] = *i;
} else {
subseq.push(*i);
}
}
return subseq;
}
fn main() {
let list = vec![3, 2, 6, 4, 5, 1];
println!("{:?}", lis(&list));
let list = vec![0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 11, 7, 15];
println!("{:?}", lis(&list));
}
|
#![recursion_limit = "1024"]
mod subscribers;
mod types;
mod web_socket_session;
use subscribers::Subscribers;
pub use types::RawMeasurement;
use jsonrpc_core::MetaIoHandler;
use jsonrpc_core::{futures as futuresOne, Params, Value};
use jsonrpc_pubsub::typed::{Sink, Subscriber};
use jsonrpc_pubsub::{PubSubHandler, Session, SubscriptionId};
use jsonrpc_server_utils::tokio;
use log::{debug, trace};
use serde::Deserialize;
use std::collections::HashMap;
use std::sync::{Arc, Mutex, RwLock};
use warp::{filters::BoxedFilter, Filter};
use futuresOne::future::Future as FutureOne;
type PeripheralQuantityType = (i32, i32);
#[derive(Clone)]
struct WebSocketHandler {
executor: tokio::runtime::TaskExecutor,
raw_measurement_subscriptions: Arc<RwLock<HashMap<String, Subscribers<Sink<Value>>>>>,
raw_measurement_buffer:
Arc<RwLock<HashMap<String, HashMap<PeripheralQuantityType, RawMeasurement>>>>,
}
impl WebSocketHandler {
fn new(executor: tokio::runtime::TaskExecutor) -> Self {
Self {
executor,
raw_measurement_subscriptions: Arc::new(RwLock::new(HashMap::default())),
raw_measurement_buffer: Arc::new(RwLock::new(HashMap::default())),
}
}
fn buffer_raw_measurement(&self, kit_serial: String, raw_measurement: RawMeasurement) {
let mut buffer = self.raw_measurement_buffer.write().unwrap();
let index = (raw_measurement.peripheral, raw_measurement.quantity_type);
buffer
.entry(kit_serial)
.or_default()
.insert(index, raw_measurement);
}
fn publish_raw_measurement(&self, kit_serial: String, raw_measurement: RawMeasurement) {
let subscriptions = self.raw_measurement_subscriptions.read().unwrap();
let subscribers: Option<&Subscribers<Sink<Value>>> = subscriptions.get(&kit_serial);
if let Some(subscribers) = subscribers {
let value = serde_json::to_value(raw_measurement.clone()).unwrap();
for (id, subscriber) in subscribers.iter() {
let id = id.clone();
self.executor
.spawn(
subscriber
.notify(Ok(value.clone()))
.map(|_| ())
.map_err(move |_| {
debug!(
"subscriber {:?}: failed sending raw measurement. Transport has gone away.",
id
)
}),
);
}
}
self.buffer_raw_measurement(kit_serial, raw_measurement);
}
fn add_raw_measurement_subscriber(&self, kit_serial: String, subscriber: Subscriber<Value>) {
let buffer = self.raw_measurement_buffer.read().unwrap();
let resend: Vec<RawMeasurement> = match buffer.get(&kit_serial) {
Some(pqt_raw_measurements) => pqt_raw_measurements.values().cloned().collect(),
None => vec![],
};
let mut subscriptions = self.raw_measurement_subscriptions.write().unwrap();
let subscribers = subscriptions.entry(kit_serial).or_default();
let id = subscribers.add(subscriber);
let sink = id.and_then(|id| subscribers.get(&id));
// Resend buffered raw measurements to new connection.
if let Some(sink) = sink {
for raw_measurement in resend {
self.executor.spawn(
sink.notify(Ok(serde_json::to_value(raw_measurement).unwrap()))
.map(|_| ())
.map_err(|_| ()),
)
}
}
}
fn remove_raw_measurement_subscriber(&self, id: SubscriptionId) {
let mut subscriptions = self.raw_measurement_subscriptions.write().unwrap();
// O(n) with n the number of distinct kits subscribed to.
subscriptions.retain(|_, s| {
s.remove(&id);
!s.is_empty()
});
trace!("Raw measurement subscriber removed: {:?}", id);
}
}
pub struct WebSocketPublisher {
// TODO: perhaps communicate through a channel if the RwLocks become a bottleneck
web_socket_handler: WebSocketHandler,
}
impl WebSocketPublisher {
pub fn publish_raw_measurement(&mut self, kit_serial: String, raw_measurement: RawMeasurement) {
self.web_socket_handler
.publish_raw_measurement(kit_serial, raw_measurement);
}
}
/// Runs a JSON-RPC server on top of a Warp WebSocket filter.
/// An executor for handling messages in run in another thread.
///
/// Returns a Warp filter and a handle to publish to subscriptions.
pub fn run() -> (BoxedFilter<(impl warp::Reply,)>, WebSocketPublisher) {
let mut runtime = tokio::runtime::Builder::new().build().unwrap();
let web_socket_handler = WebSocketHandler::new(runtime.executor());
std::thread::spawn(move || runtime.block_on(futuresOne::future::empty::<(), ()>()));
let mut io = PubSubHandler::new(MetaIoHandler::default());
io.add_subscription(
"rawMeasurements",
("subscribe_rawMeasurements", {
let web_socket_handler = web_socket_handler.clone();
move |params: Params, _: Arc<Session>, subscriber: jsonrpc_pubsub::Subscriber| {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct SubParams {
kit_serial: String,
}
match params.parse::<SubParams>() {
Ok(sub_params) => {
let subscriber = Subscriber::new(subscriber);
web_socket_handler
.add_raw_measurement_subscriber(sub_params.kit_serial, subscriber);
}
Err(_) => {}
}
}
}),
("unsubscribe_rawMeasurements", {
let web_socket_handler = web_socket_handler.clone();
move |id: SubscriptionId, _| {
web_socket_handler.remove_raw_measurement_subscriber(id);
futuresOne::future::ok(Value::Bool(true))
}
}),
);
let io_handler: MetaIoHandler<Arc<Session>> = io.into();
let num_sockets = Arc::new(Mutex::new(0usize));
let filter = warp::ws()
.map(move |ws: warp::ws::Ws| {
let mut num_sockets = num_sockets.lock().unwrap();
let socket_id: usize = *num_sockets;
*num_sockets += 1;
let io_handler = io_handler.clone();
trace!("Websocket {} connecting", socket_id);
ws.on_upgrade(move |web_socket| async move {
debug!("Websocket {} upgraded", socket_id);
web_socket_session::handle_session(socket_id, web_socket, io_handler).await;
debug!("WebSocket {} stopped", socket_id);
})
})
.boxed();
let publisher = WebSocketPublisher {
web_socket_handler: web_socket_handler.clone(),
};
(filter, publisher)
}
|
pub mod api;
pub mod configuration;
pub mod database;
pub mod database_structures;
pub mod server;
pub mod database_errors;
pub mod emailer;
pub mod oauth;
pub mod authorizer; |
extern crate arkecosystem_crypto;
extern crate serde;
extern crate serde_json;
use serde_json::{from_str, Value};
use std::fs::File;
use std::io::prelude::*;
pub mod transactions;
pub fn json_transaction(transaction_type: &str, name: &str) -> Value {
let path = read_fixture(&format!("transactions/{}/{}", transaction_type, name));
from_str(&path).unwrap()
}
fn read_fixture(path: &str) -> String {
let mut file = File::open(format!("tests/fixtures/{}.json", path)).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
content
}
|
use super::service::Service;
use crate::{
await_test_server,
db::builders::UserBuilder,
tests::{self, setup_env, RequestJwtAuthExt as _},
};
use actix_web::{
http::{Method, StatusCode},
test::{call_service, read_body, TestRequest},
};
use bigdecimal::BigDecimal;
use serde_json::{json, Value};
#[actix_rt::test]
async fn index_when_no_records() {
setup_env();
let session = tests::DbSession::new();
let mut service = await_test_server!(Service);
let user = session.create_user(UserBuilder::default().tags(vec!["foo"]));
let request = TestRequest::with_uri("/record-detail/")
.jwt_auth(user.id)
.to_request();
let response = call_service(&mut service, request).await;
assert!(response.status().is_success(), "response is not success");
let response_body = read_body(response).await;
let response_body = serde_json::from_slice::<Value>(&response_body)
.expect(&format!("Failed to deserialize: {:?}", response_body));
assert_eq!(
json!({"total": 0, "results": [], "next": false, "previous": false}),
response_body
);
}
#[actix_rt::test]
async fn index_requires_auth() {
setup_env();
let mut service = await_test_server!(Service);
let request = TestRequest::with_uri("/record-detail/").to_request();
let response = call_service(&mut service, request).await;
assert_eq!(
StatusCode::UNAUTHORIZED,
response.status(),
"wrong status code"
);
}
#[actix_rt::test]
async fn update_requires_auth() {
setup_env();
let mut service = await_test_server!(Service);
let request = TestRequest::with_uri("/record-detail/123/")
.method(Method::PUT)
.to_request();
let response = call_service(&mut service, request).await;
assert_eq!(
StatusCode::UNAUTHORIZED,
response.status(),
"wrong status code"
);
}
#[actix_rt::test]
async fn create_requires_auth() {
setup_env();
let mut service = await_test_server!(Service);
let request = TestRequest::with_uri("/record-detail/")
.method(Method::POST)
.to_request();
let response = call_service(&mut service, request).await;
assert_eq!(
StatusCode::UNAUTHORIZED,
response.status(),
"wrong status code"
);
}
#[actix_rt::test]
async fn create_happy_path() {
setup_env();
let session = tests::DbSession::new();
let mut service = await_test_server!(Service);
let user = session.create_user(UserBuilder::default());
let payload = json!({
"amount": {"amount": 999.12, "currency": { "code": "CAD", "name": "Canadian Dollar" }},
"transaction_type": "EXP",
"tags": ["foo", "bar"],
});
let request = TestRequest::with_uri("/record-detail/")
.method(Method::POST)
.jwt_auth(user.id)
.set_json(&payload)
.to_request();
let response = call_service(&mut service, request).await;
assert_eq!(StatusCode::OK, response.status(), "wrong status code");
let response_body = read_body(response).await;
let response_body = serde_json::from_slice::<Value>(&response_body)
.expect(&format!("Failed to deserialize: {:?}", response_body));
// make sure that record was created properly
let new_record_id = response_body.get("id").unwrap().as_i64().unwrap() as i32;
let updated_record = session.find_record(new_record_id);
assert_eq!(BigDecimal::from(999.12), updated_record.amount);
assert_eq!("EXP", updated_record.transaction_type);
assert_eq!(vec!["foo", "bar"], updated_record.tags);
}
#[actix_rt::test]
async fn update_happy_path() {
setup_env();
let session = tests::DbSession::new();
let mut service = await_test_server!(Service);
let user = session.create_user(UserBuilder::default());
let record = session.create_record2(user.id);
let payload = json!({
"amount": {"amount": 999, "currency": { "code": "CAD", "name": "Canadian Dollar" }},
"transaction_type": "INC",
"tags": ["foo"],
});
let request = TestRequest::with_uri(&format!("/record-detail/{}/", record.id))
.method(Method::PUT)
.set_json(&payload)
.jwt_auth(user.id)
.to_request();
let response = call_service(&mut service, request).await;
assert_eq!(StatusCode::OK, response.status(), "wrong status code");
let response_body = read_body(response).await;
let response_body = serde_json::from_slice::<Value>(&response_body)
.expect(&format!("Failed to deserialize: {:?}", response_body));
assert_eq!(json!(""), response_body);
// make sure that record was updated
let updated_record = session.find_record(record.id);
assert_eq!(BigDecimal::from(999), updated_record.amount);
assert_eq!("INC", updated_record.transaction_type);
assert_eq!(vec!["foo"], updated_record.tags);
}
|
#[macro_use]
extern crate service_core_derive;
#[allow(non_camel_case_types)]
#[mock_service(name = "database", version = "0.1.0")]
pub struct database {}
|
// -*- mode:rust;mode:rust-playground -*-
// snippet of code @ 2017-04-18 11:36:23
// === Rust Playground ===
// Execute the snippet with Ctl-Return
// Remove the snippet completely with its dir and all files M-x `rust-playground-rm`
struct Point<T> {
x: T,
y: T,
}
impl<T> Point<T> {
fn x(&self) -> &T {
let a = &self.x;
a
}
}
fn main() {
let p = Point{x:5, y:10};
println!("p.x = {}", p.x())
}
|
// https://www.codewars.com/kata/grasshopper-terminal-game-combat-function-1
fn combat(health: f32, damage: f32) -> f32 {
if health - damage < 0.0 {
0.0
} else {
health - damage
}
}
#[test]
fn example_tests() {
assert_eq!(combat(100.0, 5.0), 95.0);
assert_eq!(combat(92.0, 8.0), 84.0);
assert_eq!(combat(20.0, 30.0), 0.0, "Health cannot go below 0");
} |
use crate::{BalanceOf, Config, Pallet, Relayers};
use frame_support::traits::ExistenceRequirement::AllowDeath;
use frame_support::traits::{Currency, ExistenceRequirement, WithdrawReasons};
use frame_support::PalletId;
use sp_messenger::messages::FeeModel;
use sp_runtime::traits::{AccountIdConversion, CheckedDiv, CheckedSub};
use sp_runtime::{ArithmeticError, DispatchResult};
/// Messenger Id used to store deposits and fees.
const MESSENGER_PALLET_ID: PalletId = PalletId(*b"messengr");
impl<T: Config> Pallet<T> {
/// Returns the account_id to holds fees and and acts as treasury for messenger.
pub(crate) fn messenger_account_id() -> T::AccountId {
MESSENGER_PALLET_ID.into_account_truncating()
}
/// Ensures the fees from the sender per FeeModel provided for a single request for a response.
#[inline]
pub(crate) fn ensure_fees_for_outbox_message(
sender: &T::AccountId,
fee_model: &FeeModel<BalanceOf<T>>,
) -> DispatchResult {
let msgr_acc_id = Self::messenger_account_id();
// reserve outbox fee by transferring it to the messenger account.
// we will use the funds to pay the relayers once the response is received.
let outbox_fee = fee_model.outbox_fee().ok_or(ArithmeticError::Overflow)?;
T::Currency::withdraw(
sender,
outbox_fee,
WithdrawReasons::TRANSACTION_PAYMENT,
AllowDeath,
)?;
T::Currency::deposit_creating(&msgr_acc_id, outbox_fee);
// burn the fees that need to be paid on the dst_chain
let inbox_fee = fee_model.inbox_fee().ok_or(ArithmeticError::Overflow)?;
T::Currency::withdraw(
sender,
inbox_fee,
WithdrawReasons::TRANSACTION_PAYMENT,
AllowDeath,
)?;
Ok(())
}
/// Ensures the fee paid by the sender on the src_chain are minted here and paid to
/// relayer set when the acknowledgments are received.
#[inline]
pub(crate) fn ensure_fees_for_inbox_message(
fee_model: &FeeModel<BalanceOf<T>>,
) -> DispatchResult {
let inbox_fee = fee_model.inbox_fee().ok_or(ArithmeticError::Overflow)?;
let msngr_acc_id = Self::messenger_account_id();
T::Currency::deposit_creating(&msngr_acc_id, inbox_fee);
Ok(())
}
/// Distribute the rewards to the relayers.
/// Operation is no-op if there is not enough balance to pay.
/// Operation is no-op if there are no relayers.
pub(crate) fn distribute_reward_to_relayers(reward: BalanceOf<T>) -> DispatchResult {
let relayers = Relayers::<T>::get();
let relayer_count: BalanceOf<T> = (relayers.len() as u32).into();
let reward_per_relayer = match reward.checked_div(&relayer_count) {
// no relayers yet.
None => return Ok(()),
Some(reward) => reward,
};
// ensure we have enough to pay but maintain minimum existential deposit
let msngr_acc_id = Self::messenger_account_id();
if !T::Currency::free_balance(&msngr_acc_id)
.checked_sub(&T::Currency::minimum_balance())
.map(|usable| usable >= reward)
.unwrap_or(false)
{
return Ok(());
}
// distribute reward to relayers
for relayer in relayers.into_iter() {
// ensure msngr account is still kept alive after transfer.
T::Currency::transfer(
&msngr_acc_id,
&relayer,
reward_per_relayer,
ExistenceRequirement::KeepAlive,
)?;
}
Ok(())
}
}
|
#![deny(clippy::all, clippy::pedantic)]
#[allow(clippy::needless_pass_by_value)]
pub fn find<V, T>(array: V, key: T) -> Option<usize>
where
T: std::cmp::Ord,
V: AsRef<[T]>,
{
let array = array.as_ref();
let mut range = 0..array.len();
while range.start < range.end {
let idx = range.start + (range.end - range.start) / 2;
match array[idx].cmp(&key) {
std::cmp::Ordering::Equal => return Some(idx),
std::cmp::Ordering::Greater => range.end = idx,
std::cmp::Ordering::Less => range.start = idx + 1,
}
}
None
}
|
use serde::{Deserialize, Serialize};
use common::event::EventPublisher;
use common::result::Result;
use crate::domain::author::{AuthorId, AuthorRepository};
use crate::domain::category::{CategoryId, CategoryRepository};
use crate::domain::publication::{
Header, Image, Name, Publication, PublicationRepository, Synopsis, Tag,
};
#[derive(Deserialize)]
pub struct CreateCommand {
pub name: String,
pub synopsis: String,
pub category_id: String,
pub tags: Vec<String>,
pub cover: String,
}
#[derive(Serialize)]
pub struct CreateResponse {
pub id: String,
}
pub struct Create<'a> {
event_pub: &'a dyn EventPublisher,
author_repo: &'a dyn AuthorRepository,
category_repo: &'a dyn CategoryRepository,
publication_repo: &'a dyn PublicationRepository,
}
impl<'a> Create<'a> {
pub fn new(
event_pub: &'a dyn EventPublisher,
author_repo: &'a dyn AuthorRepository,
category_repo: &'a dyn CategoryRepository,
publication_repo: &'a dyn PublicationRepository,
) -> Self {
Create {
event_pub,
author_repo,
category_repo,
publication_repo,
}
}
pub async fn exec(&self, auth_id: String, cmd: CreateCommand) -> Result<CreateResponse> {
let name = Name::new(cmd.name)?;
let synopsis = Synopsis::new(cmd.synopsis)?;
let mut tags = Vec::new();
for tag in cmd.tags.into_iter() {
tags.push(Tag::new(tag)?);
}
let cover = Image::new(cmd.cover)?;
let category_id = CategoryId::new(cmd.category_id)?;
self.category_repo.find_by_id(&category_id).await?;
let header = Header::new(name, synopsis, category_id, tags, cover)?;
let author_id = AuthorId::new(auth_id)?;
self.author_repo.find_by_id(&author_id).await?;
let mut publication =
Publication::new(self.publication_repo.next_id().await?, author_id, header)?;
self.publication_repo.save(&mut publication).await?;
self.event_pub
.publish_all(publication.base().events()?)
.await?;
Ok(CreateResponse {
id: publication.base().id().to_string(),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::domain::publication::PublicationId;
use crate::mocks;
#[tokio::test]
async fn valid() {
let c = mocks::container();
let uc = Create::new(
c.event_pub(),
c.author_repo(),
c.category_repo(),
c.publication_repo(),
);
let mut author = mocks::author1();
c.author_repo().save(&mut author).await.unwrap();
let mut category = mocks::category1();
c.category_repo().save(&mut category).await.unwrap();
let res = uc
.exec(
author.base().id().to_string(),
CreateCommand {
name: "Publication 1".to_owned(),
synopsis: "Synopsis...".to_owned(),
category_id: category.base().id().to_string(),
tags: vec!["Tag 1".to_owned()],
cover: "cover.com/cover.jpg".to_owned(),
},
)
.await
.unwrap();
let publication = c
.publication_repo()
.find_by_id(&PublicationId::new(&res.id).unwrap())
.await
.unwrap();
assert_eq!(publication.base().id().value(), res.id);
assert_eq!(publication.header().name().value(), "Publication 1");
assert_eq!(publication.header().synopsis().value(), "Synopsis...");
assert_eq!(publication.pages().len(), 0);
assert_eq!(c.event_pub().events().await.len(), 1);
}
#[tokio::test]
async fn invalid_data() {
let c = mocks::container();
let uc = Create::new(
c.event_pub(),
c.author_repo(),
c.category_repo(),
c.publication_repo(),
);
let mut author = mocks::author1();
c.author_repo().save(&mut author).await.unwrap();
let mut category = mocks::category1();
c.category_repo().save(&mut category).await.unwrap();
assert!(uc
.exec(
author.base().id().to_string(),
CreateCommand {
name: "".to_owned(),
synopsis: "Synopsis...".to_owned(),
category_id: category.base().id().to_string(),
tags: vec!["Tag 1".to_owned()],
cover: "cover.com/cover.jpg".to_owned(),
}
)
.await
.is_err());
assert!(uc
.exec(
author.base().id().to_string(),
CreateCommand {
name: "Publication 1".to_owned(),
synopsis: "".to_owned(),
category_id: category.base().id().to_string(),
tags: vec!["Tag 1".to_owned()],
cover: "cover.com/cover.jpg".to_owned(),
}
)
.await
.is_err());
}
#[tokio::test]
async fn not_existing_category() {
let c = mocks::container();
let uc = Create::new(
c.event_pub(),
c.author_repo(),
c.category_repo(),
c.publication_repo(),
);
let mut author = mocks::author1();
c.author_repo().save(&mut author).await.unwrap();
let category = mocks::category1();
assert!(uc
.exec(
author.base().id().to_string(),
CreateCommand {
name: "Publication 1".to_owned(),
synopsis: "Synopsis...".to_owned(),
category_id: category.base().id().to_string(),
tags: vec!["Tag 1".to_owned()],
cover: "cover.com/cover.jpg".to_owned(),
},
)
.await
.is_err());
}
}
|
use crate::day2;
pub(crate) fn way_down_we_go() {
let input: Vec<String> = day2::fetch_input();
let trees = count_trees(3, 1, input.clone());
let trees1 = count_trees(1, 1, input.clone());
let trees2 = count_trees(5, 1, input.clone());
let trees3 = count_trees(7, 1, input.clone());
let trees4 = count_trees(1, 2, input.clone());
println!("[day1] trees: {}", trees);
println!(
"[day2] trees: {}",
trees * trees1 * trees2 * trees3 * trees4
);
}
fn count_trees(mut right: usize, mut down: usize, forest: Vec<String>) -> i64 {
let mut row: usize = 0;
let mut col: usize = 0;
let mut trees = 0;
while row < forest.len() {
if forest[row].chars().nth(col).unwrap() == '#' {
trees += 1;
}
row += down;
col += right;
while col >= forest[0].len() {
col -= forest[0].len()
}
}
trees
}
|
use parallel::*;
fn main() {
println!("Parallel: start");
show_cpus();
monte_carlo::pi::main();
println!("Parallel: done");
}
fn show_cpus() {
println!("cores = {}", num_cpus::get_physical());
println!("cpus = {}", num_cpus::get());
} |
use crate::emitter::emitter::Emitter;
use crate::emitter::environment::{Value, Variable};
use crate::parser::node::expression::unary::primary::PrimaryNode;
#[derive(Debug, PartialEq, Clone)]
pub struct PrefixNode {
pub op: String,
pub val: PrimaryNode,
}
impl PrefixNode {
pub fn emit(self, emitter: &mut Emitter) -> Value {
match self.op.as_ref() {
"*" => {
let identifier = self.val.get_identifier();
let alloca = match emitter.environment.get(&identifier) {
Some(variable) => match variable {
Variable::Int(int_variable) => int_variable.pointer,
_ => panic!(),
},
None => panic!(format!(
"error: use of undeclared identifier \'{}\'",
identifier
)),
};
Value::Int(
emitter
.builder
.build_load(alloca, &identifier)
.into_int_value(),
)
} // dereference
"&" => {
let identifier = self.val.get_identifier();
let alloca = match emitter.environment.get(&identifier) {
Some(variable) => match variable {
Variable::Int(int_variable) => int_variable.pointer,
_ => panic!(),
},
None => panic!(format!(
"error: use of undeclared identifier \'{}\'",
identifier
)),
};
Value::Int(
emitter
.builder
.build_load(alloca, &identifier)
.into_int_value(),
)
} // reference
_ => panic!(),
}
}
}
|
use std::fmt;
use std::fmt::{Display, Formatter};
use std::fs;
use std::io;
use std::path::PathBuf;
use chrono::Utc;
use semver::Version;
use crate::release::Release;
#[derive(Debug)]
pub struct Error(io::Error);
impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl std::error::Error for Error {}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self {
Self(err)
}
}
pub fn generate(release: &Release<Version>, dry_run: bool) -> Result<(), Error> {
let date = Utc::now().format("%Y-%m-%d");
let file: PathBuf = PathBuf::from("CHANGELOG.md");
let mut changelog = format!(
"## {version} - {date}\n\n{changes}",
version = release.version,
date = date,
changes = release.changelog.markdown(),
);
println!("{}", changelog.trim());
if !dry_run {
if file.exists() {
changelog.push('\n');
changelog.push_str(&fs::read_to_string(&file)?);
}
fs::write(file, changelog)?;
}
Ok(())
}
|
use crate::{
ast::ty::{Type, TypeData},
wasm::{
frame::frame::{Frame, FrameType, Frame_},
il::{
module::{ModuleList, Module_},
stm::StmList,
},
},
};
use super::entry_map::EntryMap;
pub struct SemanticParam {
pub venv: EntryMap,
pub tenv: EntryMap,
pub loop_index: i32,
pub func_num: i32,
pub frame: Vec<Frame>,
pub temp_stmlist: StmList,
pub result_modlist: ModuleList,
}
impl SemanticParam {
pub fn new() -> SemanticParam {
let result_modlist = vec![Module_::jsimport_mod(
"memory".to_string(),
"std".to_string(),
Module_::mem_mod(100),
)];
SemanticParam {
venv: EntryMap::new(),
tenv: EntryMap::new(),
loop_index: 0,
func_num: 0,
frame: vec![],
temp_stmlist: vec![],
result_modlist,
}
}
pub fn get_mem_size(&self) -> i32 {
if self.frame.len() > 0 {
self.frame.last().unwrap().memory_offset + self.frame.last().unwrap().frame_size
} else {
0
}
}
pub fn new_frame(&mut self, func_name: &String, class: Option<&Type>) -> &mut Frame {
let new_frame = match class {
Some(ty) => match &ty.data {
TypeData::Name(_) | TypeData::Template(_, _) => Frame_::new(
self.get_mem_size(),
FrameType::Method(func_name.clone(), ty.clone()),
),
_ => Frame_::new(self.get_mem_size(), FrameType::Func(func_name.clone())),
},
None => Frame_::new(self.get_mem_size(), FrameType::Func(func_name.clone())),
};
self.frame.push(new_frame);
self.frame.last_mut().unwrap()
}
pub fn current_frame(&self) -> Option<&Frame> {
self.frame.last()
}
}
|
// FIPS-180-1 compliant SHA-1 implementation
//
// The SHA-1 standard was published by NIST in 1993.
// https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf
//
// ❗️ SHA1算法在2005年后被证实存在弱点,可以被加以破解。
// ‼️ SHA1算法在2017年被证实无法防止碰撞攻击,因此不适用于安全性认证。
use core::convert::TryFrom;
// NOTE: 虽然在 X86 和 AArch64 架构上,有很多款芯片都支持对 SHA1 加速,
// 但是考虑到 SHA1 已经被证实存在弱点,所以这里不再对 SHA1 的代码
// 做任何性能方面的改进,以减轻代码维护工作。
//
// 如果你需要更好的性能,建议参考 `noloader/SHA-Intrinsics` 的代码自行实现:
// https://github.com/noloader/SHA-Intrinsics/blob/master/sha1-arm.c
// https://github.com/noloader/SHA-Intrinsics/blob/master/sha1-x86.c
//
const K1: u32 = 0x5a827999;
const K2: u32 = 0x6ed9eba1;
const K3: u32 = 0x8f1bbcdc;
const K4: u32 = 0xca62c1d6;
const INITIAL_STATE: [u32; 5] = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0];
/// SHA1
pub fn sha1<T: AsRef<[u8]>>(data: T) -> [u8; Sha1::DIGEST_LEN] {
Sha1::oneshot(data)
}
/// SHA1
#[derive(Clone)]
pub struct Sha1 {
buffer: [u8; Self::BLOCK_LEN],
state: [u32; 5],
len: u64, // in bytes.
offset: usize,
}
impl Sha1 {
pub const BLOCK_LEN: usize = 64;
pub const DIGEST_LEN: usize = 20;
const BLOCK_LEN_BITS: u64 = Self::BLOCK_LEN as u64 * 8;
const MLEN_SIZE: usize = core::mem::size_of::<u64>();
const MLEN_SIZE_BITS: u64 = Self::MLEN_SIZE as u64 * 8;
const MAX_PAD_LEN: usize = Self::BLOCK_LEN + Self::MLEN_SIZE as usize;
pub fn new() -> Self {
Self {
buffer: [0u8; 64],
state: INITIAL_STATE,
len: 0,
offset: 0,
}
}
pub fn update(&mut self, data: &[u8]) {
let mut i = 0usize;
while i < data.len() {
if self.offset < Self::BLOCK_LEN {
self.buffer[self.offset] = data[i];
self.offset += 1;
i += 1;
}
if self.offset == Self::BLOCK_LEN {
transform(&mut self.state, &self.buffer);
self.offset = 0;
self.len += Self::BLOCK_LEN as u64;
}
}
}
pub fn finalize(mut self) -> [u8; Self::DIGEST_LEN] {
let mlen = self.len + self.offset as u64; // in bytes
let mlen_bits = mlen * 8; // in bits
// pad len, in bits
let plen_bits = Self::BLOCK_LEN_BITS
- (mlen_bits + Self::MLEN_SIZE_BITS + 1) % Self::BLOCK_LEN_BITS
+ 1;
// pad len, in bytes
let plen = plen_bits / 8;
debug_assert_eq!(plen_bits % 8, 0);
debug_assert!(plen > 1);
debug_assert_eq!(
(mlen + plen + Self::MLEN_SIZE as u64) % Self::BLOCK_LEN as u64,
0
);
// NOTE: MAX_PAD_LEN 是一个很小的数字,所以这里可以安全的 unwrap.
let plen = usize::try_from(plen).unwrap();
let mut padding: [u8; Self::MAX_PAD_LEN] = [0u8; Self::MAX_PAD_LEN];
padding[0] = 0x80;
let mlen_octets: [u8; Self::MLEN_SIZE] = mlen_bits.to_be_bytes();
padding[plen..plen + Self::MLEN_SIZE].copy_from_slice(&mlen_octets);
let data = &padding[..plen + Self::MLEN_SIZE];
self.update(data);
// NOTE: 数据填充完毕后,此时已经处理的消息应该是 BLOCK_LEN 的倍数,因此,offset 此时已被清零。
debug_assert_eq!(self.offset, 0);
let mut output = [0u8; Self::DIGEST_LEN];
output[0..4].copy_from_slice(&self.state[0].to_be_bytes());
output[4..8].copy_from_slice(&self.state[1].to_be_bytes());
output[8..12].copy_from_slice(&self.state[2].to_be_bytes());
output[12..16].copy_from_slice(&self.state[3].to_be_bytes());
output[16..20].copy_from_slice(&self.state[4].to_be_bytes());
output
}
pub fn oneshot<T: AsRef<[u8]>>(data: T) -> [u8; Self::DIGEST_LEN] {
let mut m = Self::new();
m.update(data.as_ref());
m.finalize()
}
}
// https://github.com/B-Con/crypto-algorithms/blob/master/sha1.c
#[inline]
fn transform(state: &mut [u32; 5], block: &[u8]) {
debug_assert_eq!(state.len(), 5);
debug_assert_eq!(block.len(), Sha1::BLOCK_LEN);
let mut w = [0u32; 80];
for i in 0..16 {
w[i] = u32::from_be_bytes([
block[i * 4 + 0],
block[i * 4 + 1],
block[i * 4 + 2],
block[i * 4 + 3],
]);
}
for i in 16..80 {
w[i] = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]).rotate_left(1);
}
let mut a = state[0];
let mut b = state[1];
let mut c = state[2];
let mut d = state[3];
let mut e = state[4];
for i in 0..20 {
let t = a
.rotate_left(5)
.wrapping_add((b & c) ^ (!b & d))
.wrapping_add(e)
.wrapping_add(K1)
.wrapping_add(w[i]);
e = d;
d = c;
c = b.rotate_left(30);
b = a;
a = t;
}
for i in 20..40 {
let t = a
.rotate_left(5)
.wrapping_add(b ^ c ^ d)
.wrapping_add(e)
.wrapping_add(K2)
.wrapping_add(w[i]);
e = d;
d = c;
c = b.rotate_left(30);
b = a;
a = t;
}
for i in 40..60 {
let t = a
.rotate_left(5)
.wrapping_add((b & c) ^ (b & d) ^ (c & d))
.wrapping_add(e)
.wrapping_add(K3)
.wrapping_add(w[i]);
e = d;
d = c;
c = b.rotate_left(30);
b = a;
a = t;
}
for i in 60..80 {
let t = a
.rotate_left(5)
.wrapping_add(b ^ c ^ d)
.wrapping_add(e)
.wrapping_add(K4)
.wrapping_add(w[i]);
e = d;
d = c;
c = b.rotate_left(30);
b = a;
a = t;
}
state[0] = state[0].wrapping_add(a);
state[1] = state[1].wrapping_add(b);
state[2] = state[2].wrapping_add(c);
state[3] = state[3].wrapping_add(d);
state[4] = state[4].wrapping_add(e);
}
#[test]
fn test_sha1_one_block_message() {
let msg = b"abc";
let digest = [
169, 153, 62, 54, 71, 6, 129, 106, 186, 62, 37, 113, 120, 80, 194, 108, 156, 208, 216, 157,
];
assert_eq!(sha1(&msg[..]), digest);
}
#[test]
fn test_sha1_multi_block_message() {
let msg = b"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
let digest = [
132, 152, 62, 68, 28, 59, 210, 110, 186, 174, 74, 161, 249, 81, 41, 229, 229, 70, 112, 241,
];
assert_eq!(sha1(&msg[..]), digest);
}
#[test]
fn test_sha1_long_message() {
let msg = vec![b'a'; 1000_000];
let digest = [
52, 170, 151, 60, 212, 196, 218, 164, 246, 30, 235, 43, 219, 173, 39, 49, 101, 52, 1, 111,
];
assert_eq!(sha1(&msg), digest);
}
|
// q0094_binary_tree_inorder_traversal
struct Solution;
use crate::util::TreeNode;
use std::cell::RefCell;
use std::rc::Rc;
impl Solution {
pub fn inorder_traversal(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<i32> {
match root {
Some(head) => {
let mut ret = vec![];
let mut tree_buf = vec![head];
while let Some(rf_node) = tree_buf.pop() {
match (&rf_node.borrow().left.as_ref(), &rf_node.borrow().right) {
(Some(ref ln), Some(ref rn)) => {
tree_buf.push(Rc::clone(rn));
tree_buf
.push(Rc::new(RefCell::new(TreeNode::new(rf_node.borrow().val))));
tree_buf.push(Rc::clone(ln));
}
(Some(ref ln), None) => {
tree_buf
.push(Rc::new(RefCell::new(TreeNode::new(rf_node.borrow().val))));
tree_buf.push(Rc::clone(ln));
}
(None, Some(ref rn)) => {
tree_buf.push(Rc::clone(rn));
ret.push(rf_node.borrow().val);
}
(None, None) => {
ret.push(rf_node.borrow().val);
}
}
}
return ret;
}
None => return vec![],
}
}
}
#[cfg(test)]
mod tests {
use super::Solution;
use crate::util::TreeNode;
#[test]
fn it_works() {
assert_eq!(
vec![1, 3, 2],
Solution::inorder_traversal(TreeNode::build(vec![Some(1), None, Some(2), Some(3)]))
);
}
}
|
#[doc = "Reader of register MUX_SWITCH_SQ_CTRL"]
pub type R = crate::R<u32, super::MUX_SWITCH_SQ_CTRL>;
#[doc = "Writer for register MUX_SWITCH_SQ_CTRL"]
pub type W = crate::W<u32, super::MUX_SWITCH_SQ_CTRL>;
#[doc = "Register MUX_SWITCH_SQ_CTRL `reset()`'s with value 0"]
impl crate::ResetValue for super::MUX_SWITCH_SQ_CTRL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_P0`"]
pub type MUX_SQ_CTRL_P0_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_P0`"]
pub struct MUX_SQ_CTRL_P0_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_P0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_P1`"]
pub type MUX_SQ_CTRL_P1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_P1`"]
pub struct MUX_SQ_CTRL_P1_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_P1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_P2`"]
pub type MUX_SQ_CTRL_P2_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_P2`"]
pub struct MUX_SQ_CTRL_P2_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_P2_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_P3`"]
pub type MUX_SQ_CTRL_P3_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_P3`"]
pub struct MUX_SQ_CTRL_P3_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_P3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_P4`"]
pub type MUX_SQ_CTRL_P4_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_P4`"]
pub struct MUX_SQ_CTRL_P4_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_P4_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_P5`"]
pub type MUX_SQ_CTRL_P5_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_P5`"]
pub struct MUX_SQ_CTRL_P5_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_P5_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_P6`"]
pub type MUX_SQ_CTRL_P6_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_P6`"]
pub struct MUX_SQ_CTRL_P6_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_P6_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_P7`"]
pub type MUX_SQ_CTRL_P7_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_P7`"]
pub struct MUX_SQ_CTRL_P7_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_P7_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_VSSA`"]
pub type MUX_SQ_CTRL_VSSA_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_VSSA`"]
pub struct MUX_SQ_CTRL_VSSA_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_VSSA_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_TEMP`"]
pub type MUX_SQ_CTRL_TEMP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_TEMP`"]
pub struct MUX_SQ_CTRL_TEMP_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_TEMP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_AMUXBUSA`"]
pub type MUX_SQ_CTRL_AMUXBUSA_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_AMUXBUSA`"]
pub struct MUX_SQ_CTRL_AMUXBUSA_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_AMUXBUSA_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_AMUXBUSB`"]
pub type MUX_SQ_CTRL_AMUXBUSB_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_AMUXBUSB`"]
pub struct MUX_SQ_CTRL_AMUXBUSB_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_AMUXBUSB_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_SARBUS0`"]
pub type MUX_SQ_CTRL_SARBUS0_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_SARBUS0`"]
pub struct MUX_SQ_CTRL_SARBUS0_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_SARBUS0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);
self.w
}
}
#[doc = "Reader of field `MUX_SQ_CTRL_SARBUS1`"]
pub type MUX_SQ_CTRL_SARBUS1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MUX_SQ_CTRL_SARBUS1`"]
pub struct MUX_SQ_CTRL_SARBUS1_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_SQ_CTRL_SARBUS1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23);
self.w
}
}
impl R {
#[doc = "Bit 0 - for P0 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p0(&self) -> MUX_SQ_CTRL_P0_R {
MUX_SQ_CTRL_P0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - for P1 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p1(&self) -> MUX_SQ_CTRL_P1_R {
MUX_SQ_CTRL_P1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - for P2 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p2(&self) -> MUX_SQ_CTRL_P2_R {
MUX_SQ_CTRL_P2_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - for P3 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p3(&self) -> MUX_SQ_CTRL_P3_R {
MUX_SQ_CTRL_P3_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - for P4 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p4(&self) -> MUX_SQ_CTRL_P4_R {
MUX_SQ_CTRL_P4_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - for P5 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p5(&self) -> MUX_SQ_CTRL_P5_R {
MUX_SQ_CTRL_P5_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - for P6 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p6(&self) -> MUX_SQ_CTRL_P6_R {
MUX_SQ_CTRL_P6_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - for P7 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p7(&self) -> MUX_SQ_CTRL_P7_R {
MUX_SQ_CTRL_P7_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 16 - for vssa switch"]
#[inline(always)]
pub fn mux_sq_ctrl_vssa(&self) -> MUX_SQ_CTRL_VSSA_R {
MUX_SQ_CTRL_VSSA_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - for temp switch"]
#[inline(always)]
pub fn mux_sq_ctrl_temp(&self) -> MUX_SQ_CTRL_TEMP_R {
MUX_SQ_CTRL_TEMP_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - for amuxbusa switch"]
#[inline(always)]
pub fn mux_sq_ctrl_amuxbusa(&self) -> MUX_SQ_CTRL_AMUXBUSA_R {
MUX_SQ_CTRL_AMUXBUSA_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - for amuxbusb switches"]
#[inline(always)]
pub fn mux_sq_ctrl_amuxbusb(&self) -> MUX_SQ_CTRL_AMUXBUSB_R {
MUX_SQ_CTRL_AMUXBUSB_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 22 - for sarbus0 switch"]
#[inline(always)]
pub fn mux_sq_ctrl_sarbus0(&self) -> MUX_SQ_CTRL_SARBUS0_R {
MUX_SQ_CTRL_SARBUS0_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bit 23 - for sarbus1 switch"]
#[inline(always)]
pub fn mux_sq_ctrl_sarbus1(&self) -> MUX_SQ_CTRL_SARBUS1_R {
MUX_SQ_CTRL_SARBUS1_R::new(((self.bits >> 23) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - for P0 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p0(&mut self) -> MUX_SQ_CTRL_P0_W {
MUX_SQ_CTRL_P0_W { w: self }
}
#[doc = "Bit 1 - for P1 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p1(&mut self) -> MUX_SQ_CTRL_P1_W {
MUX_SQ_CTRL_P1_W { w: self }
}
#[doc = "Bit 2 - for P2 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p2(&mut self) -> MUX_SQ_CTRL_P2_W {
MUX_SQ_CTRL_P2_W { w: self }
}
#[doc = "Bit 3 - for P3 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p3(&mut self) -> MUX_SQ_CTRL_P3_W {
MUX_SQ_CTRL_P3_W { w: self }
}
#[doc = "Bit 4 - for P4 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p4(&mut self) -> MUX_SQ_CTRL_P4_W {
MUX_SQ_CTRL_P4_W { w: self }
}
#[doc = "Bit 5 - for P5 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p5(&mut self) -> MUX_SQ_CTRL_P5_W {
MUX_SQ_CTRL_P5_W { w: self }
}
#[doc = "Bit 6 - for P6 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p6(&mut self) -> MUX_SQ_CTRL_P6_W {
MUX_SQ_CTRL_P6_W { w: self }
}
#[doc = "Bit 7 - for P7 switches"]
#[inline(always)]
pub fn mux_sq_ctrl_p7(&mut self) -> MUX_SQ_CTRL_P7_W {
MUX_SQ_CTRL_P7_W { w: self }
}
#[doc = "Bit 16 - for vssa switch"]
#[inline(always)]
pub fn mux_sq_ctrl_vssa(&mut self) -> MUX_SQ_CTRL_VSSA_W {
MUX_SQ_CTRL_VSSA_W { w: self }
}
#[doc = "Bit 17 - for temp switch"]
#[inline(always)]
pub fn mux_sq_ctrl_temp(&mut self) -> MUX_SQ_CTRL_TEMP_W {
MUX_SQ_CTRL_TEMP_W { w: self }
}
#[doc = "Bit 18 - for amuxbusa switch"]
#[inline(always)]
pub fn mux_sq_ctrl_amuxbusa(&mut self) -> MUX_SQ_CTRL_AMUXBUSA_W {
MUX_SQ_CTRL_AMUXBUSA_W { w: self }
}
#[doc = "Bit 19 - for amuxbusb switches"]
#[inline(always)]
pub fn mux_sq_ctrl_amuxbusb(&mut self) -> MUX_SQ_CTRL_AMUXBUSB_W {
MUX_SQ_CTRL_AMUXBUSB_W { w: self }
}
#[doc = "Bit 22 - for sarbus0 switch"]
#[inline(always)]
pub fn mux_sq_ctrl_sarbus0(&mut self) -> MUX_SQ_CTRL_SARBUS0_W {
MUX_SQ_CTRL_SARBUS0_W { w: self }
}
#[doc = "Bit 23 - for sarbus1 switch"]
#[inline(always)]
pub fn mux_sq_ctrl_sarbus1(&mut self) -> MUX_SQ_CTRL_SARBUS1_W {
MUX_SQ_CTRL_SARBUS1_W { w: self }
}
}
|
//! A parser for the arith language.
//!
//! t :=
//! true
//! false
//! if t then t else t
//! 0
//! succ t
//! pred t
//! iszero t
use std::error;
use std::fmt;
use Term;
/// Errors that may occur during parsing.
#[derive(PartialEq, Debug, Clone)]
pub enum Error {
/// An if term without a consequent
IfWithoutConsequent,
/// An if term without an alternative
IfWithoutAlternative,
/// An unknown token
UnknownToken(String),
/// When the end of the input is encountered unexpected
UnexpectedEndOfInput,
/// When another token is found instead of the end of input
ExpectedEndOfInput(String),
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::IfWithoutConsequent => "if statement without consequent",
Error::IfWithoutAlternative => "if statement without alternative",
Error::UnknownToken(_) => "unknown token",
Error::UnexpectedEndOfInput => "unexpected end of input",
Error::ExpectedEndOfInput(_) => "expected end of input, found another token",
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::IfWithoutConsequent => write!(f, "if statement without consequent"),
Error::IfWithoutAlternative => write!(f, "if statement without alternative"),
Error::UnknownToken(ref unknown) => write!(f, "unknown token {}", unknown),
Error::UnexpectedEndOfInput => write!(f, "unexpected end of input"),
Error::ExpectedEndOfInput(ref unknown) => write!(f, "expected end of input, found {}", unknown),
}
}
}
/// Parse a string term of the arith language into `Term`s.
///
/// # Example
///
/// ```rust
/// #![feature(box_syntax)]
/// use tyarith::{parse, Term};
///
/// let term = parse("if true then true else false");
/// assert_eq!(Ok(Term::If(box Term::True, box Term::True, box Term::False)), term);
/// ```
pub fn parse(input: &str) -> Result<Term, Error> {
let mut tokens = input.split_whitespace();
let term = parse_t(&mut tokens);
match tokens.next() {
Some(unexpected) => Err(Error::ExpectedEndOfInput(unexpected.into())),
None => term,
}
}
fn parse_t(mut tokens: &mut Iterator<Item=&str>) -> Result<Term, Error> {
match tokens.next() {
Some("true") => Ok(Term::True),
Some("false") => Ok(Term::False),
Some("0") => Ok(Term::Zero),
Some("succ") => parse_t(&mut tokens).map(|t| Term::Succ(box t)),
Some("pred") => parse_t(&mut tokens).map(|t| Term::Pred(box t)),
Some("iszero") => parse_t(&mut tokens).map(|t| Term::IsZero(box t)),
Some("if") => {
let guard = parse_t(&mut tokens)?;
match tokens.next() {
Some("then") => {
let consequent = parse_t(&mut tokens)?;
match tokens.next() {
Some("else") => {
let alternate = parse_t(&mut tokens)?;
Ok(Term::If(box guard, box consequent, box alternate))
}
_ => Err(Error::IfWithoutAlternative),
}
}
_ => Err(Error::IfWithoutConsequent),
}
}
Some(unknown) => Err(Error::UnknownToken(unknown.into())),
None => Err(Error::UnexpectedEndOfInput),
}
}
|
/*!
Bytecode `io` helpers.
*/
use std::io;
use std::mem;
use byteorder::{ self, LittleEndian, ReadBytesExt, WriteBytesExt };
/// Bytecode representation.
pub trait Bytecode {
}
/// Serialize and deserialize a structure from `io`.
pub trait Serializer {
/// Write contents to io `writer`, returns bytes written.
fn serialize<O: io::Write>(&self, writer: &mut O) -> Result<u64, Error>;
/// Read contents from `reader`, return tuple of bytes read and new structure.
fn deserialize<I: io::Read>(reader: &mut I) -> Result<(u64, Self), Error> where Self: Sized;
}
/// Bytecode read/write error.
#[derive(Debug)]
pub enum Error {
/// Failed to read cache header, assume this is not valid cache file.
InvalidBinaryFormat,
UnexpectedEOF,
Io(io::Error),
}
impl From<byteorder::Error> for Error {
fn from(other: byteorder::Error) -> Error {
match other {
byteorder::Error::UnexpectedEOF => Error::UnexpectedEOF,
byteorder::Error::Io(e) => Error::Io(e),
}
}
}
impl From<io::Error> for Error {
fn from(other: io::Error) -> Error {
Error::Io(other)
}
}
/// Bytecode file header.
#[derive(Eq, PartialEq, Debug)]
pub struct Header {
magic: u32,
}
impl Header {
pub fn new() -> Header {
Header {
magic: Header::magic(),
}
}
/// Check if header is valid.
pub fn is_magical(&self) -> bool {
self.magic == Header::magic()
}
/// Return magic header number.
fn magic() -> u32 {
52231103
}
}
impl Serializer for Header {
fn serialize<O: io::Write>(&self, output: &mut O) -> Result<u64, Error> {
try!(output.write_u32::<LittleEndian>(self.magic));
Ok(mem::size_of::<Header>() as u64)
}
fn deserialize<I: io::Read>(input: &mut I) -> Result<(u64, Header), Error> {
Ok((mem::size_of::<Header>() as u64, Header {
magic: try!(input.read_u32::<LittleEndian>())
}))
}
}
#[cfg(test)]
mod test {
use super::*;
use std::io::Cursor;
#[test]
fn header() {
let mut input: Vec<u8> = vec![];
let a = Header::new();
a.serialize(&mut input).unwrap();
let mut cursor = Cursor::new(&input[..]);
let (_, b) = Header::deserialize(&mut cursor).unwrap();
assert_eq!(a, b);
}
}
|
// By listing the first six prime numbers: 2, 3, 5, 7, 11, and 13, we can see that the 6th prime is 13.
//
// What is the 10 001st prime number?
extern crate project_euler;
fn main() {
let mut primes = project_euler::primes::primes();
// note that nth_prime is 0-indexed, but the problem is 1-indexed.
assert_eq!(primes.nth_prime(6 - 1), 13);
println!("{}", project_euler::primes::primes().nth_prime(10001 - 1));
}
|
//! This module handles connections to Content Manager Server
//! First you connect into the ip using a tcp socket
//! Then reads/writes into it
//!
//! Packets are sent at the following format: packet_len + packet_magic + data
//! packet length: u32
//! packet magic: VT01
//!
//! Apparently, bytes received are in little endian
//!
use std::convert::TryInto;
use std::error::Error;
use std::future::Future;
use bytes::{Buf, Bytes, BytesMut};
use futures::task::Context;
use tokio::{
io::{
AsyncRead,
AsyncReadExt,
AsyncWrite,
AsyncWriteExt,
},
macros::support::{Pin, Poll},
net::TcpStream,
};
use async_trait::async_trait;
const PACKET_MAGIC_BYTES: &[u8] = br#"VT01"#;
/// This should be an abstraction over low-level socket handlers and is not to be used directly.
/// Use [SteamClient] instead for binding and connecting.
// Should be a way to register event handlers, so we can listen to certain types of events,
// like friends logging in, or receiving trade requests.
pub struct SteamConnection<S> {
/// Stream of data to Steam Content server. May be TCP or Websocket.
stream: S,
/// Address to which the connection is bound.
endpoint: String,
}
#[async_trait]
trait Connection<S> {
async fn new_connection(ip_addr: &str) -> Result<SteamConnection<S>, Box<dyn Error>>;
async fn read_packets(&mut self) -> Result<Vec<u8>, Box<dyn Error>>;
async fn write_packets(&mut self, data: &[u8]) -> Result<(), Box<dyn Error>>;
}
impl<S> Future for SteamConnection<S>
where S: AsyncRead + AsyncWrite + Unpin {
type Output = ();
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
unimplemented!()
}
}
#[cfg(not(feature = "websockets"))]
#[async_trait]
impl Connection<TcpStream> for SteamConnection<TcpStream> {
/// Opens a tcp stream to specified IP
async fn new_connection(ip_addr: &str) -> Result<SteamConnection<TcpStream>, Box<dyn Error>> {
trace!("Connecting to ip: {}", &ip_addr);
let stream = TcpStream::connect(ip_addr).await?;
Ok(SteamConnection { stream, endpoint: ip_addr.to_string() })
}
#[inline]
async fn read_packets(&mut self) -> Result<Vec<u8>, Box<dyn Error>> {
let mut data_len: [u8; 4] = [0; 4];
self.stream.read_exact(&mut data_len).await?;
let mut packet_magic: [u8; 4] = [0; 4];
self.stream.read_exact(&mut packet_magic).await?;
if packet_magic != PACKET_MAGIC_BYTES {
log::error!("Could not find magic packet on read.");
}
let data_length = u32::from_le_bytes(data_len);
let mut incoming_data = BytesMut::with_capacity(data_length as usize);
let oi = self.stream.read_buf(&mut incoming_data).await?;
debug!("bytes read from socket {}", oi);
debug_assert_eq!(oi as u32, data_length);
// Sanity check
debug!("data length: {}", data_length);
trace!("data: {:?}", incoming_data.bytes());
Ok(incoming_data.to_vec())
}
#[inline]
async fn write_packets(&mut self, data: &[u8]) -> Result<(), Box<dyn Error>> {
let mut output_buffer = BytesMut::with_capacity(1024);
output_buffer.extend_from_slice(&(data.len() as u32).to_le_bytes());
trace!("Data size: {} ", data.len());
output_buffer.extend_from_slice(PACKET_MAGIC_BYTES);
output_buffer.extend_from_slice(data);
trace!("Data bytes: {:?}", data);
trace!("Writing {} bytes of data to stream..", output_buffer.len());
self.stream.write_all(output_buffer.bytes()).await?;
Ok(())
}
}
#[cfg(feature = "websockets")]
mod connection_method {
use tokio_tls::TlsStream;
use tokio_tungstenite::{connect_async, stream::Stream, WebSocketStream};
use super::*;
type Ws = WebSocketStream<Stream<TcpStream, TlsStream<TcpStream>>>;
#[async_trait]
impl Connection<Ws> for SteamConnection<Ws> {
async fn new_connection(ws_url: &str) -> Result<SteamConnection<Ws>, Box<dyn Error>> {
let formatted_ws_url = format!("wss://{}/cmsocket/", ws_url);
debug!("Connecting to addr: {}", formatted_ws_url);
let (ws_stream, _) = connect_async(&formatted_ws_url).await?;
Ok(SteamConnection { stream: ws_stream, endpoint: formatted_ws_url })
}
#[inline]
async fn read_packets(&mut self) -> Result<Vec<u8>, Box<dyn Error>> {
let mut data_len: [u8; 4] = [0; 4];
self.stream.get_mut().read_exact(&mut data_len).await?;
let mut packet_magic: [u8; 4] = [0; 4];
self.stream.get_mut().read_exact(&mut packet_magic).await?;
if packet_magic != PACKET_MAGIC_BYTES {
log::error!("Could not find magic packet on read.");
}
let mut incoming_data = BytesMut::with_capacity(1024);
self.stream.get_mut().read_buf(&mut incoming_data).await?;
//sanity check
debug!("data length: {}", u32::from_le_bytes(data_len));
trace!("data: {:?}", incoming_data);
Ok(incoming_data.to_vec())
}
#[inline]
async fn write_packets(&mut self, data: &[u8]) -> Result<(), Box<dyn Error>> {
unimplemented!()
}
}
}
#[cfg(test)]
mod tests {
use std::thread::sleep;
use env_logger::{Builder, Target};
use log::LevelFilter;
use tokio::time::Duration;
use steam_language_gen::generated::enums::EMsg;
use crate::cmserver::CmServerSvList;
use crate::encrypted_connection::handle_encrypt_request;
use crate::messages::packetmessage::PacketMessage;
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
fn init() {
let _ = Builder::from_default_env()
.filter_module("steam_api", LevelFilter::Trace)
.is_test(true)
.try_init();
}
#[tokio::test]
#[cfg(not(feature = "websockets"))]
async fn connect_to_web_server() {
init();
let cm_servers = CmServerSvList::fetch_servers(env!("STEAM_API")).await;
let dumped_cm_servers = cm_servers.unwrap().dump_tcp_servers();
let steam_connection = SteamConnection::new_connection(&dumped_cm_servers[0]).await;
assert!(steam_connection.is_ok());
}
#[tokio::test]
#[cfg(feature = "websockets")]
async fn connect_to_ws_server() {
init();
let get_results = CmServerSvList::fetch_servers(env!("STEAM_API")).await;
let fetched_servers = get_results.unwrap().dump_ws_servers();
let steam_connection = SteamConnection::new_connection(&fetched_servers[0]).await;
assert!(steam_connection.is_ok())
}
#[tokio::test]
#[cfg(not(feature = "websockets"))]
async fn answer_encrypt_request() {
init();
let cm_servers = CmServerSvList::fetch_servers(env!("STEAM_API")).await;
let dumped_cm_servers = cm_servers.unwrap().dump_tcp_servers();
let mut steam_connection: SteamConnection<TcpStream> = SteamConnection::new_connection(&dumped_cm_servers[0]).await.unwrap();
let data = steam_connection.read_packets().await.unwrap();
let message = EMsg::from_raw_message(&data);
assert_eq!(message.unwrap(), EMsg::ChannelEncryptRequest);
//
handle_encrypt_request(PacketMessage::from_rawdata(&data));
steam_connection.write_packets(b"\x18\x05\0\0\x17\x05\0\0\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\0\0\0\x80\0\0\0").await.unwrap();
let data = steam_connection.read_packets().await.unwrap();
let message = EMsg::from_raw_message(&data);
assert_eq!(message.unwrap(), EMsg::ChannelEncryptResult);
}
}
|
// Implement basic function to split some generic computational work between threads
// Split should occur only on some threshold
// If computational work is shorter that this threshold
// No splitting should occur and no threads should be created
// You get as input:
// 1. Vec<T>
// 2. Function f(t: T) -> R
// You should return:
// 1. Up to you, but probably some Vec of the same length as input(1)
use num_cpus;
use std::time::{Duration, Instant};
use std::thread::{spawn};
use std::sync::atomic::{AtomicU8, Ordering};
use std::sync::{Arc, Mutex};
use std::thread::{sleep};
use std::mem;
const MAX_NO_THREADS_DURATION: Duration = Duration::from_secs(1);
fn split_work_run_threads<R, T>(input: &[T], func: fn(T) -> R, result: Vec<R>) -> Vec<R>
where
R: Clone + Send + Sync + 'static,
T: Clone + Send + 'static
{
let threads_available = num_cpus::get();
let threads_count = if input.len() < threads_available {input.len()} else {threads_available};
let calls_count_approx = input.len() / threads_count;
let mut calls_count_remainder = input.len() % threads_count;
println!("Starting {} threads for {} calls", threads_count, input.len());
let sync_push = Arc::new(AtomicU8::new(0));
let sync_result = Arc::new(Mutex::new(result));
let mut calculated = 0;
let mut threads = vec![];
while calculated < input.len()
{
let thread_id = threads.len() as u8;
let calls_count = calls_count_approx + if calls_count_remainder > 0 {1} else {0};
println!("Thread {}: prepares to start for {} calls", thread_id, calls_count);
let thread_input = input.get(calculated..calculated + calls_count).unwrap().to_owned();
let sync_push_cpy = sync_push.clone();
let sync_result_cpy = sync_result.clone();
threads.push(spawn(move ||
{
println!("Thread {}: started", thread_id);
let mut thread_result = vec![];
for arg in thread_input
{
thread_result.push(func(arg.clone()));
}
println!("Thread {}: waits", thread_id);
while sync_push_cpy.as_ref().load(Ordering::Acquire) != thread_id
{
sleep(MAX_NO_THREADS_DURATION / 100);
}
println!("Thread {}: inserts", thread_id);
let result_ref = &mut *sync_result_cpy.as_ref().lock().unwrap(); // safe unwrap cause of atomic guard
for r in thread_result
{
result_ref.push(r.clone());
}
sync_push_cpy.as_ref().store(thread_id + 1, Ordering::Release);
println!("Thread {}: finished", thread_id);
}));
calculated += calls_count;
calls_count_remainder -= if calls_count_remainder > 0 {1} else {0};
}
for t in threads
{
t.join().unwrap();
}
let result = mem::take(&mut *sync_result.as_ref().lock().unwrap());
return result;
}
fn split_work<R, T>(input: &Vec<T>, func: fn(T) -> R) -> Vec<R>
where
R: Clone + Send + Sync + 'static,
T: Clone + Send + 'static
{
println!("Splitting work for {} calls", input.len());
let mut result = Vec::<R>::with_capacity(input.capacity());
let mut calculated = 0;
let start_time = Instant::now();
while calculated < input.len() && start_time.elapsed() < MAX_NO_THREADS_DURATION
{
let arg = input.get(calculated).unwrap().clone();
result.push(func(arg));
calculated += 1;
}
println!("{} was calculated without spawning threads", calculated);
if calculated == input.len()
{
return result;
}
return split_work_run_threads(input.get(calculated..input.len()).unwrap(), func, result);
}
fn sum(end: i64) -> i64
{
let mut result = 0_i64;
for i in 0..=end
{
result += i;
}
sleep(MAX_NO_THREADS_DURATION / 100);
return result;
}
fn main()
{
let mut input = Vec::<i64>::new();
for i in 1..=100
{
input.push(i);
}
let result = split_work(&mut input, sum);
for i in 0..100
{
println!("{}", result[i]);
}
}
|
mod cli;
mod ndjson;
use std::fs;
use std::io;
fn main() {
let opts = cli::parse_opts();
let uniq_opts = ndjson::Opts {
group: opts.group,
count: opts.count,
};
if opts.file == "-" {
let reader = io::BufReader::new(io::stdin());
ndjson::uniq(reader, &opts.key, io::stdout(), uniq_opts)
} else {
let f = fs::File::open(opts.file).unwrap();
let reader = io::BufReader::new(f);
ndjson::uniq(reader, &opts.key, io::stdout(), uniq_opts)
}
.unwrap();
}
|
#[macro_use]
pub mod register;
pub mod authentication;
use actix_identity::Identity;
use actix_web::error::{ErrorBadRequest, ErrorUnauthorized};
use actix_web::{dev, FromRequest, HttpRequest};
use actix_web::{web, Error, Result};
use chrono::Duration;
use csrf_token::CsrfTokenGenerator;
use futures_util::future::{err, ok, Ready};
use hex;
use crate::db_connection::{PgPool, PgPooledConnection};
use crate::utils::jwt::{decode_token, SlimUser};
pub type LoggedUser = SlimUser;
pub fn pg_pool_handler(pool: web::Data<PgPool>) -> Result<PgPooledConnection> {
pool.get()
.map_err(|e| actix_web::error::ErrorInternalServerError(e))
}
impl FromRequest for LoggedUser {
type Error = Error;
type Config = ();
type Future = Ready<Result<Self, Self::Error>>;
fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future {
match get_token(req, payload) {
Ok(user) => ok(user),
Err(error) => err(error),
}
}
}
fn get_token(req: &HttpRequest, payload: &mut dev::Payload) -> Result<LoggedUser, Error> {
let generator = CsrfTokenGenerator::new(
dotenv!("CSRF_TOKEN_KEY").as_bytes().to_vec(),
Duration::hours(1),
);
let csrf_token = req
.headers()
.get("x-csrf-token")
.ok_or(ErrorBadRequest("No token provided"))?;
let decoded_token = hex::decode(&csrf_token)
.map_err(|_| ErrorBadRequest("An Error ocurred decoding the token"))?;
generator
.verify(&decoded_token)
.map_err(|_| ErrorUnauthorized("can't verify token"))?;
if let Some(identity) = Identity::from_request(req, payload)
.into_inner()?
.identity()
{
let user: SlimUser = decode_token(&identity)?;
Ok(user as LoggedUser)
} else {
Err(ErrorUnauthorized("can't obtain token"))
}
}
|
use spair::prelude::*;
impl spair::Render<crate::App> for &spair::FetchError {
fn render(self, nodes: spair::Nodes<crate::App>) {
nodes.div(|d| {
d.nodes()
.span(|s| s.nodes().render(&self.to_string()).done());
});
}
}
|
use crate::error::{Error, Result, RuntimeError};
use crate::parsing::{parse_sexpr, ParseErrorKind, Span};
use crate::scm::Scm;
use std::cell::RefCell;
use std::fs::File;
use std::io::{stderr, stdin, stdout, BufRead, BufReader, BufWriter, Write};
use std::mem::size_of;
pub struct SchemePort {
port: RefCell<PortState>,
}
impl std::fmt::Debug for SchemePort {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "<Port {:p}>", self)
}
}
impl SchemePort {
pub fn std_input() -> Self {
SchemePort {
port: RefCell::new(PortState::Input(Box::new(BufReader::new(stdin())))),
}
}
pub fn std_output() -> Self {
SchemePort {
port: RefCell::new(PortState::Output(Box::new(stdout()))),
}
}
pub fn std_error() -> Self {
SchemePort {
port: RefCell::new(PortState::Output(Box::new(stderr()))),
}
}
pub fn file_input(filename: &str) -> Result<Self> {
let f = File::open(filename)?;
Ok(SchemePort {
port: RefCell::new(PortState::Input(Box::new(BufReader::new(f)))),
})
}
pub fn file_output(filename: &str) -> Result<Self> {
let f = File::create(filename)?;
Ok(SchemePort {
port: RefCell::new(PortState::Output(Box::new(BufWriter::new(f)))),
})
}
pub fn string_input(s: &'static str) -> Self {
SchemePort {
port: RefCell::new(PortState::Input(Box::new(s.as_bytes()))),
}
}
pub fn bytes_input(s: &'static [u8]) -> Self {
SchemePort {
port: RefCell::new(PortState::Input(Box::new(s))),
}
}
pub fn bytes_output() -> Self {
SchemePort {
port: RefCell::new(PortState::Buffer(vec![])),
}
}
pub fn is_open(&self) -> bool {
match &*self.port.borrow() {
PortState::Closed => false,
_ => true,
}
}
pub fn close(&self) -> Result<()> {
let mut p = self.port.borrow_mut();
match &mut *p {
PortState::Output(port) => port.flush_output()?,
_ => {}
}
*p = PortState::Closed;
Ok(())
}
pub fn clone_data(&self) -> Result<Vec<u8>> {
match &*self.port.borrow() {
PortState::Buffer(data) => Ok(data.clone()),
_ => Err(RuntimeError::WrongPortKind.into()),
}
}
fn with_input_port<T>(&self, func: impl FnOnce(&mut dyn InputPort) -> Result<T>) -> Result<T> {
match &mut *self.port.borrow_mut() {
PortState::Input(p) => func(p.as_mut()),
PortState::Output(_) | PortState::Buffer(_) => Err(RuntimeError::WrongPortKind.into()),
PortState::Closed => Err(RuntimeError::ClosedPort.into()),
}
}
fn with_output_port<T>(
&self,
func: impl FnOnce(&mut dyn OutputPort) -> Result<T>,
) -> Result<T> {
match &mut *self.port.borrow_mut() {
PortState::Output(p) => func(p.as_mut()),
PortState::Buffer(p) => func(&mut p.as_mut_slice()),
PortState::Input(_) => Err(RuntimeError::WrongPortKind.into()),
PortState::Closed => Err(RuntimeError::ClosedPort.into()),
}
}
}
impl SchemePort {
pub fn is_input_port(&self) -> bool {
match &*self.port.borrow() {
PortState::Input(_) => true,
_ => false,
}
}
pub fn is_output_port(&self) -> bool {
match &*self.port.borrow() {
PortState::Output(_) => true,
_ => false,
}
}
pub fn read(&self) -> Result<Scm> {
self.with_input_port(|p| read_sexpr(p))
}
pub fn read_char(&self) -> Result<Scm> {
self.with_input_port(|p| p.read_char())
.map(convert_optional_value_to_scm)
}
pub fn peek_char(&self) -> Result<Scm> {
self.with_input_port(|p| p.peek_char())
.map(convert_optional_value_to_scm)
}
pub fn read_line(&self) -> Result<Scm> {
self.with_input_port(|p| p.read_line())
.map(convert_optional_value_to_scm)
}
pub fn read_string(&self, k: usize) -> Result<Scm> {
self.with_input_port(|p| p.read_string(k))
.map(convert_optional_value_to_scm)
}
pub fn read_u8(&self) -> Result<Scm> {
self.with_input_port(|p| p.read_u8())
.map(convert_optional_value_to_scm)
}
pub fn peek_u8(&self) -> Result<Scm> {
self.with_input_port(|p| p.peek_u8())
.map(convert_optional_value_to_scm)
}
pub fn read_bytevector(&self, k: usize) -> Result<Scm> {
self.with_input_port(|p| p.read_bytevector(k))
.map(convert_optional_value_to_scm)
}
pub fn read_into_bytevector(&self, start: usize, end: usize, vec: &mut [u8]) -> Result<Scm> {
self.with_input_port(|p| p.read_into_bytevector(start, end, vec))
.map(convert_optional_value_to_scm)
}
pub fn write_char(&self, ch: char) -> Result<()> {
self.with_output_port(|p| p.write_char(ch))
}
pub fn write_string(&self, s: &str, start: usize, end: usize) -> Result<()> {
self.with_output_port(|p| p.write_string(s, start, end))
}
pub fn write_u8(&self, x: u8) -> Result<()> {
self.with_output_port(|p| p.write_u8(x))
}
pub fn write_bytevector(&self, v: &[u8], start: usize, end: usize) -> Result<()> {
self.with_output_port(|p| p.write_bytevector(&v[start..end]))
}
pub fn flush_output(&self) -> Result<()> {
self.with_output_port(|p| p.flush_output())
}
pub fn write(&self, x: Scm) -> Result<()> {
self.with_output_port(|p| p.write(x))
}
pub fn write_shared(&self, x: Scm) -> Result<()> {
self.with_output_port(|p| p.write_shared(x))
}
pub fn write_simple(&self, x: Scm) -> Result<()> {
self.with_output_port(|p| p.write_simple(x))
}
pub fn display(&self, x: Scm) -> Result<()> {
self.with_output_port(|p| p.display(x))
}
}
fn convert_optional_value_to_scm<T: Into<Scm>>(value: Option<T>) -> Scm {
match value {
None => Scm::Eof,
Some(x) => x.into(),
}
}
enum PortState {
Input(Box<dyn InputPort>),
Output(Box<dyn OutputPort>),
Buffer(Vec<u8>),
Closed,
}
pub trait InputPort {
fn read_char(&mut self) -> Result<Option<char>>;
fn peek_char(&mut self) -> Result<Option<char>>;
fn read_line(&mut self) -> Result<Option<String>>;
fn read_string(&mut self, k: usize) -> Result<Option<String>>;
fn read_u8(&mut self) -> Result<Option<u8>>;
fn peek_u8(&mut self) -> Result<Option<u8>>;
fn read_bytevector(&mut self, k: usize) -> Result<Option<Vec<u8>>>;
fn read_into_bytevector(
&mut self,
start: usize,
end: usize,
vec: &mut [u8],
) -> Result<Option<usize>>;
}
pub trait OutputPort {
fn write(&mut self, x: Scm) -> Result<()>;
fn write_shared(&mut self, x: Scm) -> Result<()>;
fn write_simple(&mut self, x: Scm) -> Result<()>;
fn display(&mut self, x: Scm) -> Result<()>;
fn write_char(&mut self, ch: char) -> Result<()>;
fn write_string(&mut self, s: &str, start: usize, end: usize) -> Result<()>;
fn write_u8(&mut self, x: u8) -> Result<()>;
fn write_bytevector(&mut self, v: &[u8]) -> Result<()>;
fn flush_output(&mut self) -> Result<()>;
}
impl<T: BufRead> InputPort for T {
fn read_char(&mut self) -> Result<Option<char>> {
let och = self.peek_char()?;
match och {
Some(ch) => self.consume(ch.len_utf8()),
None => {}
}
Ok(och)
}
fn peek_char(&mut self) -> Result<Option<char>> {
let buf = self.fill_buf()?;
let s = if buf.len() >= size_of::<char>() {
std::str::from_utf8(&buf[..size_of::<char>()])?
} else {
std::str::from_utf8(buf)?
};
Ok(s.chars().next())
}
fn read_line(&mut self) -> Result<Option<String>> {
let mut buf = String::new();
let n = BufRead::read_line(self, &mut buf)?;
if n == 0 {
Ok(None)
} else {
Ok(Some(buf))
}
}
fn read_string(&mut self, k: usize) -> Result<Option<String>> {
let buf = (0..k)
.map(|_| self.read_char())
.take_while(|r| match r {
Err(_) => false,
Ok(None) => false,
Ok(_) => true,
})
.map(|r| r.map(|x| x.unwrap()))
.collect::<Result<String>>()?;
if buf.len() == 0 && k > 0 {
Ok(None)
} else {
Ok(Some(buf))
}
}
fn read_u8(&mut self) -> Result<Option<u8>> {
let ou = self.peek_u8()?;
if ou.is_some() {
self.consume(1);
}
Ok(ou)
}
fn peek_u8(&mut self) -> Result<Option<u8>> {
let buf = self.fill_buf()?;
if buf.is_empty() {
Ok(None)
} else {
Ok(Some(buf[0]))
}
}
fn read_bytevector(&mut self, k: usize) -> Result<Option<Vec<u8>>> {
let mut buf = vec![0; k];
match self.read_into_bytevector(0, k, buf.as_mut_slice())? {
None => return Ok(None),
Some(n) if n as usize == k => {}
Some(n) => buf.truncate(n),
}
Ok(Some(buf.into()))
}
fn read_into_bytevector(
&mut self,
start: usize,
end: usize,
vec: &mut [u8],
) -> Result<Option<usize>> {
if end <= start {
return Ok(Some(0));
}
let vec = &mut vec[start..end];
self.read(vec)
.map(|n| match n {
0 => None,
n => Some(n),
})
.map_err(Error::from)
}
}
impl<T: Write> OutputPort for T {
fn write(&mut self, x: Scm) -> Result<()> {
Ok(write!(self, "{}", x.write())?)
}
fn write_shared(&mut self, x: Scm) -> Result<()> {
Ok(write!(self, "{}", x.write_shared())?)
}
fn write_simple(&mut self, x: Scm) -> Result<()> {
Ok(write!(self, "{}", x.write_simple())?)
}
fn display(&mut self, x: Scm) -> Result<()> {
Ok(write!(self, "{}", x.display())?)
}
fn write_char(&mut self, ch: char) -> Result<()> {
Ok(write!(self, "{}", ch)?)
}
fn write_string(&mut self, s: &str, start: usize, end: usize) -> Result<()> {
for ch in s.chars().take(end).skip(start) {
self.write_char(ch)?;
}
Ok(())
}
fn write_u8(&mut self, x: u8) -> Result<()> {
let n = self.write(&[x])?;
if n != 1 {
Err(RuntimeError::WriteError.into())
} else {
Ok(())
}
}
fn write_bytevector(&mut self, v: &[u8]) -> Result<()> {
let n = self.write(v)?;
if n != 1 {
Err(RuntimeError::WriteError.into())
} else {
Ok(())
}
}
fn flush_output(&mut self) -> Result<()> {
Ok(self.flush()?)
}
}
pub fn read_sexpr(port: &mut dyn InputPort) -> Result<Scm> {
let mut buf = String::new();
loop {
match port.read_line()? {
None => return Ok(Scm::Eof),
Some(line) => buf += &line,
}
let span = Span::new(&buf);
match parse_sexpr(span) {
Ok((x, _)) => return Ok((&x).into()),
Err(e) if e.kind == ParseErrorKind::UnclosedSequence => {}
Err(e) => return Err(Error::from_parse_error_and_source(e, (&buf).into())),
}
}
}
|
use crate::ast::expressions;
use crate::ast::stack;
use crate::interpreter;
#[derive(Debug)]
pub struct Label(pub Box<dyn expressions::Expression>);
impl interpreter::Eval for Label {}
impl expressions::Expression for Label {}
impl Label {
pub fn new(stack: &mut stack::Stack) {
let (_, name, _) = stack_unpack!(stack, single, single, single);
stack.push_single(Box::new(Label(name)))
}
}
#[derive(Debug)]
pub struct Goto(pub Box<dyn expressions::Expression>);
impl interpreter::Eval for Goto {}
impl expressions::Expression for Goto {}
impl Goto {
pub fn new(stack: &mut stack::Stack) {
let (name, _goto) = stack_unpack!(stack, single, single);
stack.push_single(Box::new(Goto(name)))
}
}
|
use log::error;
use rand::Rng;
use std::{
ffi::CString,
ops::{Add, Div, Mul, Sub},
path::Path,
time::Duration,
};
pub fn to_vec32(vecin: Vec<u8>) -> Vec<u32> { unsafe { vecin.align_to::<u32>().1.to_vec() } }
pub fn load_file(file: &Path) -> Option<Vec<u8>> {
let contents = std::fs::read(file);
match contents {
Ok(file_str) => Some(file_str),
Err(err) => {
error!("Impossible to read file {} : {}", file.display(), err);
None
},
}
}
pub fn tick(val: bool) -> String {
if val {
return "✅".to_string();
}
"❌".to_string()
}
pub fn cstr2string(mut cstr: Vec<i8>) -> String {
let string = unsafe { CString::from_raw(cstr.as_mut_ptr()) };
std::mem::forget(cstr);
String::from(string.to_string_lossy())
}
pub fn get_fract_s(date: Duration) -> String {
let millis = date.subsec_millis() as u64;
let sec = date.as_secs();
let tot = sec * 1000 + millis;
format!("{}", tot)
}
// Bad f32 comparison with a epsilon
pub fn f32_cmp(a: f32, b: f32, epsilon: f32) -> bool { (a + epsilon) > b && b > (a - epsilon) }
pub fn rand_vec<T>(len: usize, low: T, high: T) -> Vec<T>
where
T: rand::distributions::uniform::SampleUniform + Copy + PartialOrd,
{
let mut rng = rand::thread_rng();
let mut output: Vec<T> = Vec::with_capacity(len);
for _ in 0..len {
output.push(rng.gen_range(low..high))
}
output
}
pub fn min_max<T: PartialOrd + Copy>(data: &[T]) -> Option<(T, T)> {
if data.is_empty() {
return None;
} else if data.len() == 1 {
return Some((data[0], data[0]));
}
let mut min_local: T = data[0];
let mut max_local: T = data[0];
for item in data {
if &min_local > item {
min_local = *item;
}
if &max_local < item {
max_local = *item;
}
}
Some((min_local, max_local))
}
pub fn remap<T>(x: T, origin_min: T, origin_max: T, map_min: T, map_max: T) -> T
where
T: Copy + Add<Output = T> + Sub<Output = T> + Mul<Output = T> + Div<Output = T>,
{
map_min + (x - origin_min) * (map_max - map_min) / (origin_max - origin_min)
}
pub fn to_ppm(data: &[f32], width: usize, height: usize) -> Option<String> {
if width * height * 3 != data.len() {
return None;
}
let (min, max) = min_max(data).unwrap();
let mut ppm = String::new();
ppm.push_str("P3\n");
ppm.push_str(&format!("{} {}\n", width, height));
ppm.push_str("255\n");
for (i, item) in data.iter().enumerate() {
ppm.push_str(&format!("{} ", remap(*item, min, max, 0.0, 255.0) as u8));
if i % width == 0 {
ppm.push('\n');
}
}
Some(ppm)
}
|
fn main() {
let rect = Rectange {
width: 10.0,
height: 20.0,
};
let tri = Triangle {
base: 10.0,
height: 20.0,
};
let cir = Circle {
radius: 10.0
};
println!("rect area is {}", rect.calc_area());
println!("tri area is {}", tri.calc_area());
println!("cir area is {}", cir.calc_area());
println!("rect {} {}", rect.expr_str(), rect.calc_area());
println!("tri {} {}", tri.expr_str(), tri.calc_area());
println!("cir {} {}", cir.expr_str(), cir.calc_area());
}
// 四角形
struct Rectange {
width: f32,
height: f32,
}
// 三角形
struct Triangle {
base: f32,
height: f32,
}
// 円
struct Circle {
radius: f32
}
// 面積を計算するトレイト
trait CalcArea {
fn calc_area(&self) -> f32 ;
}
// トレイトを実装する
impl CalcArea for Rectange {
fn calc_area(&self) -> f32 {
self.width * self.height
}
}
impl CalcArea for Triangle {
fn calc_area(&self) -> f32 {
self.base * self.height * 0.5
}
}
impl CalcArea for Circle {
fn calc_area(&self) -> f32 {
self.radius * self.radius * 3.14
}
}
// デフォルトメソッドの実装
trait ExprString {
fn expr_str(&self) -> String {
"幅 × 高さ = ".to_string()
}
}
impl ExprString for Rectange {}
impl ExprString for Triangle {
fn expr_str(&self) -> String {
"底辺 × 高さ ÷ 2 = ".to_string()
}
}
impl ExprString for Circle {
fn expr_str(&self) -> String {
"π × 半径 × 半径 = ".to_string()
}
}
trait ToNumber {
fn to_i(&self) -> i32 ;
}
impl ToNumber for String {
fn to_i(&self) -> i32 {
match self.parse::<i32>() {
Ok(n) => n,
Err(_) => 0,
}
}
}
fn _main() {
let s = String::from("100");
let n = s.to_i();
println!("n is {}", n );
}
|
use proconio::input;
// ABC180のE問題に提出してACしたコードです。
// 巡回セールスマン問題を解くものです。
fn main() {
input! {
number_of_cities:usize,
xyz:[(isize,isize,isize);number_of_cities],
}
// distance
let mut distance: Vec<Vec<usize>> = vec![vec![0; number_of_cities]; number_of_cities];
for city_from in 0..number_of_cities {
for city_to in 0..number_of_cities {
// TODO: 都市city_fromからcity_toへの距離。問題によって違うはず。
// 都市は0〜(number_of_cities-1) になっていること。
distance[city_from][city_to] = ((xyz[city_from].0 - xyz[city_to].0).abs()
+ (xyz[city_from].1 - xyz[city_to].1).abs()
+ 0isize.max(xyz[city_to].2 - xyz[city_from].2))
as usize;
}
}
// dp[state][last] = 「訪問済み都市状態がstateで、最後に訪れた都市がlast」という状態になれる最小コスト
let mut dp = vec![vec![std::usize::MAX; number_of_cities]; 1 << number_of_cities];
// 都市一つも訪れてない状態を作るコストは0にします。多分意味ないけど。
for last in 0..number_of_cities {
dp[0][last] = 0;
}
// TODO: 「最初の都市としてコスト0でいける都市1つだけに訪れたことがある状態」を作るコストを0とします。
// スタートが決まっていない場合
// for last in 0.. number_of_cities
// {
// dp[1 << last][last] = 0;
// }
// 都市0からスタートする場合
dp[1 << 0][0] = 0;
// 状態列挙しながら、そこから次の都市を全列挙しながら、その場合の次の状態におけるコストを更新していきます
for state in 1..(1 << number_of_cities) {
for last in 0..number_of_cities {
// lastが行ったことない場所だったらおかしいのでcontinue
if (state & (1 << last)) == 0 {
continue;
}
for next_city in 0..number_of_cities {
// 訪問済みの場所に行こうとしてたら意味ないのでcontinue
if (state & 1 << next_city) > 0 {
continue;
}
// ここから作る次の状態がより良い場合、更新します
if dp[state][last] < std::usize::MAX
&& dp[state | 1 << next_city][next_city]
> dp[state][last] + distance[last][next_city]
{
dp[state | 1 << next_city][next_city] =
dp[state][last] + distance[last][next_city];
}
}
}
}
let mut result = std::usize::MAX;
for last in 0..number_of_cities {
// TODO: 都市0に戻らない場合、+dist[last][0]は不要
// スタートがどこでもよくて一周する場合は、0からスタートして0に戻ることにすればOK
if result > dp[(1 << number_of_cities) - 1][last] + distance[last][0] {
result = dp[(1 << number_of_cities) - 1][last] + distance[last][0];
}
}
println!("{}", result);
}
|
use crate::types::{keyword_type::KeywordType, validation_error::ValidationError};
use failure::Fail;
use loader_rs::LoaderError;
use url::{ParseError, Url};
#[derive(Debug, Fail)]
pub(in crate) enum SchemaError {
#[fail(display = "Unknown error")]
Unknown,
#[fail(display = "Malformed Schema: path={}, detail={}", path, detail)]
Malformed { path: Url, keyword: KeywordType, detail: String },
#[fail(display = "Url Parsing error: {}", 0)]
UrlParse(ParseError),
#[fail(display = "Validation error: {}", 0)]
Validation(ValidationError),
#[fail(display = "Loader Error: {}", 0)]
LoaderError(LoaderError),
}
impl Default for SchemaError {
fn default() -> Self {
Self::Unknown
}
}
impl From<ParseError> for SchemaError {
fn from(value: ParseError) -> Self {
Self::UrlParse(value)
}
}
impl From<ValidationError> for SchemaError {
fn from(value: ValidationError) -> Self {
Self::Validation(value)
}
}
impl From<LoaderError> for SchemaError {
fn from(value: LoaderError) -> Self {
Self::LoaderError(value)
}
}
|
#[doc = "Reader of register RCC_MP_APB3LPENSETR"]
pub type R = crate::R<u32, super::RCC_MP_APB3LPENSETR>;
#[doc = "Writer for register RCC_MP_APB3LPENSETR"]
pub type W = crate::W<u32, super::RCC_MP_APB3LPENSETR>;
#[doc = "Register RCC_MP_APB3LPENSETR `reset()`'s with value 0x0003_290f"]
impl crate::ResetValue for super::RCC_MP_APB3LPENSETR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0003_290f
}
}
#[doc = "LPTIM2LPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LPTIM2LPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<LPTIM2LPEN_A> for bool {
#[inline(always)]
fn from(variant: LPTIM2LPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `LPTIM2LPEN`"]
pub type LPTIM2LPEN_R = crate::R<bool, LPTIM2LPEN_A>;
impl LPTIM2LPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPTIM2LPEN_A {
match self.bits {
false => LPTIM2LPEN_A::B_0X0,
true => LPTIM2LPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == LPTIM2LPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == LPTIM2LPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `LPTIM2LPEN`"]
pub struct LPTIM2LPEN_W<'a> {
w: &'a mut W,
}
impl<'a> LPTIM2LPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LPTIM2LPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(LPTIM2LPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(LPTIM2LPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "LPTIM3LPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LPTIM3LPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<LPTIM3LPEN_A> for bool {
#[inline(always)]
fn from(variant: LPTIM3LPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `LPTIM3LPEN`"]
pub type LPTIM3LPEN_R = crate::R<bool, LPTIM3LPEN_A>;
impl LPTIM3LPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPTIM3LPEN_A {
match self.bits {
false => LPTIM3LPEN_A::B_0X0,
true => LPTIM3LPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == LPTIM3LPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == LPTIM3LPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `LPTIM3LPEN`"]
pub struct LPTIM3LPEN_W<'a> {
w: &'a mut W,
}
impl<'a> LPTIM3LPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LPTIM3LPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(LPTIM3LPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(LPTIM3LPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "LPTIM4LPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LPTIM4LPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<LPTIM4LPEN_A> for bool {
#[inline(always)]
fn from(variant: LPTIM4LPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `LPTIM4LPEN`"]
pub type LPTIM4LPEN_R = crate::R<bool, LPTIM4LPEN_A>;
impl LPTIM4LPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPTIM4LPEN_A {
match self.bits {
false => LPTIM4LPEN_A::B_0X0,
true => LPTIM4LPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == LPTIM4LPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == LPTIM4LPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `LPTIM4LPEN`"]
pub struct LPTIM4LPEN_W<'a> {
w: &'a mut W,
}
impl<'a> LPTIM4LPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LPTIM4LPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(LPTIM4LPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(LPTIM4LPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "LPTIM5LPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LPTIM5LPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<LPTIM5LPEN_A> for bool {
#[inline(always)]
fn from(variant: LPTIM5LPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `LPTIM5LPEN`"]
pub type LPTIM5LPEN_R = crate::R<bool, LPTIM5LPEN_A>;
impl LPTIM5LPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LPTIM5LPEN_A {
match self.bits {
false => LPTIM5LPEN_A::B_0X0,
true => LPTIM5LPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == LPTIM5LPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == LPTIM5LPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `LPTIM5LPEN`"]
pub struct LPTIM5LPEN_W<'a> {
w: &'a mut W,
}
impl<'a> LPTIM5LPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LPTIM5LPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(LPTIM5LPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(LPTIM5LPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "SAI4LPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SAI4LPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<SAI4LPEN_A> for bool {
#[inline(always)]
fn from(variant: SAI4LPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `SAI4LPEN`"]
pub type SAI4LPEN_R = crate::R<bool, SAI4LPEN_A>;
impl SAI4LPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SAI4LPEN_A {
match self.bits {
false => SAI4LPEN_A::B_0X0,
true => SAI4LPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == SAI4LPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == SAI4LPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `SAI4LPEN`"]
pub struct SAI4LPEN_W<'a> {
w: &'a mut W,
}
impl<'a> SAI4LPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SAI4LPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(SAI4LPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(SAI4LPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "SYSCFGLPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SYSCFGLPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<SYSCFGLPEN_A> for bool {
#[inline(always)]
fn from(variant: SYSCFGLPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `SYSCFGLPEN`"]
pub type SYSCFGLPEN_R = crate::R<bool, SYSCFGLPEN_A>;
impl SYSCFGLPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SYSCFGLPEN_A {
match self.bits {
false => SYSCFGLPEN_A::B_0X0,
true => SYSCFGLPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == SYSCFGLPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == SYSCFGLPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `SYSCFGLPEN`"]
pub struct SYSCFGLPEN_W<'a> {
w: &'a mut W,
}
impl<'a> SYSCFGLPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SYSCFGLPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(SYSCFGLPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(SYSCFGLPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "VREFLPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum VREFLPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<VREFLPEN_A> for bool {
#[inline(always)]
fn from(variant: VREFLPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `VREFLPEN`"]
pub type VREFLPEN_R = crate::R<bool, VREFLPEN_A>;
impl VREFLPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> VREFLPEN_A {
match self.bits {
false => VREFLPEN_A::B_0X0,
true => VREFLPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == VREFLPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == VREFLPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `VREFLPEN`"]
pub struct VREFLPEN_W<'a> {
w: &'a mut W,
}
impl<'a> VREFLPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: VREFLPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(VREFLPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(VREFLPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "TMPSENSLPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TMPSENSLPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<TMPSENSLPEN_A> for bool {
#[inline(always)]
fn from(variant: TMPSENSLPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `TMPSENSLPEN`"]
pub type TMPSENSLPEN_R = crate::R<bool, TMPSENSLPEN_A>;
impl TMPSENSLPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TMPSENSLPEN_A {
match self.bits {
false => TMPSENSLPEN_A::B_0X0,
true => TMPSENSLPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == TMPSENSLPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == TMPSENSLPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `TMPSENSLPEN`"]
pub struct TMPSENSLPEN_W<'a> {
w: &'a mut W,
}
impl<'a> TMPSENSLPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TMPSENSLPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(TMPSENSLPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(TMPSENSLPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "PMBCTRLLPEN\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PMBCTRLLPEN_A {
#[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are disabled in\r\n CSLEEP"]
B_0X0 = 0,
#[doc = "1: Writing enables the peripheral\r\n clocks in CSLEEP, reading means that the\r\n peripheral clocks are enabled in\r\n CSLEEP"]
B_0X1 = 1,
}
impl From<PMBCTRLLPEN_A> for bool {
#[inline(always)]
fn from(variant: PMBCTRLLPEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `PMBCTRLLPEN`"]
pub type PMBCTRLLPEN_R = crate::R<bool, PMBCTRLLPEN_A>;
impl PMBCTRLLPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PMBCTRLLPEN_A {
match self.bits {
false => PMBCTRLLPEN_A::B_0X0,
true => PMBCTRLLPEN_A::B_0X1,
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == PMBCTRLLPEN_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == PMBCTRLLPEN_A::B_0X1
}
}
#[doc = "Write proxy for field `PMBCTRLLPEN`"]
pub struct PMBCTRLLPEN_W<'a> {
w: &'a mut W,
}
impl<'a> PMBCTRLLPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PMBCTRLLPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Writing has no effect, reading means that the peripheral clocks are disabled in CSLEEP"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(PMBCTRLLPEN_A::B_0X0)
}
#[doc = "Writing enables the peripheral clocks in CSLEEP, reading means that the peripheral clocks are enabled in CSLEEP"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(PMBCTRLLPEN_A::B_0X1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
impl R {
#[doc = "Bit 0 - LPTIM2LPEN"]
#[inline(always)]
pub fn lptim2lpen(&self) -> LPTIM2LPEN_R {
LPTIM2LPEN_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - LPTIM3LPEN"]
#[inline(always)]
pub fn lptim3lpen(&self) -> LPTIM3LPEN_R {
LPTIM3LPEN_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - LPTIM4LPEN"]
#[inline(always)]
pub fn lptim4lpen(&self) -> LPTIM4LPEN_R {
LPTIM4LPEN_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - LPTIM5LPEN"]
#[inline(always)]
pub fn lptim5lpen(&self) -> LPTIM5LPEN_R {
LPTIM5LPEN_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 8 - SAI4LPEN"]
#[inline(always)]
pub fn sai4lpen(&self) -> SAI4LPEN_R {
SAI4LPEN_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 11 - SYSCFGLPEN"]
#[inline(always)]
pub fn syscfglpen(&self) -> SYSCFGLPEN_R {
SYSCFGLPEN_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 13 - VREFLPEN"]
#[inline(always)]
pub fn vreflpen(&self) -> VREFLPEN_R {
VREFLPEN_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 16 - TMPSENSLPEN"]
#[inline(always)]
pub fn tmpsenslpen(&self) -> TMPSENSLPEN_R {
TMPSENSLPEN_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - PMBCTRLLPEN"]
#[inline(always)]
pub fn pmbctrllpen(&self) -> PMBCTRLLPEN_R {
PMBCTRLLPEN_R::new(((self.bits >> 17) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - LPTIM2LPEN"]
#[inline(always)]
pub fn lptim2lpen(&mut self) -> LPTIM2LPEN_W {
LPTIM2LPEN_W { w: self }
}
#[doc = "Bit 1 - LPTIM3LPEN"]
#[inline(always)]
pub fn lptim3lpen(&mut self) -> LPTIM3LPEN_W {
LPTIM3LPEN_W { w: self }
}
#[doc = "Bit 2 - LPTIM4LPEN"]
#[inline(always)]
pub fn lptim4lpen(&mut self) -> LPTIM4LPEN_W {
LPTIM4LPEN_W { w: self }
}
#[doc = "Bit 3 - LPTIM5LPEN"]
#[inline(always)]
pub fn lptim5lpen(&mut self) -> LPTIM5LPEN_W {
LPTIM5LPEN_W { w: self }
}
#[doc = "Bit 8 - SAI4LPEN"]
#[inline(always)]
pub fn sai4lpen(&mut self) -> SAI4LPEN_W {
SAI4LPEN_W { w: self }
}
#[doc = "Bit 11 - SYSCFGLPEN"]
#[inline(always)]
pub fn syscfglpen(&mut self) -> SYSCFGLPEN_W {
SYSCFGLPEN_W { w: self }
}
#[doc = "Bit 13 - VREFLPEN"]
#[inline(always)]
pub fn vreflpen(&mut self) -> VREFLPEN_W {
VREFLPEN_W { w: self }
}
#[doc = "Bit 16 - TMPSENSLPEN"]
#[inline(always)]
pub fn tmpsenslpen(&mut self) -> TMPSENSLPEN_W {
TMPSENSLPEN_W { w: self }
}
#[doc = "Bit 17 - PMBCTRLLPEN"]
#[inline(always)]
pub fn pmbctrllpen(&mut self) -> PMBCTRLLPEN_W {
PMBCTRLLPEN_W { w: self }
}
}
|
use super::*;
impl<T: FromPacketBytes> FromPacketBytes for Option<T> {
type Output = Option<T::Output>;
fn from_packet(reader: &mut PacketReader) -> Result<Self::Output, Box<PacketFormatError>> {
if reader.is_empty() {
Ok(None)
} else {
T::from_packet(reader).map(Some)
}
}
}
impl<T: ToPacketBytes<U>, U> ToPacketBytes<Option<U>> for Option<T> {
fn to_packet(value: Option<U>, packet: &mut Vec<u8>) -> Result<(), Box<PacketFormatError>> {
if let Some(value) = value {
T::to_packet(value, packet)
} else {
Ok(())
}
}
}
|
use llvm_sys::{core::*, execution_engine::*, prelude::*, support::*, target::*, *};
use super::{to_ptr, Type, LLVMTypeCache};
use std::ptr;
pub struct LLVMCompiler {
pub context: LLVMContextRef,
pub module: LLVMModuleRef,
pub builder: LLVMBuilderRef,
pub types: LLVMTypeCache,
}
impl LLVMCompiler {
pub fn new() -> LLVMCompiler {
unsafe {
// It would be nice to create a new context here. However,
// earlier in the code types are already created. These types
// are built within the global context, which is the context passed
// into functions. As such, creating a new context would create a
// context mismatch between the function (global) context and the
// context used by the rest of the builder.
let context = LLVMGetGlobalContext();
// This is required to ensure that exported
// functions are available to the context.
LLVMLoadLibraryPermanently(ptr::null());
let module = LLVMModuleCreateWithNameInContext(to_ptr("main"), context);
let builder = LLVMCreateBuilderInContext(context);
// TODO: figure out the right organization
// for LLVM objects and codegen objects... strongly
// itertwined.
let types = LLVMTypeCache::new(context);
return LLVMCompiler {
context,
module,
builder,
types
};
}
}
} |
use common::BitSet;
use std::borrow::Borrow;
use std::borrow::BorrowMut;
use std::cmp::Ordering;
use DocId;
/// Expresses the outcome of a call to `DocSet`'s `.skip_next(...)`.
#[derive(PartialEq, Eq, Debug)]
pub enum SkipResult {
/// target was in the docset
Reached,
/// target was not in the docset, skipping stopped as a greater element was found
OverStep,
/// the docset was entirely consumed without finding the target, nor any
/// element greater than the target.
End,
}
/// Represents an iterable set of sorted doc ids.
pub trait DocSet {
/// Goes to the next element.
/// `.advance(...)` needs to be called a first time to point to the correct
/// element.
fn advance(&mut self) -> bool;
/// After skipping, position the iterator in such a way that `.doc()`
/// will return a value greater than or equal to target.
///
/// SkipResult expresses whether the `target value` was reached, overstepped,
/// or if the `DocSet` was entirely consumed without finding any value
/// greater or equal to the `target`.
///
/// WARNING: Calling skip always advances the docset.
/// More specifically, if the docset is already positionned on the target
/// skipping will advance to the next position and return SkipResult::Overstep.
///
/// If `.skip_next()` oversteps, then the docset must be positionned correctly
/// on an existing document. In other words, `.doc()` should return the first document
/// greater than `DocId`.
fn skip_next(&mut self, target: DocId) -> SkipResult {
if !self.advance() {
return SkipResult::End;
}
loop {
match self.doc().cmp(&target) {
Ordering::Less => {
if !self.advance() {
return SkipResult::End;
}
}
Ordering::Equal => return SkipResult::Reached,
Ordering::Greater => return SkipResult::OverStep,
}
}
}
/// Fills a given mutable buffer with the next doc ids from the
/// `DocSet`
///
/// If that many `DocId`s are available, the method should
/// fill the entire buffer and return the length of the buffer.
///
/// If we reach the end of the `DocSet` before filling
/// it entirely, then the buffer is filled up to this point, and
/// return value is the number of elements that were filled.
///
/// # Warning
///
/// This method is only here for specific high-performance
/// use case where batching. The normal way to
/// go through the `DocId`'s is to call `.advance()`.
fn fill_buffer(&mut self, buffer: &mut [DocId]) -> usize {
for (i, buffer_val) in buffer.iter_mut().enumerate() {
if self.advance() {
*buffer_val = self.doc();
} else {
return i;
}
}
buffer.len()
}
/// Returns the current document
fn doc(&self) -> DocId;
/// Returns a best-effort hint of the
/// length of the docset.
fn size_hint(&self) -> u32;
/// Appends all docs to a `bitset`.
fn append_to_bitset(&mut self, bitset: &mut BitSet) {
while self.advance() {
bitset.insert(self.doc());
}
}
/// Returns the number documents matching.
///
/// Calling this method consumes the `DocSet`.
fn count(&mut self) -> u32 {
let mut count = 0u32;
while self.advance() {
count += 1u32;
}
count
}
}
impl<TDocSet: DocSet + ?Sized> DocSet for Box<TDocSet> {
fn advance(&mut self) -> bool {
let unboxed: &mut TDocSet = self.borrow_mut();
unboxed.advance()
}
fn skip_next(&mut self, target: DocId) -> SkipResult {
let unboxed: &mut TDocSet = self.borrow_mut();
unboxed.skip_next(target)
}
fn doc(&self) -> DocId {
let unboxed: &TDocSet = self.borrow();
unboxed.doc()
}
fn size_hint(&self) -> u32 {
let unboxed: &TDocSet = self.borrow();
unboxed.size_hint()
}
fn count(&mut self) -> u32 {
let unboxed: &mut TDocSet = self.borrow_mut();
unboxed.count()
}
fn append_to_bitset(&mut self, bitset: &mut BitSet) {
let unboxed: &mut TDocSet = self.borrow_mut();
unboxed.append_to_bitset(bitset);
}
}
|
use std::fs::{create_dir_all, write as write_file, File};
use std::io::Read;
use std::path::PathBuf;
use sodiumoxide::crypto::sign;
use sodiumoxide::crypto::sign::ed25519::PublicKey;
use sodiumoxide::crypto::sign::ed25519::SecretKey;
pub struct Keyring {
pub public_key: PublicKey,
pub secret_key: SecretKey,
}
impl Keyring {
pub fn new(keyring_path_buf: &str) -> Keyring {
let mut keyring_path_buf = PathBuf::from(keyring_path_buf);
match (keyring_path_buf.exists(), keyring_path_buf.is_dir()) {
(true, true) => Keyring::read_keys(&mut keyring_path_buf),
(false, false) => {
create_dir_all(&keyring_path_buf).ok().expect("Could not create path for keys!!!");
Keyring::create_keys(&mut keyring_path_buf)
},
(true, false) => panic!("Given PATH does not lead to directory!!!"),
(false, true) => panic!("This shouldn't be possible! Given PATH does not exist, but is directory!!!")
}
}
fn create_keys(keyring_path_buf: &mut PathBuf) -> Keyring {
let (public_key, secret_key) = sign::gen_keypair();
Keyring::write_public_key(&public_key.0, keyring_path_buf);
Keyring::write_secret_key(&secret_key.0[..], keyring_path_buf);
Keyring {
public_key,
secret_key
}
}
fn write_public_key(public_key: &[u8; 32], keyring_path_buf: &mut PathBuf) {
keyring_path_buf.push("public.key");
write_file(&keyring_path_buf, public_key).ok();
keyring_path_buf.pop();
}
fn write_secret_key(secret_key: &[u8], keyring_path_buf: &mut PathBuf) {
keyring_path_buf.push("secret.key");
write_file(&keyring_path_buf, secret_key).ok();
keyring_path_buf.pop();
}
// TODO: check for keys in dir first
fn read_keys(keyring_path_buf: &mut PathBuf) -> Keyring {
match Keyring::contains_keys(keyring_path_buf) {
true => {
let public_key = Keyring::load_pub_key(keyring_path_buf);
let secret_key = Keyring::load_secret_key(keyring_path_buf);
Keyring {
public_key,
secret_key,
}
},
false => Keyring::create_keys(keyring_path_buf)
}
}
fn contains_keys(keyring_path_buf: &mut PathBuf) -> bool {
keyring_path_buf.push("public.key");
let has_public = match (keyring_path_buf.exists(), keyring_path_buf.is_file()) {
(true, true) => true,
(false, false) => false,
(true, false) => panic!("Given PUBLIC_KEY_PATH does not lead to file!!!"),
(false, true) => panic!("This shouldn't be possible! Given PATH does not exist, but is file!!!")
};
keyring_path_buf.set_file_name("secret.key");
let has_secret = match (keyring_path_buf.exists(), keyring_path_buf.is_file()) {
(true, true) => true,
(false, false) => false,
(true, false) => panic!("Given SECRET_KEY_PATH does not lead to file!!!"),
(false, true) => panic!("This shouldn't be possible! Given PATH does not exist, but is file!!!")
};
keyring_path_buf.pop();
match (&has_public, &has_secret) {
(true, true) => true,
(false, true) => panic!("Missing public key!!!"),
(true, false) => panic!("Missing secret key!!!"),
(false, false) => false
}
}
fn load_pub_key(keyring_path_buf: &mut PathBuf) -> PublicKey {
keyring_path_buf.push("public.key");
let mut pub_key_file = File::open(&keyring_path_buf).unwrap();
let mut pub_key_arr: [u8; 32] = [0; 32];
pub_key_file.read_exact(&mut pub_key_arr).ok();
keyring_path_buf.pop();
PublicKey(pub_key_arr)
}
fn load_secret_key(keyring_path_buf: &mut PathBuf) -> SecretKey {
keyring_path_buf.set_file_name("secret.key");
let mut secret_key_file = File::open(&keyring_path_buf).unwrap();
let mut secret_key_arr: [u8; 64] = [0; 64];
secret_key_file.read_exact(&mut secret_key_arr).ok();
keyring_path_buf.pop();
SecretKey(secret_key_arr)
}
}
|
use crate::custom_var::CustomVar;
use crate::function::Function;
use crate::name::Name;
use crate::name_map::NameMap;
use crate::operator::Operator;
use crate::runtime::Runtime;
use crate::std_type::Type;
use crate::string_var::StringVar;
use crate::variable::{FnResult, Variable};
use std::fmt::Debug;
use std::rc::Rc;
#[derive(Debug)]
pub struct CustomType {
name: StringVar,
supers: Vec<Type>,
constructor: Function,
static_methods: NameMap<Function>,
}
impl CustomType {
pub fn new(
name: StringVar,
supers: Vec<Type>,
constructor: Function,
static_methods: NameMap<Function>,
) -> CustomType {
CustomType {
name,
supers,
constructor,
static_methods,
}
}
}
impl CustomType {
pub fn get_name(&self) -> &StringVar {
&self.name
}
pub fn create(&self, args: Vec<Variable>, runtime: &mut Runtime) -> Result<Variable, ()> {
self.constructor.call((args, runtime))?;
Result::Ok(runtime.pop_return())
}
pub fn is_subclass(&self, other: &Type, runtime: &Runtime) -> bool {
for s in &self.supers {
if s.is_subclass(other, runtime) {
return true;
}
}
false
}
pub fn index(&self, name: Name) -> Variable {
self.static_methods
.get(name)
.copied()
.unwrap_or_else(|| panic!("{}.{} does not exist", &self.name, name.as_str()))
.into()
}
}
#[derive(Debug, Copy, Clone)]
pub struct TypeIdentity {
value: Type,
}
impl TypeIdentity {
pub fn new(value: Type) -> Rc<TypeIdentity> {
Rc::new(TypeIdentity { value })
}
}
impl CustomVar for TypeIdentity {
fn set(self: Rc<Self>, _name: Name, _object: Variable) {
unimplemented!()
}
fn get_type(&self) -> Type {
unimplemented!()
}
fn get_operator(self: Rc<Self>, op: Operator) -> Variable {
if let Operator::Call = op {
self.into()
} else {
unimplemented!()
}
}
fn get_attribute(self: Rc<Self>, _name: &str) -> Variable {
unimplemented!()
}
fn call(self: Rc<Self>, _args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
runtime.return_1(self.value.into())
}
fn call_or_goto(self: Rc<Self>, _args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
runtime.return_1(self.value.into())
}
}
|
#![no_main]
#![no_std]
extern crate cortex_m_rt;
extern crate panic_halt;
use cortex_m_rt::{entry, exception, ExceptionFrame};
#[entry]
fn foo() -> ! {
loop {}
}
#[exception]
unsafe fn HardFault(_ef: &ExceptionFrame) -> ! {
loop {}
}
pub mod reachable {
use cortex_m_rt::{exception, ExceptionFrame};
#[exception] //~ ERROR symbol `_HardFault` is already defined
unsafe fn HardFault(_ef: &ExceptionFrame) -> ! {
loop {}
}
}
|
use chrono;
use lazy_static::lazy_static;
use regex::Regex;
use serde_json::Value;
use crate::validator::{scope::ScopedSchema, state::ValidationState, types::validate_as_string};
lazy_static! {
// ajv v6.7.0 compatible
// https://github.com/epoberezkin/ajv/blob/v6.7.0/lib/compile/formats.js#L90
static ref DATE_REGEX: Regex =
Regex::new(r"^(\d\d\d\d)-(\d\d)-(\d\d)$").unwrap();
// ajv v6.7.0 compatible
// https://github.com/epoberezkin/ajv/blob/v6.7.0/lib/compile/formats.js#L104
static ref TIME_REGEX: Regex =
Regex::new(r"^(\d\d):(\d\d):(\d\d)(\.\d+)?(z|[+-]\d\d:\d\d)?$").unwrap();
}
pub fn validate_as_datetime(scope: &ScopedSchema, data: &Value) -> ValidationState {
let mut state = validate_as_string(scope, data);
if state.is_valid()
&& chrono::DateTime::parse_from_rfc3339(data.as_str().expect("invalid validate_as_string")).is_err()
{
state.push_error(scope.error("type", "unable to parse as 'datetime'"));
}
state
}
fn is_leap_year(year: usize) -> bool {
// https://tools.ietf.org/html/rfc3339#appendix-C
year % 4 == 0 && (year % 100 != 0 || year % 400 == 0)
}
fn days(year: usize, month: usize) -> usize {
const DAYS: [usize; 12] = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
if month == 2 && is_leap_year(year) {
29
} else {
DAYS[month - 1]
}
}
pub fn validate_as_date(scope: &ScopedSchema, data: &Value) -> ValidationState {
let s = match data.as_str() {
Some(x) => x,
None => return scope.error("type", "expected `date`").into(),
};
let captures = match DATE_REGEX.captures(s) {
Some(x) => x,
_ => return scope.error("type", "expected `date`").into(),
};
let year: usize = (&captures[1]).parse().expect("invalid regex");
let month: usize = (&captures[2]).parse().expect("invalid regex");
let day: usize = (&captures[3]).parse().expect("invalid regex");
if month >= 1 && month <= 12 && day >= 1 && day <= days(year, month) {
ValidationState::new()
} else {
scope.error("type", "invalid `date` range").into()
}
}
pub fn validate_as_time(scope: &ScopedSchema, data: &Value) -> ValidationState {
let s = match data.as_str() {
Some(x) => x,
None => return scope.error("type", "expected `time`").into(),
};
let captures = match TIME_REGEX.captures(s) {
Some(x) => x,
_ => return scope.error("type", "expected `time`").into(),
};
let hour: usize = (&captures[1]).parse().expect("invalid regex");
let min: usize = (&captures[2]).parse().expect("invalid regex");
let sec: usize = (&captures[3]).parse().expect("invalid regex");
if (hour <= 23 && min <= 59 && sec <= 59) || (hour == 23 && min == 59 && sec == 60) {
ValidationState::new()
} else {
scope.error("type", "invalid `time` range").into()
}
}
|
use heck::*;
use proc_macro2::Span;
use quote::{quote, ToTokens};
use syn::parse::{Parse, ParseStream, Result};
use syn::punctuated::Punctuated;
use syn::{braced, parenthesized, parse_macro_input, token, Ident, Lifetime, Path, PathArguments, PathSegment, Token, TraitBound, TraitBoundModifier, Type, TypeParamBound, TypeReference, TypeTraitObject};
use std::iter::FromIterator;
use crate::on::*;
use crate::maybe::Maybe;
pub fn make(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let parsed = parse_macro_input!(item as AbleTo);
let t = quote!(#parsed);
dbg!(format!("{:#}", t));
proc_macro::TokenStream::from(t)
}
pub(crate) struct AbleTo {
name: Ident,
_paren: Option<token::Paren>,
params: Option<Punctuated<Type, Token![,]>>,
_colon: Option<Token![:]>,
extends: Option<Punctuated<Ident, Token![+]>>,
_brace: Option<token::Brace>,
custom: Option<proc_macro2::TokenStream>,
ret: OnReturnParams,
}
impl Parse for AbleTo {
fn parse(input: ParseStream) -> Result<Self> {
let mut extends_present = false;
let mut params = None;
let mut custom = None;
Ok(Self {
name: input.parse()?,
_paren: {
let lookahead = input.lookahead1();
if lookahead.peek(token::Paren) {
let content;
let paren = parenthesized!(content in input);
params = Some(content);
Some(paren)
} else {
None
}
},
params: params.map(|content| content.parse_terminated(Type::parse).unwrap()),
_colon: {
let lookahead = input.lookahead1();
if lookahead.peek(Token![:]) {
extends_present = true;
Some(input.parse()?)
} else {
None
}
},
extends: if extends_present {
let mut extends: Punctuated<Ident, Token![+]> = Punctuated::new();
loop {
extends.push_value(input.parse()?);
if input.peek(token::Brace) || input.is_empty() {
break;
}
extends.push_punct(input.parse()?);
if input.peek(token::Brace) || input.is_empty() {
break;
}
}
Some(extends)
} else {
None
},
_brace: {
let lookahead = input.lookahead1();
if lookahead.peek(token::Brace) {
let content;
let brace = braced!(content in input);
custom = Some(content);
Some(brace)
} else {
None
}
},
custom: custom.map(|content| content.parse().unwrap()),
ret: {
let lookahead = input.lookahead1();
if lookahead.peek(token::RArrow) {
let arrow = input.parse()?;
let lookahead = input.lookahead1();
if lookahead.peek(token::Paren) {
let content;
OnReturnParams::Multi(arrow, parenthesized!(content in input), content.parse_terminated(Type::parse)?)
} else {
OnReturnParams::Single(arrow, input.parse()?)
}
} else {
OnReturnParams::None
}
},
})
}
}
impl ToTokens for AbleTo {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let ident = Ident::new(&self.name.to_string().to_camel_case(), Span::call_site());
//let a_ident = Ident::new(&format!("A{}", ident).to_camel_case(), Span::call_site());
//let ident_base = Ident::new(&format!("{}Base", ident).to_camel_case(), Span::call_site());
let ident_able = Ident::new(&format!("{}able", ident).to_camel_case(), Span::call_site());
let ident_able_inner = Ident::new(&format!("{}ableInner", ident).to_camel_case(), Span::call_site());
let ident_fn = Ident::new(&format!("{}", ident).to_snake_case(), Span::call_site());
let on_ident_fn = Ident::new(&format!("on_{}", ident).to_snake_case(), Span::call_site());
let as_into = &crate::as_into::AsInto { ident_camel: &ident_able };
let params = &self.params.as_ref().map(|punct| punct.iter().map(|i| i.clone()).collect::<Vec<_>>()).unwrap_or(vec![]);
let param_names = &(0..params.len()).map(|i| Ident::new(&format!("arg{}", i), Span::call_site())).collect::<Vec<_>>();
let extends = self.extends.as_ref().map(|punct| punct.iter().map(|i| i.clone()).collect::<Vec<_>>()).unwrap_or(vec![]);
let extends_inner = self
.extends
.as_ref()
.map(|punct| punct.iter().map(|i| Ident::new(&format!("{}Inner", i.to_string().to_camel_case()), Span::call_site())).collect::<Vec<_>>())
.unwrap_or(vec![]);
let custom = &self.custom;
let ret = match self.ret {
OnReturnParams::None => quote!{},
OnReturnParams::Single(arrow, ref ty) => quote!{
#arrow #ty
},
OnReturnParams::Multi(arrow, _, ref params) => quote!{
#arrow (#params)
}
};
let static_ = Lifetime::new("'static", Span::call_site());
let static_inner = Lifetime::new("'static", Span::call_site());
let mut on = crate::on::On {
name: ident.clone(),
paren: token::Paren { span: Span::call_site() },
//ident_owner_camel: ident_able.clone(),
params: Punctuated::new(),
ret: self.ret.clone(),
default_ret: Some(Ident::new("true", Span::call_site())),
};
let tto = TypeReference {
and_token: token::And { spans: [Span::call_site()] },
lifetime: None,
mutability: Some(token::Mut { span: Span::call_site() }),
elem: Box::new(Type::TraitObject(TypeTraitObject {
dyn_token: Some(token::Dyn { span: Span::call_site() }),
bounds: Punctuated::from_iter(
vec![TypeParamBound::Trait(TraitBound {
paren_token: None,
modifier: TraitBoundModifier::None,
lifetimes: None,
path: Path {
leading_colon: None,
segments: Punctuated::from_iter(
vec![PathSegment {
ident: ident_able.clone(),
arguments: PathArguments::None,
}]
.into_iter(),
),
},
})]
.into_iter(),
),
})),
};
on.params.push(Type::Reference(tto));
if let Some(ref params) = self.params {
params.iter().for_each(|i| on.params.push(i.clone()));
}
let maybe = Maybe {
name: ident_able.clone()
};
let on_ident = Ident::new(&format!("On{}", ident).to_camel_case(), Span::call_site());
let expr = quote! {
pub trait #ident_able: #static_ + AsAny #(+#extends)*{
fn #ident_fn(&mut self, #(#param_names: #params,)* skip_callbacks: bool) #ret;
fn #on_ident_fn(&mut self, callback: Option<#on_ident>);
#custom
#as_into
}
pub trait #ident_able_inner: #(#extends_inner+)* #static_inner {
fn #ident_fn(&mut self, #(#param_names: #params,)* skip_callbacks: bool) #ret;
fn #on_ident_fn(&mut self, callback: Option<#on_ident>);
#custom
}
#on
#maybe
/*
#[repr(C)]
pub struct #ident_base {
pub callback: Option<#on_ident>
}
#[repr(C)]
pub struct #a_ident<T: #ident_able_inner> {
pub base: #ident_base,
pub inner: T,
}*/
};
expr.to_tokens(tokens);
}
}
|
use puck_core::Vec3f;
use cgmath::Zero;
use alto;
use std::fs;
use std::path::{PathBuf, Path};
pub mod engine;
pub mod load;
pub mod context;
pub mod source;
pub mod worker;
pub use self::engine::*;
pub use self::worker::*;
pub type SoundName = String;
pub type SoundEventId = u64;
pub type Gain = f32;
pub type DistanceModel = alto::DistanceModel;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SoundEvent {
pub name: String,
pub position: Vec3f,
pub gain: f32,
pub pitch: f32,
pub attenuation: f32, // unsure if this should be bool for relative, or an optional rolloff factor (within the context distance model)
pub loop_sound: bool,
}
#[derive(Copy, Clone, PartialEq, Debug, Serialize, Deserialize)]
pub struct Listener {
pub position: Vec3f,
pub velocity: Vec3f,
pub orientation_up: Vec3f,
pub orientation_forward: Vec3f,
}
impl Listener {
pub fn default() -> Listener {
Listener {
position: Vec3f::zero(),
velocity: Vec3f::zero(),
orientation_up: Vec3f::unit_y(),
orientation_forward: Vec3f::unit_z() * -1.0,
}
}
}
pub type LoadResult<T> = Result<T, errors::LoadError>;
pub type PreloadResult<T> = Result<T, errors::PreloadError>;
pub type SoundEventResult<T> = Result<T, errors::SoundEventError>;
pub type SoundProviderResult<T> = Result<T, alto::AltoError>;
pub type WorkerResult<T> = Result<T, alto::AltoError>;
pub fn read_directory_paths(path:&Path) -> PreloadResult<Vec<PathBuf>> {
use self::errors::{LoadError, LoadErrorReason};
let mut paths : Vec<PathBuf> = Vec::new();
for entry in try!(fs::read_dir(path).map_err(|io| LoadError {path: path.to_path_buf(), reason: LoadErrorReason::FileReadError(io) })) {
let entry = try!(entry.map_err(|io| LoadError {path: path.to_path_buf(), reason: LoadErrorReason::FileReadError(io) }));
let file_path = entry.path().to_path_buf();
paths.push(file_path);
}
Ok(paths)
}
pub mod errors {
use alto;
use std::path::PathBuf;
use lewton;
use std::io;
#[derive(Debug)]
pub enum PreloadError {
LoadError(LoadError),
SoundProviderError(alto::AltoError), // this is a dupe at this point ... hrm
}
impl From<LoadError> for PreloadError {
fn from(val: LoadError) -> PreloadError {
PreloadError::LoadError(val)
}
}
impl From<alto::AltoError> for PreloadError {
fn from(val: alto::AltoError) -> PreloadError {
PreloadError::SoundProviderError(val)
}
}
#[derive(Debug)]
pub struct LoadError {
pub path: PathBuf,
pub reason: LoadErrorReason,
}
#[derive(Debug)]
pub enum LoadErrorReason {
FileDoesntExist,
FileReadError(io::Error),
ReadOggError(lewton::VorbisError),
TooManyChannels,
}
#[derive(Debug)]
pub enum SoundEventError {
LoadSoundError(LoadError), // recoverable
SoundProviderError(alto::AltoError),
NoFreeStaticSource,
NoFreeStreamingSource,
NoSounds,
}
impl From<LoadError> for SoundEventError {
fn from(val: LoadError) -> SoundEventError {
SoundEventError::LoadSoundError(val)
}
}
impl From<alto::AltoError> for SoundEventError {
fn from(val: alto::AltoError) -> SoundEventError {
SoundEventError::SoundProviderError(val)
}
}
impl From<PreloadError> for SoundEventError {
fn from(val: PreloadError) -> SoundEventError {
use self::SoundEventError::*;
match val {
PreloadError::LoadError(le) => LoadSoundError(le),
PreloadError::SoundProviderError(ae) => SoundProviderError(ae),
}
}
}
} |
use async_graphql::*;
use futures::{Stream, StreamExt};
#[async_std::test]
pub async fn test_subscription() {
struct QueryRoot;
#[SimpleObject]
struct Event {
#[field]
a: i32,
#[field]
b: i32,
}
#[Object]
impl QueryRoot {}
struct SubscriptionRoot;
#[Subscription]
impl SubscriptionRoot {
#[field]
async fn values(&self, start: i32, end: i32) -> impl Stream<Item = i32> {
futures::stream::iter(start..end)
}
#[field]
async fn events(&self, start: i32, end: i32) -> impl Stream<Item = Event> {
futures::stream::iter((start..end).map(|n| Event { a: n, b: n * 10 }))
}
}
let schema = Schema::new(QueryRoot, EmptyMutation, SubscriptionRoot);
{
let mut stream = schema
.create_subscription_stream(
"subscription { values(start: 10, end: 20) }",
None,
Default::default(),
)
.await
.unwrap();
for i in 10..20 {
assert_eq!(
Some(serde_json::json!({ "values": i })),
stream.next().await
);
}
assert!(stream.next().await.is_none());
}
{
let mut stream = schema
.create_subscription_stream(
"subscription { events(start: 10, end: 20) { a b } }",
None,
Default::default(),
)
.await
.unwrap();
for i in 10..20 {
assert_eq!(
Some(serde_json::json!({ "events": {"a": i, "b": i * 10} })),
stream.next().await
);
}
assert!(stream.next().await.is_none());
}
}
#[async_std::test]
pub async fn test_simple_broker() {
struct QueryRoot;
#[SimpleObject]
#[derive(Clone)]
struct Event1 {
#[field]
value: i32,
}
#[SimpleObject]
#[derive(Clone)]
struct Event2 {
#[field]
value: i32,
}
#[Object]
impl QueryRoot {}
struct SubscriptionRoot;
#[Subscription]
impl SubscriptionRoot {
#[field]
async fn events1(&self) -> impl Stream<Item = Event1> {
SimpleBroker::<Event1>::subscribe()
}
#[field]
async fn events2(&self) -> impl Stream<Item = Event2> {
SimpleBroker::<Event2>::subscribe()
}
}
let schema = Schema::new(QueryRoot, EmptyMutation, SubscriptionRoot);
let mut stream1 = schema
.create_subscription_stream(
"subscription { events1 { value } }",
None,
Default::default(),
)
.await
.unwrap();
let mut stream2 = schema
.create_subscription_stream(
"subscription { events2 { value } }",
None,
Default::default(),
)
.await
.unwrap();
SimpleBroker::publish(Event1 { value: 10 });
SimpleBroker::publish(Event2 { value: 88 });
SimpleBroker::publish(Event1 { value: 15 });
SimpleBroker::publish(Event2 { value: 99 });
assert_eq!(
stream1.next().await,
Some(serde_json::json!({ "events1": {"value": 10} }))
);
assert_eq!(
stream1.next().await,
Some(serde_json::json!({ "events1": {"value": 15} }))
);
assert_eq!(
stream2.next().await,
Some(serde_json::json!({ "events2": {"value": 88} }))
);
assert_eq!(
stream2.next().await,
Some(serde_json::json!({ "events2": {"value": 99} }))
);
}
|
use std::cmp::{min, max};
use std::fs;
fn find_crossing_points(map: &Vec<Vec<char>>, a: (i32, i32), b: (i32, i32)) -> Vec<(i32, i32)> {
let mut points = vec!();
let ((x1, y1), (x2, y2)) = (a, b);
let ab_dist_sq = ((x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1)) as f32;
for x in min(x1, x2)..=max(x1, x2) {
for y in min(y1, y2)..=max(y1, y2) {
let c = (x, y);
if map[y as usize][x as usize] == '#' && c != a && c != b {
let a_dist_sq = ((x1 - x) * (x1 - x) + (y1 - y) * (y1 - y)) as f32;
let b_dist_sq = ((x2 - x) * (x2 - x) + (y2 - y) * (y2 - y)) as f32;
if (a_dist_sq + b_dist_sq + 2_f32 * (a_dist_sq * b_dist_sq).sqrt() - ab_dist_sq).abs() <= 1E-6 {
points.push(c);
}
}
}
}
return points;
}
fn calc_station_score(map: &Vec<Vec<char>>, station: (i32, i32)) -> u32 {
let mut count = 0;
let (station_x, station_y) = station;
for i in 0..map[0].len() {
for j in 0..map.len() {
let asteroid = (i as i32, j as i32);
let (x, y) = asteroid;
if map[j][i] == '#' && (x != station_x || y != station_y) {
if find_crossing_points(map, station, asteroid).len() == 0 {
count += 1;
}
}
}
}
return count;
}
fn main() {
let map = fs::read_to_string("input.txt")
.unwrap()
.split("\r\n")
.map(|line| line.chars().collect::<Vec<char>>())
.collect::<Vec<Vec<char>>>();
let mut max_asteroids = 0;
let mut max_asteroids_station = (0, 0);
for i in 0..map[0].len() {
for j in 0..map.len() {
if map[j][i] == '#' {
let station_coord = (i as i32, j as i32);
println!("*** Possible station: {:?} ***", station_coord);
let score = calc_station_score(&map, station_coord);
println!("Can detect {} asteroids", score);
if score > max_asteroids {
max_asteroids = score;
max_asteroids_station = station_coord;
}
}
}
}
println!("{:?} can detect {} asteroids", max_asteroids_station, max_asteroids);
} |
fn main() {
for _i in 1.. {
println!("Hello, world!");
}
}
|
use crate::{
node::{Node, Tickable},
status::Status,
};
/// A node that will repeat its child a specific number of times, possibly
/// infinite.
///
/// A repeat node will report that it is running until its child node has been
/// run to completion the specified number of times, upon which it will be
/// considered successful. This could also be an infinite number, in which case
/// this node will always be considered running.
///
/// # State
///
/// **Initialized:** Before being ticked after either being reset or created.
///
/// **Running:** Until the child node has been reset the specified number of
/// times. If there is no limit, always.
///
/// **Succeeded:** Once the child has been reset the specified number of times.
/// If there is no limit, never.
///
/// **Failed:** Never.
///
/// # Children
///
/// One. It is ticked or reset whenever the repeat node is ticked or reset. It
/// also may be reset multiple times before the repeat node is reset or
/// completed.
///
/// # Examples
///
/// Force the child to be reset a specific number of times:
///
/// ```
/// # use aspen::std_nodes::*;
/// # use aspen::Status;
/// # use aspen::node::Tickable;
/// let run_limit = 5;
/// let child = AlwaysFail::new();
/// let mut node = Repeat::with_limit(run_limit, child);
///
/// // Subtract one since there is a run in the assert
/// for _ in 0..(run_limit - 1) {
/// assert_eq!(node.tick(&mut ()), Status::Running);
/// }
/// assert_eq!(node.tick(&mut ()), Status::Succeeded);
/// ```
pub struct Repeat<'a, W> {
/// Child node.
child: Node<'a, W>,
/// Optional number of times to do the reset.
attempt_limit: Option<u32>,
/// Number of times the child has been reset.
attempts: u32,
}
impl<'a, W> Repeat<'a, W>
where
W: 'a,
{
/// Creates a new Repeat node that will repeat forever.
pub fn new(child: Node<'a, W>) -> Node<'a, W> {
let internals = Repeat {
child,
attempt_limit: None,
attempts: 0,
};
Node::new(internals)
}
/// Creates a new Repeat node that will only repeat a limited number of
/// times.
///
/// The limit specifies the number of times this node can be run. A limit
/// of zero means that the node will instantly succeed.
pub fn with_limit(limit: u32, child: Node<'a, W>) -> Node<'a, W> {
let internals = Repeat {
child,
attempt_limit: Some(limit),
attempts: 0,
};
Node::new(internals)
}
}
impl<'a, W> Tickable<W> for Repeat<'a, W> {
fn tick(&mut self, world: &mut W) -> Status {
// Take care of the infinite version so we don't have to worry
if self.attempt_limit.is_none() {
self.child.tick(world);
return Status::Running;
}
// We're using the finite version
let limit = self.attempt_limit.unwrap();
let child_status = self.child.tick(world);
if child_status.is_done() {
self.attempts += 1;
if self.attempts < limit {
return Status::Running;
}
return Status::Succeeded;
}
// We're still running
Status::Running
}
fn reset(&mut self) {
// Reset our attempt count
self.attempts = 0;
// Reset the child
self.child.reset();
}
fn children(&self) -> Vec<&Node<W>> {
vec![&self.child]
}
/// Returns the string "Repeat".
fn type_name(&self) -> &'static str {
"Repeat"
}
}
/// Convenience macro for creating Repeat nodes.
///
/// # Examples
///
/// ```
/// # #[macro_use] extern crate aspen;
/// # fn main() {
/// let repeat = Repeat! {
/// Condition!{ |&(a, b): &(u32, u32)| a < b }
/// };
/// let limited_repeat = Repeat! { 12,
/// Condition!{ |&(a, b): &(u32, u32)| a < b }
/// };
/// # }
/// ```
#[macro_export]
macro_rules! Repeat {
( $e:expr ) => {
$crate::std_nodes::Repeat::new($e)
};
( $c:expr, $e:expr ) => {
$crate::std_nodes::Repeat::with_limit($c, $e)
};
}
#[cfg(test)]
mod tests {
use crate::{
node::Tickable,
status::Status,
std_nodes::{CountedTick, Repeat},
};
#[test]
fn repeat_finite() {
// No good way to test the infinite one
let limit = 5;
let child = CountedTick::new(Status::Failed, limit, true);
let mut node = Repeat::with_limit(limit, child);
for _ in 0..(limit - 1) {
assert_eq!(node.tick(&mut ()), Status::Running);
}
let status = node.tick(&mut ());
drop(node);
assert_eq!(status, Status::Succeeded);
}
}
|
use alloc::string::String;
use core::str::FromStr;
use crate::Client;
use chain::names::{AccountName, ActionName, ParseNameError};
use rpc_codegen::Fetch;
use serde::{Deserialize, Serialize};
#[derive(Fetch, Clone, Debug, Deserialize, Serialize)]
#[api(path="v1/chain/abi_json_to_bin", http_method="POST", returns="GetAbiJsonToBin")]
pub struct GetAbiJsonToBinParams<Args: serde::Serialize>
{
code: AccountName,
action: ActionName,
args: Args,
}
pub fn get_abi_json_to_bin<Args: serde::Serialize>(
code: impl Into<AccountName>,
action: Actions,
args: Args
) -> Result<GetAbiJsonToBinParams<Args>, ParseNameError>
{
let action: ActionName = match action {
Actions::Close => ActionName::from_str("close")?,
Actions::Create => ActionName::from_str("create")?,
Actions::Transfer => ActionName::from_str("transfer")?,
Actions::Open => ActionName::from_str("open")?,
Actions::Retire => ActionName::from_str("retire")?,
Actions::Issue => ActionName::from_str("issue")?,
};
Ok(GetAbiJsonToBinParams { code: code.into(), action, args })
}
// defined six action
pub enum Actions {
Close,
Create,
Transfer,
Retire,
Open,
Issue
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct TransferAction {
pub from: AccountName,
pub to: AccountName,
pub quantity: String,
pub memo: String
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct CloseAction {
pub owner: AccountName,
pub symbol: String
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct CreateAction {
pub issuer: AccountName,
pub maximum_supply: String
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct IssueAction {
pub to: AccountName,
pub quantity: String,
pub memo: String
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct OpenAction {
pub owner: AccountName,
pub symbol: String,
pub ram_payer: AccountName
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct RetireAction {
pub quantity: String,
pub memo: String
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct GetAbiJsonToBin {
pub binargs: String
}
|
use std::cell::RefCell;
use std::cmp::*;
use std::rc::Rc;
use crate::treenode::TreeNode;
type ONode = Option<Rc<RefCell<TreeNode>>>;
type RNode = Rc<RefCell<TreeNode>>;
pub fn rob(root: ONode) -> i32 {
fn search(root: RNode) -> (i32, i32) {
let val = root.borrow().val;
let left_max = root.borrow().left.clone().map_or((0, 0), |left| search(left));
let right_max = root.borrow().right.clone().map_or((0, 0), |right| search(right));
let nomask = val + left_max.0 + right_max.0;
let mask = left_max.1 + right_max.1;
(mask, max(nomask, mask))
}
root.map_or(0, |r| {
let (a, b) = search(r);
max(a, b)
})
} |
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Base crypto library:
//!
//! # dsa
//! signature algorithm:
//! - ed265519
//! - sm2
//! - custom algorithm provided by a dylib
//!
//! # hash
//! hash Algorithm:
//! - blake2b_160
//! - blake2b_256
//! - blake2b_512
//! - sm3
//! - custom algorithm provided by a dylib
//!
//! # address
//! account address format algorithm:
//! - blake2b_160
//! - original_160
//! - original_256
//! - custom algorithm provided by a dylib
use std::convert::TryFrom;
use primitives::errors::CommonError;
pub mod address;
pub mod dsa;
pub mod errors;
pub mod hash;
#[derive(PartialEq, Debug, Clone)]
pub enum HashLength {
/// 160 bits
HashLength20,
/// 256 bits
HashLength32,
/// 512 bits
HashLength64,
}
impl From<HashLength> for usize {
fn from(v: HashLength) -> Self {
match v {
HashLength::HashLength20 => 20,
HashLength::HashLength32 => 32,
HashLength::HashLength64 => 64,
}
}
}
impl TryFrom<usize> for HashLength {
type Error = CommonError;
#[inline]
fn try_from(i: usize) -> Result<Self, Self::Error> {
match i {
20 => Ok(HashLength::HashLength20),
32 => Ok(HashLength::HashLength32),
64 => Ok(HashLength::HashLength64),
other => Err(errors::ErrorKind::InvalidHashLength(other).into()),
}
}
}
#[derive(PartialEq, Debug, Clone)]
pub enum DsaLength {
/// secret key 32, public key 32, signature 64
DsaLength32_32_64,
/// secret key 32, public key 65, signature 64
DsaLength32_65_64,
}
impl From<DsaLength> for (usize, usize, usize) {
fn from(v: DsaLength) -> Self {
match v {
DsaLength::DsaLength32_32_64 => (32, 32, 64),
DsaLength::DsaLength32_65_64 => (32, 65, 64),
}
}
}
impl TryFrom<(usize, usize, usize)> for DsaLength {
type Error = CommonError;
#[inline]
fn try_from(i: (usize, usize, usize)) -> Result<Self, Self::Error> {
match i {
(32, 32, 64) => Ok(DsaLength::DsaLength32_32_64),
(32, 65, 64) => Ok(DsaLength::DsaLength32_65_64),
other => Err(errors::ErrorKind::InvalidDsaLength(other).into()),
}
}
}
#[derive(PartialEq, Debug, Clone)]
pub enum AddressLength {
/// 160 bits
AddressLength20,
/// 256 bits
AddressLength32,
}
impl From<AddressLength> for usize {
fn from(v: AddressLength) -> Self {
match v {
AddressLength::AddressLength20 => 20,
AddressLength::AddressLength32 => 32,
}
}
}
impl TryFrom<usize> for AddressLength {
type Error = CommonError;
#[inline]
fn try_from(i: usize) -> Result<Self, Self::Error> {
match i {
20 => Ok(AddressLength::AddressLength20),
32 => Ok(AddressLength::AddressLength32),
other => Err(errors::ErrorKind::InvalidAddressLength(other).into()),
}
}
}
|
extern crate hyper;
extern crate futures;
extern crate tokio_core;
use std::str;
use futures::{Future, Stream};
use hyper::Client;
use tokio_core::reactor::Handle;
pub fn get_mta_status(handle: &Handle) -> Box<Future<Item = String, Error = hyper::Error>> {
// This is not a txt file, but actually a URL which returns a XML response with HTML
// embedded inside.. yuk. The purpose of this project is to take that XML/HTML
// response and convert it into a nice JSON response :)
let uri: hyper::Uri = "http://web.mta.info/status/serviceStatus.txt"
.parse()
.unwrap();
let fut_resp = Client::new(handle).get(uri)
//todo check if this succeeds with `then`
.and_then(|resp| {
resp.body().concat2().map(move |chunk_body: hyper::Chunk| {
match str::from_utf8(&chunk_body) {
Ok(v) => v.to_string(),
Err(_) => "{}".to_string(),
}
})
});
Box::new(fut_resp)
}
|
pub mod publisher;
|
use crate::{error::ProcessingError, fuzzers};
use globset::{Glob, GlobMatcher};
use std::{
fs::{read_dir, DirEntry},
path::Path,
};
pub(crate) fn read_runs(
directory: &Path,
fuzzers: &[fuzzers::Fuzzer],
targets: &[String],
indices: &[String],
) -> Result<Vec<DirEntry>, ProcessingError> {
let glob = compile_glob(directory, fuzzers, targets, indices)?;
read_dir_by_glob(directory, glob)
}
pub(crate) fn read_dir_by_glob(
directory: &Path,
glob: GlobMatcher,
) -> Result<Vec<DirEntry>, ProcessingError> {
Ok(read_dir(directory)?
.filter_map(|entry| {
if let Ok(entry) = entry {
if glob.is_match(entry.path()) {
Some(entry)
} else {
None
}
} else {
None
}
})
.collect())
}
pub(crate) fn compile_glob(
directory: &Path,
fuzzers: &[fuzzers::Fuzzer],
targets: &[String],
indices: &[String],
) -> Result<GlobMatcher, ProcessingError> {
// Glob pattern for collecting all items that are selected for processing
let pattern = create_pattern(directory, fuzzers, targets, indices)?;
Ok(Glob::new(&pattern)?.compile_matcher())
}
pub(crate) fn create_pattern(
directory: &Path,
fuzzers: &[fuzzers::Fuzzer],
targets: &[String],
indices: &[String],
) -> Result<String, ProcessingError> {
Ok(format!(
"{}/{}*-{}*-{}*",
directory
.to_str()
.ok_or_else(|| ProcessingError::InvalidDirectoryName(directory.to_path_buf()))?,
as_pattern(fuzzers),
as_pattern(targets),
as_pattern(indices)
))
}
/// Convert a slice of items into a glob pattern.
fn as_pattern(items: &[impl ToString]) -> String {
if items.is_empty() {
"*".to_string()
} else if items.len() == 1 {
items[0].to_string()
} else {
let mut pattern = '{'.to_string();
for (idx, item) in items.iter().enumerate() {
pattern.push_str(&item.to_string());
if idx != items.len() - 1 {
pattern.push(',')
}
}
pattern.push('}');
pattern
}
}
|
use wasm_bindgen::prelude::*;
#[wasm_bindgen(raw_module = "../src/js_zip/js_zip.js")]
extern "C" {
pub type JSZip;
#[wasm_bindgen(constructor)]
pub fn new() -> JSZip;
#[wasm_bindgen(method, js_name = "loadAsync")]
pub fn load_async(this: &JSZip, data: &JsValue) -> js_sys::Promise;
#[wasm_bindgen(method)]
pub fn file(this: &JSZip, name: &str) -> Option<ZipObject>;
#[wasm_bindgen(method, getter)]
pub fn files(this: &JSZip) -> js_sys::Object;
}
#[wasm_bindgen]
extern "C" {
pub type ZipObject;
#[wasm_bindgen(method, js_name = "async")]
pub fn load_async(this: &ZipObject, type_: &str) -> js_sys::Promise;
}
|
use std::vec::Vec;
pub fn factors(number: u64) -> Vec<u64> {
(1..=number).filter(|&value| number % value == 0).collect()
}
pub fn prime_factors(number: u64) -> Vec<u64> {
let mut val = number;
let mut to_ret = Vec::new();
while val > 1 {
if val % 2 == 0 {
to_ret.push(2);
val /= 2;
} else {
let mut rem = 3;
while rem <= val && val % rem != 0 {
rem += 2;
}
if rem <= val {
to_ret.push(rem);
val /= rem;
}
}
}
to_ret
}
pub fn is_perfect(number: u64) -> bool {
factors(number)
.iter()
.filter(|&&i| i != number)
.sum::<u64>()
== number
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn factors_test() {
for num in 1..100 {
let factors = factors(num);
// Every list of factors is 1 or more
assert_ne!(factors.len(), 0);
// Every list of factors starts at 1
assert_eq!(factors[0], 1);
// If a number is even, the second number in its list of factors is 2
if num % 2 == 0 {
assert_eq!(factors[1], 2);
}
// Some numbers have a specific number of factors
match num {
1 | 2 => assert_eq!(factors.len() as u64, num), // 1 has 1 factor (1) and 2 has 2 factors (1, 2)
3 | 5 | 7 | 11 | 13 | 17 | 19 | 23 | 29 => assert_eq!(factors.len(), 2), // primes have two factors
_ => {}
}
}
}
#[test]
fn prime_factors_test() {
for num in 1..100 {
let prime_factors = prime_factors(num);
// Every number that is not 1 has at least 1 prime factor.
// 1 has 0 prime factors
if num != 1 {
assert_ne!(prime_factors.len(), 0);
} else {
assert_eq!(prime_factors.len(), 0);
}
// All even numbers should have their first prime factor as 2
if num % 2 == 0 {
assert_eq!(prime_factors[0], 2);
}
// If a number is prime, it should be its own only factor
match num {
3 | 5 | 7 | 11 | 13 | 17 | 23 | 29 => {
assert_eq!(prime_factors.len(), 1);
assert_eq!(prime_factors[0], num);
}
_ => {}
}
}
}
#[test]
fn is_perfect_test() {
for num in 1..100 {
let perfect = is_perfect(num);
match num {
6 | 28 => {
assert!(perfect);
}
_ => {
assert!(!perfect);
}
}
}
}
}
|
use bw;
use save::{LoadError, SaveError};
use sprites;
use units;
pub trait SaveEntityPointer {
type Pointer;
fn pointer_to_id(&self, pointer: *mut Self::Pointer) -> Result<u32, SaveError>;
}
pub trait LoadEntityPointer {
type Pointer;
fn id_to_pointer(&self, id: u32) -> Result<*mut Self::Pointer, LoadError>;
}
#[derive(Serialize, Deserialize)]
pub struct EntitySerializable {
prev: u32,
next: u32,
hitpoints: i32,
sprite: u32,
move_target: bw::Point,
move_target_unit: u16,
next_move_waypoint: bw::Point,
unk_move_waypoint: bw::Point,
flingy_flags: u8,
facing_direction: u8,
flingy_turn_speed: u8,
movement_direction: u8,
flingy_id: u16,
unk_26: u8,
flingy_movement_type: u8,
position: bw::Point,
exact_position: bw::Point32,
flingy_top_speed: u32,
current_speed: i32,
next_speed: i32,
speed: i32,
speed2: i32,
acceleration: u16,
new_direction: u8,
target_direction: u8,
player: u8,
order: u8,
order_state: u8,
order_signal: u8,
order_fow_unit: u16,
unused52: u16,
order_timer: u8,
ground_cooldown: u8,
air_cooldown: u8,
spell_cooldown: u8,
order_target_pos: bw::Point,
target: u16,
}
pub unsafe fn entity_serializable<C: SaveEntityPointer>(
entity: *const bw::Entity,
save_pointer: &C,
) -> Result<EntitySerializable, SaveError> {
let bw::Entity {
prev,
next,
hitpoints,
sprite,
move_target,
move_target_unit,
next_move_waypoint,
unk_move_waypoint,
flingy_flags,
facing_direction,
flingy_turn_speed,
movement_direction,
flingy_id,
unk_26,
flingy_movement_type,
position,
exact_position,
flingy_top_speed,
current_speed,
next_speed,
speed,
speed2,
acceleration,
new_direction,
target_direction,
player,
order,
order_state,
order_signal,
order_fow_unit,
unused52,
order_timer,
ground_cooldown,
air_cooldown,
spell_cooldown,
order_target_pos,
target,
} = *entity;
Ok(EntitySerializable {
prev: save_pointer.pointer_to_id(prev as *mut C::Pointer)?,
next: save_pointer.pointer_to_id(next as *mut C::Pointer)?,
hitpoints,
sprite: sprites::sprite_to_id_current_mapping(sprite)?,
move_target,
move_target_unit: units::unit_to_id(move_target_unit),
next_move_waypoint,
unk_move_waypoint,
flingy_flags,
facing_direction,
flingy_turn_speed,
movement_direction,
flingy_id,
unk_26,
flingy_movement_type,
position,
exact_position,
flingy_top_speed,
current_speed,
next_speed,
speed,
speed2,
acceleration,
new_direction,
target_direction,
player,
order,
order_state,
order_signal,
order_fow_unit,
unused52,
order_timer,
ground_cooldown,
air_cooldown,
spell_cooldown,
order_target_pos,
target: units::unit_to_id(target),
})
}
pub fn deserialize_entity<C: LoadEntityPointer>(
entity: &EntitySerializable,
load_pointer: &C,
) -> Result<bw::Entity, LoadError> {
let EntitySerializable {
prev,
next,
hitpoints,
sprite,
move_target,
move_target_unit,
next_move_waypoint,
unk_move_waypoint,
flingy_flags,
facing_direction,
flingy_turn_speed,
movement_direction,
flingy_id,
unk_26,
flingy_movement_type,
position,
exact_position,
flingy_top_speed,
current_speed,
next_speed,
speed,
speed2,
acceleration,
new_direction,
target_direction,
player,
order,
order_state,
order_signal,
order_fow_unit,
unused52,
order_timer,
ground_cooldown,
air_cooldown,
spell_cooldown,
order_target_pos,
target,
} = *entity;
Ok(bw::Entity {
prev: load_pointer.id_to_pointer(prev)? as *mut bw::Entity,
next: load_pointer.id_to_pointer(next)? as *mut bw::Entity,
hitpoints,
sprite: sprites::sprite_from_id_current_mapping(sprite)?,
move_target,
move_target_unit: units::unit_from_id(move_target_unit)?,
next_move_waypoint,
unk_move_waypoint,
flingy_flags,
facing_direction,
flingy_turn_speed,
movement_direction,
flingy_id,
unk_26,
flingy_movement_type,
position,
exact_position,
flingy_top_speed,
current_speed,
next_speed,
speed,
speed2,
acceleration,
new_direction,
target_direction,
player,
order,
order_state,
order_signal,
order_fow_unit,
unused52,
order_timer,
ground_cooldown,
air_cooldown,
spell_cooldown,
order_target_pos,
target: units::unit_from_id(target)?,
})
}
|
use parser::*;
use std::io::{Write, Error};
pub struct Generator {
prelude: Option<String>,
attrs: Vec<String>,
typemap: Box<Fn(&SpecFieldType) -> String>,
ser: Box<Fn(&SpecFieldType) -> String>,
de: Box<Fn(&SpecFieldType) -> String>,
namemap: Box<Fn(&str) -> String>
}
impl Generator {
const DEFAULT_SER: &'static str = "default::serialize";
const DEFAULT_DE: &'static str = "default::deserialize";
pub fn new() -> Self {
Self {
prelude: None,
attrs: vec![],
typemap: Box::new(|x| x.to_str()),
namemap: Box::new(|x| x.to_owned()),
ser: Box::new(|_| Self::DEFAULT_SER.to_string()),
de: Box::new(|_| Self::DEFAULT_DE.to_string())
}
}
pub fn map_type<F>(mut self, f: F) -> Self
where F: 'static + Fn(&SpecFieldType) -> String
{
self.typemap = Box::new(f);
self
}
pub fn map_name<F>(mut self, f: F) -> Self
where F: 'static + Fn(&str) -> String
{
self.namemap = Box::new(f);
self
}
pub fn type_ser<F>(mut self, f: F) -> Self
where F: 'static + Fn(&SpecFieldType) -> String
{
self.ser = Box::new(f);
self
}
pub fn type_de<F>(mut self, f: F) -> Self
where F: 'static + Fn(&SpecFieldType) -> String
{
self.de = Box::new(f);
self
}
pub fn def_attr(mut self, attr: &str) -> Self {
self.attrs.push(attr.to_owned());
self
}
pub fn prelude(mut self, prelude: &str) -> Self {
self.prelude = Some(prelude.to_owned());
self
}
fn print_docs<'a, W>(writer: &mut W, docs: &[&'a str]) -> Result<(), Error>
where W: Write
{
for doc in docs {
writeln!(writer, "///{}", doc)?;
}
Ok(())
}
fn print_attrs<W>(&self, writer: &mut W) -> Result<(), Error>
where W: Write
{
for attr in self.attrs.iter() {
writeln!(writer, "#[{}]", attr)?;
}
Ok(())
}
pub fn build<W>(self, specdata: &[u8], writer: &mut W) -> Result<(), Error>
where W: Write
{
let Protocol{ specs, types } = parse(specdata).unwrap();
writeln!(writer, "use std::convert::From;")?;
for ty in types {
match ty.class {
TypeClass::Enum(branches, base_ty) => {
self.print_attrs(writer)?;
writeln!(writer, "#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]")?;
writeln!(writer, "pub enum {name} {{", name=ty.name)?;
for branch in branches.iter() {
for doc in branch.docs.iter() {
writeln!(writer, "///{}", doc)?;
}
writeln!(writer, "{},", branch.name)?;
}
writeln!(writer, "}}")?;
writeln!(writer,
"impl From<{name}> for {base} {{",
name = ty.name,
base = base_ty.to_str()
)?;
writeln!(writer, "fn from(_v: {}) -> {} {{", ty.name, base_ty.to_str())?;
if branches.len() == 0 {
writeln!(writer, "unimplemented!();")?;
}
else {
writeln!(writer, "match _v {{")?;
for branch in branches.iter() {
writeln!(writer,
"{name}::{branch} => {num},",
name = ty.name,
branch = branch.name,
num = branch.value
)?;
}
writeln!(writer, "}}")?;
}
writeln!(writer, "}}\n}}")?;
writeln!(writer, "impl {} {{", ty.name)?;
writeln!(writer, "pub fn try_from(val: {}) -> Option<Self> {{", base_ty.to_str())?;
writeln!(writer, "match val {{")?;
for branch in branches.iter() {
writeln!(writer,
"{num} => Some({name}::{branch}),",
num = branch.value,
branch = branch.name,
name = ty.name
)?;
}
writeln!(writer, "_ => None")?;
writeln!(writer, "}}")?;
writeln!(writer, "}}\n}}")?;
writeln!(writer, "impl Serialize for {name} {{", name=ty.name)?;
writeln!(writer, "fn serialize(&self, ser: &mut Serializer) -> Result<(), SerError> {{")?;
writeln!(writer, "{}::from(*self).serialize(ser)", base_ty.to_str())?;
writeln!(writer, "}}\n}}")?;
writeln!(writer, "impl<'de> Deserialize<'de> for {name} {{", name=ty.name)?;
writeln!(writer, "fn deserialize(de: &mut Deserializer<'de>) -> Result<Self, DeError> {{")?;
writeln!(writer, "let val = {base}::deserialize(de)?;", base=base_ty.to_str())?;
writeln!(writer, "
match Self::try_from(val) {{
Some(v) => Ok(v),
None => Err(DeError::InvalidEnumValue(\"{name}\", val as u64))
}}", name=ty.name)?;
writeln!(writer, "}}\n}}")?;
}
}
}
for spec in specs {
Self::print_docs(writer, &spec.docs)?;
writeln!(writer, "pub mod {} {{", spec.name)?;
if let Some(ref prelude) = self.prelude {
writeln!(writer, "{}", prelude)?
}
writeln!(writer, "
mod default {{
{prelude}
pub fn serialize<T>(v: &T, ser: &mut Serializer) -> Result<(), SerError>
where T: Serialize
{{
v.serialize(ser)
}}
pub fn deserialize<'de, T>(de: &mut Deserializer<'de>) -> Result<T, DeError>
where T: Deserialize<'de>
{{
T::deserialize(de)
}}
}}",
prelude=self.prelude.clone().unwrap_or("".to_string())
)?;
for def in spec.defs {
Self::print_docs(writer, &def.docs)?;
self.print_attrs(writer)?;
writeln!(writer,
"#[derive(Clone, Debug)] pub struct {name} {{",
name=def.name
)?;
for field in def.fields.iter() {
Self::print_docs(writer, &field.docs)?;
writeln!(writer,
"pub {name}: {type},",
name=(self.namemap)(&field.name),
type=(self.typemap)(&field.ty)
)?;
}
writeln!(writer, "}}")?;
writeln!(writer, "impl Serialize for {name} {{", name=def.name)?;
writeln!(writer, "fn serialize(&self, ser: &mut Serializer) -> Result<(), SerError> {{")?;
for field in def.fields.iter() {
writeln!(writer,
"{ser}(&self.{name}, ser)?;",
name=(self.namemap)(&field.name),
ser= (self.ser)(&field.ty)
)?;
}
writeln!(writer, "Ok(())\n}}\n}}")?;
writeln!(writer, "impl<'de> Deserialize<'de> for {name} {{", name=def.name)?;
writeln!(writer, "fn deserialize(de: &mut Deserializer<'de>) -> Result<Self, DeError> {{")?;
writeln!(writer, "Ok(Self {{")?;
for field in def.fields.iter() {
writeln!(writer,
"{name}: {de}(de)?,",
name=(self.namemap)(&field.name),
de=(self.de)(&field.ty)
)?;
}
writeln!(writer, "}})")?;
writeln!(writer, "}}\n}}")?;
}
writeln!(writer, "}}")?;
}
Ok(())
}
}
|
use crate::blob::blob::generate_blob_uri;
use crate::blob::blob::responses::CopyBlobResponse;
use crate::core::prelude::*;
use crate::{RehydratePriority, RehydratePriorityOption, RehydratePrioritySupport};
use azure_core::errors::AzureError;
use azure_core::lease::LeaseId;
use azure_core::prelude::*;
use azure_core::{No, ToAssign, Yes};
use hyper::{Method, StatusCode};
use std::collections::HashMap;
use std::convert::TryInto;
use std::marker::PhantomData;
#[derive(Debug, Clone)]
pub struct CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
client: &'a C,
p_container_name: PhantomData<ContainerNameSet>,
p_blob_name: PhantomData<BlobNameSet>,
p_source_url: PhantomData<SourceUrlNameSet>,
container_name: Option<&'a str>,
blob_name: Option<&'a str>,
source_url: Option<&'a str>,
metadata: Option<&'a HashMap<&'a str, &'a str>>,
timeout: Option<u64>,
if_since_condition: Option<IfSinceCondition>,
if_source_since_condition: Option<IfSinceCondition>,
if_match_condition: Option<IfMatchCondition<'a>>,
if_source_match_condition: Option<IfMatchCondition<'a>>,
lease_id: Option<&'a LeaseId>,
source_lease_id: Option<&'a LeaseId>,
access_tier: Option<AccessTier>,
rehydrate_priority: Option<RehydratePriority>,
client_request_id: Option<&'a str>,
}
impl<'a, C> CopyBlobBuilder<'a, C, No, No, No>
where
C: Client,
{
#[inline]
pub(crate) fn new(client: &'a C) -> CopyBlobBuilder<'a, C, No, No, No> {
CopyBlobBuilder {
client,
p_container_name: PhantomData {},
container_name: None,
p_blob_name: PhantomData {},
blob_name: None,
p_source_url: PhantomData {},
source_url: None,
metadata: None,
timeout: None,
if_since_condition: None,
if_source_since_condition: None,
if_match_condition: None,
if_source_match_condition: None,
lease_id: None,
source_lease_id: None,
access_tier: None,
rehydrate_priority: None,
client_request_id: None,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> ClientRequired<'a, C>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn client(&self) -> &'a C {
self.client
}
}
//get mandatory no traits methods
//set mandatory no traits methods
impl<'a, C, BlobNameSet, SourceUrlNameSet> ContainerNameRequired<'a>
for CopyBlobBuilder<'a, C, Yes, BlobNameSet, SourceUrlNameSet>
where
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn container_name(&self) -> &'a str {
self.container_name.unwrap()
}
}
impl<'a, C, ContainerNameSet, SourceUrlNameSet> BlobNameRequired<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, Yes, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn blob_name(&self) -> &'a str {
self.blob_name.unwrap()
}
}
impl<'a, C, ContainerNameSet, BlobNameSet> SourceUrlRequired<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, Yes>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
C: Client,
{
#[inline]
fn source_url(&self) -> &'a str {
self.source_url.unwrap()
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> MetadataOption<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn metadata(&self) -> Option<&'a HashMap<&'a str, &'a str>> {
self.metadata
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> TimeoutOption
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn timeout(&self) -> Option<u64> {
self.timeout
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> IfSinceConditionOption
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn if_since_condition(&self) -> Option<IfSinceCondition> {
self.if_since_condition
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> IfSourceSinceConditionOption
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn if_source_since_condition(&self) -> Option<IfSinceCondition> {
self.if_source_since_condition
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> IfMatchConditionOption<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn if_match_condition(&self) -> Option<IfMatchCondition<'a>> {
self.if_match_condition
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> IfSourceMatchConditionOption<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn if_source_match_condition(&self) -> Option<IfMatchCondition<'a>> {
self.if_source_match_condition
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> LeaseIdOption<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn lease_id(&self) -> Option<&'a LeaseId> {
self.lease_id
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> SourceLeaseIdOption<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn source_lease_id(&self) -> Option<&'a LeaseId> {
self.source_lease_id
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> AccessTierOption
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn access_tier(&self) -> Option<AccessTier> {
self.access_tier
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> RehydratePriorityOption
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn rehydrate_priority(&self) -> Option<RehydratePriority> {
self.rehydrate_priority
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> ClientRequestIdOption<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
#[inline]
fn client_request_id(&self) -> Option<&'a str> {
self.client_request_id
}
}
impl<'a, C, BlobNameSet, SourceUrlNameSet> ContainerNameSupport<'a>
for CopyBlobBuilder<'a, C, No, BlobNameSet, SourceUrlNameSet>
where
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, Yes, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_container_name(self, container_name: &'a str) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: Some(container_name),
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, SourceUrlNameSet> BlobNameSupport<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, No, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, Yes, SourceUrlNameSet>;
#[inline]
fn with_blob_name(self, blob_name: &'a str) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: Some(blob_name),
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet> SourceUrlSupport<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, No>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, Yes>;
#[inline]
fn with_source_url(self, source_url: &'a str) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: Some(source_url),
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> MetadataSupport<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_metadata(self, metadata: &'a HashMap<&'a str, &'a str>) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: Some(metadata),
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> TimeoutSupport
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_timeout(self, timeout: u64) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: Some(timeout),
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> IfSinceConditionSupport
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_if_since_condition(self, if_since_condition: IfSinceCondition) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: Some(if_since_condition),
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> IfSourceSinceConditionSupport
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_if_source_since_condition(
self,
if_source_since_condition: IfSinceCondition,
) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: Some(if_source_since_condition),
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> IfMatchConditionSupport<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_if_match_condition(self, if_match_condition: IfMatchCondition<'a>) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: Some(if_match_condition),
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> IfSourceMatchConditionSupport<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_if_source_match_condition(
self,
if_source_match_condition: IfMatchCondition<'a>,
) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: Some(if_source_match_condition),
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> LeaseIdSupport<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_lease_id(self, lease_id: &'a LeaseId) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: Some(lease_id),
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> SourceLeaseIdSupport<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_source_lease_id(self, source_lease_id: &'a LeaseId) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: Some(source_lease_id),
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> AccessTierSupport
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_access_tier(self, access_tier: AccessTier) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: Some(access_tier),
rehydrate_priority: self.rehydrate_priority,
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> RehydratePrioritySupport
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_rehydrate_priority(self, rehydrate_priority: RehydratePriority) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: Some(rehydrate_priority),
client_request_id: self.client_request_id,
}
}
}
impl<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet> ClientRequestIdSupport<'a>
for CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
SourceUrlNameSet: ToAssign,
C: Client,
{
type O = CopyBlobBuilder<'a, C, ContainerNameSet, BlobNameSet, SourceUrlNameSet>;
#[inline]
fn with_client_request_id(self, client_request_id: &'a str) -> Self::O {
CopyBlobBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_source_url: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
source_url: self.source_url,
metadata: self.metadata,
timeout: self.timeout,
if_since_condition: self.if_since_condition,
if_source_since_condition: self.if_source_since_condition,
if_match_condition: self.if_match_condition,
if_source_match_condition: self.if_source_match_condition,
lease_id: self.lease_id,
source_lease_id: self.source_lease_id,
access_tier: self.access_tier,
rehydrate_priority: self.rehydrate_priority,
client_request_id: Some(client_request_id),
}
}
}
// methods callable only when every mandatory field has been filled
impl<'a, C> CopyBlobBuilder<'a, C, Yes, Yes, Yes>
where
C: Client,
{
#[inline]
pub async fn finalize(self) -> Result<CopyBlobResponse, AzureError> {
let mut uri =
generate_blob_uri(self.client(), self.container_name(), self.blob_name(), None);
if let Some(timeout) = TimeoutOption::to_uri_parameter(&self) {
uri = format!("{}?{}", uri, timeout);
}
trace!("uri == {:?}", uri);
let (headers, _) = self
.client()
.perform_request(
&uri,
&Method::PUT,
&|mut request| {
request = SourceUrlRequired::add_header(&self, request);
request = MetadataOption::add_header(&self, request);
request = IfSinceConditionOption::add_header(&self, request);
request = IfSourceSinceConditionOption::add_header(&self, request);
request = IfMatchConditionOption::add_header(&self, request);
request = IfSourceMatchConditionOption::add_header(&self, request);
request = LeaseIdOption::add_header(&self, request);
request = SourceLeaseIdOption::add_header(&self, request);
request = AccessTierOption::add_header(&self, request);
request = RehydratePriorityOption::add_header(&self, request);
request = ClientRequestIdOption::add_header(&self, request);
request
},
None,
)?
.check_status_extract_headers_and_body(StatusCode::ACCEPTED)
.await?;
(&headers).try_into()
}
}
|
struct Fib {
first: u32,
second: u32,
}
fn fib() -> Fib {
Fib {
first: 0,
second: 1,
}
}
impl Iterator for Fib {
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
let origin_first = self.first;
let origin_second = self.second;
match origin_first.checked_add(origin_second) {
// overflow
None => None,
Some(res) => {
self.first = origin_second;
self.second = res;
Some(res)
}
}
}
}
struct Doubler<I> {
iter: I,
}
impl<I> Iterator for Doubler<I>
where
I: Iterator<Item = u32>,
{
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
match self.iter.next() {
None => None,
Some(x) => Some(x * 2),
}
}
}
fn main() {
let one_ten_iter = 1..11;
let doubler_iter = Doubler { iter: one_ten_iter };
for i in doubler_iter {
println!("{}", i);
}
}
|
// use failure;
use failure_derive::Fail;
/// Error type for kvs
#[derive(Debug, Fail)]
pub enum KvsError {
/// caused by IO error
#[fail(display = "{}", _0)]
IoError(#[cause] std::io::Error),
/// caused by serde error
#[fail(display = "{}", _0)]
SerdeError(#[cause] serde_json::error::Error),
/// caused by key not found
#[fail(display = "Key not found")]
KeyNotFoundError,
}
impl From<std::io::Error> for KvsError {
fn from(inner: std::io::Error) -> KvsError {
KvsError::IoError(inner)
}
}
impl From<serde_json::error::Error> for KvsError {
fn from(inner: serde_json::error::Error) -> KvsError {
KvsError::SerdeError(inner)
}
}
/// Result type for kvs
pub type Result<T> = std::result::Result<T, KvsError>;
|
//! A library to read binary protobuf files
//!
//! This reader is developed similarly to a pull reader
#![deny(missing_docs)]
#![allow(dead_code)]
extern crate byteorder;
extern crate failure;
extern crate failure_derive;
pub mod errors;
pub mod message;
pub mod reader;
pub mod sizeofs;
pub mod writer;
pub use errors::{Error, Result};
pub use message::{MessageRead, MessageWrite};
pub use reader::{deserialize_from_slice, BytesReader, Reader};
pub use writer::{serialize_into_vec, Writer};
|
//! Some general utility functions.
use std::{iter, str::from_utf8};
use strip_ansi_escapes::strip;
fn ansi_len(s: &str) -> usize {
from_utf8(&strip(s.as_bytes()).unwrap())
.unwrap()
.chars()
.count()
}
pub fn adjust_to_size(s: &str, rows: usize, columns: usize) -> String {
s.lines()
.map(|l| {
let actual_len = ansi_len(l);
if actual_len > columns {
let mut line = String::from(l);
line.truncate(columns);
line
} else {
[l, &str::repeat(" ", columns - ansi_len(l))].concat()
}
})
.chain(iter::repeat(str::repeat(" ", columns)))
.take(rows)
.collect::<Vec<_>>()
.join("\n\r")
}
|
mod term_query;
mod term_weight;
mod term_scorer;
pub use self::term_query::TermQuery;
pub use self::term_weight::TermWeight;
pub use self::term_scorer::TermScorer;
#[cfg(test)]
mod tests {
use docset::DocSet;
use postings::SegmentPostings;
use query::{Query, Scorer};
use query::term_query::TermScorer;
use query::TermQuery;
use Index;
use schema::*;
use schema::IndexRecordOption;
use fastfield::FastFieldReader;
fn abs_diff(left: f32, right: f32) -> f32 {
(right - left).abs()
}
#[test]
pub fn test_term_query_no_freq() {
let mut schema_builder = SchemaBuilder::default();
let text_field = schema_builder.add_text_field("text", STRING);
let schema = schema_builder.build();
let index = Index::create_from_tempdir(schema).unwrap();
{
// writing the segment
let mut index_writer = index.writer_with_num_threads(1, 40_000_000).unwrap();
{
let doc = doc!(text_field => "a");
index_writer.add_document(doc);
}
assert!(index_writer.commit().is_ok());
}
index.load_searchers().unwrap();
let searcher = index.searcher();
let term_query = TermQuery::new(
Term::from_field_text(text_field, "a"),
IndexRecordOption::Basic,
);
let term_weight = term_query.weight(&searcher, true).unwrap();
let segment_reader = searcher.segment_reader(0);
let mut term_scorer = term_weight.scorer(segment_reader).unwrap();
assert!(term_scorer.advance());
assert_eq!(term_scorer.doc(), 0);
assert_eq!(term_scorer.score(), 0.30685282);
}
#[test]
pub fn test_term_scorer() {
let left_fieldnorms = FastFieldReader::from(vec![10, 4]);
assert_eq!(left_fieldnorms.get(0), 10);
assert_eq!(left_fieldnorms.get(1), 4);
let left = SegmentPostings::create_from_docs(&[1]);
let mut left_scorer = TermScorer {
idf: 0.30685282,
fieldnorm_reader_opt: Some(left_fieldnorms),
postings: left,
};
left_scorer.advance();
assert!(abs_diff(left_scorer.score(), 0.15342641) < 0.001f32);
}
}
|
//! Test helpers
/// parse the output of `ps aux`
pub fn parse_ps_aux(ps_aux: &str) -> Vec<PsAuxEntry> {
let mut entries = vec![];
for line in ps_aux.lines().skip(1 /* header */) {
let columns = line.split_ascii_whitespace().collect::<Vec<_>>();
let entry = PsAuxEntry {
command: columns[10..].join(" "),
pid: columns[1].parse().expect("invalid PID"),
process_state: columns[7].to_owned(),
tty: columns[6].to_owned(),
};
entries.push(entry);
}
entries
}
/// an entry / row in `ps aux` output
#[derive(Debug)]
pub struct PsAuxEntry {
/// command column
pub command: String,
/// pid column
pub pid: u32,
/// process state column
pub process_state: String,
/// tty column
pub tty: String,
}
impl PsAuxEntry {
/// whether the process has an associated PTY
pub fn has_tty(&self) -> bool {
if self.tty == "?" {
false
} else if self.tty.starts_with("pts/") {
true
} else {
unimplemented!()
}
}
/// whethe the process is a session leader
pub fn is_session_leader(&self) -> bool {
self.process_state.contains('s')
}
/// whethe the process is in the foreground process group
pub fn is_in_the_foreground_process_group(&self) -> bool {
self.process_state.contains('+')
}
}
|
use ra_db::FileId;
use ra_syntax::ast;
use crate::db::RootDatabase;
pub fn goto_defenition(db: &RootDatabase, position: FilePosition,
) -> Cancelable<Option<Vec<NavigationTarget>>> {
let file = db.source_file(position.file_id);
let syntax = file.syntax();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) {
return Ok(Some(reference_defenition(db, position.file_id, name_ref)));
}
if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) {
return Ok(Some(name_defenition(db, position.file_idname)));
}
Ok(None)
}
fn reference_defenition(db: &RootDatabase, file_id: FileId, name_ref: ast::NameRef) -> Cancelable<Vec<Nav>> {
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) {
let mut rr = ReferenceResolution::new(name_ref.syntax().range());
if let Some(fn_descr) =
source_binder::function_from_child_node(self, position.file_id, name_ref.syntax())?
{
let scope = fn_descr.scopes(self);
// First try to resolve the symbol locally
if let Some(entry) = scope.resolve_local_name(name_ref) {
rr.resolves_to.push(NavigationTarget {
file_id: position.file_id,
name: entry.name().to_string().into(),
range: entry.ptr().range(),
kind: NAME,
ptr: None,
});
return Ok(Some(rr));
};
}
// If that fails try the index based approach.
rr.resolves_to.extend(
self.index_resolve(name_ref)?
.into_iter()
.map(NavigationTarget::from_symbol),
);
return Ok(Some(rr));
}
if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) {
let mut rr = ReferenceResolution::new(name.syntax().range());
if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
if module.has_semi() {
if let Some(child_module) =
source_binder::module_from_declaration(self, position.file_id, module)?
{
let file_id = child_module.file_id();
let name = match child_module.name() {
Some(name) => name.to_string().into(),
None => "".into(),
};
let symbol = NavigationTarget {
file_id,
name,
range: TextRange::offset_len(0.into(), 0.into()),
kind: MODULE,
ptr: None,
};
rr.resolves_to.push(symbol);
return Ok(Some(rr));
}
}
}
}
Ok(None)
}
|
use criterion::{criterion_group, criterion_main};
use criterion::{BenchmarkId, Criterion};
#[cfg(unix)]
use pprof::criterion::{Output, PProfProfiler};
use ppp::v1;
use std::net::{Ipv4Addr, Ipv6Addr};
fn benchmarks(c: &mut Criterion) {
let mut group = c.benchmark_group("PPP Text");
let inputs = [
("UNKNOWN", "PROXY UNKNOWN\r\n"),
("TCP4", "PROXY TCP4 255.255.255.255 255.255.255.255 65535 65535\r\n"),
("TCP6", "PROXY TCP6 ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff 65535 65535\r\n"),
("TCP6 Compact", "PROXY TCP6 ffff::ffff ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff 65535 65535\r\n"),
("Worst Case", "PROXY UNKNOWN ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff 65535 65535\r\n"),
];
for (id, input) in inputs {
group.bench_with_input(
BenchmarkId::new("v1::Header::try_from", id),
input.as_bytes(),
|b, i| {
b.iter(|| v1::Header::try_from(i).unwrap());
},
);
}
let headers = [
("TCP4", v1::Header::new(
"PROXY TCP4 127.0.1.2 192.168.1.101 80 443\r\n",
v1::Addresses::new_tcp4(
Ipv4Addr::new(127, 0, 1, 2),
Ipv4Addr::new(192, 168, 1, 101),
80,
443,
),
)),
("TCP6", v1::Header::new(
"PROXY TCP6 1234:5678:90ab:cdef:fedc:ba09:8765:4321 4321:8765:ba09:fedc:cdef:90ab:5678:1234 443 65535\r\n",
v1::Addresses::new_tcp6(
Ipv6Addr::from([
0x1234, 0x5678, 0x90AB, 0xCDEF, 0xFEDC, 0xBA09, 0x8765, 0x4321,
]),
Ipv6Addr::from([
0x4321, 0x8765, 0xBA09, 0xFEDC, 0xCDEF, 0x90AB, 0x5678, 0x01234,
]),
443,
65535,
))),
("UNKNOWN", v1::Header::new("PROXY UNKNOWN\r\n", v1::Addresses::default())),
];
for (id, header) in headers {
group.bench_with_input(
BenchmarkId::new("v1::Header::to_string", id),
&header,
|b, h| {
b.iter(|| h.to_string());
},
);
group.bench_with_input(
BenchmarkId::new("v1::Addresses::to_string", id),
&header.addresses,
|b, a| {
b.iter(|| a.to_string());
},
);
}
group.finish();
}
#[cfg(unix)]
criterion_group! {
name = benches;
config = {
Criterion::default().with_profiler(PProfProfiler::new(100, Output::Protobuf))
};
targets = benchmarks
}
#[cfg(not(unix))]
criterion_group!(benches, benchmarks);
criterion_main!(benches);
|
Version {
minor: 44,
patch: 1,
channel: Stable,
}
|
use std::time::Duration;
use frp_gaming_lib::drawer::{Drawer, DrawerCommand};
use frp_gaming_lib::glium::glutin::event::Event;
use frp_gaming_lib::glium::{Frame, Surface};
use frp_gaming_lib::sodium_rust::Stream;
use frp_gaming_lib::timer::Timer;
pub fn create_drawer_command<CustomEvent>(
_event: &Stream<Event<'static, CustomEvent>>,
timer: &Timer,
) -> Stream<DrawerCommand> {
let elapsed = timer
.tick
.snapshot(&timer.elapsed, |_: &_, time: &Duration| *time);
elapsed.map(|elapsed: &Duration| {
let color = duration_to_color(elapsed);
DrawerCommand::AddSingle(Drawer::new(move |frame: &mut Frame| {
let (red, green, blue, alpha) = color;
frame.clear_color(red, green, blue, alpha)
}))
})
}
fn duration_to_color(elapsed: &Duration) -> (f32, f32, f32, f32) {
let mut seconds = elapsed.as_secs_f32() % 3.0;
let mut color = (0.25, 0.25, 0.25, 1.0);
if seconds < 1.0 {
color.0 += 1.0 - seconds;
color.1 += seconds;
} else if seconds < 2.0 {
seconds -= 1.0;
color.1 += 1.0 - seconds;
color.2 += seconds;
} else {
seconds -= 2.0;
color.2 += 1.0 - seconds;
color.0 += seconds;
}
color
}
|
pub mod named_field;
pub mod tuple_like;
pub mod unit_like;
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// SyntheticsDeviceId : The device ID.
/// The device ID.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum SyntheticsDeviceId {
#[serde(rename = "laptop_large")]
LAPTOP_LARGE,
#[serde(rename = "tablet")]
TABLET,
#[serde(rename = "mobile_small")]
MOBILE_SMALL,
#[serde(rename = "chrome.laptop_large")]
CHROME_LAPTOP_LARGE,
#[serde(rename = "chrome.tablet")]
CHROME_TABLET,
#[serde(rename = "chrome.mobile_small")]
CHROME_MOBILE_SMALL,
#[serde(rename = "firefox.laptop_large")]
FIREFOX_LAPTOP_LARGE,
#[serde(rename = "firefox.tablet")]
FIREFOX_TABLET,
#[serde(rename = "firefox.mobile_small")]
FIREFOX_MOBILE_SMALL,
}
impl ToString for SyntheticsDeviceId {
fn to_string(&self) -> String {
match self {
Self::LAPTOP_LARGE => String::from("laptop_large"),
Self::TABLET => String::from("tablet"),
Self::MOBILE_SMALL => String::from("mobile_small"),
Self::CHROME_LAPTOP_LARGE => String::from("chrome.laptop_large"),
Self::CHROME_TABLET => String::from("chrome.tablet"),
Self::CHROME_MOBILE_SMALL => String::from("chrome.mobile_small"),
Self::FIREFOX_LAPTOP_LARGE => String::from("firefox.laptop_large"),
Self::FIREFOX_TABLET => String::from("firefox.tablet"),
Self::FIREFOX_MOBILE_SMALL => String::from("firefox.mobile_small"),
}
}
}
|
use flate2::write::GzEncoder;
use flate2::Compression;
use iron::headers::{AcceptEncoding, ContentEncoding, ContentType, Encoding};
use iron::prelude::*;
use iron::AfterMiddleware;
use iron_staticfile_middleware::helpers;
pub struct GzipMiddleware;
impl AfterMiddleware for GzipMiddleware {
fn after(&self, req: &mut Request, mut resp: Response) -> IronResult<Response> {
// Skip Gzip response on HEAD requests
if req.method == iron::method::Head {
return Ok(resp);
}
// Enable Gzip compression only for known text-based file types
let enable_gz = helpers::is_text_mime_type(resp.headers.get::<ContentType>());
let accept_gz = helpers::accept_gzip(req.headers.get::<AcceptEncoding>());
if enable_gz && accept_gz {
let compressed_bytes = resp.body.as_mut().map(|b| {
let mut encoder = GzEncoder::new(vec![], Compression::fast());
{
let _ = b.write_body(&mut encoder);
}
encoder.finish().unwrap()
});
if let Some(b) = compressed_bytes {
resp.headers.set(ContentEncoding(vec![Encoding::Gzip]));
resp.set_mut(b);
}
}
Ok(resp)
}
}
|
#[doc = "Reader of register SR"]
pub type R = crate::R<u32, super::SR>;
#[doc = "Reader of field `DCOL`"]
pub type DCOL_R = crate::R<bool, bool>;
#[doc = "Reader of field `TXE`"]
pub type TXE_R = crate::R<bool, bool>;
#[doc = "Reader of field `RFF`"]
pub type RFF_R = crate::R<bool, bool>;
#[doc = "Reader of field `RFNE`"]
pub type RFNE_R = crate::R<bool, bool>;
#[doc = "Reader of field `TFE`"]
pub type TFE_R = crate::R<bool, bool>;
#[doc = "Reader of field `TFNF`"]
pub type TFNF_R = crate::R<bool, bool>;
#[doc = "Reader of field `BUSY`"]
pub type BUSY_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 6 - Data collision error"]
#[inline(always)]
pub fn dcol(&self) -> DCOL_R {
DCOL_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 5 - Transmission error"]
#[inline(always)]
pub fn txe(&self) -> TXE_R {
TXE_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 4 - Receive FIFO full"]
#[inline(always)]
pub fn rff(&self) -> RFF_R {
RFF_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 3 - Receive FIFO not empty"]
#[inline(always)]
pub fn rfne(&self) -> RFNE_R {
RFNE_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 2 - Transmit FIFO empty"]
#[inline(always)]
pub fn tfe(&self) -> TFE_R {
TFE_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1 - Transmit FIFO not full"]
#[inline(always)]
pub fn tfnf(&self) -> TFNF_R {
TFNF_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0 - SSI busy flag"]
#[inline(always)]
pub fn busy(&self) -> BUSY_R {
BUSY_R::new((self.bits & 0x01) != 0)
}
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
#![crate_type = "proc-macro"]
use quote::quote;
use syn::parse_macro_input;
use syn::DeriveInput;
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_derive(DisjointUnion)]
pub fn derive_disjoint_union(input: TokenStream) -> TokenStream {
let code_item: DeriveInput = parse_macro_input!(input as DeriveInput);
// Expect to be enum.
let data_enum = match code_item.data {
syn::Data::Enum(data_enum) => data_enum,
_ => panic!("#[derive(DisjointUnion)] must be applied to enums only."),
};
let variant_idents = data_enum
.variants
.iter()
.map(|variant| &variant.ident)
.collect::<Vec<_>>();
assert!(
!variant_idents.is_empty(),
"enum needs at least one variant."
);
let first_variant = variant_idents.first().unwrap();
let enum_name = &code_item.ident;
let (impl_generics, ty_generics, where_clause) = code_item.generics.split_for_impl();
let expanded = quote! {
impl #impl_generics AbstractDomain for #enum_name #ty_generics #where_clause {
fn bottom() -> Self {
#enum_name::#first_variant(AbstractDomain::bottom())
}
fn top() -> Self {
#enum_name::#first_variant(AbstractDomain::top())
}
fn is_bottom(&self) -> bool {
match self {
#( #enum_name::#variant_idents(dom) => dom.is_bottom(), )*
}
}
fn is_top(&self) -> bool {
match self {
#( #enum_name::#variant_idents(dom) => dom.is_top(), )*
}
}
fn leq(&self, rhs: &Self) -> bool {
if self.is_bottom() {
return true;
}
if rhs.is_bottom() {
return false;
}
if rhs.is_top() {
return true;
}
if self.is_top() {
return false;
}
return match (self, rhs) {
#( (#enum_name::#variant_idents(ref ldom), #enum_name::#variant_idents(rdom)) => ldom.leq(rdom), )*
_ => false,
};
}
fn join_with(&mut self, rhs: Self) {
match (self, rhs) {
#( (#enum_name::#variant_idents(ref mut ldom), #enum_name::#variant_idents(rdom)) => ldom.join_with(rdom), )*
(s, _) => *s = Self::top(),
}
}
fn meet_with(&mut self, rhs: Self) {
match (self, rhs) {
#( (#enum_name::#variant_idents(ref mut ldom), #enum_name::#variant_idents(rdom)) => ldom.meet_with(rdom), )*
(s, _) => *s = Self::bottom(),
}
}
fn widen_with(&mut self, rhs: Self) {
self.join_with(rhs)
}
fn narrow_with(&mut self, rhs: Self) {
self.meet_with(rhs)
}
}
};
TokenStream::from(expanded)
}
|
mod first_line;
mod second_line;
use std::fmt::{Display, Error, Formatter};
use zellij_tile::prelude::*;
use first_line::{ctrl_keys, superkey};
use second_line::keybinds;
pub mod colors {
use ansi_term::Colour::{self, Fixed};
pub const WHITE: Colour = Fixed(255);
pub const BLACK: Colour = Fixed(16);
pub const GREEN: Colour = Fixed(154);
pub const ORANGE: Colour = Fixed(166);
pub const GRAY: Colour = Fixed(238);
pub const BRIGHT_GRAY: Colour = Fixed(245);
pub const RED: Colour = Fixed(88);
}
// for more of these, copy paste from: https://en.wikipedia.org/wiki/Box-drawing_character
static ARROW_SEPARATOR: &str = "";
static MORE_MSG: &str = " ... ";
#[derive(Default)]
struct State {
mode_info: ModeInfo,
}
register_plugin!(State);
#[derive(Default)]
pub struct LinePart {
part: String,
len: usize,
}
impl Display for LinePart {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "{}", self.part)
}
}
impl ZellijPlugin for State {
fn load(&mut self) {
set_selectable(false);
set_invisible_borders(true);
set_max_height(2);
subscribe(&[EventType::ModeUpdate]);
}
fn update(&mut self, event: Event) {
if let Event::ModeUpdate(mode_info) = event {
self.mode_info = mode_info;
}
}
fn render(&mut self, _rows: usize, cols: usize) {
let superkey = superkey();
let ctrl_keys = ctrl_keys(&self.mode_info, cols - superkey.len);
let first_line = format!("{}{}", superkey, ctrl_keys);
let second_line = keybinds(&self.mode_info, cols);
// [48;5;238m is gray background, [0K is so that it fills the rest of the line
// [m is background reset, [0K is so that it clears the rest of the line
println!("{}\u{1b}[48;5;238m\u{1b}[0K", first_line);
println!("\u{1b}[m{}\u{1b}[0K", second_line);
}
}
|
use std::fs::File;
use std::io::prelude::*;
fn read_data(filepath: &str) -> std::io::Result<String> {
let mut file = File::open(filepath)?;
let mut contents: String = String::new();
file.read_to_string(&mut contents)?;
Ok(contents.trim().to_string())
}
/// # Errors
///
/// Returns () as error for lack of a better type
fn sol1(data: &str) -> Result<usize, ()> {
let mut datasplit = data.split('\n');
let start_time = datasplit.next().unwrap().parse::<usize>().unwrap();
let mut busline = datasplit.next().unwrap().split(',')
// Parse correct bus lines
.filter_map(|x| match x.parse::<usize>().ok() {
None => None,
Some(x) => Some((x-start_time%x, x))
})
.collect::<Vec<(usize,usize)>>();
busline.sort_unstable();
let (delta, lineno) = busline[0];
Ok(delta*lineno)
}
/// # Errors
///
/// Returns () as error for lack of a better type
fn sol2(data: &str) -> Result<usize, ()> {
let mut data = data.split('\n').skip(1).flat_map(|x|
x.split(',').enumerate()
.filter_map(|(idx, n)| match n.parse::<usize>() {
Err(_) => None,
Ok(u) => Some((idx, u))
}).collect::<Vec<(usize,usize)>>()
).collect::<Vec<(usize,usize)>>();
data.sort_unstable();
let buslines: Vec<usize> = data.iter().map(|(_,x)| *x).collect();
let offsets: Vec<usize> = data.iter().map(|(x,k)| (k*x-x)%k).collect();
let mut step = buslines[0];
let mut attempt = step;
for u in 1..buslines.len() {
loop {
// Check
if attempt%buslines[u] == offsets[u] {
// Ding!
step *= buslines[u];
break;
}
attempt += step;
}
}
Ok(attempt)
}
fn main() {
let tmp = read_data("input");
if tmp.is_err() {
panic!("Modular arithmetic is all the rage these days");
}
let data = tmp.unwrap();
println!("{:?}", sol1(&data));
println!("{:?}", sol2(&data));
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn sol1_example() {
let data = "939\n7,13,x,x,59,x,31,19";
assert_eq!(sol1(data), Ok(295));
}
#[test]
fn sol2_example() {
let data = "939\n7,13,x,x,59,x,31,19";
assert_eq!(sol2(data), Ok(1068781));
}
}
|
pub mod lexer;
pub mod parser;
pub mod runner;
pub mod helpers;
pub mod builtins;
|
// Takes a list of entities with bodies, then spits out their transformations.
pub struct PhysicsEngine;
impl PhysicsEngine {
pub fn new() -> Self {
PhysicsEngine
}
pub fn step(&mut self) {}
} |
use crate::errors::Errcode;
use crate::ipc::{send_boolean, receive_boolean};
use std::os::unix::io::RawFd;
use nix::sched::{unshare, CloneFlags};
use nix::unistd::{Gid, Uid, setgroups, setresuid, setresgid};
pub fn userns(fd: RawFd, uid: u32) -> Result<(), Errcode> {
log::debug!("Setting up user namespace with UID {}", uid);
let has_userns = match unshare(CloneFlags::CLONE_NEWUSER) {
Ok(_) => true,
Err(_) => false,
};
send_boolean(fd, has_userns)?;
if receive_boolean(fd)? {
return Err(Errcode::NamespacesError(0));
}
if has_userns {
log::info!("User namespaces set up");
} else {
log::info!("User namespaces not supported, continuing...");
}
log::debug!("Switching to uid {} / gid {}...", uid, uid);
let gid = Gid::from_raw(uid);
let uid = Uid::from_raw(uid);
if let Err(_) = setgroups(&[gid]) {
return Err(Errcode::NamespacesError(1));
}
if let Err(_) = setresgid(gid, gid, gid) {
return Err(Errcode::NamespacesError(2));
}
if let Err(_) = setresuid(uid, uid, uid) {
return Err(Errcode::NamespacesError(3));
}
Ok(())
}
use nix::unistd::Pid;
use std::fs::File;
use std::io::Write;
const USERNS_OFFSET: u64 = 10000;
const USERNS_COUNT: u64 = 2000;
pub fn handle_child_uid_map(pid: Pid, fd: RawFd) -> Result<(), Errcode> {
if receive_boolean(fd)? {
if let Ok(mut uid_map) = File::create(format!("/proc/{}/{}", pid.as_raw(), "uid_map")) {
if let Err(_) = uid_map.write_all(format!("0 {} {}", USERNS_OFFSET, USERNS_COUNT).as_bytes()) {
return Err(Errcode::NamespacesError(4));
}
} else {
return Err(Errcode::NamespacesError(5))
}
if let Ok(mut gid_map) = File::create(format!("/proc/{}/{}", pid.as_raw(), "gid_map")) {
if let Err(_) = gid_map.write_all(format!("0 {} {}", USERNS_OFFSET, USERNS_COUNT).as_bytes()) {
return Err(Errcode::NamespacesError(6));
}
} else {
return Err(Errcode::NamespacesError(7))
}
} else {
log::info!("No user namespace set up from child process");
}
log::debug!("Child UID/GID map done, sending signal to child to continue...");
send_boolean(fd, false)
} |
// Copyright (C) 2015-2021 Swift Navigation Inc.
// Contact: https://support.swiftnav.com
//
// This source is subject to the license found in the file 'LICENSE' which must
// be be distributed together with this source. All other rights reserved.
//
// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
// EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
//****************************************************************************
// Automatically generated from yaml/swiftnav/sbp/orientation.yaml
// with generate.py. Please do not hand edit!
//****************************************************************************/
//! Orientation Messages
#[allow(unused_imports)]
use std::convert::TryFrom;
#[allow(unused_imports)]
use byteorder::{LittleEndian, ReadBytesExt};
#[allow(unused_imports)]
use crate::serialize::SbpSerialize;
#[allow(unused_imports)]
use crate::SbpString;
/// Vehicle Body Frame instantaneous angular rates
///
/// This message reports the orientation rates in the vehicle body frame. The
/// values represent the measurements a strapped down gyroscope would make and
/// are not equivalent to the time derivative of the Euler angles. The
/// orientation and origin of the user frame is specified via device settings.
/// By convention, the vehicle x-axis is expected to be aligned with the
/// forward direction, while the vehicle y-axis is expected to be aligned with
/// the right direction, and the vehicle z-axis should be aligned with the
/// down direction. This message will only be available in future INS versions
/// of Swift Products and is not produced by Piksi Multi or Duro.
///
#[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))]
#[derive(Debug, Clone)]
#[allow(non_snake_case)]
pub struct MsgAngularRate {
#[cfg_attr(feature = "sbp_serde", serde(skip_serializing))]
pub sender_id: Option<u16>,
/// GPS Time of Week
pub tow: u32,
/// angular rate about x axis
pub x: i32,
/// angular rate about y axis
pub y: i32,
/// angular rate about z axis
pub z: i32,
/// Status flags
pub flags: u8,
}
impl MsgAngularRate {
#[rustfmt::skip]
pub fn parse(_buf: &mut &[u8]) -> Result<MsgAngularRate, crate::Error> {
Ok( MsgAngularRate{
sender_id: None,
tow: _buf.read_u32::<LittleEndian>()?,
x: _buf.read_i32::<LittleEndian>()?,
y: _buf.read_i32::<LittleEndian>()?,
z: _buf.read_i32::<LittleEndian>()?,
flags: _buf.read_u8()?,
} )
}
}
impl super::SBPMessage for MsgAngularRate {
fn get_message_name(&self) -> &'static str {
"MSG_ANGULAR_RATE"
}
fn get_message_type(&self) -> u16 {
546
}
fn get_sender_id(&self) -> Option<u16> {
self.sender_id
}
fn set_sender_id(&mut self, new_id: u16) {
self.sender_id = Some(new_id);
}
fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> {
let mut frame = Vec::new();
self.write_frame(&mut frame)?;
Ok(frame)
}
fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> {
crate::write_frame(self, frame)
}
#[cfg(feature = "swiftnav-rs")]
fn gps_time(
&self,
) -> Option<std::result::Result<crate::time::MessageTime, crate::time::GpsTimeError>> {
let tow_s = (self.tow as f64) / 1000.0;
let gps_time = match crate::time::GpsTime::new(0, tow_s) {
Ok(gps_time) => gps_time.tow(),
Err(e) => return Some(Err(e.into())),
};
Some(Ok(crate::time::MessageTime::Rover(gps_time.into())))
}
}
impl super::ConcreteMessage for MsgAngularRate {
const MESSAGE_TYPE: u16 = 546;
const MESSAGE_NAME: &'static str = "MSG_ANGULAR_RATE";
}
impl TryFrom<super::SBP> for MsgAngularRate {
type Error = super::TryFromSBPError;
fn try_from(msg: super::SBP) -> Result<Self, Self::Error> {
match msg {
super::SBP::MsgAngularRate(m) => Ok(m),
_ => Err(super::TryFromSBPError),
}
}
}
impl crate::serialize::SbpSerialize for MsgAngularRate {
#[allow(unused_variables)]
fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) {
self.tow.append_to_sbp_buffer(buf);
self.x.append_to_sbp_buffer(buf);
self.y.append_to_sbp_buffer(buf);
self.z.append_to_sbp_buffer(buf);
self.flags.append_to_sbp_buffer(buf);
}
fn sbp_size(&self) -> usize {
let mut size = 0;
size += self.tow.sbp_size();
size += self.x.sbp_size();
size += self.y.sbp_size();
size += self.z.sbp_size();
size += self.flags.sbp_size();
size
}
}
/// Heading relative to True North
///
/// This message reports the baseline heading pointing from the base station
/// to the rover relative to True North. The full GPS time is given by the
/// preceding MSG_GPS_TIME with the matching time-of-week (tow). It is
/// intended that time-matched RTK mode is used when the base station is
/// moving.
///
#[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))]
#[derive(Debug, Clone)]
#[allow(non_snake_case)]
pub struct MsgBaselineHeading {
#[cfg_attr(feature = "sbp_serde", serde(skip_serializing))]
pub sender_id: Option<u16>,
/// GPS Time of Week
pub tow: u32,
/// Heading
pub heading: u32,
/// Number of satellites used in solution
pub n_sats: u8,
/// Status flags
pub flags: u8,
}
impl MsgBaselineHeading {
#[rustfmt::skip]
pub fn parse(_buf: &mut &[u8]) -> Result<MsgBaselineHeading, crate::Error> {
Ok( MsgBaselineHeading{
sender_id: None,
tow: _buf.read_u32::<LittleEndian>()?,
heading: _buf.read_u32::<LittleEndian>()?,
n_sats: _buf.read_u8()?,
flags: _buf.read_u8()?,
} )
}
}
impl super::SBPMessage for MsgBaselineHeading {
fn get_message_name(&self) -> &'static str {
"MSG_BASELINE_HEADING"
}
fn get_message_type(&self) -> u16 {
527
}
fn get_sender_id(&self) -> Option<u16> {
self.sender_id
}
fn set_sender_id(&mut self, new_id: u16) {
self.sender_id = Some(new_id);
}
fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> {
let mut frame = Vec::new();
self.write_frame(&mut frame)?;
Ok(frame)
}
fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> {
crate::write_frame(self, frame)
}
#[cfg(feature = "swiftnav-rs")]
fn gps_time(
&self,
) -> Option<std::result::Result<crate::time::MessageTime, crate::time::GpsTimeError>> {
let tow_s = (self.tow as f64) / 1000.0;
let gps_time = match crate::time::GpsTime::new(0, tow_s) {
Ok(gps_time) => gps_time.tow(),
Err(e) => return Some(Err(e.into())),
};
Some(Ok(crate::time::MessageTime::Rover(gps_time.into())))
}
}
impl super::ConcreteMessage for MsgBaselineHeading {
const MESSAGE_TYPE: u16 = 527;
const MESSAGE_NAME: &'static str = "MSG_BASELINE_HEADING";
}
impl TryFrom<super::SBP> for MsgBaselineHeading {
type Error = super::TryFromSBPError;
fn try_from(msg: super::SBP) -> Result<Self, Self::Error> {
match msg {
super::SBP::MsgBaselineHeading(m) => Ok(m),
_ => Err(super::TryFromSBPError),
}
}
}
impl crate::serialize::SbpSerialize for MsgBaselineHeading {
#[allow(unused_variables)]
fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) {
self.tow.append_to_sbp_buffer(buf);
self.heading.append_to_sbp_buffer(buf);
self.n_sats.append_to_sbp_buffer(buf);
self.flags.append_to_sbp_buffer(buf);
}
fn sbp_size(&self) -> usize {
let mut size = 0;
size += self.tow.sbp_size();
size += self.heading.sbp_size();
size += self.n_sats.sbp_size();
size += self.flags.sbp_size();
size
}
}
/// Euler angles
///
/// This message reports the yaw, pitch, and roll angles of the vehicle body
/// frame. The rotations should applied intrinsically in the order yaw, pitch,
/// and roll in order to rotate the from a frame aligned with the local-level
/// NED frame to the vehicle body frame. This message will only be available
/// in future INS versions of Swift Products and is not produced by Piksi
/// Multi or Duro.
///
#[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))]
#[derive(Debug, Clone)]
#[allow(non_snake_case)]
pub struct MsgOrientEuler {
#[cfg_attr(feature = "sbp_serde", serde(skip_serializing))]
pub sender_id: Option<u16>,
/// GPS Time of Week
pub tow: u32,
/// rotation about the forward axis of the vehicle
pub roll: i32,
/// rotation about the rightward axis of the vehicle
pub pitch: i32,
/// rotation about the downward axis of the vehicle
pub yaw: i32,
/// Estimated standard deviation of roll
pub roll_accuracy: f32,
/// Estimated standard deviation of pitch
pub pitch_accuracy: f32,
/// Estimated standard deviation of yaw
pub yaw_accuracy: f32,
/// Status flags
pub flags: u8,
}
impl MsgOrientEuler {
#[rustfmt::skip]
pub fn parse(_buf: &mut &[u8]) -> Result<MsgOrientEuler, crate::Error> {
Ok( MsgOrientEuler{
sender_id: None,
tow: _buf.read_u32::<LittleEndian>()?,
roll: _buf.read_i32::<LittleEndian>()?,
pitch: _buf.read_i32::<LittleEndian>()?,
yaw: _buf.read_i32::<LittleEndian>()?,
roll_accuracy: _buf.read_f32::<LittleEndian>()?,
pitch_accuracy: _buf.read_f32::<LittleEndian>()?,
yaw_accuracy: _buf.read_f32::<LittleEndian>()?,
flags: _buf.read_u8()?,
} )
}
}
impl super::SBPMessage for MsgOrientEuler {
fn get_message_name(&self) -> &'static str {
"MSG_ORIENT_EULER"
}
fn get_message_type(&self) -> u16 {
545
}
fn get_sender_id(&self) -> Option<u16> {
self.sender_id
}
fn set_sender_id(&mut self, new_id: u16) {
self.sender_id = Some(new_id);
}
fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> {
let mut frame = Vec::new();
self.write_frame(&mut frame)?;
Ok(frame)
}
fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> {
crate::write_frame(self, frame)
}
#[cfg(feature = "swiftnav-rs")]
fn gps_time(
&self,
) -> Option<std::result::Result<crate::time::MessageTime, crate::time::GpsTimeError>> {
let tow_s = (self.tow as f64) / 1000.0;
let gps_time = match crate::time::GpsTime::new(0, tow_s) {
Ok(gps_time) => gps_time.tow(),
Err(e) => return Some(Err(e.into())),
};
Some(Ok(crate::time::MessageTime::Rover(gps_time.into())))
}
}
impl super::ConcreteMessage for MsgOrientEuler {
const MESSAGE_TYPE: u16 = 545;
const MESSAGE_NAME: &'static str = "MSG_ORIENT_EULER";
}
impl TryFrom<super::SBP> for MsgOrientEuler {
type Error = super::TryFromSBPError;
fn try_from(msg: super::SBP) -> Result<Self, Self::Error> {
match msg {
super::SBP::MsgOrientEuler(m) => Ok(m),
_ => Err(super::TryFromSBPError),
}
}
}
impl crate::serialize::SbpSerialize for MsgOrientEuler {
#[allow(unused_variables)]
fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) {
self.tow.append_to_sbp_buffer(buf);
self.roll.append_to_sbp_buffer(buf);
self.pitch.append_to_sbp_buffer(buf);
self.yaw.append_to_sbp_buffer(buf);
self.roll_accuracy.append_to_sbp_buffer(buf);
self.pitch_accuracy.append_to_sbp_buffer(buf);
self.yaw_accuracy.append_to_sbp_buffer(buf);
self.flags.append_to_sbp_buffer(buf);
}
fn sbp_size(&self) -> usize {
let mut size = 0;
size += self.tow.sbp_size();
size += self.roll.sbp_size();
size += self.pitch.sbp_size();
size += self.yaw.sbp_size();
size += self.roll_accuracy.sbp_size();
size += self.pitch_accuracy.sbp_size();
size += self.yaw_accuracy.sbp_size();
size += self.flags.sbp_size();
size
}
}
/// Quaternion 4 component vector
///
/// This message reports the quaternion vector describing the vehicle body
/// frame's orientation with respect to a local-level NED frame. The
/// components of the vector should sum to a unit vector assuming that the LSB
/// of each component as a value of 2^-31. This message will only be available
/// in future INS versions of Swift Products and is not produced by Piksi
/// Multi or Duro.
///
#[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))]
#[derive(Debug, Clone)]
#[allow(non_snake_case)]
pub struct MsgOrientQuat {
#[cfg_attr(feature = "sbp_serde", serde(skip_serializing))]
pub sender_id: Option<u16>,
/// GPS Time of Week
pub tow: u32,
/// Real component
pub w: i32,
/// 1st imaginary component
pub x: i32,
/// 2nd imaginary component
pub y: i32,
/// 3rd imaginary component
pub z: i32,
/// Estimated standard deviation of w
pub w_accuracy: f32,
/// Estimated standard deviation of x
pub x_accuracy: f32,
/// Estimated standard deviation of y
pub y_accuracy: f32,
/// Estimated standard deviation of z
pub z_accuracy: f32,
/// Status flags
pub flags: u8,
}
impl MsgOrientQuat {
#[rustfmt::skip]
pub fn parse(_buf: &mut &[u8]) -> Result<MsgOrientQuat, crate::Error> {
Ok( MsgOrientQuat{
sender_id: None,
tow: _buf.read_u32::<LittleEndian>()?,
w: _buf.read_i32::<LittleEndian>()?,
x: _buf.read_i32::<LittleEndian>()?,
y: _buf.read_i32::<LittleEndian>()?,
z: _buf.read_i32::<LittleEndian>()?,
w_accuracy: _buf.read_f32::<LittleEndian>()?,
x_accuracy: _buf.read_f32::<LittleEndian>()?,
y_accuracy: _buf.read_f32::<LittleEndian>()?,
z_accuracy: _buf.read_f32::<LittleEndian>()?,
flags: _buf.read_u8()?,
} )
}
}
impl super::SBPMessage for MsgOrientQuat {
fn get_message_name(&self) -> &'static str {
"MSG_ORIENT_QUAT"
}
fn get_message_type(&self) -> u16 {
544
}
fn get_sender_id(&self) -> Option<u16> {
self.sender_id
}
fn set_sender_id(&mut self, new_id: u16) {
self.sender_id = Some(new_id);
}
fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> {
let mut frame = Vec::new();
self.write_frame(&mut frame)?;
Ok(frame)
}
fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> {
crate::write_frame(self, frame)
}
#[cfg(feature = "swiftnav-rs")]
fn gps_time(
&self,
) -> Option<std::result::Result<crate::time::MessageTime, crate::time::GpsTimeError>> {
let tow_s = (self.tow as f64) / 1000.0;
let gps_time = match crate::time::GpsTime::new(0, tow_s) {
Ok(gps_time) => gps_time.tow(),
Err(e) => return Some(Err(e.into())),
};
Some(Ok(crate::time::MessageTime::Rover(gps_time.into())))
}
}
impl super::ConcreteMessage for MsgOrientQuat {
const MESSAGE_TYPE: u16 = 544;
const MESSAGE_NAME: &'static str = "MSG_ORIENT_QUAT";
}
impl TryFrom<super::SBP> for MsgOrientQuat {
type Error = super::TryFromSBPError;
fn try_from(msg: super::SBP) -> Result<Self, Self::Error> {
match msg {
super::SBP::MsgOrientQuat(m) => Ok(m),
_ => Err(super::TryFromSBPError),
}
}
}
impl crate::serialize::SbpSerialize for MsgOrientQuat {
#[allow(unused_variables)]
fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) {
self.tow.append_to_sbp_buffer(buf);
self.w.append_to_sbp_buffer(buf);
self.x.append_to_sbp_buffer(buf);
self.y.append_to_sbp_buffer(buf);
self.z.append_to_sbp_buffer(buf);
self.w_accuracy.append_to_sbp_buffer(buf);
self.x_accuracy.append_to_sbp_buffer(buf);
self.y_accuracy.append_to_sbp_buffer(buf);
self.z_accuracy.append_to_sbp_buffer(buf);
self.flags.append_to_sbp_buffer(buf);
}
fn sbp_size(&self) -> usize {
let mut size = 0;
size += self.tow.sbp_size();
size += self.w.sbp_size();
size += self.x.sbp_size();
size += self.y.sbp_size();
size += self.z.sbp_size();
size += self.w_accuracy.sbp_size();
size += self.x_accuracy.sbp_size();
size += self.y_accuracy.sbp_size();
size += self.z_accuracy.sbp_size();
size += self.flags.sbp_size();
size
}
}
|
use std;
#[derive(Eq,PartialEq)]
pub enum RespValue {
Int(i64),
NilBulk,
NilArray,
Bulk(Vec<u8>),
Array(Vec<RespValue>),
Error(Vec<u8>),
}
impl std::fmt::Debug for RespValue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
RespValue::NilBulk => write!(f, "NilBulk"),
RespValue::NilArray => write!(f, "NilArray"),
RespValue::Int(n) => write!(f, "Int({})", n),
RespValue::Bulk(bs) => write!(f, "Bulk('{}')", String::from_utf8_lossy(bs)),
RespValue::Error(bs) => write!(f, "Error('{}')", String::from_utf8_lossy(bs)),
RespValue::Array(arr) => {
write!(f, "Array([")?;
for i in 0..arr.len() {
arr[i].fmt(f)?;
if i != arr.len()-1 {
write!(f, ", ")?;
}
}
write!(f, "])")
}
}
}
}
#[derive(Debug)]
pub enum RespError {
IoError(std::io::Error),
ParseFailed(String),
Unexpected(String),
Unknown
}
impl From<std::io::Error> for RespError {
fn from(err: std::io::Error) -> Self {
RespError::IoError(err)
}
}
impl std::fmt::Display for RespError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
RespError::IoError(ref err) => write!(f, "io err: {}", err),
RespError::ParseFailed(ref s) => write!(f, "parse failed: {}", s),
RespError::Unexpected(ref s) => write!(f, "unexpected: {}", s),
RespError::Unknown => write!(f, "unknown error"),
}
}
}
impl std::error::Error for RespError {
fn description(&self) -> &str {
"resp error"
}
}
|
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::fmt::Debug;
use std::fs;
use camino::Utf8Path;
use chrono::{NaiveDate, NaiveDateTime, NaiveTime};
use eyre::{eyre, Result};
use itemref_derive::ItemRef;
use lazy_static::lazy_static;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use regex::Regex;
use serde::{Deserialize, Serialize};
use tera::Context;
use yaml_front_matter::{Document, YamlFrontMatter};
use crate::content::series::SeriesRef;
use crate::content::tags::{Tag, TagPostContext, TagsMeta};
use crate::item::Item;
use crate::item::RenderContext;
use crate::markdown::find_markdown_files;
use crate::markdown::markdown_to_html;
use crate::paths::AbsPath;
use crate::{content::SeriesItem, item::TeraItem, site_url::SiteUrl, util};
pub fn load_posts(dir: AbsPath) -> Result<BTreeMap<PostRef, PostItem>> {
let mut posts = find_markdown_files(dir)
.par_iter()
.map(|path| PostItem::from_file(path.abs_path()).map(|post| (post.post_ref(), post)))
.collect::<Result<BTreeMap<PostRef, PostItem>>>()?;
set_post_prev_next(&mut posts);
Ok(posts)
}
pub fn set_post_prev_next(posts: &mut BTreeMap<PostRef, PostItem>) {
let mut prev: Option<(&PostRef, &mut PostItem)> = None;
for curr in posts.iter_mut().peekable() {
curr.1.prev = prev.as_ref().map(|x| x.0.clone());
if let Some((_, prev_post)) = prev {
prev_post.next = Some(curr.0.clone());
}
prev = Some(curr);
}
}
#[derive(ItemRef, Debug, Clone)]
#[item(PostItem)]
pub struct PostRef {
pub id: String,
#[order]
pub created: NaiveDate,
}
impl PostRef {
pub fn from_path(path: &Utf8Path) -> Result<PostRef> {
let meta = PostDirMetadata::from_path(&path)?;
Ok(PostRef {
id: meta.to_url()?.href().to_string(),
created: meta.date,
})
}
}
#[derive(Debug)]
pub struct PostItem {
pub title: String,
pub tags: Vec<Tag>,
pub created: NaiveDateTime,
pub updated: NaiveDateTime,
pub path: AbsPath,
pub url: SiteUrl,
pub prev: Option<PostRef>,
pub next: Option<PostRef>,
pub recommended: bool,
pub raw_content: String,
pub transformed_content: String,
pub series_id: Option<String>,
pub series: Option<SeriesRef>,
}
impl PostItem {
pub fn from_file(path: AbsPath) -> Result<Self> {
let modified = util::last_modified(&path)?;
let raw_content = fs::read_to_string(&path)?;
Self::from_string(path, raw_content, modified)
}
pub fn from_string(
path: AbsPath,
raw_content: String,
modified: NaiveDateTime,
) -> Result<Self> {
let post_dir = PostDirMetadata::from_path(&path)?;
let Document { metadata, content } =
YamlFrontMatter::parse::<PostMetadata>(&raw_content)
.map_err(|err| eyre!("Failed to parse metadata for post: {}\n{}", path, err))?;
let time = match metadata.time {
Some(time_str) => parse_time(&time_str)?,
None => NaiveTime::from_hms_opt(0, 0, 0).unwrap(),
};
let created = NaiveDateTime::new(post_dir.date, time);
let url = post_dir.to_url().expect("Should be able to create a url");
let transformed_content = markdown_to_html(&content);
Ok(Self {
title: metadata.title,
tags: metadata.tags.into(),
created,
updated: modified,
path,
url,
prev: None,
next: None,
raw_content: content,
transformed_content,
series_id: metadata.series,
series: None,
recommended: metadata.recommended.unwrap_or(false),
})
}
pub fn post_ref(&self) -> PostRef {
PostRef {
id: self.id().to_string(),
created: self.created.date(),
}
}
}
impl TeraItem for PostItem {
fn context(&self, ctx: &RenderContext) -> Context {
let series = self.series.as_ref().map(|series| {
PostSeriesContext::new(
self,
ctx.content
.get_series(series)
.expect("Could not find series {:?series}"),
ctx,
)
});
Context::from_serialize(PostContext {
title: html_escape::encode_text(&self.title),
url: self.url.href(),
ymd: self.created.format("%F").to_string(),
date: self.created.format("%B %e, %Y").to_string(),
content: &self.transformed_content,
tags: self.tags.iter().map(TagPostContext::from).collect(),
meta_keywords: self.tags.iter().map(|tag| tag.name.as_str()).collect(),
series,
prev: self.prev.as_ref().map(|x| PostRefContext::from_ref(x, ctx)),
next: self.next.as_ref().map(|x| PostRefContext::from_ref(x, ctx)),
})
.unwrap()
}
fn template(&self) -> &str {
"post.html"
}
fn url(&self) -> &SiteUrl {
&self.url
}
}
#[derive(Debug, Clone, Serialize)]
struct PostContext<'a> {
title: Cow<'a, str>,
url: Cow<'a, str>,
ymd: String,
date: String,
content: &'a str,
tags: Vec<TagPostContext<'a>>,
meta_keywords: Vec<&'a str>,
series: Option<PostSeriesContext<'a>>,
prev: Option<PostRefContext<'a>>,
next: Option<PostRefContext<'a>>,
}
#[derive(Debug, Clone, Serialize)]
struct PostSeriesContext<'a> {
title: Cow<'a, str>,
url: Cow<'a, str>,
completed: bool,
part_number: usize,
post_note: Option<&'a str>,
next_url: Option<Cow<'a, str>>,
}
impl<'a> PostSeriesContext<'a> {
fn new(post: &PostItem, series: &'a SeriesItem, ctx: &'a RenderContext) -> Self {
let posts: Vec<_> = series.posts.iter().collect();
let post_index = posts.iter().position(|curr| &curr.0 == post).unwrap();
let next_url = posts
.get(post_index + 1)
.as_ref()
.and_then(|next| ctx.content.get_post(&next.0).as_ref().map(|x| x.url.href()));
Self {
title: html_escape::encode_text(&series.title),
url: series.url.href(),
completed: series.completed,
part_number: post_index + 1,
next_url,
post_note: series.post_note.as_deref(),
}
}
}
#[derive(Debug, Clone, Serialize)]
pub struct PostRefContext<'a> {
title: &'a str,
url: Cow<'a, str>,
ymd: String,
date: String,
}
impl<'a> PostRefContext<'a> {
pub fn from_post(post: &'a PostItem) -> Self {
PostRefContext {
title: &post.title,
url: post.url.href(),
ymd: post.created.format("%F").to_string(),
date: post.created.format("%B %e, %Y").to_string(),
}
}
pub fn from_ref(post_ref: &PostRef, ctx: &'a RenderContext) -> Self {
let post = ctx.content.get_post(post_ref).expect("Should have post");
Self::from_post(post)
}
}
#[derive(Deserialize, Debug)]
struct PostMetadata {
title: String,
tags: TagsMeta,
time: Option<String>,
series: Option<String>,
recommended: Option<bool>,
}
#[derive(Debug)]
pub struct PostDirMetadata {
pub date: NaiveDate,
pub slug: String,
}
impl PostDirMetadata {
pub fn from_path(path: &Utf8Path) -> Result<Self> {
lazy_static! {
static ref RE: Regex = Regex::new(r"^(\d{4})-(\d{2})-(\d{2})-(\S+)$").unwrap();
}
let captures = RE
.captures(path.file_stem().expect("should have a file name"))
.ok_or_else(|| eyre!("Failed to parse post path: {}", path))?;
Ok(Self {
date: NaiveDate::from_ymd_opt(
captures[1].parse().unwrap(),
captures[2].parse().unwrap(),
captures[3].parse().unwrap(),
)
.ok_or_else(|| eyre!("Post has invalid ymd: {}", path))?,
slug: captures[4].to_string(),
})
}
pub fn to_url(&self) -> Result<SiteUrl> {
SiteUrl::parse(&format!(
"/blog/{}/{}/",
self.date.format("%Y/%m/%d"),
self.slug
))
}
}
fn parse_time(s: &str) -> Result<NaiveTime> {
if s.len() == 8 {
NaiveTime::parse_from_str(s, "%H:%M:%S")
.map_err(|err| eyre!("Failed to parse time: `{}`: {}", s, err))
} else if s.len() == 5 {
NaiveTime::parse_from_str(s, "%H:%M")
.map_err(|err| eyre!("Failed to parse time: `{}`: {}", s, err))
} else {
Err(eyre!("Wrong time length {}", s.len()))
}
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use super::*;
use crate::tests::*;
use crate::{item::RenderContext, site::SiteContext};
use scraper::{node::Element, Html, Selector};
#[test]
fn test_post_from_string() -> Result<()> {
let path = "posts/2022-01-31-test_post.markdown";
let content = fs::read_to_string(PathBuf::from("test-site").join(path))?;
// let (path, content) = tests::raw_post1();
let post = PostItem::from_string(
path.into(),
content,
NaiveDateTime::new(
NaiveDate::from_ymd_opt(2022, 4, 30).unwrap(),
NaiveTime::from_hms_opt(1, 2, 3).unwrap(),
),
)?;
assert_eq!(post.title, "Post & Title");
assert_eq!(
post.tags,
vec![
Tag {
id: "Tag1".to_string(),
name: "Tag1".to_string(),
url: SiteUrl::parse("/blog/tags/tag1").unwrap(),
},
Tag {
id: "<Tag> 2".to_string(),
name: "<Tag> 2".to_string(),
url: SiteUrl::parse("/blog/tags/tag_2").unwrap(),
}
]
);
assert_eq!(
post.created,
NaiveDateTime::new(
NaiveDate::from_ymd_opt(2022, 1, 31).unwrap(),
NaiveTime::from_hms_opt(7, 7, 0).unwrap()
)
);
assert!(post.raw_content.contains("# Header 1"));
assert_eq!(post.url.path(), "/blog/2022/01/31/test_post/");
assert_eq!(post.url.href(), "/blog/2022/01/31/test_post/");
assert_eq!(
post.output_file(".output".into()),
".output/blog/2022/01/31/test_post/index.html"
);
Ok(())
}
fn select_inner_html(document: &Html, selectors: &str) -> Option<String> {
Some(
document
.select(&Selector::parse(selectors).unwrap())
.next()?
.inner_html(),
)
}
fn select_element<'a>(document: &'a Html, selectors: &str) -> Option<&'a Element> {
Some(
document
.select(&Selector::parse(selectors).unwrap())
.next()?
.value(),
)
}
#[test]
fn postref() -> Result<()> {
let test_site = TestSiteBuilder {
include_drafts: false,
}
.build()?;
let post = test_site
.find_post("2022-01-31-test_post.markdown")
.unwrap();
assert_eq!(
post.post_ref(),
PostRef::from_path("/posts/2022-01-31-test_post".into()).unwrap(),
);
Ok(())
}
#[test]
fn test_render_post() -> Result<()> {
let test_site = TestSiteBuilder {
include_drafts: false,
}
.build()?;
let post = test_site
.find_post("2022-01-31-test_post.markdown")
.unwrap();
let rendered = post.render_to_string(&RenderContext {
parent_context: &Context::from_serialize(SiteContext::new(false)).unwrap(),
output_dir: ".test_output".into(), // Not used
tera: tests::templates(),
content: &test_site.site.content,
})?;
let document = Html::parse_document(&rendered);
// Element selection returns decoded < and >, so we make extra sure it's escaped properly
// in the raw document.
assert!(!rendered.contains("<Tag>"));
assert_eq!(
select_element(&document, r#"meta[name="keywords"]"#)
.unwrap()
.attr("content"),
Some("Tag1, <Tag> 2")
);
assert_eq!(
select_inner_html(&document, "title").unwrap(),
"Jonas Hietala: Post & Title"
);
assert!(rendered
.contains("<h1><a href=\"/blog/2022/01/31/test_post/\">Post & Title</a></h1>"));
assert!(rendered.contains("<time datetime=\"2022-01-31\">January 31, 2022</time>"));
assert!(rendered.contains(
r##"<h2 id="header-1"><a class="heading-ref" href="#header-1">Header 1</a></h2>"##
));
assert!(rendered.contains(r#"<iframe src="//www.youtube.com/embed/eoKDyhxCVm0""#));
assert!(rendered.contains("☃︎"));
assert!(rendered.contains("—and–some…"));
assert!(rendered.contains("“Auto quote” ‘A’"));
assert!(rendered.contains(
"Posted in <a href=\"/blog/tags/tag1\">Tag1</a>, <a href=\"/blog/tags/tag_2\"><Tag> 2</a>"
));
// Just make sure that code is highlighted
let rust_code = select_inner_html(&document, r#"pre code.rust"#).unwrap();
assert!(rust_code.contains("<span class=\"storage type rust\">let</span> x "));
// FIXME git commit
Ok(())
}
}
|
#![allow(unused_variables)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(dead_code)]
use std::fs::File;
use std::io::prelude::*;
use std::env;
use std::rc::Rc;
pub use crate::configuration::Configuration;
pub use crate::globalstate::GlobalState;
pub use crate::instructions::Instruction;
pub use crate::instructionobject::InstructionObject;
pub use crate::lookuptable::LookupTable;
mod configuration;
mod globalstate;
mod instructions;
mod instructionobject;
mod lookuptable;
//macro to build ref counted boxed instruction objects
macro_rules! function_object {
($($x: expr),*) => {{
$(
Rc::new(instructions::Instruction::new(Box::new($x)))
)*
}}
}
fn parse_file(file: String) -> Option<String> {
let mut content = String::new();
match File::open(file.clone()) {
// The file is open (no error).
Ok(mut file) => {
// Read contents to string
file.read_to_string(&mut content).unwrap();
},
// Error handling.
Err(error) => {
println!("Error opening file {}: {}", file, error);
return None;
},
}
Some(content)
}
fn build_lookup_table(lookup: &mut LookupTable, config: &Configuration) {
let mut nop = ||{};
let mut increment = function_object!(|state: &mut GlobalState| {state.data[state.current_index] = std::char::from_u32(state.data[state.current_index] as u32 + 1).unwrap_or(0 as char)});
let mut decrement = function_object!(|state: &mut GlobalState| {state.data[state.current_index] = std::char::from_u32(state.data[state.current_index] as u32 - 1).unwrap_or(0 as char)});
let mut register_forward = function_object!(|state: &mut GlobalState| {if state.current_index < 8 { state.current_index+= 1}});
let mut register_back = function_object!(|state: &mut GlobalState| { if state.current_index > 0 {state.current_index-= 1}});
let mut print_register = function_object!(|state: &mut GlobalState| {println!("{}", state.data[state.current_index])});
let mut load_x_register = function_object!(|state: &mut GlobalState| {state.x_register = state.data[state.current_index]});
let mut load_y_register = function_object!(|state: &mut GlobalState| {state.y_register = state.data[state.current_index]});
let mut register_check = function_object!(|state: &mut GlobalState| {state.register_check_passed = state.y_register == state.x_register});
let mut inverted_register_check = function_object!(|state: &mut GlobalState| {state.register_check_passed = state.y_register != state.x_register});
//TODO: Implement probability gate instructions/logic
//TODO: Implement function parser instruction/logic
let min_prob = config.min_prob;
let high_end_probability = config.max_prob - (min_prob + config.nop_prob);
lookup.add_instruction(('i', InstructionObject::new(config.clone(), (high_end_probability, Rc::clone(&increment)), (min_prob, Rc::clone(&decrement)))));
lookup.add_instruction(('d', InstructionObject::new(config.clone(), (high_end_probability, Rc::clone(&decrement)), (min_prob, Rc::clone(&increment)))));
}
fn main() {
//let scriptdata = parse_file(env::args().nth(1).expect("No script given"));
let config = configuration::Configuration::new();
let mut global_state = globalstate::GlobalState::new();
let mut lookup_table = lookuptable::LookupTable::new();
build_lookup_table(&mut lookup_table, &config);
let mut op = lookup_table.fetch_instruction('i');
match op {
Some(x) =>
{
println!("{}", global_state.data[0]); //A
x.call_fn(&mut global_state, 125, 0, 1.0); //inc -> B
println!("{}", global_state.data[0]); //B
x.call_fn(&mut global_state, 9, 0, 1.0); // NOP -> B
println!("{}", global_state.data[0]);//B
x.call_fn(&mut global_state, 12, 0, 1.0); //dec -> A
println!("{}", global_state.data[0]);//A
x.call_fn(&mut global_state, 125, 0, 1.0); //inc -> B
x.call_fn(&mut global_state, 125, 0, 1.0); //inc -> C
println!("{}", global_state.data[0]);//C
}
None =>
{
println!("Error!");
}
}
}
|
use date_time::{date_tuple::DateTuple, time_tuple::TimeTuple};
use log::error;
use serenity::{
framework::standard::{macros::command, CommandResult},
model::channel::Message,
prelude::*,
};
#[command]
#[description = "Display the date in format: `14:47 | 28 May 2020`."]
fn date(ctx: &mut Context, msg: &Message) -> CommandResult {
let date = DateTuple::today().to_readable_string(); // dd mmm yyyy
let time = TimeTuple::now().to_hhmm_string(); // 00:00, 24-hour time
let date = time + &" | ".to_string() + &date; // example output: 14:47 | 28 May 2020
if let Err(why) = msg.channel_id.say(&ctx.http, date) {
error!("Error sending message: {:?}", why);
}
Ok(())
}
|
use maat_graphics::math;
use maat_graphics::cgmath::{Vector2, Vector3};
use maat_graphics::cgmath::InnerSpace;
use maat_graphics::cgmath::Zero;
use crate::modules::Boid;
pub fn boid_collision(boids: &mut Vec<Boid>, delta_time: f32) {
let clone_boids = boids.clone();
for i in 0..boids.len() {
boid_math(i, boids, &clone_boids);
}
/*
for i in 0..boids.len() {
let vision_boids = boids_in_vision(i, boids, &clone_boids);
if vision_boids.len() == 0 {
continue;
}
seperation(i, boids, &vision_boids, delta_time);
alignment(i, boids, &vision_boids, delta_time);
cohesion(i, boids, &vision_boids, delta_time);
}*/
}
fn boid_math(i: usize, boids: &mut Vec<Boid>, clone_boids: &Vec<Boid>) {
boids[i].mut_local_boids().num_boids = 0;
boids[i].mut_local_boids().average_dir_heading = Vector2::zero();
boids[i].mut_local_boids().center = Vector2::zero();
boids[i].mut_local_boids().average_seperation_heading = Vector2::zero();
for j in 0..clone_boids.len() {
if i == j {
continue;
}
let offset = clone_boids[j].pos() - boids[i].pos();
let sqr_dist = offset.x * offset.x + offset.y * offset.y;
if (sqr_dist < boids[i].vision_radius() * boids[i].vision_radius()) {
boids[i].mut_local_boids().num_boids += 1;
boids[i].mut_local_boids().average_dir_heading += clone_boids[j].direction();
boids[i].mut_local_boids().center += clone_boids[j].pos();
if sqr_dist < boids[i].avoid_radius() * boids[i].avoid_radius() {
boids[i].mut_local_boids().average_seperation_heading -= offset / sqr_dist;
}
}
}
}
fn boids_in_vision(i: usize, boids: &mut Vec<Boid>, clone_boids: &Vec<Boid>) -> Vec<Boid> {
let mut vision_boids = Vec::new();
let boid_vision_circle = boids[i].pos().extend(boids[i].vision_radius());
for j in 0..clone_boids.len() {
if i == j { // is same boid ignore self
continue;
}
if math::is_point_inside_circle(clone_boids[j].pos(), boid_vision_circle) {
vision_boids.push(clone_boids[j].clone());
}
}
vision_boids
}
fn seperation(i: usize, boids: &mut Vec<Boid>, vision_boids: &Vec<Boid>, delta_time: f32) {
let mut total_vector = Vector2::new(0.0, 0.0);
for j in 0..vision_boids.len() {
let vector = vision_boids[j].pos() - boids[i].pos();
total_vector += math::normalise_vector2(vector);
}
total_vector = math::normalise_vector2(total_vector);
total_vector *= -1.0;
let player_vector = math::normalise_vector2(
Vector2::new(
1.0 * math::to_radians(boids[i].angle()+90.0).cos(),
1.0 * math::to_radians(boids[i].angle()+90.0).sin()
));
let angle = math::to_degrees(player_vector.angle(total_vector).0);
if angle < 0.0 && angle > -90.0 {
boids[i].turn_left(delta_time);
} else if angle > 0.0 && angle < 90.0 {
boids[i].turn_right(delta_time);
}
}
fn alignment(i: usize, boids: &mut Vec<Boid>, vision_boids: &Vec<Boid>, delta_time: f32) {
let mut average_angle = 0.0;
for boid in vision_boids {
average_angle += boid.angle();
}
average_angle /= vision_boids.len() as f32;
if boids[i].angle() < average_angle {
boids[i].turn_right(delta_time);
} else {
boids[i].turn_left(delta_time);
}
}
fn cohesion(i: usize, boids: &mut Vec<Boid>, vision_boids: &Vec<Boid>, delta_time: f32) {
let mut average_x = 0.0;
let mut average_y = 0.0;
for boid in vision_boids {
average_x += boid.pos().x;
average_y += boid.pos().y;
}
average_x /= vision_boids.len() as f32;
average_y /= vision_boids.len() as f32;
let unit_vector = math::normalise_vector2(boids[i].pos() - Vector2::new(average_x, average_y));
let x_angle = (boids[i].angle()+90.0).cos(); // -1 to 1
let y_angle = (boids[i].angle()+90.0).sin(); // -1 to 1
let unit_angle = Vector2::new(x_angle, y_angle);
let angle = unit_angle.angle(unit_vector);
if math::to_degrees(angle.0) <= 0.0 {
boids[i].turn_left(delta_time);
} else {
boids[i].turn_right(delta_time);
}
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use anyhow::{ensure, format_err, Result};
use config::NodeConfig;
use crypto::{hash::PlainCryptoHash, HashValue};
use logger::prelude::*;
use starcoin_accumulator::{
node::AccumulatorStoreType, Accumulator, AccumulatorTreeStore, MerkleAccumulator,
};
use starcoin_open_block::OpenedBlock;
use starcoin_state_api::{ChainState, ChainStateReader, ChainStateWriter};
use starcoin_statedb::ChainStateDB;
use std::iter::Extend;
use std::time::{SystemTime, UNIX_EPOCH};
use std::{convert::TryInto, marker::PhantomData, sync::Arc};
use storage::Store;
use traits::{ChainReader, ChainWriter, Consensus, ExcludedTxns};
use types::{
account_address::AccountAddress,
accumulator_info::AccumulatorInfo,
block::{
Block, BlockHeader, BlockInfo, BlockNumber, BlockState, BlockTemplate,
ALLOWED_FUTURE_BLOCKTIME,
},
error::BlockExecutorError,
transaction::{SignedUserTransaction, Transaction, TransactionInfo},
U256,
};
pub struct BlockChain<C, S>
where
C: Consensus,
S: Store + 'static,
{
config: Arc<NodeConfig>,
txn_accumulator: MerkleAccumulator,
block_accumulator: MerkleAccumulator,
head: Block,
chain_state: ChainStateDB,
storage: Arc<S>,
phantom: PhantomData<C>,
}
impl<C, S> BlockChain<C, S>
where
C: Consensus,
S: Store,
{
pub fn new(
config: Arc<NodeConfig>,
head_block_hash: HashValue,
storage: Arc<S>,
) -> Result<Self> {
let head = storage
.get_block_by_hash(head_block_hash)?
.ok_or_else(|| format_err!("Can not find block by hash {:?}", head_block_hash))?;
let block_info = storage
.get_block_info(head_block_hash)?
.ok_or_else(|| format_err!("Can not find block info by hash {:?}", head_block_hash))?;
debug!("Init chain with block_info: {:?}", block_info);
let state_root = head.header().state_root();
let txn_accumulator_info = block_info.get_txn_accumulator_info();
let block_accumulator_info = block_info.get_block_accumulator_info();
let chain = Self {
config,
txn_accumulator: info_2_accumulator(
txn_accumulator_info,
AccumulatorStoreType::Transaction,
storage.clone(),
)?,
block_accumulator: info_2_accumulator(
block_accumulator_info.clone(),
AccumulatorStoreType::Block,
storage.clone(),
)?,
head,
chain_state: ChainStateDB::new(storage.clone(), Some(state_root)),
storage,
phantom: PhantomData,
};
Ok(chain)
}
pub fn new_chain(&self, head_block_hash: HashValue) -> Result<Self> {
Self::new(self.config.clone(), head_block_hash, self.storage.clone())
}
pub fn save_block(&self, block: &Block, block_state: BlockState) {
if let Err(e) = self.storage.commit_block(block.clone(), block_state) {
error!("save block {:?} failed : {:?}", block.id(), e);
}
}
fn get_block_info(&self, block_id: HashValue) -> Result<BlockInfo> {
Ok(self
.storage
.get_block_info(block_id)?
.ok_or_else(|| format_err!("Can not find block info by hash {}", block_id))?)
}
pub fn save_block_info(&self, block_info: BlockInfo) {
let block_id = *block_info.block_id();
if let Err(e) = self.storage.save_block_info(block_info) {
error!("save block info {:?} failed : {:?}", block_id, e);
}
}
pub fn create_block_template_inner(
&self,
author: AccountAddress,
auth_key_prefix: Option<Vec<u8>>,
previous_header: BlockHeader,
user_txns: Vec<SignedUserTransaction>,
) -> Result<(BlockTemplate, ExcludedTxns)> {
let mut opened_block = OpenedBlock::new(
self.storage.clone(),
previous_header,
self.config.miner.block_gas_limit,
author,
auth_key_prefix,
)?;
let excluded_txns = opened_block.push_txns(user_txns)?;
let template = opened_block.finalize()?;
Ok((template, excluded_txns))
}
fn find_block_by_number(&self, number: u64) -> Result<HashValue> {
// let head_num = self.head.header().number();
// let head_id = self.head.header().id();
// if number == head_num {
// Ok(head_id)
// } else if number > head_num {
// Err(format_err!("number {} > head_num {}", number, head_num))
// } else {
self.block_accumulator
.get_leaf(number)?
.ok_or_else(|| format_err!("Can not find block by number {}", number))
// }
}
pub fn block_exist_by_number(
&self,
block_id: HashValue,
block_num: BlockNumber,
) -> Result<bool> {
if let Some(block_header) = self.get_header_by_number(block_num)? {
if block_id == block_header.id() {
return Ok(true);
} else {
debug!(
"block id miss match {:?} : {:?}",
block_id,
block_header.id()
);
}
}
Ok(false)
}
pub fn append_block(
&mut self,
block_id: HashValue,
block_accumulator_info: AccumulatorInfo,
) -> Result<()> {
self.block_accumulator.append(&[block_id])?;
self.block_accumulator.flush()?;
let pivot_block_accumulator_info: AccumulatorInfo = (&self.block_accumulator).try_into()?;
assert_eq!(block_accumulator_info, pivot_block_accumulator_info);
debug!("save pivot {:?} succ.", block_id);
Ok(())
}
pub fn get_storage(&self) -> Arc<S> {
self.storage.clone()
}
}
impl<C, S> ChainReader for BlockChain<C, S>
where
C: Consensus,
S: Store,
{
fn head_block(&self) -> Block {
self.head.clone()
}
fn current_header(&self) -> BlockHeader {
self.head.header().clone()
}
fn get_header(&self, hash: HashValue) -> Result<Option<BlockHeader>> {
let header = if let Some(block) = self.get_block(hash)? {
Some(block.header().clone())
} else {
None
};
Ok(header)
}
fn get_header_by_number(&self, number: BlockNumber) -> Result<Option<BlockHeader>> {
let block_id = self.find_block_by_number(number)?;
self.storage.get_block_header_by_hash(block_id)
}
fn get_block_by_number(&self, number: BlockNumber) -> Result<Option<Block>> {
let block_id = self.find_block_by_number(number)?;
self.storage.get_block_by_hash(block_id)
}
fn get_blocks_by_number(&self, number: Option<BlockNumber>, count: u64) -> Result<Vec<Block>> {
let mut block_vec = vec![];
let mut current_num = match number {
None => self.current_header().number(),
Some(number) => number,
};
let mut tmp_count = count;
loop {
let block = self
.get_block_by_number(current_num)?
.ok_or_else(|| format_err!("Can not find block by number {}", current_num))?;
block_vec.push(block);
if current_num == 0 || tmp_count == 1 {
break;
}
current_num -= 1;
tmp_count -= 1;
}
Ok(block_vec)
}
fn get_block(&self, hash: HashValue) -> Result<Option<Block>> {
let block = self.storage.get_block_by_hash(hash)?;
match block {
Some(b) => {
let block_exit =
self.block_exist_by_number(b.header().id(), b.header().number())?;
if block_exit {
return Ok(Some(b));
}
}
None => {
debug!("Get block {:?} from storage return none.", hash);
}
}
Ok(None)
}
fn get_transaction(&self, txn_hash: HashValue) -> Result<Option<Transaction>> {
self.storage.get_transaction(txn_hash)
}
fn get_transaction_info_by_version(&self, version: u64) -> Result<Option<TransactionInfo>> {
match self.txn_accumulator.get_leaf(version)? {
None => Ok(None),
Some(hash) => self.storage.get_transaction_info(hash),
}
}
fn create_block_template(
&self,
author: AccountAddress,
auth_key_prefix: Option<Vec<u8>>,
parent_hash: Option<HashValue>,
user_txns: Vec<SignedUserTransaction>,
) -> Result<(BlockTemplate, ExcludedTxns)> {
let block_id = match parent_hash {
Some(hash) => hash,
None => self.current_header().id(),
};
assert!(self.exist_block(block_id));
let previous_header = self
.get_header(block_id)?
.ok_or_else(|| format_err!("Can find block header by {:?}", block_id))?;
self.create_block_template_inner(author, auth_key_prefix, previous_header, user_txns)
}
fn chain_state_reader(&self) -> &dyn ChainStateReader {
&self.chain_state
}
fn get_block_info(&self, block_id: Option<HashValue>) -> Result<Option<BlockInfo>> {
let id = match block_id {
Some(hash) => hash,
None => self.current_header().id(),
};
self.storage.get_block_info(id)
}
fn get_total_difficulty(&self) -> Result<U256> {
let block_info = self.storage.get_block_info(self.head.header().id())?;
Ok(block_info.map_or(U256::zero(), |info| info.total_difficulty))
}
fn exist_block(&self, block_id: HashValue) -> bool {
if let Ok(Some(header)) = self.storage.get_block_header_by_hash(block_id) {
if let Ok(exist) = self.block_exist_by_number(block_id, header.number()) {
return exist;
}
}
false
}
}
impl<C, S> BlockChain<C, S>
where
C: Consensus,
S: Store,
{
fn save(
&mut self,
block_id: HashValue,
transactions: Vec<Transaction>,
txn_infos: Vec<TransactionInfo>,
) -> Result<()> {
ensure!(
transactions.len() == txn_infos.len(),
"block txns' length should be equal to txn infos' length"
);
let txn_id_vec = transactions
.iter()
.cloned()
.map(|user_txn| user_txn.id())
.collect::<Vec<HashValue>>();
// save block's transactions
self.storage.save_block_transactions(block_id, txn_id_vec)?;
// save transactions
self.storage.save_transaction_batch(transactions)?;
let txn_info_ids: Vec<_> = txn_infos.iter().map(|info| info.crypto_hash()).collect();
self.storage
.save_block_txn_info_ids(block_id, txn_info_ids)?;
self.storage.save_transaction_infos(txn_infos)?;
Ok(())
}
}
impl<C, S> ChainWriter for BlockChain<C, S>
where
C: Consensus,
S: Store,
{
fn apply(&mut self, block: Block) -> Result<bool> {
let header = block.header();
let pre_hash = header.parent_hash();
assert_eq!(self.head.header().id(), pre_hash);
// do not check genesis block timestamp check
if let Some(pre_block) = self.get_block(pre_hash)? {
ensure!(
pre_block.header().timestamp() <= header.timestamp(),
"Invalid block: block timestamp too old"
);
let now = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs();
ensure!(
header.timestamp() <= ALLOWED_FUTURE_BLOCKTIME + now,
"Invalid block: block timestamp too new"
);
}
ensure!(
block.header().gas_used() <= block.header().gas_limit(),
"invalid block: gas_used should not greater than gas_limit"
);
if let Err(e) = C::verify(self.config.clone(), self, header) {
error!("verify header failed : {:?}", e);
return Ok(false);
}
let txns = {
let block_metadata = header.clone().into_metadata();
let mut t = vec![Transaction::BlockMetadata(block_metadata)];
t.extend(
block
.transactions()
.iter()
.cloned()
.map(Transaction::UserTransaction),
);
t
};
let (state_root, vec_transaction_info) =
executor::block_execute(&self.chain_state, txns.clone(), block.header().gas_limit())?;
assert_eq!(
block.header().state_root(),
state_root,
"verify block:{:?} state_root fail.",
block.header().id()
);
let block_gas_used = vec_transaction_info
.iter()
.fold(0u64, |acc, i| acc + i.gas_used());
ensure!(
block_gas_used == block.header().gas_used(),
"invalid block: gas_used is not match"
);
ensure!(
vec_transaction_info.len() == txns.len(),
"invalid txn num in the block"
);
// txn accumulator verify.
let executed_accumulator_root = {
let included_txn_info_hashes: Vec<_> = vec_transaction_info
.iter()
.map(|info| info.crypto_hash())
.collect();
let (accumulator_root, _first_leaf_idx) =
self.txn_accumulator.append(&included_txn_info_hashes)?;
accumulator_root
};
ensure!(
executed_accumulator_root == block.header().accumulator_root(),
"verify block: txn accumulator root mismatch"
);
// If chain state is matched, and accumulator is matched,
// then, we save flush states, and save block data.
self.txn_accumulator
.flush()
.map_err(|_err| BlockExecutorError::BlockAccumulatorFlushErr)?;
self.chain_state
.flush()
.map_err(BlockExecutorError::BlockChainStateErr)?;
let total_difficulty = {
let pre_total_difficulty = self
.get_block_info(block.header().parent_hash())?
.total_difficulty;
pre_total_difficulty + header.difficulty()
};
self.block_accumulator.append(&[block.id()])?;
self.block_accumulator.flush()?;
let txn_accumulator_info: AccumulatorInfo = (&self.txn_accumulator).try_into()?;
let block_accumulator_info: AccumulatorInfo = (&self.block_accumulator).try_into()?;
let block_info = BlockInfo::new_with_accumulator_info(
header.id(),
txn_accumulator_info,
block_accumulator_info,
total_difficulty,
);
// save block's transaction relationship and save transaction
self.save(header.id(), txns, vec_transaction_info)?;
self.commit(block.clone(), block_info, BlockState::Executed)?;
Ok(true)
}
fn commit(
&mut self,
block: Block,
block_info: BlockInfo,
block_state: BlockState,
) -> Result<()> {
let block_id = block.id();
self.save_block(&block, block_state);
self.head = block;
self.save_block_info(block_info);
self.chain_state =
ChainStateDB::new(self.storage.clone(), Some(self.head.header().state_root()));
debug!("save block {:?} succ.", block_id);
Ok(())
}
fn chain_state(&mut self) -> &dyn ChainState {
&self.chain_state
}
}
pub(crate) fn info_2_accumulator(
accumulator_info: AccumulatorInfo,
store_type: AccumulatorStoreType,
node_store: Arc<dyn AccumulatorTreeStore>,
) -> Result<MerkleAccumulator> {
MerkleAccumulator::new(
*accumulator_info.get_accumulator_root(),
accumulator_info.get_frozen_subtree_roots().clone(),
accumulator_info.get_num_leaves(),
accumulator_info.get_num_nodes(),
store_type,
node_store,
)
}
|
use crate::Server;
use log::info;
use notify_rust;
use notify_rust::{Notification, NotificationHandle};
use std::{error::Error, io};
pub struct Importer {
pub state: State,
pub config: Config,
}
pub mod config;
mod link;
mod sync;
use config::Config;
pub mod state;
use state::State;
impl Importer {
pub fn new() -> Result<Importer, Box<dyn Error>> {
let state = State::get()?;
let config = Config::from_settings()?;
Ok(Importer { state, config })
}
pub fn from_config(config: Config) -> Result<Importer, Box<dyn Error>> {
let state = State::get()?;
Ok(Importer { state, config })
}
pub fn listen(&mut self) -> Result<(), Box<dyn Error>> {
let server = Server::new()?;
loop {
self.sync_and_notify()?;
server.check_messages_for_300(self)?;
}
}
pub fn notify(&self, body: &str) -> notify_rust::error::Result<NotificationHandle> {
info!("Notify: {}", body);
Notification::new()
.summary("Dotfiles Importer")
.body(body)
.show()
}
pub fn setup(&mut self) -> Result<(), Box<dyn Error>> {
if !self.state.initialized {
info!("Setting up...");
self.backup().map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("Could not backup files: {}", e),
)
})?;
self.link_all().map_err(|e| {
info!("Could not link files: {}", e);
let err = self
.restore()
.map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("Could not restore from backup: {}", e),
)
})
.err();
if let Some(err) = err {
return err;
} else {
return io::Error::new(io::ErrorKind::Other, format!("Linking failed: {}", e));
}
})?;
self.intitialize_mapped()?;
self.state.suggested_files = vec![];
self.state.differences = vec![];
self.state.save()?;
self.state.initialized = true;
self.state.save()?;
return Ok(());
}
info!("Already setup");
Ok(())
}
}
|
pub struct PseudorandomFloatGenerator {
state: u32,
}
/// Generates floats (f64) between 0.0 (inclusive) and 1.0 (exclusive).
///
/// Uses [Xorshift](https://en.wikipedia.org/wiki/Xorshift)
impl PseudorandomFloatGenerator {
pub fn new(seed: u32) -> PseudorandomFloatGenerator {
PseudorandomFloatGenerator {
state: seed,
}
}
pub fn next(&mut self) -> f64 {
// Calculate random u32.
// https://en.wikipedia.org/wiki/Xorshift
let mut x = self.state;
x ^= x << 13;
x ^= x >> 17;
x ^= x << 5;
let x = x;
self.state = x;
// Convert to u16 and divide by (2 ** 16) to get random float
(x >> 16) as f64 / 65536.0
}
}
|
#[derive(Debug)]
enum Ipaddress {
Ipaddrv4(i32,i32,i32,i32),
Ipaddrv6(String)
}
#[derive(Debug)]
enum Option<T>{
Some(T),
None
}
fn main() {
let v4 = Ipaddress::Ipaddrv4(0,0,0,0);
let v6 = Ipaddress::Ipaddrv6(String::from("::!"));
println!("{:?}", v4);
println!("This address:: {}", which_address(v4));
println!("This enum:: {:?}", handle_null(Option::Some(3)));
let some_value = Some(4);
if let Some(3) = some_value{
println!("this is a test");
}
}
fn which_address(ipaddress:Ipaddress) -> u32{
match ipaddress {
Ipaddress::Ipaddrv4(_,_,_,_) => {
println!("Compete");
1
},
Ipaddress::Ipaddrv6(_) => 2,
}
}
fn handle_null(option: Option<i32>)->Option<i32>{
match option {
Option::Some(i) => Option::Some(i+1),
Option::None => Option::None,
}
}
|
use std::process::Command;
fn main(){
let output = if cfg!(target_os = "windows") {
Command::new("cmd")
.args(&["/C", "echo hello"])
.output()
.expect("failed to execute process")
} else {
Command::new("ls")
// .arg("-c")
// .arg("echo hello")
.output()
.expect("failed to execute process")
// Command::new("ls")
// .env("PATH", "/bin")
// // .spawn()
// .output()
// .expect("ls command failed to start")
};
let hello = output.stdout;
println!("{:?}", String::from_utf8_lossy(&hello));
}
|
extern crate iref;
use std::convert::TryInto;
use iref::{Iri, IriRef, IriRefBuf};
fn main() -> Result<(), iref::Error> {
let mut iri_ref = IriRefBuf::default(); // an IRI reference can be empty.
// An IRI reference with a scheme is a valid IRI.
iri_ref.set_scheme(Some("https".try_into()?));
let iri: Iri = iri_ref.as_iri()?;
// An IRI can be safely converted into an IRI reference.
let _iri_ref: IriRef = iri.into();
Ok(())
}
|
//! Based on http://www.gc-forever.com/yagcd/chap14.html#sec14.1
use encoding_rs::{UTF_8, SHIFT_JIS};
use failure::{err_msg, Error};
const COLUMNS: usize = 24;
const ROWS: usize = 8;
const PIXELS_PER_COLUMN: usize = 4;
const PIXELS_PER_ROW: usize = 4;
const WIDTH: usize = 96;
const HEIGHT: usize = 32;
const UNCOMPRESSED_BYTES_PER_PIXEL: usize = 4;
const COMPRESSED_BYTES_PER_PIXEL: usize = 2;
const UNCOMPRESSED_IMAGE_SIZE: usize = WIDTH * HEIGHT * UNCOMPRESSED_BYTES_PER_PIXEL;
const COMPRESSED_IMAGE_SIZE: usize = WIDTH * HEIGHT * COMPRESSED_BYTES_PER_PIXEL;
const SHORT_TEXT_LEN: usize = 0x20;
const LONG_TEXT_LEN: usize = 0x40;
const DESCRIPTION_LEN: usize = 0x80;
const MAGIC_LEN: usize = 4;
const OFFSET_IMAGE: usize = 0x20;
const OFFSET_GAME_NAME: usize = OFFSET_IMAGE + COMPRESSED_IMAGE_SIZE;
const OFFSET_DEVELOPER_NAME: usize = OFFSET_GAME_NAME + SHORT_TEXT_LEN;
const OFFSET_FULL_GAME_NAME: usize = OFFSET_DEVELOPER_NAME + SHORT_TEXT_LEN;
const OFFSET_FULL_DEVELOPER_NAME: usize = OFFSET_FULL_GAME_NAME + LONG_TEXT_LEN;
const OFFSET_GAME_DESCRIPTION: usize = OFFSET_FULL_DEVELOPER_NAME + LONG_TEXT_LEN;
const BANNER_LEN: usize = OFFSET_GAME_DESCRIPTION + DESCRIPTION_LEN;
pub struct Banner {
pub magic: [u8; MAGIC_LEN],
pub image: [u8; UNCOMPRESSED_IMAGE_SIZE],
pub game_name: String,
pub developer_name: String,
pub full_game_name: String,
pub full_developer_name: String,
pub game_description: String,
}
fn a1rgb5_to_rgba(v: &[u8]) -> [u8; 4] {
let (x, y) = (v[0], v[1]);
// ARRRRRGG GGGBBBBB
let a = x >> 7;
let r = (x >> 2) & 0b11111;
let g = ((x & 0b11) << 3) | (y >> 5);
let b = y & 0b11111;
let r = (r as f32 * (255.0 / 31.0)) as u8;
let g = (g as f32 * (255.0 / 31.0)) as u8;
let b = (b as f32 * (255.0 / 31.0)) as u8;
let a = a * 255;
[r, g, b, a]
}
fn rgba_to_a1rgb5(v: &[u8]) -> [u8; 2] {
let (r, g, b, a) = (v[0], v[1], v[2], v[3]);
let r = (r as f32 * (31.0 / 255.0)).round() as u8;
let g = (g as f32 * (31.0 / 255.0)).round() as u8;
let b = (b as f32 * (31.0 / 255.0)).round() as u8;
let a = (a >= 128) as u8;
let x = (a << 7) | (r << 2) | (g >> 3);
let y = (g << 5) | b;
[x, y]
}
fn read_string(is_japanese: bool, bytes: &[u8]) -> Result<String, Error> {
let end = bytes.iter().position(|&x| x == 0).unwrap_or(bytes.len());
let bytes = &bytes[..end];
let encoding = if is_japanese { SHIFT_JIS } else { UTF_8 };
Ok(encoding
.decode_without_bom_handling_and_without_replacement(bytes)
.ok_or_else(|| err_msg("Couldn't parse string"))?
.into_owned())
}
fn write_string(is_japanese: bool, text: &str, bytes: &mut [u8]) {
let encoding = if is_japanese { SHIFT_JIS } else { UTF_8 };
encoding.new_encoder().encode_from_utf8(text, bytes, true);
}
impl Banner {
pub fn parse(is_japanese: bool, data: &[u8]) -> Result<Self, Error> {
let mut magic = [0; MAGIC_LEN];
magic.copy_from_slice(&data[..MAGIC_LEN]);
if &magic != b"BNR1" && &magic != b"BNR2" {
panic!("Invalid Banner File");
}
let image_data = &data[OFFSET_IMAGE..][..COMPRESSED_IMAGE_SIZE];
let mut rgba_image = [0; UNCOMPRESSED_IMAGE_SIZE];
let mut image_data = image_data.chunks(COMPRESSED_BYTES_PER_PIXEL);
for row in 0..ROWS {
let row_y = row * PIXELS_PER_ROW;
for column in 0..COLUMNS {
let column_x = column * PIXELS_PER_COLUMN;
for y in 0..PIXELS_PER_ROW {
let y = row_y + y;
for x in 0..PIXELS_PER_COLUMN {
let x = column_x + x;
let pixel_index = UNCOMPRESSED_BYTES_PER_PIXEL * (y * WIDTH + x);
let dst = &mut rgba_image[pixel_index..][..UNCOMPRESSED_BYTES_PER_PIXEL];
dst.copy_from_slice(&a1rgb5_to_rgba(image_data.next().unwrap()));
}
}
}
}
let game_name = read_string(is_japanese, &data[OFFSET_GAME_NAME..][..SHORT_TEXT_LEN])?;
let developer_name = read_string(
is_japanese,
&data[OFFSET_DEVELOPER_NAME..][..SHORT_TEXT_LEN],
)?;
let full_game_name =
read_string(is_japanese, &data[OFFSET_FULL_GAME_NAME..][..LONG_TEXT_LEN])?;
let full_developer_name = read_string(
is_japanese,
&data[OFFSET_FULL_DEVELOPER_NAME..][..LONG_TEXT_LEN],
)?;
let game_description = read_string(
is_japanese,
&data[OFFSET_GAME_DESCRIPTION..][..DESCRIPTION_LEN],
)?;
Ok(Self {
magic,
image: rgba_image,
game_name,
developer_name,
full_game_name,
full_developer_name,
game_description,
})
}
pub fn to_bytes(&self, is_japanese: bool) -> [u8; BANNER_LEN] {
let mut data = [0; BANNER_LEN];
data[..MAGIC_LEN].copy_from_slice(&self.magic);
{
let image_data = &mut data[OFFSET_IMAGE..][..COMPRESSED_IMAGE_SIZE];
let mut image_data = image_data.chunks_mut(COMPRESSED_BYTES_PER_PIXEL);
for row in 0..ROWS {
let row_y = row * PIXELS_PER_ROW;
for column in 0..COLUMNS {
let column_x = column * PIXELS_PER_COLUMN;
for y in 0..PIXELS_PER_ROW {
let y = row_y + y;
for x in 0..PIXELS_PER_COLUMN {
let x = column_x + x;
let pixel_index = UNCOMPRESSED_BYTES_PER_PIXEL * (y * WIDTH + x);
let src = &self.image[pixel_index..];
image_data
.next()
.unwrap()
.copy_from_slice(&rgba_to_a1rgb5(src));
}
}
}
}
}
write_string(
is_japanese,
&self.game_name,
&mut data[OFFSET_GAME_NAME..][..SHORT_TEXT_LEN],
);
write_string(
is_japanese,
&self.developer_name,
&mut data[OFFSET_DEVELOPER_NAME..][..SHORT_TEXT_LEN],
);
write_string(
is_japanese,
&self.full_game_name,
&mut data[OFFSET_FULL_GAME_NAME..][..LONG_TEXT_LEN],
);
write_string(
is_japanese,
&self.full_developer_name,
&mut data[OFFSET_FULL_DEVELOPER_NAME..][..LONG_TEXT_LEN],
);
write_string(
is_japanese,
&self.game_description,
&mut data[OFFSET_GAME_DESCRIPTION..][..DESCRIPTION_LEN],
);
data
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.