text stringlengths 8 4.13M |
|---|
use std::borrow::Cow;
use std::error::Error;
use std::fmt;
#[derive(Debug, PartialEq)]
pub struct JustTextError<'a> {
message: Cow<'a, str>,
}
impl<'a> JustTextError<'a> {
pub fn new<S>(message: S) -> Self
where
S: Into<Cow<'a, str>>,
{
JustTextError {
message: message.into(),
}
}
}
impl<'a> fmt::Display for JustTextError<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.message)
}
}
impl<'a> Error for JustTextError<'a> {}
|
// error-pattern:explicit failure
use std;
import std::option::*;
fn foo(s: str) { }
fn main() {
let i = alt some[int](3) { none[int]. { fail } some[int](_) { fail } };
foo(i);
} |
///
/// The general trait to process opcodes.
///
/// ## A NOTE ABOUT CERTAIN OPCODES
/// There are actually 2 different Chip8 specifications which behave differently
/// for certain opcodes. These specifications go by many different names, but
/// here the more popular and well-known will be called the "CowGod" specification
/// (after the website from which many have come to know it) while the other
/// will be refered to as the "Legacy" specification.
/// Note that in this code base the opcodes are processed via the "Cowsay"
/// specification by default.
pub trait OpcodeExecuter {
// Instruction process functions
///
/// Sets the display buffer to all 0 and redraws the screen.
fn clear_screen(&mut self) ;
///
/// Pops the top value of the stack pointer and puts it into the program counter.
fn ret(&mut self) ;
///
/// Jumps to ```addr```.
fn jump(&mut self, addr : u16) ;
///
/// Stores the current program counter in the stack and jumps to ```addr```.
fn call(&mut self, addr : u16) ;
///
/// If the value in register ```register``` is equal to ```byte```, then
/// the program counter is incremented, skipping over the next instruction.
fn skip_if_equal_const(&mut self, register : usize, byte : u8) ;
///
/// If the value in register ```register``` is NOT equal to ```byte```, then
/// the program counter is incremented, skipping over the next instruction.
fn skip_if_unequal_const(&mut self, register : usize, byte : u8) ;
///
/// If the value in register ```register1``` is equal to the value in register ```register2```
/// then the program counter is incremented, skipping over the next instruction.
fn skip_if_equal_reg(&mut self, register1 : usize, register2 : usize) ;
///
/// Sets the value of register ```register``` to the value ```byte```.
fn load_const(&mut self, register : usize, byte : u8) ;
///
/// Adds the value ```byte``` to the value in register ```register``` and
/// stores the new value into ```register```..
fn add_const(&mut self, register : usize, byte : u8) ;
///
/// Copies the value in register ```reg``` into register ```acc```.
fn load_register(&mut self, acc : usize, reg : usize) ;
///
/// Sets the value in register ```acc``` to itself bitwise-ORed with the
/// value in register ```reg```.
fn or_register(&mut self, acc : usize, reg : usize) ;
///
/// Sets the value in register ```acc``` to itself bitwise-ANDed with the
/// value in register ```reg```.
fn and_register(&mut self, acc : usize, reg : usize) ;
///
/// Sets the value in register ```acc``` to itself bitwise-XORed with the
/// value in register ```reg```.
fn xor_register(&mut self, acc : usize, reg : usize) ;
/// Sets the value in register ```acc``` to itself plus the
/// value in register ```reg```.
///
/// Register ```0xF``` is then set to 1 if the add caused an overflow, 0 otherwise.
fn add_register(&mut self, acc : usize, reg : usize) ;
/// Sets the value in register ```acc``` to itself minus the
/// value in register ```reg```.
///
/// Register ```0xF``` is then set to 0 if the minus caused an underflow, 1 otherwise.
fn sub_register(&mut self, acc : usize, reg : usize) ;
///
/// In the "CowGod" spec the value in register ```0xF``` (15) is
/// set to the lowest bit of the value of register ```acc``` and then the value
/// in register ```acc```is divided by 2.
/// The "Legacy" spec states that instead of register ```acc```,
/// the value in register ```0xF``` is set to the lowest bit of the value in
/// register ```reg```, the value of ```reg``` is divided by 2, and the freshly
/// divided number is then copied into register ```acc```.
fn right_shift_register(&mut self, acc : usize, reg : usize) ;
/// Sets the value in register ```acc``` to
/// value in register ```reg``` minus itself.
///
/// Register ```0xF``` is then set to 0 if the minusd caused an underflow, 1 otherwise.
fn rev_sub_register(&mut self, acc : usize, reg : usize) ;
///
/// In the "CowGod" spec the value in register ```0xF``` (15) is
/// set to 1 if the highest bit of the value of register ```acc``` is 1, and 0 if it isnt.
/// Then the value in register ```acc```is divided by 2.
/// The "Legacy" spec states that instead of register ```acc```,
/// the value in register ```0xF``` is set to 1 if the highest bit of the value in
/// register ```reg``` is 1 and 0 otherwise. The value of ```reg``` is then
/// divided by 2, and the freshly divided number is then copied into register ```acc```.
fn left_shift_register(&mut self, acc : usize, reg : usize) ;
///
/// If the value in register ```register1``` is NOT equal to the value in register ```register2```
/// then the program counter is incremented, skipping over the next instruction.
fn skip_if_unequal_reg(&mut self, register1 : usize, register2 : usize) ;
///
/// Sets the value in the address pointer ```I```to the constant value
/// ```addr```.
fn load_addr_const(&mut self, addr : u16) ;
///
/// Sets the program counter to the value in register ```0x0```
/// plus the value ```addr```.
fn add_jump_v0(&mut self, addr_offset : u16) ;
///
/// Sets the value stored in register ```reg``` to a pseudo-random number
/// which is then bitwise-ANDed with ```mask```.
fn randomize(&mut self, reg : usize, mask : u8) ;
///
/// Draws a sprite whose top left corner starts offset from the left
/// edge by the value stored in ```xreg``` pixels, offset from the top
/// by the value stored in ```yreg``` pixel, whose width is 8 pixels
/// and whose height is equal to ```length```.
///
/// The sprite is drawn by reading ```length``` bytes of memory, starting
/// at the addres pointer ```I```. Each byte represents a row of pixels,
/// and each bit within the byte represents a single pixel; for example,
/// if we wanted to draw a box 8 pixels wide and 5 high in the bottom right corner,
/// we would set the value in ```xreg``` to 55 (63 - 8 for the box), the value
/// in ```yreg``` to 26 (31 - 5 for the height), and length to 5. We would
/// then make sure that ```self.I[0 .. 5]``` is equal to ```[0xFF, 0x81, 0x81, 0x81, 0xFF]```,
/// since ```0xFF``` has all bits set to 1 and ```0x81``` has only the outer bits set to 1.
fn draw_sprite(&mut self, xreg : usize, yreg : usize, length : u8);
///
/// Skips the next instruciton if the key whose value is stored in ```reg``` is pressed;
/// otherwise do nothing.
fn skip_if_key_pressed(&mut self, reg : usize) ;
///
/// Skips the next instruciton if the key whose value is stored in ```reg``` is NOT pressed;
/// otherwise do nothing.
fn skip_if_key_not_pressed(&mut self, reg : usize) ;
///
/// Sets the value in register ```reg``` to the current number stored in the
/// external, non-audio timer.
fn load_timer(&mut self, reg : usize) ;
///
/// Pauses all CPU instructions until a key is pressed. The value of this
/// key is then stored into register ```reg```.
fn wait_for_key(&mut self, reg : usize) ;
///
/// Sets the value in the external timer to the value stored in register ```reg```.
fn set_timer (&mut self, reg : usize) ;
///
/// Sets the audio timer to the value stored in register ```reg```.
fn set_audio(&mut self, reg : usize) ;
///
/// Adds the value stored in register ```reg``` to the address pointer ```I```.
fn add_addr_reg(&mut self, reg : usize) ;
///
/// Sets the address pointer ```I``` to 5 * the value stored in register ```reg```.
/// The chip 8 is set to automatically load in a simplistic fontset for the hex
/// characters ```0 - F```, in order, at memory locations ```0x0000 - 0x2000`,
/// meaning after this instruction ```I``` will point to the sprite coresponding
/// to the value stored in ```reg```.
fn set_addr_to_char(&mut self, reg : usize) ;
///
/// Loads the value at the memory location ```self.I + x``` into register
/// ```x```, for all ```x``` between ```0``` and ```reg```, inclusive.
///
/// In the ```LEGACY``` spec ```I``` is then incremented by ```reg + 1```,
/// which is the first byte *after* the loaded values.
fn store_digits(&mut self, reg : usize) ;
///
/// Stores the values currently stored in registers ```0``` to ```reg```,
/// inclusive, at ```reg```-sized block of memory starting at the address
/// pointer ```I```.
///
/// In the ```LEGACY``` spec ```I``` is then incremented by ```reg + 1```,
/// which is the first byte *after* the stored values.
fn save_registers(&mut self, reg : usize) ;
///
/// Sets the values in registers ```0``` to ```reg``` to the values at
/// memory location ```self.I + 0```, ```self.I + 1```, ... ```self.I + reg```,
/// where ```I``` is the CPU's 16-bit address pointer.
///
/// In the "Legacy" spec, the address pointer is then increased by ```reg + 1```.
fn restore_registers(&mut self, reg : usize) ;
//Once per frame helper functions
///
/// If necessary, decrements the values in the audio timer and other timer
/// registers.
fn tick(&mut self, delta_t_in_ns : u64) ;
///
/// Gets the value of the next instruction to be passed to ```process_instruction```.
/// Runs at the beginning of the clock cycle.
fn get_next_instr(&self) -> u16 ;
///
/// Ran at the end of the clock cycle. Does things like increment the program
/// counter, etc.
fn end_frame(&mut self) ;
//Misc management functions
///
/// Halts the CPU and stops processing all instruction.
/// The regular Chip8 does not have this as a specific instruction, but
/// this may be called in the case of a bad opcode value being read.
fn die(&mut self) ;
///
/// Checks if the CPU has had the method ```die()``` called.
/// Not used in the virtual Chip8 itself, but by the emulator to close
/// the program if the CPU hangs.
fn has_died(&self) -> bool;
///
/// Loads a ROM into memory. The rom will be places into memory offset
/// by ```0x200```, or 512, due to the default fontset; in effect, this means
/// that ```self.memory[0x200] = rom[0]```, ```self.memory[0x200 + 1] = rom[1]```, etc.
fn load_rom(&mut self, rom : &[u8]) ;
///
/// Resets the CPU to its initial state.
/// All registers, timers, stack values, and screen buffer values will be set to 0.
/// The memory will be cleared except for the default fontset at the beginning of the memory.
fn reset(&mut self) ;
///
/// Decodes a raw opcode value into its constituent parts,
/// figures out what function call and parameters that corresponds to,
/// and calls the function with the correct parameters.
fn process_instruction(&mut self, op : u16) {
if op == 0 {
return;
}
else if op == 0x00E0 {
self.clear_screen()
}
else if op == 0x00EE {
self.ret()
}
else if op & 0xF000 == 0 {
error_log!("GOT A ZERO OP: {:#X}", op);
}
else if op & 0xF000 == 0x1000 {
self.jump(addr(op))
}
else if op & 0xF000 == 0x2000 {
self.call(addr(op))
}
else if op & 0xF000 == 0x3000 {
self.skip_if_equal_const(acc(op), num(op))
}
else if op & 0xF000 == 0x4000 {
self.skip_if_unequal_const(acc(op), num(op))
}
else if op & 0xF000 == 0x5000 {
self.skip_if_equal_reg(acc(op), reg(op))
}
else if op & 0xF000 == 0x6000 {
self.load_const(acc(op), num(op))
}
else if op & 0xF000 == 0x7000 {
self.add_const(acc(op), num(op))
}
else if op & 0xF00F == 0x8000 {
self.load_register(acc(op), reg(op))
}
else if op & 0xF00F == 0x8001 {
self.or_register(acc(op), reg(op))
}
else if op & 0xF00F == 0x8002 {
self.and_register(acc(op), reg(op))
}
else if op & 0xF00F == 0x8003 {
self.xor_register(acc(op), reg(op))
}
else if op & 0xF00F == 0x8004 {
self.add_register(acc(op), reg(op))
}
else if op & 0xF00F == 0x8005 {
self.sub_register(acc(op), reg(op))
}
else if op & 0xF00F == 0x8006 {
self.right_shift_register(acc(op), reg(op))
}
else if op & 0xF00F == 0x8007 {
self.rev_sub_register(acc(op), reg(op))
}
else if op & 0xF00F == 0x800E {
self.left_shift_register(acc(op), reg(op))
}
else if op & 0xF00F == 0x9000 {
self.skip_if_unequal_reg(acc(op), reg(op))
}
else if op & 0xF000 == 0xA000 {
self.load_addr_const(addr(op))
}
else if op & 0xF000 == 0xB000 {
self.add_jump_v0(addr(op))
}
else if op & 0xF000 == 0xC000 {
self.randomize(acc(op), num(op))
}
else if op & 0xF000 == 0xD000 {
let rows = (op & 0x000F) as u8;
self.draw_sprite(acc(op), reg(op), rows)
}
else if op & 0xF0FF == 0xE09E {
self.skip_if_key_pressed(acc(op))
}
else if op & 0xF0FF == 0xE0A1 {
self.skip_if_key_not_pressed(acc(op))
}
else if op & 0xF0FF == 0xF007 {
self.load_timer(acc(op))
}
else if op & 0xF0FF == 0xF00A {
self.wait_for_key(acc(op))
}
else if op & 0xF0FF == 0xF015 {
self.set_timer(acc(op))
}
else if op & 0xF0FF == 0xF018 {
self.set_audio(acc(op))
}
else if op & 0xF0FF == 0xF01E {
self.add_addr_reg(acc(op))
}
else if op & 0xF0FF == 0xF029 {
self.set_addr_to_char(acc(op))
}
else if op & 0xF0FF == 0xF033 {
self.store_digits(acc(op))
}
else if op & 0xF0FF == 0xF055 {
self.save_registers(acc(op))
}
else if op & 0xF0FF == 0xF065 {
self.restore_registers(acc(op))
}
else {
error_log!("BAD OPCODE: {:#X}", op);
self.die();
}
}
}
///
/// An enum to set the CPU's instruction set between COWGOD, which is more
/// popular, and LEGACY, which is official.
pub enum InstructionSet {
LEGACY,
COWGOD,
}
#[inline(always)]
fn addr(instruction : u16) -> u16 {
if (instruction & 0x0FFF) % 2 == 1 {
debug_log!("Got odd jump!");
}
instruction & 0x0FFF
}
#[inline(always)]
fn acc(instruction : u16) -> usize {
((instruction & 0x0F00) >> 8) as usize
}
#[inline(always)]
fn reg(instruction : u16) -> usize {
((instruction & 0x00F0) >> 4) as usize
}
#[inline(always)]
fn num(instruction : u16) -> u8 {
(instruction & 0x00FF) as u8
} |
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)]
pub enum FilterMode {
Nearest,
Linear,
}
#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)]
pub struct Filter {
pub min: FilterMode,
pub mag: FilterMode,
pub mipmap: Option<FilterMode>,
}
impl Filter {
pub fn new(min: FilterMode, mag: FilterMode, mipmap: Option<FilterMode>) -> Self {
Self { min, mag, mipmap }
}
pub(crate) fn to_min_flag(&self) -> u32 {
match (self.min, self.mipmap) {
(FilterMode::Nearest, None) => glow::NEAREST,
(FilterMode::Linear, None) => glow::LINEAR,
(FilterMode::Nearest, Some(FilterMode::Nearest)) => glow::NEAREST_MIPMAP_NEAREST,
(FilterMode::Linear, Some(FilterMode::Nearest)) => glow::LINEAR_MIPMAP_NEAREST,
(FilterMode::Nearest, Some(FilterMode::Linear)) => glow::NEAREST_MIPMAP_LINEAR,
(FilterMode::Linear, Some(FilterMode::Linear)) => glow::LINEAR_MIPMAP_LINEAR,
}
}
pub(crate) fn to_mag_flag(&self) -> u32 {
match self.mag {
FilterMode::Nearest => glow::NEAREST,
FilterMode::Linear => glow::LINEAR,
}
}
}
impl Default for Filter {
fn default() -> Self {
Self::new(FilterMode::Nearest, FilterMode::Linear, Some(FilterMode::Linear))
}
}
|
use rodio::{Decoder, OutputStream, source::Source};
use std::fs::File;
use std::io::BufReader;
// Play specified file on the Primary sound device
pub fn play_ogg(ogg_file:&str){
//Get default sound device
let (_stream, stream_handle) = OutputStream::try_default().unwrap();
//Open .ogg files in the root directory
let ogg_file = BufReader::new(File::open(ogg_file).unwrap());
//Use the decoder to decode the above ogg file from BufReader
let source = Decoder::new(ogg_file).unwrap();
//Play the decoded ogg file using the sound device handler(line 8)
stream_handle.play_raw(source.convert_samples()).unwrap();
//Keep main thread alive while sound plays on a different thread
std::thread::sleep(std::time::Duration::from_secs(2));
} |
use rocket_include_static_resources::StaticResponse;
pub fn favicon_dir() -> std::string::String {
"src/static_content/favicon.ico".to_string()
}
pub fn index_dir() -> std::string::String {
"src/static_content/index.html".to_string()
}
#[get("/")]
pub fn index() -> StaticResponse {
static_response!("index_page")
}
#[get("/favicon.ico")]
pub fn favicon() -> StaticResponse {
static_response!("favicon")
}
|
//!
//! methods to directly interact with the bdev layer
use clap::{App, AppSettings, Arg, ArgMatches, SubCommand};
use colored_json::prelude::*;
use tonic::Status;
use rpc::mayastor::{BdevShareRequest, BdevUri, CreateReply, Null};
use crate::context::Context;
pub async fn handler(
ctx: Context,
matches: &ArgMatches<'_>,
) -> Result<(), Status> {
match matches.subcommand() {
("list", Some(args)) => list(ctx, args).await,
("create", Some(args)) => create(ctx, args).await,
("share", Some(args)) => share(ctx, args).await,
("destroy", Some(args)) => destroy(ctx, args).await,
("unshare", Some(args)) => unshare(ctx, args).await,
(cmd, _) => {
Err(Status::not_found(format!("command {} does not exist", cmd)))
}
}
}
pub fn subcommands<'a, 'b>() -> App<'a, 'b> {
let list = SubCommand::with_name("list").about("List all bdevs");
let create = SubCommand::with_name("create")
.about("Create a new bdev by specifying a URI")
.arg(Arg::with_name("uri").required(true).index(1));
let destroy = SubCommand::with_name("destroy")
.about("destroy the given bdev")
.arg(Arg::with_name("name").required(true).index(1));
let share = SubCommand::with_name("share")
.about("share the given bdev")
.arg(Arg::with_name("name").required(true).index(1))
.arg(
Arg::with_name("protocol")
.short("p")
.help("the protocol to used to share the given bdev")
.required(false)
.possible_values(&["nvmf", "iscsi"])
.default_value("nvmf"),
);
let unshare = SubCommand::with_name("unshare")
.about("unshare the given bdev")
.arg(Arg::with_name("name").required(true).index(1));
SubCommand::with_name("bdev")
.settings(&[
AppSettings::SubcommandRequiredElseHelp,
AppSettings::ColoredHelp,
AppSettings::ColorAlways,
])
.about("Block device management")
.subcommand(list)
.subcommand(share)
.subcommand(unshare)
.subcommand(create)
.subcommand(destroy)
}
async fn list(mut ctx: Context, _args: &ArgMatches<'_>) -> Result<(), Status> {
let bdevs = ctx.bdev.list(Null {}).await?;
println!(
"{}",
serde_json::to_string_pretty(&bdevs.into_inner())
.unwrap()
.to_colored_json_auto()
.unwrap()
);
Ok(())
}
async fn create(mut ctx: Context, args: &ArgMatches<'_>) -> Result<(), Status> {
let uri = args.value_of("uri").unwrap().to_owned();
let response = ctx
.bdev
.create(BdevUri {
uri,
})
.await?;
println!(
"{}",
serde_json::to_string_pretty(&response.into_inner())
.unwrap()
.to_colored_json_auto()
.unwrap()
);
Ok(())
}
async fn destroy(
mut ctx: Context,
args: &ArgMatches<'_>,
) -> Result<(), Status> {
let name = args.value_of("name").unwrap().to_owned();
let bdevs = ctx.bdev.list(Null {}).await?.into_inner();
if let Some(bdev) = bdevs.bdevs.iter().find(|b| b.name == name) {
// un share the bdev
let _ = ctx
.bdev
.unshare(CreateReply {
name,
})
.await?;
let response = ctx
.bdev
.destroy(BdevUri {
uri: bdev.uri.clone(),
})
.await?;
println!(
"{}",
serde_json::to_string_pretty(&response.into_inner())
.unwrap()
.to_colored_json_auto()
.unwrap()
);
Ok(())
} else {
Err(Status::not_found(name))
}
}
async fn share(mut ctx: Context, args: &ArgMatches<'_>) -> Result<(), Status> {
let name = args.value_of("name").unwrap().to_owned();
let protocol = args.value_of("protocol").unwrap().to_owned();
let response = ctx
.bdev
.share(BdevShareRequest {
name,
proto: protocol,
})
.await?;
println!(
"{}",
serde_json::to_string_pretty(&response.into_inner())
.unwrap()
.to_colored_json_auto()
.unwrap()
);
Ok(())
}
async fn unshare(
mut ctx: Context,
args: &ArgMatches<'_>,
) -> Result<(), Status> {
let name = args.value_of("name").unwrap().to_owned();
let response = ctx
.bdev
.unshare(CreateReply {
name,
})
.await?;
println!(
"{}",
serde_json::to_string_pretty(&response.into_inner())
.unwrap()
.to_colored_json_auto()
.unwrap()
);
Ok(())
}
|
use std::str::Chars;
use std::collections::HashMap;
mod file_reader;
#[derive(Debug, PartialEq)]
enum Token {
PLUS(String),
MINUS(String),
NUMBER(String),
IDENTIFIER(String),
MOD(String),
MULTIPLY(String),
LESSTHAN(String),
GREATERTHAN(String),
DOT(String),
COMMA(String),
OPENPARENTHESES(String),
CLOSEPARENTHESES(String),
STARTBRACES(String),
ENDBRACES(String),
STARTSQUAREBRACKETS(String),
ENDSQUAREBRACKETS(String),
INVERTEDCOMMAS(String),
APOSTROPHE(String),
EQUALS(String),
ASSIGNMENT(String),
KEYWORD(String),
REFERENCE(String),
SEMICOLON(String),
COLON(String),
PLUSEQUALS(String),
MINUSEQUALS(String),
MULTIPLYEQUALS(String),
MODEQUALS(String),
AND(String),
OR(String),
NOT(String),
ABSOLUTEVALUE(String),
LESSTHANEQUALS(String),
GREATERTHANEQUALS(String),
SHIFTLEFT(String),
SHIFTRIGHT(String),
STARTSTAR(String),
STRING(String),
}
struct Lexer<'a> {
iter: Chars<'a>,
}
impl<'a> Lexer<'a> {
fn new(code: &str) -> Lexer {
Lexer { iter: code.chars() }
}
fn next(
&mut self,
mapping: &HashMap<&str, &str>,
append: &mut String,
token_map: &mut Vec<Token>,
) -> Option<Token> {
let mut start: &str = self.iter.as_str();
let mut index = self.iter.next();
while let Some(chr) = index {
if !chr.is_whitespace() {
break;
}
start = self.iter.as_str();
index = self.iter.next();
}
if let Some(chr) = index {
match chr {
'.' => Some(Token::DOT(String::from(chr))),
'!' => Some(Token::NOT(String::from(chr))),
',' => Some(Token::COMMA(String::from(chr))),
'\'' => Some(Token::APOSTROPHE(String::from(chr))),
'(' => Some(Token::OPENPARENTHESES(String::from(chr))),
')' => Some(Token::CLOSEPARENTHESES(String::from(chr))),
'[' => Some(Token::STARTSQUAREBRACKETS(String::from(chr))),
']' => Some(Token::ENDSQUAREBRACKETS(String::from(chr))),
'{' => Some(Token::STARTBRACES(String::from(chr))),
'}' => Some(Token::ENDBRACES(String::from(chr))),
';' => Some(Token::SEMICOLON(String::from(chr))),
':' => Some(Token::COLON(String::from(chr))),
'"' => {
token_map.push(Token::INVERTEDCOMMAS(String::from("\"")));
let mut end = self.iter.as_str();
while let Some(c) = self.iter.next() {
if c == '\"' {
break;
}
end = self.iter.as_str();
}
let len = start.len() - end.len();
let word = String::from(start[1..len].trim().to_string());
append.push_str(&word);
let temp = String::from(append.as_mut_str());
append.clear();
token_map.push(Token::STRING(temp));
Some(Token::INVERTEDCOMMAS(String::from("\"")))
}
'+' => {
if let Some(c) = self.iter.next() {
if c == '=' {
Some(Token::PLUSEQUALS(String::from("+=")))
} else {
Some(Token::PLUS(String::from(chr)))
}
} else {
Some(Token::PLUS(String::from(chr)))
}
}
'-' => {
if let Some(c) = self.iter.next() {
if c == '=' {
Some(Token::MINUSEQUALS(String::from("-=")))
} else {
Some(Token::MINUS(String::from(chr)))
}
} else {
Some(Token::MINUS(String::from(chr)))
}
}
'*' => {
if let Some(c) = self.iter.next() {
if c == '=' {
Some(Token::MULTIPLYEQUALS(String::from("*=")))
} else if c == '*' {
Some(Token::STARTSTAR(String::from("**")))
} else {
Some(Token::MULTIPLY(String::from(chr)))
}
} else {
Some(Token::MULTIPLY(String::from(chr)))
}
}
'%' => {
if let Some(c) = self.iter.next() {
if c == '=' {
Some(Token::MODEQUALS(String::from("%=")))
} else {
Some(Token::MOD(String::from(chr)))
}
} else {
Some(Token::MOD(String::from(chr)))
}
}
'<' => {
if let Some(c) = self.iter.next() {
if c == '<' {
Some(Token::SHIFTLEFT(String::from("<<")))
} else if c == '=' {
Some(Token::LESSTHANEQUALS(String::from("<=")))
} else {
*append = String::from(c);
Some(Token::LESSTHAN(String::from(chr)))
}
} else {
Some(Token::LESSTHAN(String::from(chr)))
}
}
'>' => {
if let Some(c) = self.iter.next() {
if c == '>' {
Some(Token::SHIFTRIGHT(String::from(">>")))
} else if c == '=' {
Some(Token::GREATERTHANEQUALS(String::from(">=")))
} else {
Some(Token::GREATERTHAN(String::from(chr)))
}
} else {
Some(Token::GREATERTHAN(String::from(chr)))
}
}
'#' => {
let mut end = self.iter.as_str();
while let Some(c) = self.iter.next() {
if !c.is_ascii_alphanumeric() && c != '_' {
break;
}
end = self.iter.as_str();
}
let len = start.len() - end.len();
let word = String::from(start[0..len].trim().to_string());
if mapping.contains_key(&start[0..len]) {
Some(Token::KEYWORD(word))
} else {
Some(Token::IDENTIFIER(start[0..len].trim().to_string()))
}
}
'&' => {
if let Some(c) = self.iter.next() {
if c == '&' {
Some(Token::AND(String::from("&&")))
} else {
Some(Token::REFERENCE(String::from(chr)))
}
} else {
Some(Token::REFERENCE(String::from(chr)))
}
}
'|' => {
if let Some(c) = self.iter.next() {
if c == '|' {
Some(Token::OR(String::from("||")))
} else {
Some(Token::ABSOLUTEVALUE(String::from(chr)))
}
} else {
Some(Token::ABSOLUTEVALUE(String::from(chr)))
}
}
'=' => {
if let Some(c) = self.iter.next() {
if c == '=' {
Some(Token::EQUALS(String::from("=")))
} else {
Some(Token::ASSIGNMENT(String::from("=")))
}
} else {
Some(Token::ASSIGNMENT(String::from("=")))
}
}
'0'..='9' => {
let mut checker = false;
let mut end = self.iter.as_str();
while let Some(c) = self.iter.next() {
if !c.is_ascii_digit() {
if c == ';' {
checker = true;
let len = start.len() - end.len();
let word = String::from(start[0..len].trim().to_string());
append.push_str(&word);
let temp = String::from(append.as_mut_str());
append.clear();
let temp_slice: &str = &temp[..];
if mapping.contains_key(&temp_slice) {
token_map.push(Token::KEYWORD(temp));
} else {
token_map.push(Token::NUMBER(temp));
}
}
break;
}
end = self.iter.as_str();
}
if !checker {
let len = start.len() - end.len();
let word = start[0..len].trim().to_string();
Some(Token::NUMBER(word))
} else {
Some(Token::SEMICOLON(String::from(";")))
}
}
'a'..='z' | 'A'..='Z' | '_' => {
let mut library = false;
let mut end = self.iter.as_str();
while let Some(c) = self.iter.next() {
if !c.is_ascii_alphanumeric() && c != '_' {
if c != '.' {
if c == '>' {
library = true;
let len = start.len() - end.len();
let word = String::from(start[0..len].trim().to_string());
append.push_str(&word);
let temp = String::from(append.as_mut_str());
append.clear();
let temp_slice: &str = &temp[..];
if mapping.contains_key(&temp_slice) {
token_map.push(Token::KEYWORD(temp));
}
}
break;
}
}
end = self.iter.as_str();
}
if !library {
let len = start.len() - end.len();
let word = String::from(start[0..len].trim().to_string());
append.push_str(&word);
let temp = String::from(append.as_mut_str());
append.clear();
let temp_slice: &str = &temp[..];
if mapping.contains_key(&temp_slice) {
if library == true {
Some(Token::GREATERTHAN(String::from(">")))
} else {
Some(Token::KEYWORD(temp))
}
} else {
Some(Token::IDENTIFIER(start[0..len].trim().to_string()))
}
} else {
Some(Token::GREATERTHAN(String::from(">")))
}
}
_ => None,
}
} else {
None
}
}
}
fn build_tokens(token_vector: Vec<&str>) -> HashMap<&str, &str> {
let mut token_map = HashMap::new();
for &elem in &token_vector {
let one_tok: Vec<&str> = elem.split(",").collect();
token_map.insert(one_tok[0], one_tok[1]);
}
return token_map;
}
fn main() {
let tokens_str = String::from(file_reader::read_tokens("src/TOKENS.txt"));
let token_map: HashMap<&str, &str> = build_tokens(tokens_str.split(" ").collect());
let code = String::from(file_reader::read_source_code("src/source_code.c")).to_owned();
let code_slice: &str = &code[..];
let mut generated_tokens = Vec::new();
let mut lex = Lexer::new(code_slice);
let mut append = String::from("");
while let Some(token) = lex.next(&token_map, &mut append, &mut generated_tokens) {
generated_tokens.push(token);
}
println!("\nGENERATED TOKENS\n {:?}", generated_tokens);
}
|
extern crate wast_spec;
use std::path::Path;
use wast_spec::WastContext;
macro_rules! run_wast {
($file:expr, $func_name:ident) => {
#[test]
fn $func_name() {
run_spectest($file)
}
};
}
fn run_spectest(filename: &str) {
let testsuite_dir = Path::new(file!()).parent().unwrap().join("testsuite");
let mut context = WastContext::new();
match context.run_file(&testsuite_dir.join(filename)) {
Ok(_) => (),
Err(err) => panic!("{}", err),
}
}
run_wast!("address.wast", test_wast_address);
run_wast!("align.wast", test_wast_align);
run_wast!("binary-leb128.wast", test_wast_binary_leb128);
run_wast!("binary.wast", test_wast_binary);
run_wast!("block.wast", test_wast_block);
run_wast!("br.wast", test_wast_br);
run_wast!("br_if.wast", test_wast_br_if);
run_wast!("br_table.wast", test_wast_br_table);
run_wast!("break-drop.wast", test_wast_break_drop);
run_wast!("call.wast", test_wast_call);
run_wast!("call_indirect.wast", test_wast_call_indirect);
run_wast!("comments.wast", test_wast_comments);
run_wast!("const.wast", test_wast_const);
run_wast!("conversions.wast", test_wast_conversions);
run_wast!("custom.wast", test_wast_custom);
run_wast!("data.wast", test_wast_data);
run_wast!("elem.wast", test_wast_elem);
run_wast!("endianness.wast", test_wast_endianness);
run_wast!("exports.wast", test_wast_exports);
run_wast!("f32.wast", test_wast_f32_orig);
run_wast!("f32_bitwise.wast", test_wast_f32_bitwise);
run_wast!("f32_cmp.wast", test_wast_f32_cmp);
run_wast!("f64.wast", test_wast_f64);
run_wast!("f64_bitwise.wast", test_wast_f64_bitwise);
run_wast!("f64_cmp.wast", test_wast_f64_cmp);
run_wast!("fac.wast", test_wast_fac);
run_wast!("float_exprs.wast", test_wast_float_exprs);
run_wast!("float_literals.wast", test_wast_float_literals);
run_wast!("float_memory.wast", test_wast_float_memory);
run_wast!("float_misc.wast", test_wast_float_misc);
run_wast!("forward.wast", test_wast_forward);
run_wast!("func.wast", test_wast_func);
run_wast!("func_ptrs.wast", test_wast_func_ptrs);
run_wast!("globals.wast", test_wast_globals);
run_wast!("i32.wast", test_wast_i32);
run_wast!("i64.wast", test_wast_i64);
run_wast!("if.wast", test_wast_if);
run_wast!("imports.wast", test_wast_imports);
run_wast!("inline-module.wast", test_wast_inline_module);
run_wast!("int_exprs.wast", test_wast_int_exprs);
run_wast!("int_literals.wast", test_wast_int_literals);
run_wast!("labels.wast", test_wast_labels);
run_wast!("left-to-right.wast", test_wast_left_to_right);
run_wast!("linking.wast", test_wast_linking);
run_wast!("load.wast", test_wast_load);
run_wast!("local_get.wast", test_wast_local_get);
run_wast!("local_set.wast", test_wast_local_set);
run_wast!("local_tee.wast", test_wast_local_tee);
run_wast!("loop.wast", test_wast_loop);
run_wast!("memory.wast", test_wast_memory);
run_wast!("memory_grow.wast", test_wast_memory_grow);
run_wast!("memory_redundancy.wast", test_wast_memory_redundancy);
run_wast!("memory_size.wast", test_wast_memory_size);
run_wast!("memory_trap.wast", test_wast_memory_trap);
run_wast!("names.wast", test_wast_names);
run_wast!("nop.wast", test_wast_nop);
run_wast!("return.wast", test_wast_return);
run_wast!("select.wast", test_wast_select);
run_wast!(
"skip-stack-guard-page.wast",
test_wast_skip_stack_guard_page
);
run_wast!("stack.wast", test_wast_stack);
run_wast!("start.wast", test_wast_start);
run_wast!("store.wast", test_wast_store);
run_wast!("switch.wast", test_wast_switch);
run_wast!("token.wast", test_wast_token);
run_wast!("traps.wast", test_wast_traps);
run_wast!("type.wast", test_wast_type);
run_wast!("typecheck.wast", test_wast_typecheck);
run_wast!("unreachable.wast", test_wast_unreachable);
run_wast!("unreached-invalid.wast", test_wast_unreached_invalid);
run_wast!("unwind.wast", test_wast_unwind);
run_wast!(
"utf8-custom-section-id.wast",
test_wast_utf8_custom_section_id
);
run_wast!("utf8-import-field.wast", test_wast_utf8_import_field);
run_wast!("utf8-import-module.wast", test_wast_utf8_import_module);
run_wast!(
"utf8-invalid-encoding.wast",
test_wast_utf8_invalid_encoding
);
|
#[derive(Debug)]
pub struct MDatabase{
filename: String,
magic_number: u32,
file_format_id: String,
jet_version: u32,
db_info: Option<DBInfo>,
}
use std::io::Seek;
use std::fs::File;
use std::io::{Read, SeekFrom, Error};
use std::mem::transmute;
impl MDatabase {
pub fn open_database(filename: &str) -> Result<MDatabase, std::io::Error>{
let mut file=File::open(filename).unwrap();
let mut db = MDatabase::read_headers(filename, &mut file).unwrap();
db = MDatabase::read_db_info(db, &mut file).unwrap();
Ok(db)
}
fn read_headers(filename: &str, file: &mut File) -> Result<MDatabase, std::io::Error>{
let magic_number = MDatabase::seek_and_read_u32(0x00, file).unwrap();
let file_format_id = MDatabase::seek_and_read_string(0x04, file).unwrap();
let jet_version = MDatabase::seek_and_read_u32(0x14, file).unwrap();
Ok(MDatabase{
filename: filename.to_string(),
magic_number,
file_format_id: file_format_id,
jet_version,
db_info: None
})
}
fn read_db_info(mut db: MDatabase, file: &mut File) -> Result<MDatabase, std::io::Error>{
let system_collation = if db.jet_version == 0 {
MDatabase::seek_and_read_u16(0x22, file).unwrap()
}else{
MDatabase::seek_and_read_u16(0x56, file).unwrap()
};
let system_code_page = MDatabase::seek_and_read_u16(0x24, file).unwrap();
let database_key = MDatabase::seek_and_read_u32(0x26, file).unwrap();
let creation_date = MDatabase::seek_and_read_f64(0x5A, file).unwrap();
let info = DBInfo{
system_collation,
system_code_page,
database_key,
database_password: None,
creation_date,
};
db.db_info = Some(info);
Ok(db)
}
fn seek_and_read_u32(position: u64, file: &mut File) -> Result<u32, Error>{
unsafe{
file.seek(SeekFrom::Start(position))?;
let mut buf = [0u8; 4];
file.read(&mut buf)?;
let out = transmute::<[u8; 4], u32>(buf);
return Ok(out);
}
}
fn seek_and_read_u16(position: u64, file: &mut File) -> Result<u16, Error>{
unsafe{
file.seek(SeekFrom::Start(position))?;
let mut buf = [0u8; 2];
file.read(&mut buf)?;
let out = transmute::<[u8; 2], u16>(buf);
return Ok(out);
}
}
fn seek_and_read_u64(position: u64, file: &mut File) -> Result<u64, Error>{
unsafe{
file.seek(SeekFrom::Start(position))?;
let mut buf = [0u8; 8];
file.read(&mut buf)?;
let out = transmute::<[u8; 8], u64>(buf);
return Ok(out);
}
}
fn seek_and_read_f64(position: u64, file: &mut File) -> Result<f64, Error>{
unsafe{
file.seek(SeekFrom::Start(position))?;
let mut buf = [0u8; 8];
file.read(&mut buf)?;
let out = transmute::<[u8; 8], f64>(buf);
return Ok(out);
}
}
fn seek_and_read_string(position: u64, file: &mut File) -> Result<String, Error>{
file.seek(SeekFrom::Start(position))?;
let mut buf = [0u8; 16];
file.read(&mut buf)?;
let out = String::from_utf8_lossy(&buf);
return Ok(out.to_string());
}
}
#[derive(Debug)]
struct DBInfo{
system_collation: u16,
system_code_page: u16,
database_key: u32, // 0 means not encoded
database_password: Option<Vec<u8>>, // TODO: Add a working code for Jet4's 40byte array
creation_date: f64
}
#[derive(Debug)]
struct DBTableDefinition{
page_signature: u16, // always 0x0102
unknown: u16, // always 'VC' for JET3, number of free bytes on this page for JET4
next_page: u32 // if the page is too long, this contains a pointer to the next page
}
|
use thiserror::Error;
#[derive(Error, Debug)]
pub enum NSError {
#[error("error from the http client")]
HTTPClient(#[from] reqwest::Error),
#[error("error from the deserializer")]
Deserializer(#[from] quick_xml::de::DeError),
}
|
#![allow(non_snake_case)]
use glutin::dpi::PhysicalSize;
use glutin::event::{Event, KeyboardInput, VirtualKeyCode, WindowEvent};
use glutin::event_loop::ControlFlow;
use glutin::event_loop::EventLoop;
use glutin::window::Window;
use glutin::window::WindowBuilder;
use glutin::Api;
use glutin::ContextBuilder;
use glutin::ContextWrapper;
use glutin::GlRequest;
use glutin::PossiblyCurrent;
mod shader;
use shader::Shader;
mod renderer;
use renderer::Renderer;
mod generator;
pub const WIDTH: i32 = 1920;
pub const HEIGHT: i32 = 1080;
pub type Ctx = ContextWrapper<PossiblyCurrent, Window>;
fn main() {
let el = EventLoop::new();
let wb = WindowBuilder::new()
.with_title("dev")
.with_visible(true)
.with_resizable(true)
.with_inner_size(PhysicalSize::new(WIDTH, HEIGHT));
let context = ContextBuilder::new()
.with_gl(GlRequest::Specific(Api::OpenGl, (4, 4)))
.build_windowed(wb, &el);
let context = match context {
Ok(c) => c,
Err(e) => {
println!("{}", e);
return;
}
};
let context = unsafe { context.make_current().expect("Make context current") };
gl::load_with(|symbol| context.get_proc_address(symbol) as *const _);
unsafe { gl::Viewport(0, 0, WIDTH, HEIGHT) }
let mut renderer = Renderer::new(context);
let frame_time = 1000 / 60;
let mut timer = std::time::Instant::now();
el.run(move |event, _, control_flow| {
if timer.elapsed().as_millis() > frame_time {
renderer.draw();
timer = std::time::Instant::now();
}
*control_flow = match event {
Event::WindowEvent { event, .. } => match event {
WindowEvent::Resized(size) => {
renderer.context.resize(size);
let rounded = size.cast::<i32>();
unsafe { gl::Viewport(0, 0, rounded.width, rounded.height) }
ControlFlow::Poll
}
WindowEvent::KeyboardInput { input, .. } => handle_keycodes(input, &mut renderer),
_ => ControlFlow::Poll,
},
Event::RedrawRequested(_) => {
renderer.draw();
ControlFlow::Poll
}
_ => ControlFlow::Poll,
};
});
}
fn handle_keycodes(input: KeyboardInput, renderer: &mut Renderer) -> ControlFlow {
if let glutin::event::ElementState::Released = input.state {
return ControlFlow::Poll;
}
let mut control_flow = ControlFlow::Poll;
if let Some(keycode) = input.virtual_keycode {
match keycode {
VirtualKeyCode::Escape | VirtualKeyCode::Q => return ControlFlow::Exit,
VirtualKeyCode::Subtract => renderer.diminish_precision(),
VirtualKeyCode::Add => renderer.augment_precision(),
VirtualKeyCode::W => renderer.augment_zoom(),
VirtualKeyCode::S => renderer.diminish_zoom(),
VirtualKeyCode::Left => renderer.move_left(),
VirtualKeyCode::Right => renderer.move_right(),
VirtualKeyCode::Down => renderer.move_down(),
VirtualKeyCode::Up => renderer.move_up(),
VirtualKeyCode::A => renderer.change_n(-1),
VirtualKeyCode::D => renderer.change_n(1),
VirtualKeyCode::C => renderer.next_color(),
VirtualKeyCode::T => renderer.switch_automation(),
VirtualKeyCode::X => renderer.next_fractal_type(),
_ => control_flow = ControlFlow::Poll,
}
}
control_flow
}
|
use crate::abst::{Controller, Input, Presenter};
use crate::exp::SyntaxError;
pub struct Console;
fn string_to_tokens(string: &[char]) -> Vec<Input> {
use Input::*;
let mut tokens = vec![];
let mut num_buf = String::new();
for raw_token in string {
if raw_token.is_digit(10) || raw_token == &'.' {
num_buf.push(*raw_token);
continue;
}
if !num_buf.is_empty() {
tokens.push(Num(num_buf.parse().unwrap()));
num_buf.clear();
}
tokens.push(match raw_token {
'+' => Plus,
'-' => Minus,
'*' => Cross,
'/' => Division,
'(' => LParen,
')' => RParen,
' ' => continue,
_ => unimplemented!("Unimplemented token: {}", raw_token),
});
}
if !num_buf.is_empty() {
tokens.push(Num(num_buf.parse().unwrap()));
num_buf.clear();
}
tokens
}
impl Controller for Console {
fn get_inputs(&self) -> Vec<Input> {
println!("Input the expression:");
let mut buffer = String::new();
let stdin = std::io::stdin();
stdin.read_line(&mut buffer).expect("no input");
let raw_tokens: Vec<_> = buffer.trim().chars().collect();
string_to_tokens(&raw_tokens)
}
}
impl Presenter for Console {
fn show_error(&self, _: SyntaxError) {
println!("Syntax error");
}
fn show_result(&self, result: f64) {
println!("{}", result);
}
}
|
// Copyright (c) 2018 tomlenv developers
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
//! `tomlenv` default environment hierarchy implementation.
use crate::error::{Error, Result};
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use std::convert::TryFrom;
use std::fmt;
/// A fairly standard environment hierarchy for use with `Environments`.
/// Prod -> Stage -> Test -> Dev -> Local
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Environment {
/// Production
Prod,
/// Stage
Stage,
/// Test
Test,
/// Development
Dev,
/// Local
Local,
}
impl<'de> Deserialize<'de> for Environment {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct EnvironmentVisitor;
impl de::Visitor<'_> for EnvironmentVisitor {
type Value = Environment;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("any valid environment")
}
fn visit_str<E>(self, value: &str) -> std::result::Result<Environment, E>
where
E: de::Error,
{
TryFrom::try_from(value).map_err(de::Error::custom)
}
}
deserializer.deserialize_string(EnvironmentVisitor)
}
}
impl Serialize for Environment {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl fmt::Display for Environment {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let env = match *self {
Environment::Prod => "prod",
Environment::Stage => "stage",
Environment::Test => "test",
Environment::Dev => "dev",
Environment::Local => "local",
};
write!(f, "{env}")
}
}
impl TryFrom<&str> for Environment {
type Error = Error;
fn try_from(env: &str) -> Result<Self> {
match env {
"prod" => Ok(Environment::Prod),
"stage" => Ok(Environment::Stage),
"test" => Ok(Environment::Test),
"dev" => Ok(Environment::Dev),
"local" => Ok(Environment::Local),
_ => Err(Error::invalid_runtime_environment(env)),
}
}
}
impl TryFrom<String> for Environment {
type Error = Error;
fn try_from(env: String) -> Result<Self> {
match &env[..] {
"prod" => Ok(Environment::Prod),
"stage" => Ok(Environment::Stage),
"test" => Ok(Environment::Test),
"dev" => Ok(Environment::Dev),
"local" => Ok(Environment::Local),
_ => Err(Error::invalid_runtime_environment(&env)),
}
}
}
#[cfg(test)]
mod test {
use super::Environment;
use std::convert::TryFrom;
#[test]
fn display() {
assert_eq!(Environment::Prod.to_string(), "prod");
assert_eq!(Environment::Stage.to_string(), "stage");
assert_eq!(Environment::Test.to_string(), "test");
assert_eq!(Environment::Dev.to_string(), "dev");
assert_eq!(Environment::Local.to_string(), "local");
}
#[test]
fn convert() {
match Environment::try_from("prod") {
Ok(re) => assert_eq!(re, Environment::Prod),
Err(_) => assert!(false, "Invalid 'prod' Runtime Environment"),
}
match Environment::try_from("stage") {
Ok(re) => assert_eq!(re, Environment::Stage),
Err(_) => assert!(false, "Invalid 'stage' Runtime Environment"),
}
match Environment::try_from("test") {
Ok(re) => assert_eq!(re, Environment::Test),
Err(_) => assert!(false, "Invalid 'test' Runtime Environment"),
}
match Environment::try_from("dev") {
Ok(re) => assert_eq!(re, Environment::Dev),
Err(_) => assert!(false, "Invalid 'dev' Runtime Environment"),
}
match Environment::try_from("blah") {
Ok(_) => assert!(false, "'blah' is not a good runtime environment!"),
Err(_) => assert!(true, "'blah' failed to convert properly"),
}
}
}
|
#[macro_use]
extern crate log;
extern crate env_logger;
extern crate clap;
extern crate serde;
extern crate serde_json;
extern crate time;
#[macro_use]
extern crate serde_derive;
pub mod manifest;
pub mod options;
pub mod profiler;
pub mod processor;
pub mod types;
|
use super::Interrupts;
#[derive(Debug)]
pub enum ButtonState {
Up,
Down,
}
#[derive(Debug,Copy,Clone)]
pub enum Button {
Up,
Down,
Left,
Right,
A,
B,
Start,
Select,
}
impl Button {
fn flag(self) -> u8 {
use self::Button::*;
match self {
Right | A => 0b0001,
Left | B => 0b0010,
Up | Select => 0b0100,
Down | Start => 0b1000,
}
}
}
#[derive(Debug)]
pub struct InputEvent {
button: Button,
state: ButtonState,
}
impl InputEvent {
pub fn new(button: Button, state: ButtonState) -> InputEvent {
InputEvent {
button: button,
state: state,
}
}
}
pub struct Gamepad {
input_port_1: u8,
input_port_2: u8,
port: u8,
}
impl Gamepad {
pub fn new() -> Gamepad {
Gamepad {
input_port_1: 0x0f,
input_port_2: 0x0f,
port: 0xf0,
}
}
pub fn read(&mut self) -> u8 {
let mut input = self.port | 0b1100_0000;
if (self.port & 0x10) != 0 {
input |= self.input_port_2 & 0x0f
}
if (self.port & 0x20) != 0 {
input |= self.input_port_1 & 0x0f
}
input
}
pub fn write(&mut self, val: u8) {
self.port = val & 0b0011_0000
}
pub fn cycle_flush(&mut self, _cycle_count: u32) -> Interrupts {
Interrupts::empty()
}
pub fn handle_event(&mut self, event: InputEvent) {
use self::Button::*;
// println!("Handle event: {:?}", event);
match event.state {
ButtonState::Down => {
let mask = !event.button.flag();
match event.button {
Up | Down | Left | Right => self.input_port_1 = self.input_port_1 & mask,
A | B | Start | Select => self.input_port_2 = self.input_port_2 & mask,
}
}
ButtonState::Up => {
let flag = event.button.flag();
match event.button {
Up | Down | Left | Right => self.input_port_1 = self.input_port_1 | flag,
A | B | Start | Select => self.input_port_2 = self.input_port_2 | flag,
}
}
}
}
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use common_arrow::arrow::chunk::Chunk;
use common_arrow::arrow::datatypes::DataType as ArrowDataType;
use common_arrow::arrow::io::parquet::write::transverse;
use common_arrow::arrow::io::parquet::write::RowGroupIterator;
use common_arrow::arrow::io::parquet::write::WriteOptions;
use common_arrow::parquet::encoding::Encoding;
use common_arrow::parquet::metadata::ThriftFileMetaData;
use common_arrow::parquet::write::Version;
use common_arrow::write_parquet_file;
use common_exception::ErrorCode;
use common_exception::Result;
use common_expression::serialize::col_encoding;
use common_expression::DataBlock;
use common_expression::TableSchema;
use storages_common_table_meta::table::TableCompression;
/// Serialize data blocks to parquet format.
pub fn blocks_to_parquet(
schema: impl AsRef<TableSchema>,
blocks: Vec<DataBlock>,
write_buffer: &mut Vec<u8>,
compression: TableCompression,
) -> Result<(u64, ThriftFileMetaData)> {
let arrow_schema = schema.as_ref().to_arrow();
let row_group_write_options = WriteOptions {
write_statistics: false,
version: Version::V2,
compression: compression.into(),
data_pagesize_limit: None,
};
let batches = blocks
.into_iter()
.map(Chunk::try_from)
.collect::<Result<Vec<_>>>()?;
let encoding_map = |data_type: &ArrowDataType| match data_type {
ArrowDataType::Dictionary(..) => Encoding::RleDictionary,
_ => col_encoding(data_type),
};
let encodings: Vec<Vec<_>> = arrow_schema
.fields
.iter()
.map(|f| transverse(&f.data_type, encoding_map))
.collect::<Vec<_>>();
let row_groups = RowGroupIterator::try_new(
batches.into_iter().map(Ok),
&arrow_schema,
row_group_write_options,
encodings,
)?;
use common_arrow::parquet::write::WriteOptions as FileWriteOption;
let options = FileWriteOption {
write_statistics: false,
version: Version::V2,
};
match write_parquet_file(write_buffer, row_groups, arrow_schema, options) {
Ok(result) => Ok(result),
Err(cause) => Err(ErrorCode::Internal(format!(
"write_parquet_file: {:?}",
cause,
))),
}
}
|
use amethyst::core::{Transform};
use amethyst::derive::SystemDesc;
use amethyst::ecs::{ReadStorage, System, SystemData, WriteStorage, WriteExpect};
use crate::ball::component::Ball;
use crate::taunt::Taunt;
use amethyst::core::ecs::{Join, Read};
use amethyst::renderer::SpriteRender;
use crate::persistence::Settings;
#[derive(SystemDesc)]
pub struct TauntSystem;
impl<'s> System<'s> for TauntSystem {
type SystemData = (
ReadStorage<'s, Ball>,
ReadStorage<'s, Transform>,
WriteExpect<'s, Taunt>,
WriteStorage<'s, SpriteRender>,
Read<'s, Settings>,
);
fn run(&mut self, (balls, trans, taunt, mut sprites, settings): Self::SystemData) {
for (_ball, transform) in (&balls, &trans).join() {
let md = settings.window_settings.arena_width() / 3.0;
let pos = transform.translation().x / md;
let sprite = sprites.get_mut(taunt.face.unwrap()).unwrap();
let sprite_ind = if pos < 1.0 {
2
} else if pos < 2.0 {
4
} else {
3
};
if sprite.sprite_number != sprite_ind {
sprite.sprite_number = sprite_ind;
}
}
}
}
|
/// Any password hasher should implement this trait.
///
/// Note that password hashers are the only ones that
/// are able to read a clear text password, so try not
/// to leak any kind of information about it.
///
/// If your hasher can be configured to have different
/// time or memory costs, then make sure to store those
/// parameters next to your users' password hashes and
/// salts in your data base, so that you are able to
/// upgrade your parameters at any time.
pub trait PasswordHasher {
/// Hashes a clear text password.
///
/// Make sure to use all of the given parameters as described
/// below. Additionally, you should choose a strong hashing
/// algorithm. As of mid 2016, Argon2i, bcrypt, and scrypt
/// seem to be very good choices.
///
/// - `clear_text_password` are the given password's raw bytes.
/// Do not store or write this data, just hash it.
/// - `hash_salt` is a random salt that should be used.
/// - `hashed_password` receives your hash function's result.
/// Use every single byte of `hashed_password`, i.e. produce
/// `hashed_password.len()` bytes of hash data.
unsafe fn hash(&self, hashed_password: &mut [u8], clear_text_password: &[u8], hash_salt: &[u8]);
}
|
use crate::HdbResult;
#[cfg(feature = "sync")]
use byteorder::{LittleEndian, WriteBytesExt};
#[derive(Debug)]
pub struct WriteLobRequest<'a> {
locator_id: u64,
offset: i64,
buf: &'a [u8],
last_data: bool,
}
impl<'a> WriteLobRequest<'a> {
pub fn new(locator_id: u64, offset: i64, buf: &[u8], last_data: bool) -> WriteLobRequest {
trace!(
"Offset = {offset}, buffer length = {}, last_data: {last_data}",
buf.len()
);
WriteLobRequest {
locator_id,
offset,
buf,
last_data,
}
}
pub fn size(&self) -> usize {
21 + self.buf.len()
}
#[cfg(feature = "sync")]
pub fn sync_emit(&self, w: &mut dyn std::io::Write) -> HdbResult<()> {
// 1: NULL (not used here), 2: DATA_INCLUDED, 4: LASTDATA
let options = if self.last_data { 6 } else { 2 };
w.write_u64::<LittleEndian>(self.locator_id)?;
w.write_u8(options)?;
w.write_i64::<LittleEndian>(self.offset)?;
#[allow(clippy::cast_possible_truncation)]
w.write_u32::<LittleEndian>(self.buf.len() as u32)?;
w.write_all(self.buf)?;
Ok(())
}
#[cfg(feature = "async")]
pub async fn async_emit<W: std::marker::Unpin + tokio::io::AsyncWriteExt>(
&self,
w: &mut W,
) -> HdbResult<()> {
// 1: NULL (not used here), 2: DATA_INCLUDED, 4: LASTDATA
let options = if self.last_data { 6 } else { 2 };
w.write_u64_le(self.locator_id).await?;
w.write_u8(options).await?;
w.write_i64_le(self.offset).await?;
#[allow(clippy::cast_possible_truncation)]
w.write_u32_le(self.buf.len() as u32).await?;
w.write_all(self.buf).await?;
Ok(())
}
}
|
wit_bindgen::generate!("sample");
struct SampleHost;
impl Sample for SampleHost {
fn run() {
log(LogLevel::Info, "test log");
}
}
export_sample!(SampleHost);
|
use pyo3::exceptions::PyValueError;
use pyo3::ffi::Py_uintptr_t;
use pyo3::prelude::*;
use polars_core::utils::accumulate_dataframes_vertical;
use polars_core::POOL;
use ukis_h3cellstore::export::polars::export::arrow::datatypes::DataType as ArrowDataType;
use ukis_h3cellstore::export::polars::export::arrow::ffi;
use ukis_h3cellstore::export::polars::export::rayon::iter::{
IndexedParallelIterator, IntoParallelIterator, ParallelIterator,
};
use ukis_h3cellstore::export::polars::prelude::{ArrayRef, DataFrame, Series};
use crate::error::{IntoPyResult, ToCustomPyErr};
/// from https://github.com/pola-rs/polars/blob/d1e5b1062c6872cd030b04b96505d2fac36b5376/py-polars/src/arrow_interop/to_rust.rs
pub fn array_to_rust(obj: &PyAny) -> PyResult<ArrayRef> {
// prepare a pointer to receive the Array struct
let array = Box::new(ffi::ArrowArray::empty());
let schema = Box::new(ffi::ArrowSchema::empty());
let array_ptr = &*array as *const ffi::ArrowArray;
let schema_ptr = &*schema as *const ffi::ArrowSchema;
// make the conversion through PyArrow's private API
// this changes the pointer's memory and is thus unsafe. In particular, `_export_to_c` can go out of bounds
obj.call_method1(
"_export_to_c",
(array_ptr as Py_uintptr_t, schema_ptr as Py_uintptr_t),
)?;
unsafe {
let field = ffi::import_field_from_c(schema.as_ref()).into_pyresult()?;
let array = ffi::import_array_from_c(*array, field.data_type).into_pyresult()?;
Ok(array)
}
}
pub fn to_rust_df(rb: &[&PyAny]) -> PyResult<DataFrame> {
let schema = rb
.get(0)
.ok_or_else(|| PyValueError::new_err("empty table"))?
.getattr("schema")?;
let names = schema.getattr("names")?.extract::<Vec<String>>()?;
let dfs = rb
.iter()
.map(|rb| {
let mut run_parallel = false;
let columns = (0..names.len())
.map(|i| {
let array = rb.call_method1("column", (i,))?;
let arr = array_to_rust(array)?;
run_parallel |= matches!(
arr.data_type(),
ArrowDataType::Utf8 | ArrowDataType::Dictionary(_, _, _)
);
Ok(arr)
})
.collect::<PyResult<Vec<_>>>()?;
// we parallelize this part because we can have dtypes that are not zero copy
// for instance utf8 -> large-utf8
// dict encoded to categorical
let columns = if run_parallel {
POOL.install(|| {
columns
.into_par_iter()
.enumerate()
.map(|(i, arr)| Series::try_from((names[i].as_str(), arr)).into_pyresult())
.collect::<PyResult<Vec<_>>>()
})
} else {
columns
.into_iter()
.enumerate()
.map(|(i, arr)| Series::try_from((names[i].as_str(), arr)).into_pyresult())
.collect::<PyResult<Vec<_>>>()
}?;
DataFrame::new(columns).into_pyresult()
})
.collect::<PyResult<Vec<_>>>()?;
accumulate_dataframes_vertical(dfs).map_err(|e| e.to_custom_pyerr())
}
|
use crate::utils::*;
pub(crate) const NAME: &[&str] = &["AsMut"];
pub(crate) fn derive(data: &Data, items: &mut Vec<ItemImpl>) -> Result<()> {
derive_trait!(
data,
parse_quote!(::core::convert::AsMut)?,
parse_quote! {
trait AsMut<__T: ?Sized> {
#[inline]
fn as_mut(&mut self) -> &mut __T;
}
}?,
)
.map(|item| items.push(item))
}
|
fn main() {
yew::start_app::<froovie_front::Model>();
}
|
use ggez::graphics;
use ggez::graphics::*;
use specs;
use std::path;
use std::sync::{Arc, RwLock};
use components::*;
use map::*;
use resources::*;
use sprite::*;
use state::*;
use storyboard::*;
use systems::UpdateCharacters;
pub fn create_scene(tilemap_src: &'static str) -> Story {
let tilemap_src = tilemap_src.to_owned();
return Story::Setup(Box::new(move |ctx| {
let state = &mut *ctx.state.borrow_mut();
let ctx = &mut *ctx.ctx.borrow_mut();
let map_def = load_tile_map(ctx, tilemap_src.as_ref()).unwrap();
{
let src = path::Path::new(&tilemap_src);
let dir = src.parent().unwrap();
let mut s_map = state.world.specs_world.write_resource::<SpriteMap>();
for tileset in map_def.tilesets.iter() {
for image in tileset.images.iter() {
let i_src = dir.join(&image.source);
let i = Image::new(ctx, i_src.to_str().unwrap()).unwrap();
let sprite = Sprite::new(i, tileset.tile_width, tileset.tile_height);
s_map.0.insert(image.source.clone(), sprite);
}
}
}
let map = Map::new(map_def);
let mut maps = state.world.specs_world.write_resource::<Maps>();
let mut current = state.world.specs_world.write_resource::<CurrentMap>();
maps.0.insert(tilemap_src.to_owned(), map);
current.0 = tilemap_src.to_owned();
Story::Run(Box::new(SceneStory::new(tilemap_src.clone())))
}));
}
pub struct SceneStory {
tilemap_src: String,
started: bool,
done: Arc<RwLock<bool>>,
}
impl SceneStory {
pub fn new(tilemap_src: String) -> Self {
SceneStory {
tilemap_src,
started: false,
done: Arc::new(RwLock::new(false)),
}
}
}
impl State<StoryboardContext> for SceneStory {
fn state_name(&self) -> String {
format!("SceneStory: {}", self.tilemap_src)
}
fn update(&mut self, _dt: f32, _ctx: StateData<StoryboardContext>) -> StoryTrans {
if !self.started {
self.started = true;
return Trans::Push(Box::new(SceneState::new(self.done.clone())));
}
if *self.done.read().unwrap() {
return Trans::Pop;
}
Trans::None
}
fn is_blocking(&self) -> bool {
false
}
}
struct SceneState {
#[allow(dead_code)]
done: Arc<RwLock<bool>>,
}
impl SceneState {
pub fn new(done: Arc<RwLock<bool>>) -> Self {
SceneState { done }
}
fn register_systems() -> specs::Dispatcher<'static, 'static> {
specs::DispatcherBuilder::new()
.with(UpdateCharacters, "update_characters", &[])
.build()
}
}
impl State<StoryboardContext> for SceneState {
fn state_name(&self) -> String {
return "SceneState".to_owned();
}
fn update(&mut self, _dt: f32, ctx: StateData<StoryboardContext>) -> StoryTrans {
let state = &mut *ctx.data.state.borrow_mut();
let _res = &state.world.specs_world.res;
let mut disp = SceneState::register_systems();
disp.dispatch(&state.world.specs_world.res);
Trans::None
}
fn draw(&mut self, ctx: StateData<StoryboardContext>) {
use specs::Join;
let state = &mut *ctx.data.state.borrow_mut();
let ctx = &mut *ctx.data.ctx.borrow_mut();
// Drawing map
let mut sprite_map = state.world.specs_world.write_resource::<SpriteMap>();
let current_map = state.world.specs_world.read_resource::<CurrentMap>();
let mut maps = state.world.specs_world.write_resource::<Maps>();
{
let map = maps.0.get_mut(¤t_map.0).unwrap();
let i_src = map.map_def.tilesets[0].images[0].source.clone();
let map_sprite = sprite_map.0.get_mut(&i_src).unwrap();
for i in 0..map.map_def.layers.len() / 3 {
map.layer_index = i;
let s = map_sprite.with_context(&*map);
graphics::draw(ctx, &s, Point2::new(0.0, 0.0), 0.0).unwrap();
}
}
// Drawing characters
let mut render = state.world.specs_world.write_storage::<EntityRender>();
let pos = state.world.specs_world.read_storage::<Position>();
let anim = state.world.specs_world.read_storage::<Animation>();
for (render, anim) in (&mut render, &anim).join() {
warn!("anim {:?}", anim);
warn!("render {:?}", render);
render.frame = anim.frame();
}
for (pos, render) in (&pos, &render).join() {
let renderable = Renderable::new(pos.clone(), render.clone());
if let Some(sprite) = sprite_map.0.get_mut(&render.sprite_id) {
let s = sprite.with_context(&renderable);
graphics::draw(ctx, &s, Point2::new(0.0, 0.0), 0.0).unwrap();
} else {
panic!("Sprite Doesn't Exist");
}
}
}
}
|
use lazy_static::lazy_static;
use nalgebra::{Point3, Vector3};
use std::collections::HashMap;
type Face = [Point3<f32>; 3];
#[derive(Clone, Copy, Debug)]
pub struct Triangle {
pub normal: Vector3<f32>,
pub vertices: Face,
}
pub fn to_stl(s: &str) -> Vec<Triangle> {
let mut output = vec![];
for (i, c) in s.to_uppercase().chars().enumerate() {
let translation = Vector3::new(-6.0 * i as f32, 0.0, 0.0);
if c != ' ' {
let stl_letter = char_to_stl_letter(&c);
for triangle in stl_letter {
let t = Triangle {
normal: triangle.normal,
vertices: [
triangle.vertices[0] + translation,
triangle.vertices[1] + translation,
triangle.vertices[2] + translation,
],
};
output.push(t);
}
}
}
output
}
fn char_to_stl_letter(c: &char) -> Vec<Triangle> {
LETTERS[&c].clone()
}
fn normal(face: &Face) -> Vector3<f32> {
let [p1, p2, p3] = face;
(p2 - p1).cross(&(p3 - p1))
}
/// assumes:
/// X = left/right
/// Y = up/down
/// Z = in/out
fn cube() -> Vec<Triangle> {
let vertices = [
Point3::new(0.0, 0.0, 0.0),
Point3::new(0.0, 1.0, 0.0),
Point3::new(1.0, 1.0, 0.0),
Point3::new(1.0, 0.0, 0.0),
Point3::new(0.0, 0.0, 1.0),
Point3::new(0.0, 1.0, 1.0),
Point3::new(1.0, 1.0, 1.0),
Point3::new(1.0, 0.0, 1.0),
];
let indices = [
[3, 0, 1],
[3, 1, 2],
[5, 1, 0],
[4, 5, 0],
[2, 1, 5],
[5, 6, 2],
[2, 6, 7],
[3, 2, 7],
[0, 3, 7],
[0, 7, 4],
[4, 6, 5],
[7, 6, 4],
];
indices
.iter()
.map(|group| {
let face = [vertices[group[0]], vertices[group[1]], vertices[group[2]]];
let normal = normal(&face).normalize();
Triangle {
normal: normal,
vertices: face,
}
})
.collect()
}
macro_rules! letter {
($name:ident, $def:expr) => {
fn $name() -> Vec<Triangle> {
let mut cubes = vec![];
for (y, row) in $def.iter().enumerate() {
for (x, column) in row.iter().enumerate() {
if *column == 1 {
let translation = Vector3::new(-1.0 * x as f32, 0.0, -1.0 * y as f32);
for triangle in cube().iter() {
let t = Triangle {
normal: triangle.normal,
vertices: [
triangle.vertices[0] + translation,
triangle.vertices[1] + translation,
triangle.vertices[2] + translation,
],
};
cubes.push(t);
}
}
}
}
cubes
}
};
}
lazy_static! {
static ref LETTERS: HashMap<char, Vec<Triangle>> = {
let mut characters = HashMap::new();
// numbers
characters.insert('0', zero());
characters.insert('1', one());
characters.insert('2', two());
characters.insert('3', three());
characters.insert('4', four());
characters.insert('5', five());
characters.insert('6', six());
characters.insert('7', seven());
characters.insert('8', eight());
characters.insert('9', nine());
// letters
characters.insert('A', a_upper());
characters.insert('B', b_upper());
characters.insert('C', c_upper());
characters.insert('D', d_upper());
characters.insert('E', e_upper());
characters.insert('F', f_upper());
characters.insert('G', g_upper());
characters.insert('H', h_upper());
characters.insert('I', i_upper());
characters.insert('J', j_upper());
characters.insert('K', k_upper());
characters.insert('L', l_upper());
characters.insert('M', m_upper());
characters.insert('N', n_upper());
characters.insert('O', o_upper());
characters.insert('P', p_upper());
characters.insert('Q', q_upper());
characters.insert('R', r_upper());
characters.insert('S', s_upper());
characters.insert('T', t_upper());
characters.insert('U', u_upper());
characters.insert('V', v_upper());
characters.insert('W', w_upper());
characters.insert('X', x_upper());
characters.insert('Y', y_upper());
characters.insert('Z', z_upper());
// symbols
characters.insert('-', hyphen());
characters
};
}
letter! {
zero,
[
[0, 1, 1, 1, 0],
[1, 0, 0, 1, 1],
[1, 0, 1, 0, 1],
[1, 1, 0, 0, 1],
[0, 1, 1, 1, 0]
]
}
letter! {
one,
[
[0, 1, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 1, 1, 1, 0]
]
}
letter! {
two,
[
[0, 1, 1, 0, 0],
[1, 0, 0, 1, 0],
[0, 0, 1, 0, 0],
[0, 1, 0, 0, 0],
[1, 1, 1, 1, 0]
]
}
letter! {
three,
[
[1, 1, 1, 0, 0],
[0, 0, 0, 1, 0],
[0, 1, 1, 0, 0],
[0, 0, 0, 1, 0],
[1, 1, 1, 0, 0],
]
}
letter! {
four,
[
[1, 0, 0, 1, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 1, 0],
[0, 0, 0, 1, 0],
[0, 0, 0, 1, 0],
]
}
letter! {
five,
[
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 0],
[1, 1, 1, 0, 0],
[0, 0, 0, 1, 0],
[1, 1, 1, 0, 0],
]
}
letter! {
six,
[
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 0],
[1, 1, 1, 1, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 1, 0],
]
}
letter! {
seven,
[
[1, 1, 1, 1, 0],
[0, 0, 0, 1, 0],
[0, 0, 1, 0, 0],
[0, 1, 0, 0, 0],
[0, 1, 0, 0, 0],
]
}
letter! {
eight,
[
[1, 1, 1, 1, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 1, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 1, 0],
]
}
letter! {
nine,
[
[1, 1, 1, 1, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 1, 0],
[0, 0, 0, 1, 0],
[1, 1, 1, 1, 0]
]
}
letter! {
a_upper,
[
[0, 1, 1, 0, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 1, 0],
[1, 0, 0, 1, 0],
[1, 0, 0, 1, 0]
]
}
letter! {
b_upper,
[
[1, 1, 1, 0, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 0, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 0, 0]
]
}
letter! {
c_upper,
[
[0, 1, 1, 1, 0],
[1, 0, 0, 0, 0],
[1, 0, 0, 0, 0],
[1, 0, 0, 0, 0],
[0, 1, 1, 1, 0]
]
}
letter! {
d_upper,
[
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[1, 0, 0, 0, 1],
[1, 0, 0, 0, 1],
[1, 1, 1, 1, 0]
]
}
letter! {
e_upper,
[
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 0],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 0],
[1, 1, 1, 1, 0],
]
}
letter! {
f_upper,
[
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 0],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 0],
[1, 0, 0, 0, 0],
]
}
letter! {
g_upper,
[
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 0],
[1, 0, 1, 1, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 1, 0],
]
}
letter! {
h_upper,
[
[1, 0, 0, 1, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 1, 0],
[1, 0, 0, 1, 0],
[1, 0, 0, 1, 0],
]
}
letter! {
i_upper,
[
[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[1, 1, 1, 1, 1],
]
}
letter! {
j_upper,
[
[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[1, 0, 1, 0, 0],
[1, 1, 1, 0, 0]
]
}
letter! {
k_upper,
[
[1, 0, 0, 1, 0],
[1, 0, 1, 0, 0],
[1, 1, 0, 0, 0],
[1, 0, 1, 0, 0],
[1, 0, 0, 1, 0],
]
}
letter! {
l_upper,
[
[1, 0, 0, 0, 0],
[1, 0, 0, 0, 0],
[1, 0, 0, 0, 0],
[1, 0, 0, 0, 0],
[1, 1, 1, 1, 0],
]
}
letter! {
m_upper,
[
[1, 0, 0, 0, 1],
[1, 1, 0, 1, 1],
[1, 0, 1, 0, 1],
[1, 0, 1, 0, 1],
[1, 0, 0, 0, 1],
]
}
letter! {
n_upper,
[
[1, 0, 0, 0, 1],
[1, 1, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 0, 0, 1, 1],
[1, 0, 0, 0, 1]
]
}
letter! {
o_upper,
[
[0, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[1, 0, 0, 0, 1],
[1, 0, 0, 0, 1],
[0, 1, 1, 1, 0]
]
}
letter! {
p_upper,
[
[1, 1, 1, 1, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 1, 0],
[1, 0, 0, 0, 0],
[1, 0, 0, 0, 0]
]
}
letter! {
q_upper,
[
[0, 1, 1, 1, 0],
[1, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 0, 0, 1, 0],
[0, 1, 1, 0, 1]
]
}
letter! {
r_upper,
[
[1, 1, 1, 1, 0],
[1, 0, 0, 1, 0],
[1, 1, 1, 1, 0],
[1, 0, 1, 0, 0],
[1, 0, 0, 1, 0]
]
}
letter! {
s_upper,
[
[1, 1, 1, 1, 1],
[1, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, 1],
[1, 1, 1, 1, 1]
]
}
letter! {
t_upper,
[
[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
]
}
letter! {
u_upper,
[
[1, 0, 0, 0, 1],
[1, 0, 0, 0, 1],
[1, 0, 0, 0, 1],
[1, 0, 0, 0, 1],
[1, 1, 1, 1, 1],
]
}
letter! {
v_upper,
[
[1, 0, 0, 0, 1],
[1, 0, 0, 0, 1],
[1, 1, 0, 1, 1],
[0, 1, 0, 1, 0],
[0, 0, 1, 0, 0],
]
}
letter! {
w_upper,
[
[1, 0, 0, 0, 1],
[1, 0, 0, 0, 1],
[1, 0, 1, 0, 1],
[1, 1, 0, 1, 1],
[1, 0, 0, 0, 1]
]
}
letter! {
x_upper,
[
[1, 0, 0, 0, 1],
[0, 1, 0, 1, 0],
[0, 0, 1, 0, 0],
[0, 1, 0, 1, 0],
[1, 0, 0, 0, 1],
]
}
letter! {
y_upper,
[
[1, 0, 0, 0, 1],
[0, 1, 0, 1, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
]
}
letter! {
z_upper,
[
[1, 1, 1, 1, 1],
[0, 0, 0, 1, 0],
[0, 0, 1, 0, 0],
[0, 1, 0, 0, 0],
[1, 1, 1, 1, 1]
]
}
letter! {
hyphen,
[
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 1, 1, 1, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]
]
}
#[cfg(test)]
mod tests {
use super::*;
use byteorder::{LittleEndian, WriteBytesExt};
use std::fs::File;
use std::io::BufWriter;
use std::io::Write;
/// write stl binary, like this:
///
/// UINT8[80] – Header
/// UINT32 – Number of triangles
/// foreach triangle
/// REAL32[3] – Normal vector
/// REAL32[3] – Vertex 1
/// REAL32[3] – Vertex 2
/// REAL32[3] – Vertex 3
/// UINT16 – Attribute byte count
/// end
fn write_stl(path: &str, triangles: &[Triangle]) -> std::io::Result<()> {
let output_file = File::create(path)?;
let mut output_buf = BufWriter::new(output_file);
// write 80 byte header
let header = [0u8; 80];
output_buf.write_all(&header)?;
// write triangles count
output_buf.write_u32::<LittleEndian>(triangles.len() as u32)?;
// write triangles
for triangle in triangles {
// write normal
output_buf.write_f32::<LittleEndian>(triangle.normal[0])?;
output_buf.write_f32::<LittleEndian>(triangle.normal[1])?;
output_buf.write_f32::<LittleEndian>(triangle.normal[2])?;
// write each coordinate of the 3 vertices
for vertex in triangle.vertices.iter() {
output_buf.write_f32::<LittleEndian>(vertex[0])?;
output_buf.write_f32::<LittleEndian>(vertex[1])?;
output_buf.write_f32::<LittleEndian>(vertex[2])?;
}
// write attribute byte count
output_buf.write_u16::<LittleEndian>(0)?;
}
Ok(())
}
#[test]
fn it_uses_the_high_level_api() {
let s = "abcdefghijklmnopqrstuvwxyz-0123456789";
let stl = to_stl(s);
write_stl("hello.stl", &stl).unwrap();
assert_eq!(2 + 2, 4);
}
#[test]
fn it_does_a_cube() {
let cube = cube();
write_stl("cube.stl", &cube).unwrap();
assert_eq!(2 + 2, 4);
}
#[test]
fn it_does_letters() {
let input = [
a_upper(),
b_upper(),
c_upper(),
d_upper(),
e_upper(),
f_upper(),
];
let mut letters = vec![];
for (i, letter) in input.iter().enumerate() {
let translation = Vector3::new(6.0 * i as f32, 0.0, 0.0);
for triangle in letter {
let t = Triangle {
normal: triangle.normal,
vertices: [
triangle.vertices[0] + translation,
triangle.vertices[1] + translation,
triangle.vertices[2] + translation,
],
};
letters.push(t);
}
}
write_stl("letters.stl", &letters).unwrap();
assert_eq!(2 + 2, 4);
}
#[test]
fn it_does_numbers() {
let nums = vec![
zero(),
one(),
two(),
three(),
four(),
five(),
six(),
seven(),
eight(),
nine(),
hyphen(),
];
let mut numbers = vec![];
for (i, letter) in nums.iter().enumerate() {
let translation = Vector3::new(6.0 * i as f32, 0.0, 0.0);
for triangle in letter {
let t = Triangle {
normal: triangle.normal,
vertices: [
triangle.vertices[0] + translation,
triangle.vertices[1] + translation,
triangle.vertices[2] + translation,
],
};
numbers.push(t);
}
}
write_stl("numbers.stl", &numbers).unwrap();
assert_eq!(2 + 2, 4);
}
}
|
use std::cell::Ref;
/// Entry point of Bot
fn main() -> lux_ai::LuxAiResult<()> {
// Initialize Lux AI I/O environment
let mut environment = lux_ai::Environment::new();
// Create agent [lux_ai_api::Agent]
let mut agent = lux_ai::Agent::new(&mut environment)?;
// For every turn
loop {
// Update Agent state for current turn
match agent.update_turn(&mut environment) {
Err(lux_ai::LuxAiError::EmptyInput) => break,
result => result?,
};
let ref game_map = agent.game_map;
#[allow(unused_variables)]
let ref opponent = agent.players[(agent.team as usize + 1) % 2];
let player = agent.player();
// Cache all resource cells
let mut resource_cells: Vec<&lux_ai::Cell> = vec![];
for y in 0..game_map.height() {
for x in 0..game_map.width() {
let position = lux_ai::Position::new(x, y);
resource_cells.push(&game_map[position]);
}
}
// For every our unit
for unit in player.units.iter() {
match unit.unit_type {
// If worker has zero cooldown
lux_ai::UnitType::Worker if unit.can_act() => {
if unit.get_cargo_space_left().is_positive() {
// If has cargo space left
let mut closest_distance = f32::MAX;
let mut closest_resource_cell: Option<&lux_ai::Cell> = None;
// Find closest already researhed resource cell
for &resource_cell in resource_cells.iter() {
let distance = resource_cell.pos.distance_to(&unit.pos);
if let Some(resource) = &resource_cell.resource {
if player.is_researched(resource.resource_type) &&
distance < closest_distance
{
closest_distance = distance;
closest_resource_cell = Some(resource_cell);
}
}
}
// And if any move in that direction
if let Some(closest_resource_cell) = closest_resource_cell {
let direction = unit.pos.direction_to(&closest_resource_cell.pos);
environment.write_action(unit.move_(direction));
}
} else {
// Else if no cargo space left
let mut closest_distance = f32::MAX;
let mut closest_city_tile: Option<Ref<lux_ai::CityTile>> = None;
// Find nearest city tile
for city in player.cities.values() {
for city_tile in city.citytiles.iter() {
let city_tile = city_tile.borrow();
let distance = city_tile.pos.distance_to(&unit.pos);
if distance < closest_distance {
closest_distance = distance;
closest_city_tile = Some(city_tile);
}
}
}
// And if any move in that direction
if let Some(closest_city_tile) = closest_city_tile {
let direction = unit.pos.direction_to(&closest_city_tile.pos);
environment.write_action(unit.move_(direction));
}
}
},
_ => {},
}
}
// Flust all performed actions
environment.flush_actions()?;
// End our turn
environment.write_raw_action(lux_ai::Commands::FINISH.to_string())?;
// Flush I/O buffering
environment.flush()?;
}
Ok(())
}
|
use std::env;
use std::process;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use tokio::prelude::*;
use tokio::runtime::Runtime;
use tokio::timer::Interval;
use modio::error::Error;
use modio::filter::prelude::*;
use modio::QueryString;
use modio::{auth::Credentials, Modio};
fn current_timestamp() -> u64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs()
}
fn main() -> Result<(), Error> {
dotenv::dotenv().ok();
env_logger::init();
// Fetch the access token / api key from the environment of the current process.
let creds = match (env::var("MODIO_TOKEN"), env::var("MODIO_API_KEY")) {
(Ok(token), _) => Credentials::Token(token),
(_, Ok(apikey)) => Credentials::ApiKey(apikey),
_ => {
eprintln!("missing MODIO_TOKEN or MODIO_API_KEY environment variable");
process::exit(1);
}
};
let host = env::var("MODIO_HOST").unwrap_or_else(|_| "https://api.test.mod.io/v1".to_string());
// tokio runtime to execute the modio futures.
let mut rt = Runtime::new().expect("new rt");
// Creates a `Modio` endpoint for the test environment.
let modio = Modio::host(host, creds)?;
// Creates an `Interval` task that yields every 10 seconds starting now.
let task = Interval::new_interval(Duration::from_secs(10))
.fold(current_timestamp(), move |tstamp, _| {
// Create an event filter for `date_added` > time.
let filter = DateAdded::gt(tstamp);
// Create the call for `/me/events` and wait for the result.
let print = modio
.me()
.events(&filter)
.collect()
.and_then(move |list| {
println!("event filter: {}", filter.to_query_string());
println!("event count: {}", list.len());
println!("{:#?}", list);
Ok(())
})
.map_err(|e| println!("{:?}", e));
rt.spawn(print);
// timestamp for the next run.
Ok(current_timestamp())
})
.map(|_| ())
.map_err(|e| panic!("interval errored; err={:?}", e));
tokio::run(task);
Ok(())
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
const MAX_ARGS: usize = 5;
use std::fmt::Write as _;
use std::fs::File;
use std::io::Write;
use std::process::Command;
use itertools::Itertools;
pub fn codegen_register() {
let mut source = String::new();
// Write imports.
writeln!(
source,
"
#![allow(unused_parens)]
#![allow(unused_variables)]
#![allow(clippy::redundant_closure)]
use crate::FunctionEval;
use crate::Function;
use crate::EvalContext;
use crate::FunctionDomain;
use crate::FunctionRegistry;
use crate::FunctionSignature;
use crate::property::Domain;
use crate::types::nullable::NullableColumn;
use crate::types::nullable::NullableDomain;
use crate::types::*;
use crate::values::Value;
use crate::values::ValueRef;
"
)
.unwrap();
// Write `impl FunctionRegistry`.
writeln!(source, "impl FunctionRegistry {{").unwrap();
// Write `register_x_arg`.
for n_args in 1..=MAX_ARGS {
let arg_generics_bound = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}: ArgType, "))
.join("");
let arg_f_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("&I{n}::Domain, "))
.join("");
let arg_g_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}::ScalarRef<'_>, "))
.join("");
let arg_generics = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}, "))
.join("");
writeln!(
source,
"
pub fn register_{n_args}_arg<{arg_generics_bound} O: ArgType, F, G>(
&mut self,
name: &str,
calc_domain: F,
func: G,
) where
F: Fn({arg_f_closure_sig}) -> FunctionDomain<O> + 'static + Clone + Copy + Send + Sync,
G: Fn({arg_g_closure_sig} &mut EvalContext) -> O::Scalar + 'static + Clone + Copy + Send + Sync,
{{
self.register_passthrough_nullable_{n_args}_arg::<{arg_generics} O, _, _>(
name,
calc_domain,
vectorize_{n_args}_arg(func),
)
}}
"
)
.unwrap();
}
// Write `register_passthrough_nullable_x_arg`.
for n_args in 1..=MAX_ARGS {
let arg_generics_bound = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}: ArgType, "))
.join("");
let arg_f_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("&I{n}::Domain, "))
.join("");
let arg_g_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef<'a, I{n}>, "))
.join("");
let arg_sig_type = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}::data_type(), "))
.join("");
let arg_generics = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}, "))
.join("");
let arg_nullable_generics = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("NullableType<I{n}>, "))
.join("");
let closure_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}"))
.join(",");
let closure_args_value = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("&arg{n}.value"))
.join(",");
let some_values = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("Some(value{n})"))
.join(",");
let values = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("value{n}"))
.join(",");
let any_arg_has_null = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}.has_null"))
.join("||");
writeln!(
source,
"
pub fn register_passthrough_nullable_{n_args}_arg<{arg_generics_bound} O: ArgType, F, G>(
&mut self,
name: &str,
calc_domain: F,
func: G,
) where
F: Fn({arg_f_closure_sig}) -> FunctionDomain<O> + 'static + Clone + Copy + Send + Sync,
G: for<'a> Fn({arg_g_closure_sig} &mut EvalContext) -> Value<O> + 'static + Clone + Copy + Send + Sync,
{{
let has_nullable = &[{arg_sig_type} O::data_type()]
.iter()
.any(|ty| ty.as_nullable().is_some() || ty.is_null());
assert!(
!has_nullable,
\"Function {{}} has nullable argument or output, please use register_{n_args}_arg_core instead\",
name
);
self.register_{n_args}_arg_core::<{arg_generics} O, _, _>(name, calc_domain, func);
self.register_{n_args}_arg_core::<{arg_nullable_generics} NullableType<O>, _, _>(
name,
move |{closure_args}| {{
match ({closure_args_value}) {{
({some_values}) => {{
if let Some(domain) = calc_domain({values}).normalize() {{
FunctionDomain::Domain(NullableDomain {{
has_null: {any_arg_has_null},
value: Some(Box::new(domain)),
}})
}} else {{
FunctionDomain::MayThrow
}}
}},
_ => {{
FunctionDomain::Domain(NullableDomain {{
has_null: true,
value: None,
}})
}},
}}
}},
passthrough_nullable_{n_args}_arg(func),
);
}}
"
)
.unwrap();
}
// Write `register_combine_nullable_x_arg`.
for n_args in 1..=MAX_ARGS {
let arg_generics_bound = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}: ArgType, "))
.join("");
let arg_f_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("&I{n}::Domain, "))
.join("");
let arg_g_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef<'a, I{n}>, "))
.join("");
let arg_sig_type = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}::data_type(), "))
.join("");
let arg_generics = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}, "))
.join("");
let arg_nullable_generics = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("NullableType<I{n}>, "))
.join("");
let closure_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}"))
.join(",");
let closure_args_value = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("&arg{n}.value"))
.join(",");
let some_values = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("Some(value{n})"))
.join(",");
let values = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("value{n}"))
.join(",");
let any_arg_has_null = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}.has_null"))
.join("||");
writeln!(
source,
"
pub fn register_combine_nullable_{n_args}_arg<{arg_generics_bound} O: ArgType, F, G>(
&mut self,
name: &str,
calc_domain: F,
func: G,
) where
F: Fn({arg_f_closure_sig}) -> FunctionDomain<NullableType<O>> + 'static + Clone + Copy + Send + Sync,
G: for<'a> Fn({arg_g_closure_sig} &mut EvalContext) -> Value<NullableType<O>> + 'static + Clone + Copy + Send + Sync,
{{
let has_nullable = &[{arg_sig_type} O::data_type()]
.iter()
.any(|ty| ty.as_nullable().is_some() || ty.is_null());
assert!(
!has_nullable,
\"Function {{}} has nullable argument or output, please use register_{n_args}_arg_core instead\",
name
);
self.register_{n_args}_arg_core::<{arg_generics} NullableType<O>, _, _>(
name,
calc_domain,
func
);
self.register_{n_args}_arg_core::<{arg_nullable_generics} NullableType<O>, _, _>(
name,
move |{closure_args}| {{
match ({closure_args_value}) {{
({some_values}) => {{
if let Some(domain) = calc_domain({values}).normalize() {{
FunctionDomain::Domain(NullableDomain {{
has_null: {any_arg_has_null} || domain.has_null,
value: domain.value,
}})
}} else {{
FunctionDomain::MayThrow
}}
}}
_ => {{
FunctionDomain::Domain(NullableDomain {{
has_null: true,
value: None,
}})
}},
}}
}},
combine_nullable_{n_args}_arg(func),
);
}}
"
)
.unwrap();
}
// Write `register_x_arg_core`.
for n_args in 0..=MAX_ARGS {
let arg_generics_bound = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}: ArgType, "))
.join("");
let arg_f_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("&I{n}::Domain, "))
.join("");
let arg_g_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef<'a, I{n}>, "))
.join("");
let arg_sig_type = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}::data_type()"))
.join(", ");
let arg_generics = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}, "))
.join("");
writeln!(
source,
"
pub fn register_{n_args}_arg_core<{arg_generics_bound} O: ArgType, F, G>(
&mut self,
name: &str,
calc_domain: F,
func: G,
) where
F: Fn({arg_f_closure_sig}) -> FunctionDomain<O> + 'static + Clone + Copy + Send + Sync,
G: for <'a> Fn({arg_g_closure_sig} &mut EvalContext) -> Value<O> + 'static + Clone + Copy + Send + Sync,
{{
let func = Function {{
signature: FunctionSignature {{
name: name.to_string(),
args_type: vec![{arg_sig_type}],
return_type: O::data_type(),
}},
eval: FunctionEval::Scalar {{
calc_domain: Box::new(erase_calc_domain_generic_{n_args}_arg::<{arg_generics} O>(calc_domain)),
eval: Box::new(erase_function_generic_{n_args}_arg(func)),
}},
}};
self.register_function(func);
}}
"
)
.unwrap();
}
writeln!(source, "}}").unwrap();
// Write `vectorize_x_arg`.
for n_args in 1..=MAX_ARGS {
let arg_generics_bound = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}: ArgType, "))
.join("");
let arg_input_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}::ScalarRef<'_>, "))
.join("");
let arg_output_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef<I{n}>, "))
.join("");
let func_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}, "))
.join("");
let args_tuple = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}"))
.join(", ");
let arg_scalar = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef::Scalar(arg{n})"))
.join(", ");
let match_arms = (1..(1 << n_args))
.map(|idx| {
let columns = (0..n_args)
.filter(|n| idx & (1 << n) != 0)
.collect::<Vec<_>>();
let arm_pat = (0..n_args)
.map(|n| {
if columns.contains(&n) {
format!("ValueRef::Column(arg{})", n + 1)
} else {
format!("ValueRef::Scalar(arg{})", n + 1)
}
})
.join(", ");
let arg_iter = (0..n_args)
.filter(|n| columns.contains(n))
.map(|n| n + 1)
.map(|n| format!("let arg{n}_iter = I{n}::iter_column(&arg{n});"))
.join("");
let zipped_iter = columns
.iter()
.map(|n| format!("arg{}_iter", n + 1))
.reduce(|acc, item| format!("{acc}.zip({item})"))
.unwrap();
let col_arg = columns
.iter()
.map(|n| format!("arg{}", n + 1))
.reduce(|acc, item| format!("({acc}, {item})"))
.unwrap();
let func_arg = (0..n_args)
.map(|n| {
if columns.contains(&n) {
format!("arg{}, ", n + 1)
} else {
format!("arg{}.clone(), ", n + 1)
}
})
.join("");
format!(
"({arm_pat}) => {{
let generics = &(ctx.generics.to_owned());
{arg_iter}
let iter = {zipped_iter}.map(|{col_arg}| func({func_arg} ctx));
let col = O::column_from_iter(iter, generics);
Value::Column(col)
}}"
)
})
.join("");
writeln!(
source,
"
pub fn vectorize_{n_args}_arg<{arg_generics_bound} O: ArgType>(
func: impl Fn({arg_input_closure_sig} &mut EvalContext) -> O::Scalar + Copy + Send + Sync,
) -> impl Fn({arg_output_closure_sig} &mut EvalContext) -> Value<O> + Copy + Send + Sync {{
move |{func_args} ctx| match ({args_tuple}) {{
({arg_scalar}) => Value::Scalar(func({func_args} ctx)),
{match_arms}
}}
}}
"
)
.unwrap();
}
// Write `vectorize_with_builder_x_arg`.
for n_args in 1..=MAX_ARGS {
let arg_generics_bound = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}: ArgType, "))
.join("");
let arg_input_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}::ScalarRef<'_>, "))
.join("");
let arg_output_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef<I{n}>, "))
.join("");
let func_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}, "))
.join("");
let args_tuple = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}"))
.join(", ");
let arg_scalar = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef::Scalar(arg{n})"))
.join(", ");
let match_arms = (1..(1 << n_args))
.map(|idx| {
let columns = (0..n_args)
.filter(|n| idx & (1 << n) != 0)
.collect::<Vec<_>>();
let arm_pat = (0..n_args)
.map(|n| {
if columns.contains(&n) {
format!("ValueRef::Column(arg{})", n + 1)
} else {
format!("ValueRef::Scalar(arg{})", n + 1)
}
})
.join(", ");
let arg_iter = (0..n_args)
.filter(|n| columns.contains(n))
.map(|n| n + 1)
.map(|n| format!("let arg{n}_iter = I{n}::iter_column(&arg{n});"))
.join("");
let zipped_iter = columns
.iter()
.map(|n| format!("arg{}_iter", n + 1))
.reduce(|acc, item| format!("{acc}.zip({item})"))
.unwrap();
let col_arg = columns
.iter()
.map(|n| format!("arg{}", n + 1))
.reduce(|acc, item| format!("({acc}, {item})"))
.unwrap();
let func_arg = (0..n_args)
.map(|n| {
if columns.contains(&n) {
format!("arg{}, ", n + 1)
} else {
format!("arg{}.clone(), ", n + 1)
}
})
.join("");
format!(
"({arm_pat}) => {{
let generics = &(ctx.generics.to_owned());
{arg_iter}
let iter = {zipped_iter};
let mut builder = O::create_builder(iter.size_hint().0, generics);
for {col_arg} in iter {{
func({func_arg} &mut builder, ctx);
}}
Value::Column(O::build_column(builder))
}}"
)
})
.join("");
writeln!(
source,
"
pub fn vectorize_with_builder_{n_args}_arg<{arg_generics_bound} O: ArgType>(
func: impl Fn({arg_input_closure_sig} &mut O::ColumnBuilder, &mut EvalContext) + Copy + Send + Sync,
) -> impl Fn({arg_output_closure_sig} &mut EvalContext) -> Value<O> + Copy + Send + Sync {{
move |{func_args} ctx| match ({args_tuple}) {{
({arg_scalar}) => {{
let generics = &(ctx.generics.to_owned());
let mut builder = O::create_builder(1, generics);
func({func_args} &mut builder, ctx);
Value::Scalar(O::build_scalar(builder))
}}
{match_arms}
}}
}}
"
)
.unwrap();
}
// Write `passthrough_nullable_x_arg`.
for n_args in 1..=MAX_ARGS {
let arg_generics_bound = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}: ArgType, "))
.join("");
let arg_input_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef<'a, I{n}>, "))
.join("");
let arg_output_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef<'a, NullableType<I{n}>>, "))
.join("");
let closure_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}, "))
.join("");
let args_tuple = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}"))
.join(", ");
let arg_scalar = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef::Scalar(Some(arg{n}))"))
.join(", ");
let scalar_func_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef::Scalar(arg{n}), "))
.join("");
let scalar_nones_pats = (0..n_args)
.map(|n| {
let pat = (0..n_args)
.map(|nth| {
if nth == n {
"ValueRef::Scalar(None)"
} else {
"_"
}
})
.join(",");
format!("({pat})")
})
.reduce(|acc, item| format!("{acc} | {item}"))
.unwrap();
let match_arms = (1..(1 << n_args))
.map(|idx| {
let columns = (0..n_args)
.filter(|n| idx & (1 << n) != 0)
.collect::<Vec<_>>();
let arm_pat = (0..n_args)
.map(|n| {
if columns.contains(&n) {
format!("ValueRef::Column(arg{})", n + 1)
} else {
format!("ValueRef::Scalar(Some(arg{}))", n + 1)
}
})
.join(", ");
let and_validity = columns
.iter()
.map(|n| format!("arg{}.validity", n + 1))
.reduce(|acc, item| {
format!("common_arrow::arrow::bitmap::and(&{acc}, &{item})")
})
.unwrap();
let func_arg = (0..n_args)
.map(|n| {
if columns.contains(&n) {
format!("ValueRef::Column(arg{}.column), ", n + 1)
} else {
format!("ValueRef::Scalar(arg{}), ", n + 1)
}
})
.join("");
format!(
"({arm_pat}) => {{
let and_validity = {and_validity};
let validity = ctx.validity.as_ref().map(|valid| valid & (&and_validity)).unwrap_or(and_validity);
ctx.validity = Some(validity.clone());
let column = func({func_arg} ctx).into_column().unwrap();
Value::Column(NullableColumn {{ column, validity }})
}}"
)
})
.join("");
writeln!(
source,
"
pub fn passthrough_nullable_{n_args}_arg<{arg_generics_bound} O: ArgType>(
func: impl for <'a> Fn({arg_input_closure_sig} &mut EvalContext) -> Value<O> + Copy + Send + Sync,
) -> impl for <'a> Fn({arg_output_closure_sig} &mut EvalContext) -> Value<NullableType<O>> + Copy + Send + Sync {{
move |{closure_args} ctx| match ({args_tuple}) {{
{scalar_nones_pats} => Value::Scalar(None),
({arg_scalar}) => Value::Scalar(Some(
func({scalar_func_args} ctx)
.into_scalar()
.unwrap(),
)),
{match_arms}
}}
}}
"
)
.unwrap();
}
// Write `combine_nullable_x_arg`.
for n_args in 1..=MAX_ARGS {
let arg_generics_bound = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}: ArgType, "))
.join("");
let arg_input_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef<'a, I{n}>, "))
.join("");
let arg_output_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef<'a, NullableType<I{n}>>, "))
.join("");
let closure_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}, "))
.join("");
let args_tuple = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}"))
.join(", ");
let arg_scalar = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef::Scalar(Some(arg{n}))"))
.join(", ");
let scalar_func_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef::Scalar(arg{n}), "))
.join("");
let scalar_nones_pats = (0..n_args)
.map(|n| {
let pat = (0..n_args)
.map(|nth| {
if nth == n {
"ValueRef::Scalar(None)"
} else {
"_"
}
})
.join(",");
format!("({pat})")
})
.reduce(|acc, item| format!("{acc} | {item}"))
.unwrap();
let match_arms = (1..(1 << n_args))
.map(|idx| {
let columns = (0..n_args)
.filter(|n| idx & (1 << n) != 0)
.collect::<Vec<_>>();
let arm_pat = (0..n_args)
.map(|n| {
if columns.contains(&n) {
format!("ValueRef::Column(arg{})", n + 1)
} else {
format!("ValueRef::Scalar(Some(arg{}))", n + 1)
}
})
.join(", ");
let and_validity = columns
.iter()
.map(|n| format!("arg{}.validity", n + 1))
.reduce(|acc, item| {
format!("common_arrow::arrow::bitmap::and(&{acc}, &{item})")
})
.unwrap();
let func_arg = (0..n_args)
.map(|n| {
if columns.contains(&n) {
format!("ValueRef::Column(arg{}.column), ", n + 1)
} else {
format!("ValueRef::Scalar(arg{}), ", n + 1)
}
})
.join("");
format!(
"({arm_pat}) => {{
let and_validity = {and_validity};
let validity = ctx.validity.as_ref().map(|valid| valid & (&and_validity)).unwrap_or(and_validity);
ctx.validity = Some(validity.clone());
let nullable_column = func({func_arg} ctx).into_column().unwrap();
let combine_validity = common_arrow::arrow::bitmap::and(&validity, &nullable_column.validity);
Value::Column(NullableColumn {{ column: nullable_column.column, validity: combine_validity }})
}}"
)
})
.join("");
writeln!(
source,
"
pub fn combine_nullable_{n_args}_arg<{arg_generics_bound} O: ArgType>(
func: impl for <'a> Fn({arg_input_closure_sig} &mut EvalContext) -> Value<NullableType<O>> + Copy + Send + Sync,
) -> impl for <'a> Fn({arg_output_closure_sig} &mut EvalContext) -> Value<NullableType<O>> + Copy + Send + Sync {{
move |{closure_args} ctx| match ({args_tuple}) {{
{scalar_nones_pats} => Value::Scalar(None),
({arg_scalar}) => Value::Scalar(
func({scalar_func_args} ctx)
.into_scalar()
.unwrap(),
),
{match_arms}
}}
}}
"
)
.unwrap();
}
// Write `erase_calc_domain_generic_x_arg`.
for n_args in 0..=MAX_ARGS {
let arg_generics_bound = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}: ArgType, "))
.join("");
let arg_f_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("&I{n}::Domain, "))
.join("");
let let_args = (0..n_args)
.map(|n| n + 1)
.map(|n| {
format!(
"let arg{n} = I{n}::try_downcast_domain(&args[{}]).unwrap();",
n - 1
)
})
.join("");
let func_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("&arg{n}"))
.join(",");
writeln!(
source,
"
fn erase_calc_domain_generic_{n_args}_arg<{arg_generics_bound} O: ArgType>(
func: impl Fn({arg_f_closure_sig}) -> FunctionDomain<O>,
) -> impl Fn(&[Domain]) -> FunctionDomain<AnyType> {{
move |args| {{
{let_args}
func({func_args}).map(O::upcast_domain)
}}
}}
"
)
.unwrap();
}
// Write `erase_function_generic_x_arg`.
for n_args in 0..=MAX_ARGS {
let arg_generics_bound = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("I{n}: ArgType, "))
.join("");
let arg_g_closure_sig = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("ValueRef<'a, I{n}>, "))
.join("");
let let_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("let arg{n} = args[{}].try_downcast().unwrap();", n - 1))
.join("");
let func_args = (0..n_args)
.map(|n| n + 1)
.map(|n| format!("arg{n}, "))
.join("");
writeln!(
source,
"
fn erase_function_generic_{n_args}_arg<{arg_generics_bound} O: ArgType>(
func: impl for <'a> Fn({arg_g_closure_sig} &mut EvalContext) -> Value<O>,
) -> impl Fn(&[ValueRef<AnyType>], &mut EvalContext) -> Value<AnyType> {{
move |args, ctx| {{
{let_args}
Value::upcast(func({func_args} ctx))
}}
}}
"
)
.unwrap();
}
format_and_save("src/query/expression/src/register.rs", &source);
}
fn format_and_save(path: &str, src: &str) {
let mut file = File::create(path).expect("open");
// Write the head.
let codegen_src_path = file!();
writeln!(
file,
"// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the \"License\");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an \"AS IS\" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This code is generated by {codegen_src_path}. DO NOT EDIT.
"
)
.unwrap();
writeln!(file, "{src}").unwrap();
file.flush().unwrap();
Command::new("cargo")
.arg("fmt")
.arg("--")
.arg(path)
.status()
.unwrap()
.exit_ok()
.unwrap();
}
|
pub mod rv64i;
|
use serde::{
Serialize,
Serializer
};
use super::helpers::serialize_currency;
////////////////////////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Serialize)]
pub struct VirtualItem{
pub amount: i32,
pub available_groups: Vec<String>,
pub sku: String
}
////////////////////////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Serialize)]
pub struct VirtualItems{
#[serde(serialize_with = "serialize_currency")]
pub currency: &'static iso4217::CurrencyCode,
pub items: Vec<VirtualItem>
}
////////////////////////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Serialize)]
pub struct VirtualCurrency{
#[serde(serialize_with = "serialize_currency")]
pub currency: &'static iso4217::CurrencyCode,
pub quantity: i32
}
////////////////////////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Serialize)]
pub struct PurchaseInfo{
#[serde(skip_serializing_if = "Option::is_none")]
pub virtual_items: Option<VirtualItems>,
#[serde(skip_serializing_if = "Option::is_none")]
pub virtual_currency: Option<VirtualCurrency>
// coupon_code
// gift
// pin_codes
// subscription
}
////////////////////////////////////////////////////////////////////////////////////////////////////
#[derive(Debug)]
pub enum SandboxMode{
Normal,
Sandbox
}
impl Serialize for SandboxMode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
SandboxMode::Normal => {
serializer.serialize_none()
}
SandboxMode::Sandbox => {
serializer.serialize_str("sandbox")
}
}
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Serialize)]
pub struct Settings{
#[serde(serialize_with = "serialize_currency")]
pub currency: &'static iso4217::CurrencyCode,
// pub external_id: String,
// pub language: String, // TODO: 2 буквы в нижнем регистре, перегнать в ISO?
pub mode: SandboxMode, // При sandbox url тоже должен быть https://sandbox-secure.xsolla.com,
// pub payment_method: i32
// pub payment_widget
pub project_id: i32,
// pub redirect_policy
pub return_url: String // TODO: Может быть URL?
// pub ui
}
////////////////////////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Serialize)]
pub struct UserId{
pub value: String
}
////////////////////////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Serialize)]
pub struct User{
// pub age: u32,
// pub attributes: serde_json::Value,
// pub country
// pub email
pub id: UserId
// pub is_legal
// TODO: Остальные параметры тут https://developers.xsolla.com/ru/pay-station-api/current/token/create-token/
}
////////////////////////////////////////////////////////////////////////////////////////////////////
/// Структура параметров для запроса токена
/// https://developers.xsolla.com/ru/pay-station-api/current/token/create-token/
#[derive(Debug, Serialize)]
pub struct Body{
// pub custom_parameters: serde_json::Value,
pub purchase: PurchaseInfo,
pub settings: Settings,
pub user: User
} |
use itertools::{IntoChunks, Itertools};
use crate::raw;
use crate::FlannError;
use crate::Indexable;
use crate::Neighbor;
use crate::Parameters;
pub struct SliceIndex<'a, T: Indexable> {
index: raw::flann_index_t,
parameters: raw::FLANNParameters,
rebuild_threshold: f32,
pub(crate) point_len: usize,
_phantom: std::marker::PhantomData<&'a T>,
}
impl<'a, T: Indexable> Drop for SliceIndex<'a, T> {
fn drop(&mut self) {
unsafe {
T::free_index(self.index, &mut self.parameters);
}
}
}
impl<'a, T: Indexable> SliceIndex<'a, T> {
/// Makes a new index from points that are already in a slice of memory
/// in component order where there are `point_len` components.
///
/// This borrows the slice internally in FLANN.
pub fn new(
point_len: usize,
points: &'a [T],
parameters: Parameters,
) -> Result<Self, FlannError> {
if points.is_empty() {
return Err(FlannError::ZeroInputPoints);
}
if points.len() % point_len != 0 {
return Err(FlannError::InvalidFlatPointsLen {
expected: point_len,
got: points.len(),
});
}
// This stores how much faster FLANN executed compared to linear, which we discard.
let mut speedup = 0.0;
let rebuild_threshold = parameters.rebuild_threshold;
let mut flann_params = parameters.into();
let index = unsafe {
T::build_index(
points.as_ptr() as *mut T,
(points.len() / point_len) as i32,
point_len as i32,
&mut speedup,
&mut flann_params,
)
};
if index.is_null() {
return Err(FlannError::FailedToBuildIndex);
}
Ok(Self {
index,
parameters: flann_params,
rebuild_threshold,
point_len,
_phantom: Default::default(),
})
}
/// Adds a point to the index.
pub fn add_slice(&mut self, point: &'a [T]) -> Result<(), FlannError> {
if point.len() != self.point_len {
return Err(FlannError::InvalidPointDimensionality {
expected: self.point_len,
got: point.len(),
});
}
let retval = unsafe {
T::add_points(
self.index,
point.as_ptr() as *mut T,
1,
self.point_len as i32,
self.rebuild_threshold,
)
};
assert_eq!(retval, 0);
Ok(())
}
/// Adds multiple points to the index.
pub fn add_many_slices(&mut self, points: &'a [T]) -> Result<(), FlannError> {
// Don't run FLANN if we add no points.
if points.is_empty() {
return Ok(());
}
if points.len() % self.point_len != 0 {
return Err(FlannError::InvalidFlatPointsLen {
expected: self.point_len,
got: points.len(),
});
}
let retval = unsafe {
T::add_points(
self.index,
points.as_ptr() as *mut T,
(points.len() / self.point_len) as i32,
self.point_len as i32,
self.rebuild_threshold,
)
};
assert_eq!(retval, 0);
Ok(())
}
/// Get the point that corresponds to this index `idx`.
pub fn get(&self, idx: usize) -> Option<&'a [T]> {
if idx < self.len() {
let point = unsafe { T::get_point(self.index, idx as u32) };
assert!(!point.is_null());
Some(unsafe { std::slice::from_raw_parts(point, self.point_len) })
} else {
None
}
}
/// Removes a point at index `idx`.
pub fn remove(&mut self, idx: usize) {
let retval = unsafe { T::remove_point(self.index, idx as u32) };
assert_eq!(retval, 0);
}
pub fn len(&self) -> usize {
unsafe { T::size(self.index) as usize }
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Performs a search to find only the closest neighbor.
pub fn find_nearest_neighbor(
&mut self,
point: &[T],
) -> Result<Neighbor<T::ResultType>, FlannError> {
if point.len() != self.point_len {
return Err(FlannError::InvalidPointDimensionality {
expected: self.point_len,
got: point.len(),
});
}
let mut index = -1;
let mut distance_squared = T::ResultType::default();
let retval = unsafe {
T::find_nearest_neighbors_index(
self.index,
point.as_ptr() as *mut T,
1,
&mut index,
&mut distance_squared,
1,
&mut self.parameters,
)
};
assert_eq!(retval, 0);
Ok(Neighbor {
index: index as usize,
distance_squared,
})
}
/// Performs k-NN search for `num` neighbors.
/// If there are less points in the set than `num` it returns that many neighbors.
pub fn find_nearest_neighbors(
&mut self,
num: usize,
point: &[T],
) -> Result<impl Iterator<Item = Neighbor<T::ResultType>>, FlannError> {
if point.len() != self.point_len {
return Err(FlannError::InvalidPointDimensionality {
expected: self.point_len,
got: point.len(),
});
}
let num = num.min(self.len());
let mut indices: Vec<i32> = vec![-1; num];
let mut distances_squared: Vec<T::ResultType> = vec![T::ResultType::default(); num];
let retval = unsafe {
T::find_nearest_neighbors_index(
self.index,
point.as_ptr() as *mut T,
1,
indices.as_mut_ptr(),
distances_squared.as_mut_ptr(),
num as i32,
&mut self.parameters,
)
};
assert_eq!(retval, 0);
Ok(indices.into_iter().zip(distances_squared.into_iter()).map(
|(index, distance_squared)| Neighbor {
index: index as usize,
distance_squared,
},
))
}
/// Performs k-NN search for `num` neighbors, limiting the search to `radius` distance.
/// If there are less points in the set than `num` it returns that many neighbors.
///
/// The returned iterator is sorted by closest to furthest.
pub fn find_nearest_neighbors_radius(
&mut self,
num: usize,
radius_squared: f32,
point: &[T],
) -> Result<impl Iterator<Item = Neighbor<T::ResultType>>, FlannError> {
if point.len() != self.point_len {
return Err(FlannError::InvalidPointDimensionality {
expected: self.point_len,
got: point.len(),
});
}
let num = num.min(self.len());
let mut indices: Vec<i32> = vec![-1; num];
let mut distances_squared: Vec<T::ResultType> = vec![T::ResultType::default(); num];
let retval = unsafe {
T::radius_search(
self.index,
point.as_ptr() as *mut T,
indices.as_mut_ptr(),
distances_squared.as_mut_ptr(),
num as i32,
radius_squared,
&mut self.parameters,
)
};
assert!(retval >= 0);
Ok(indices
.into_iter()
.zip(distances_squared.into_iter())
.take(retval as usize)
.map(|(index, distance_squared)| Neighbor {
index: index as usize,
distance_squared,
}))
}
/// Performs k-NN search for `num` neighbors for several points.
///
/// If there are less points in the set than `num` it returns that many
/// neighbors for each point.
pub fn find_many_nearest_neighbors<I, P>(
&mut self,
num: usize,
points: I,
) -> Result<IntoChunks<impl Iterator<Item = Neighbor<T::ResultType>>>, FlannError>
where
I: IntoIterator<Item = P>,
P: IntoIterator<Item = T>,
{
let mut points_vec = Vec::new();
for point in points {
let count = point.into_iter().map(|d| points_vec.push(d)).count();
if count != self.point_len {
return Err(FlannError::InvalidPointDimensionality {
expected: self.point_len,
got: count,
});
}
}
self.find_many_nearest_neighbors_flat(num, &points_vec)
}
/// Performs k-NN search on `num` neighbors for several points.
///
/// If there are less points in the set than `num` it returns that many
/// neighbors for each point.
///
/// This assumes points are already in a slice of memory
/// in component order where there are `point_len` components
/// (as specified in `new` or `new_flat`).
pub fn find_many_nearest_neighbors_flat(
&mut self,
num: usize,
points: &[T],
) -> Result<IntoChunks<impl Iterator<Item = Neighbor<T::ResultType>>>, FlannError> {
let neighbor_from_index_distance = |(index, distance_squared)| Neighbor {
index: index as usize,
distance_squared,
};
if points.is_empty() {
let indices: Vec<i32> = Vec::new();
let distances: Vec<T::ResultType> = Vec::new();
return Ok(indices
.into_iter()
.zip(distances.into_iter())
.map(neighbor_from_index_distance)
.chunks(num));
}
if points.len() % self.point_len != 0 {
return Err(FlannError::InvalidFlatPointsLen {
expected: self.point_len,
got: points.len(),
});
}
let num = num.min(self.len());
let total_points = points.len() / self.point_len;
let mut indices: Vec<i32> = vec![-1; num * total_points];
let mut distances_squared: Vec<T::ResultType> =
vec![T::ResultType::default(); num * total_points];
let retval = unsafe {
T::find_nearest_neighbors_index(
self.index,
points.as_ptr() as *mut T,
total_points as i32,
indices.as_mut_ptr(),
distances_squared.as_mut_ptr(),
num as i32,
&mut self.parameters,
)
};
assert_eq!(retval, 0);
Ok(indices
.into_iter()
.zip(distances_squared.into_iter())
.map(neighbor_from_index_distance)
.chunks(num))
}
}
|
//! IOx Compactor Layout tests
//!
//! These tests do almost everything the compactor would do in a
//! production system *except* for reading/writing parquet data.
//!
//! The input to each test is the parquet file layout of a partition.
//!
//! The output is a representation of the steps the compactor chose to
//! take and the final layout of the parquet files in the partition.
//!
//! # Interpreting test lines
//!
//! This test uses `insta` to compare inlined string represetention of
//! what the compactor did.
//!
//! Each line in the representation represents either some metadata or
//! a parquet file, with a visual depiction of its `min_time` and
//! `max_time` (the minimum timestamp and maximum timestamp for data
//! in the file).
//!
//! For example:
//!
//! ```text
//! - L0.3[300,350] 5kb |-L0.3-|
//! ```
//!
//! Represents the following [`ParquetFile`]:
//!
//! ```text
//! ParquetFile {
//! id: 3,
//! compaction_level: L0
//! min_time: 300,
//! max_time: 350
//! file_size_bytes: 5*1024
//! }
//! ```
//!
//! The `|-L0.3-|` shows the relative location of `min_time` (`|-`)
//! and `max_time (`-|`) on a time line to help visualize the output
//!
//! A file with `?` represents a `ParquetFileParam` (aka a file that
//! will be added to the catalog but is not yet and thus has no id
//! assigned). So the following represents the same file as above, but
//! without an entry in the catalog:
//!
//!
//! ```text
//! - L0.?[300,350] 5kb |-L0.3-|
//! ```
mod accumulated_size;
mod backfill;
mod common_use_cases;
mod core;
mod created_at;
mod knobs;
mod large_files;
mod large_overlaps;
mod many_files;
mod single_timestamp;
mod stuck;
use std::{sync::atomic::Ordering, time::Duration};
use compactor_test_utils::{display_size, format_files, TestSetup, TestSetupBuilder};
use data_types::{CompactionLevel, ParquetFile};
use iox_tests::TestParquetFileBuilder;
use iox_time::Time;
pub(crate) const ONE_MB: u64 = 1024 * 1024;
/// creates a TestParquetFileBuilder setup for layout tests
pub(crate) fn parquet_builder() -> TestParquetFileBuilder {
TestParquetFileBuilder::default()
.with_compaction_level(CompactionLevel::Initial)
// need some LP to generate the schema
.with_line_protocol("table,tag1=A,tag2=B,tag3=C field_int=1i 100")
}
/// Creates the default TestSetupBuilder for layout tests
///
/// NOTE: The builder is configured with parameters that are intended
/// to be as close as possible to what is configured on production
/// systems so that we can predict and reason about what the compactor
/// will do in production.
pub(crate) async fn layout_setup_builder() -> TestSetupBuilder<false> {
TestSetup::builder()
.await
.with_percentage_max_file_size(20)
.with_split_percentage(80)
.with_max_num_files_per_plan(200)
.with_min_num_l1_files_to_compact(10)
.with_max_desired_file_size_bytes(100 * ONE_MB)
.simulate_without_object_store()
}
/// Creates a scenario with ten 9 * 1MB overlapping L0 files
pub(crate) async fn all_overlapping_l0_files(setup: TestSetup) -> TestSetup {
for i in 0..10 {
setup
.partition
.create_parquet_file(
parquet_builder()
.with_min_time(100)
.with_max_time(200000)
.with_file_size_bytes(9 * ONE_MB)
.with_max_l0_created_at(Time::from_timestamp_nanos(i + 1)),
)
.await;
}
setup
}
/// runs the scenario and returns a string based output for comparison
pub(crate) async fn run_layout_scenario(setup: &TestSetup) -> Vec<String> {
// verify the files are ok to begin with
setup.verify_invariants().await;
setup.catalog.time_provider.inc(Duration::from_nanos(200));
let input_files = setup.list_by_table_not_to_delete().await;
let mut output = format_files("**** Input Files ", &sort_files(input_files));
// check if input files trip warnings (situations may be deliberate)
output.extend(setup.generate_warnings().await);
// run the actual compaction
let compact_result = setup.run_compact().await;
// record what the compactor actually did
if !setup.suppress_run_output {
output.extend(compact_result.run_log);
}
// Record any skipped compactions (is after what the compactor actually did)
output.extend(get_skipped_compactions(setup).await);
// record the final state of the catalog
let output_files = setup.list_by_table_not_to_delete().await;
let bytes_written = setup.bytes_written.load(Ordering::Relaxed) as i64;
output.extend(format_files(
format!(
"**** Final Output Files ({} written)",
display_size(bytes_written)
),
&sort_files(output_files),
));
if !setup.suppress_writes_breakdown {
output.extend(vec![
"**** Breakdown of where bytes were written".to_string()
]);
let mut breakdown = Vec::new();
for (op, written) in setup.bytes_written_per_plan.lock().unwrap().iter() {
let written = *written as i64;
breakdown.push(format!("{} written by {}", display_size(written), op));
}
breakdown.sort();
output.extend(breakdown);
}
// verify that the output of the compactor was valid as well
setup.verify_invariants().await;
// check if output files trip warnings (warnings here deserve scrutiny, but may be justifiable)
output.extend(setup.generate_warnings().await);
output
}
fn sort_files(mut files: Vec<ParquetFile>) -> Vec<ParquetFile> {
// sort by ascending parquet file id for more consistent display
files.sort_by(|f1, f2| f1.id.cmp(&f2.id));
files
}
async fn get_skipped_compactions(setup: &TestSetup) -> Vec<String> {
let skipped = setup
.catalog
.catalog
.repositories()
.await
.partitions()
.list_skipped_compactions()
.await
.unwrap();
skipped
.iter()
.map(|skipped| {
format!(
"SKIPPED COMPACTION for {:?}: {}",
skipped.partition_id, skipped.reason
)
})
.collect()
}
|
mod stack;
pub use crate::stack::Stack;
|
use byteorder::{ByteOrder, NetworkEndian};
use std::future::Future;
use thiserror::Error;
use crate::io::*;
use crate::v5::{
SocksV5AddressType, SocksV5Command, SocksV5Host, SocksV5RequestError, SocksV5RequestStatus,
};
use crate::SocksVersion;
/// Writes a SOCKSv5 request with the specified command, host and port.
///
/// # Errors
///
/// If writing to `writer` fails, this function will return the I/O error.
///
/// # Panics
///
/// If `host` is a domain name, and its length is greater than 255 bytes.
/// The SOCKSv5 specification leaves only a single octet for encoding the domain name length,
/// so a target longer than 255 bytes cannot be properly encoded.
pub async fn write_request<Writer>(
mut writer: Writer,
command: SocksV5Command,
host: SocksV5Host,
port: u16,
) -> std::io::Result<()>
where
Writer: AsyncWrite + Unpin,
{
let mut data = Vec::<u8>::with_capacity(6 + host.repr_len());
data.push(SocksVersion::V5.to_u8());
data.push(command.to_u8());
data.push(0u8); // reserved bits in SOCKSv5
match &host {
SocksV5Host::Domain(domain) => {
assert!(
domain.len() < 256,
"domain name must be shorter than 256 bytes"
);
data.push(SocksV5AddressType::Domain.to_u8());
data.push(domain.len() as u8);
data.extend_from_slice(domain);
}
SocksV5Host::Ipv4(octets) => {
data.push(SocksV5AddressType::Ipv4.to_u8());
data.extend_from_slice(octets);
}
SocksV5Host::Ipv6(octets) => {
data.push(SocksV5AddressType::Ipv6.to_u8());
data.extend_from_slice(octets);
}
}
let port_start = data.len();
data.extend_from_slice(b"\0\0");
NetworkEndian::write_u16(&mut data[port_start..], port);
writer.write_all(&data).await
}
#[derive(Debug)]
pub struct SocksV5Response {
pub status: SocksV5RequestStatus,
pub host: SocksV5Host,
pub port: u16,
}
pub type SocksV5ResponseResult = Result<SocksV5Response, SocksV5RequestError>;
/// Reads and parses a SOCKSv5 command response message.
///
/// Depending on the data (in case of parsing errors),
/// this function may not consume the whole response from the server.
///
/// # Errors
///
/// If reading from `reader` fails, including if a premature EOF is encountered,
/// this function will return the I/O error (wrapped in `SocksV5RequestError::Io`).
///
/// If the first byte read from `reader` is not `05`, as required by the SOCKSv5 specification,
/// then this function will return `SocksV5RequestError::InvalidVersion` with the actual "version number".
///
/// If the status byte or the address type byte are not from the respective lists in the specification,
/// then this function will return `SocksV5RequestError::InvalidRequest`
/// with a human-readable description of the error.
pub async fn read_request_status<Reader>(mut reader: Reader) -> SocksV5ResponseResult
where
Reader: AsyncRead + Unpin,
{
let mut buf = [0u8; 2];
reader.read_exact(&mut buf[0..1]).await?;
if buf[0] != SocksVersion::V5.to_u8() {
return Err(SocksV5RequestError::InvalidVersion(buf[0]));
}
reader.read_exact(&mut buf[0..1]).await?;
let status = SocksV5RequestStatus::from_u8(buf[0]).ok_or_else(|| {
SocksV5RequestError::InvalidRequest(format!("invalid status {:02X}", buf[0]))
})?;
reader.read_exact(&mut buf).await?;
// ignore a reserved octet, use the following one
let atyp = SocksV5AddressType::from_u8(buf[1]).ok_or_else(|| {
SocksV5RequestError::InvalidRequest(format!(
"invalid address type {:02X}, expected {:02X} (IP V4), {:02X} (DOMAINNAME), or {:02X} (IP V6)",
buf[1],
SocksV5AddressType::Ipv4.to_u8(),
SocksV5AddressType::Domain.to_u8(),
SocksV5AddressType::Ipv6.to_u8(),
))
})?;
let host = SocksV5Host::read(&mut reader, atyp).await?;
reader.read_exact(&mut buf).await?;
let port = NetworkEndian::read_u16(&buf);
Ok(SocksV5Response { status, port, host })
}
#[derive(Debug, Error)]
pub enum SocksV5ConnectError {
#[error("invalid SOCKS version {0:02X}, expected {:02X}", SocksVersion::V5.to_u8())]
InvalidVersion(u8),
#[error("invalid server response: {0}")]
InvalidResponse(String),
#[error("server returned an error: {0:?}")]
ServerError(SocksV5RequestStatus),
#[error("{0}")]
Io(
#[from]
#[source]
std::io::Error,
),
}
impl From<SocksV5RequestError> for SocksV5ConnectError {
fn from(he: SocksV5RequestError) -> Self {
use SocksV5ConnectError::*;
match he {
SocksV5RequestError::InvalidVersion(v) => InvalidVersion(v),
SocksV5RequestError::InvalidRequest(msg) => InvalidResponse(msg),
SocksV5RequestError::Io(e) => Io(e),
}
}
}
/// As a client, send a CONNECT request to a stream and process the response.
///
/// # Returns
///
/// If the server accepts the command, this function returns the stream (that can now be used
/// to communicate with the target through the proxy), as well as the host and port that the proxy
/// server used to connect to the target socket.
///
/// # Errors
///
/// - `Io` if either sending the request or receiving the response fails due to I/O error, including a premature EOF.
/// - `InvalidVersion` if the server returns an unexpected version number.
/// - `InvalidResponse` if the server's reply cannot be interpreted (because, for example, it uses
/// an unsupported status code or address type).
/// - `ServerError` if the server returns a non-success status.
pub async fn request_connect<Stream, Host>(
mut stream: Stream,
target_host: Host,
target_port: u16,
) -> Result<(Stream, SocksV5Host, u16), SocksV5ConnectError>
where
Stream: AsyncRead + AsyncWrite + Unpin,
Host: Into<SocksV5Host>,
{
write_request(
&mut stream,
SocksV5Command::Connect,
target_host.into(),
target_port,
)
.await?;
let response = read_request_status(&mut stream).await?;
if response.status != SocksV5RequestStatus::Success {
return Err(SocksV5ConnectError::ServerError(response.status));
}
Ok((stream, response.host, response.port))
}
/// As a client, send a BIND request to a stream and process the response.
///
/// # Returns
///
/// If the server accepts the command, this function returns a triple consisting of:
/// - a future (that can be used to accept an incoming connection through the proxy);
/// - as well as the host and port that the proxy is listening on.
///
/// Once an incoming connection to the proxy is made, that "accept" future will resolve into another triple:
/// - a read-write stream,
/// - as well as the host and port of the client that connected to the proxy.
///
/// The stream can then be used to communicate with the client.
///
/// # Errors
///
/// Errors can be returned from both this function and the "accept" future:
/// - `Io` if either sending the request or receiving the response fails due to I/O error, including a premature EOF.
/// - `InvalidVersion` if the server returns an unexpected version number.
/// - `InvalidResponse` if the server's reply cannot be interpreted (because, for example, it uses
/// an unsupported status code or address type).
/// - `ServerError` if the server returns a non-success status.
pub async fn request_bind<Stream, Host>(
mut stream: Stream,
host: Host,
port: u16,
) -> Result<
(
impl Future<Output = Result<(Stream, SocksV5Host, u16), SocksV5ConnectError>>,
SocksV5Host,
u16,
),
SocksV5ConnectError,
>
where
Stream: AsyncRead + AsyncWrite + Unpin,
Host: Into<SocksV5Host>,
{
write_request(&mut stream, SocksV5Command::Bind, host.into(), port).await?;
let response1 = read_request_status(&mut stream).await?;
if response1.status != SocksV5RequestStatus::Success {
return Err(SocksV5ConnectError::ServerError(response1.status));
}
let accepted_fut = async move {
let response2 = read_request_status(&mut stream).await?;
if response2.status != SocksV5RequestStatus::Success {
return Err(SocksV5ConnectError::ServerError(response2.status));
}
Ok((stream, response2.host, response2.port))
};
Ok((accepted_fut, response1.host, response1.port))
}
#[cfg(test)]
mod tests {
use futures::executor::block_on;
use super::*;
#[test]
fn write_request_ipv4() {
let mut buf = Vec::<u8>::new();
block_on(write_request(
&mut buf,
SocksV5Command::Connect,
SocksV5Host::Ipv4([127, 0, 0, 1]),
1080,
))
.unwrap();
assert_eq!(buf, &[5, 1, 0, 1, 127, 0, 0, 1, 4, 56]);
}
#[test]
fn write_request_ipv6() {
let mut buf = Vec::<u8>::new();
block_on(write_request(
&mut buf,
SocksV5Command::Connect,
SocksV5Host::Ipv6([1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6]),
1080,
))
.unwrap();
assert_eq!(
buf,
&[5, 1, 0, 4, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 4, 56]
);
}
}
|
use std::collections::HashMap;
use std::convert::TryFrom;
use amethyst::ui::{UiText, UiTransform};
use super::system_prelude::*;
const DIFFICULTY_DESCRIPTION_TRANSFORM_ID: &str =
"label_difficulty_description";
const PREFIX_SELECTION_TRANSFORM_ID: &str = "selection_";
#[derive(Default)]
pub struct MenuSelectionSystem;
impl<'a> System<'a> for MenuSelectionSystem {
type SystemData = (
Read<'a, Time>,
ReadExpect<'a, Settings>,
Read<'a, SavefileDataRes>,
ReadStorage<'a, MenuSelector>,
WriteStorage<'a, UiTransform>,
WriteStorage<'a, UiText>,
);
fn run(
&mut self,
(
time,
settings,
savefile_data,
menu_selectors,
mut transforms,
mut texts,
): Self::SystemData,
) {
let dt = time.delta_seconds();
let (selector_animation_speed, selector_animation_deadzone) = {
let anim_speed = &settings.misc.menu_selector_animation_speed;
(
(anim_speed.0 * dt, anim_speed.1 * dt),
settings.misc.menu_selector_animation_deadzone.into(),
)
};
let level_manager_settings = &settings.level_manager;
let selections_positions: HashMap<MenuSelection, (f32, f32)> =
(&transforms)
.join()
.filter_map(|transform| {
let transform_id = transform.id.as_str();
if transform_id.starts_with(PREFIX_SELECTION_TRANSFORM_ID) {
if let Ok(selection) =
MenuSelection::try_from(transform_id)
{
Some((
selection,
(transform.local_x, transform.local_y),
))
} else {
None
}
} else {
None
}
})
.collect();
let mut selected_level_opt = None;
for (selector, selector_transform) in
(&menu_selectors, &mut transforms).join()
{
selected_level_opt = Some(selector.selection.0.clone());
if let Some(selection_pos) =
selections_positions.get(&selector.selection)
{
move_selector_towards(
selector_transform,
selection_pos,
selector_animation_speed,
selector_animation_deadzone,
);
}
}
if let Some(selected_level) = selected_level_opt {
let level_settings = level_manager_settings.level(&selected_level);
let (level_description, level_description_color) =
if is_level_locked(
&selected_level,
level_manager_settings,
&savefile_data.0,
) {
(
level_settings
.locked_description
.as_ref()
.map(String::as_str)
.unwrap_or(
level_manager_settings
.default_locked_description
.as_str(),
),
level_manager_settings.locked_description_text_color,
)
} else {
(
level_settings.description.as_str(),
level_manager_settings.description_text_color,
)
};
// Update level description
(&transforms, &mut texts)
.join()
.find(|(transform, _)| {
transform.id == DIFFICULTY_DESCRIPTION_TRANSFORM_ID
})
.map(|(_, description)| {
if description.text.as_str() != level_description {
description.text = level_description.to_string();
description.color = level_description_color;
}
});
}
}
}
fn move_selector_towards(
selector_transform: &mut UiTransform,
target_pos: &(f32, f32),
speed: (f32, f32),
deadzone: (f32, f32),
) {
let dist_x = target_pos.0 - selector_transform.local_x;
let dist_y = target_pos.1 - selector_transform.local_y;
if dist_x.abs() <= deadzone.0 {
selector_transform.local_x = target_pos.0;
} else {
selector_transform.local_x += speed.0 * dist_x;
}
if dist_y.abs() <= deadzone.1 {
selector_transform.local_y = target_pos.1;
} else {
selector_transform.local_y += speed.1 * dist_y;
}
}
|
//! EVM module types.
use oasis_runtime_sdk::types::{address::Address, token};
/// Transaction body for creating an EVM contract.
#[derive(Clone, Debug, cbor::Encode, cbor::Decode)]
pub struct Create {
pub value: U256,
pub init_code: Vec<u8>,
}
/// Transaction body for calling an EVM contract.
#[derive(Clone, Debug, cbor::Encode, cbor::Decode)]
pub struct Call {
pub address: H160,
pub value: U256,
pub data: Vec<u8>,
}
/// Transaction body for depositing caller's SDK account tokens into EVM account.
#[derive(Clone, Debug, cbor::Encode, cbor::Decode)]
pub struct Deposit {
pub to: H160,
pub amount: token::BaseUnits,
}
/// Transaction body for withdrawing SDK account tokens from the caller's EVM account.
#[derive(Clone, Debug, cbor::Encode, cbor::Decode)]
pub struct Withdraw {
pub to: Address,
pub amount: token::BaseUnits,
}
/// Transaction body for peeking into EVM storage.
#[derive(Clone, Debug, cbor::Encode, cbor::Decode)]
pub struct StorageQuery {
pub address: H160,
pub index: H256,
}
/// Transaction body for peeking into EVM code storage.
#[derive(Clone, Debug, cbor::Encode, cbor::Decode)]
pub struct CodeQuery {
pub address: H160,
}
/// Transaction body for fetching EVM account's balance.
#[derive(Clone, Debug, cbor::Encode, cbor::Decode)]
pub struct BalanceQuery {
pub address: H160,
}
/// Transaction body for simulating an EVM call.
#[derive(Clone, Debug, cbor::Encode, cbor::Decode)]
pub struct SimulateCallQuery {
pub gas_price: U256,
pub gas_limit: u64,
pub caller: H160,
pub address: H160,
pub value: U256,
pub data: Vec<u8>,
}
// The rest of the file contains wrappers for primitive_types::{H160, H256, U256},
// so that we can implement cbor::{Encode, Decode} for them, ugh.
// Remove this once oasis-cbor#8 is implemented.
//
// Thanks to Nick for providing the fancy macros below :)
// This `mod` exists solely to place an `#[allow(warnings)]` around the generated code.
#[allow(warnings)]
mod eth {
use std::convert::TryFrom;
use thiserror::Error;
use super::*;
#[derive(Error, Debug)]
pub enum NoError {}
macro_rules! construct_fixed_hash {
($name:ident($num_bytes:literal)) => {
fixed_hash::construct_fixed_hash! {
pub struct $name($num_bytes);
}
impl cbor::Encode for $name {
fn into_cbor_value(self) -> cbor::Value {
cbor::Value::ByteString(self.as_bytes().to_vec())
}
}
impl cbor::Decode for $name {
fn try_from_cbor_value(value: cbor::Value) -> Result<Self, cbor::DecodeError> {
match value {
cbor::Value::ByteString(v) => Ok(Self::from_slice(&v)),
_ => Err(cbor::DecodeError::UnexpectedType),
}
}
}
impl TryFrom<&[u8]> for $name {
type Error = NoError;
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
Ok(Self::from_slice(bytes))
}
}
};
}
macro_rules! construct_uint {
($name:ident($num_words:tt)) => {
uint::construct_uint! {
pub struct $name($num_words);
}
impl cbor::Encode for $name {
fn into_cbor_value(self) -> cbor::Value {
let mut out = [0u8; 32];
self.to_big_endian(&mut out);
cbor::Value::ByteString(out.to_vec())
}
}
impl cbor::Decode for $name {
fn try_from_cbor_value(value: cbor::Value) -> Result<Self, cbor::DecodeError> {
match value {
cbor::Value::ByteString(v) => Ok(Self::from_big_endian(&v)),
_ => Err(cbor::DecodeError::UnexpectedType),
}
}
}
};
}
construct_fixed_hash!(H160(20));
construct_fixed_hash!(H256(32));
construct_uint!(U256(4));
macro_rules! impl_upstream_conversions {
($($ty:ident),* $(,)?) => {
$(
impl From<$ty> for primitive_types::$ty {
fn from(t: $ty) -> Self {
Self(t.0)
}
}
impl From<primitive_types::$ty> for $ty {
fn from(t: primitive_types::$ty) -> Self {
Self(t.0)
}
}
)*
}
}
impl_upstream_conversions!(H160, H256, U256);
}
pub use eth::{H160, H256, U256};
|
use std::collections::HashMap;
use ahash::RandomState;
use crossfont::{
Error as RasterizerError, FontDesc, FontKey, GlyphKey, Metrics, Rasterize, RasterizedGlyph,
Rasterizer, Size, Slant, Style, Weight,
};
use log::{error, info};
use unicode_width::UnicodeWidthChar;
use crate::config::font::{Font, FontDescription};
use crate::config::ui_config::Delta;
use crate::gl::types::*;
use super::builtin_font;
/// `LoadGlyph` allows for copying a rasterized glyph into graphics memory.
pub trait LoadGlyph {
/// Load the rasterized glyph into GPU memory.
fn load_glyph(&mut self, rasterized: &RasterizedGlyph) -> Glyph;
/// Clear any state accumulated from previous loaded glyphs.
///
/// This can, for instance, be used to reset the texture Atlas.
fn clear(&mut self);
}
#[derive(Copy, Clone, Debug)]
pub struct Glyph {
pub tex_id: GLuint,
pub multicolor: bool,
pub top: i16,
pub left: i16,
pub width: i16,
pub height: i16,
pub uv_bot: f32,
pub uv_left: f32,
pub uv_width: f32,
pub uv_height: f32,
}
/// Naïve glyph cache.
///
/// Currently only keyed by `char`, and thus not possible to hold different
/// representations of the same code point.
pub struct GlyphCache {
/// Cache of buffered glyphs.
cache: HashMap<GlyphKey, Glyph, RandomState>,
/// Rasterizer for loading new glyphs.
rasterizer: Rasterizer,
/// Regular font.
pub font_key: FontKey,
/// Bold font.
pub bold_key: FontKey,
/// Italic font.
pub italic_key: FontKey,
/// Bold italic font.
pub bold_italic_key: FontKey,
/// Font size.
pub font_size: crossfont::Size,
/// Font offset.
font_offset: Delta<i8>,
/// Glyph offset.
glyph_offset: Delta<i8>,
/// Font metrics.
metrics: Metrics,
/// Whether to use the built-in font for box drawing characters.
builtin_box_drawing: bool,
}
impl GlyphCache {
pub fn new(mut rasterizer: Rasterizer, font: &Font) -> Result<GlyphCache, crossfont::Error> {
let (regular, bold, italic, bold_italic) = Self::compute_font_keys(font, &mut rasterizer)?;
// Need to load at least one glyph for the face before calling metrics.
// The glyph requested here ('m' at the time of writing) has no special
// meaning.
rasterizer.get_glyph(GlyphKey { font_key: regular, character: 'm', size: font.size() })?;
let metrics = rasterizer.metrics(regular, font.size())?;
Ok(Self {
cache: Default::default(),
rasterizer,
font_size: font.size(),
font_key: regular,
bold_key: bold,
italic_key: italic,
bold_italic_key: bold_italic,
font_offset: font.offset,
glyph_offset: font.glyph_offset,
metrics,
builtin_box_drawing: font.builtin_box_drawing,
})
}
fn load_glyphs_for_font<L: LoadGlyph>(&mut self, font: FontKey, loader: &mut L) {
let size = self.font_size;
// Cache all ascii characters.
for i in 32u8..=126u8 {
self.get(GlyphKey { font_key: font, character: i as char, size }, loader, true);
}
}
/// Computes font keys for (Regular, Bold, Italic, Bold Italic).
fn compute_font_keys(
font: &Font,
rasterizer: &mut Rasterizer,
) -> Result<(FontKey, FontKey, FontKey, FontKey), crossfont::Error> {
let size = font.size();
// Load regular font.
let regular_desc = Self::make_desc(font.normal(), Slant::Normal, Weight::Normal);
let regular = Self::load_regular_font(rasterizer, ®ular_desc, size)?;
// Helper to load a description if it is not the `regular_desc`.
let mut load_or_regular = |desc: FontDesc| {
if desc == regular_desc {
regular
} else {
rasterizer.load_font(&desc, size).unwrap_or(regular)
}
};
// Load bold font.
let bold_desc = Self::make_desc(&font.bold(), Slant::Normal, Weight::Bold);
let bold = load_or_regular(bold_desc);
// Load italic font.
let italic_desc = Self::make_desc(&font.italic(), Slant::Italic, Weight::Normal);
let italic = load_or_regular(italic_desc);
// Load bold italic font.
let bold_italic_desc = Self::make_desc(&font.bold_italic(), Slant::Italic, Weight::Bold);
let bold_italic = load_or_regular(bold_italic_desc);
Ok((regular, bold, italic, bold_italic))
}
fn load_regular_font(
rasterizer: &mut Rasterizer,
description: &FontDesc,
size: Size,
) -> Result<FontKey, crossfont::Error> {
match rasterizer.load_font(description, size) {
Ok(font) => Ok(font),
Err(err) => {
error!("{}", err);
let fallback_desc =
Self::make_desc(Font::default().normal(), Slant::Normal, Weight::Normal);
rasterizer.load_font(&fallback_desc, size)
},
}
}
fn make_desc(desc: &FontDescription, slant: Slant, weight: Weight) -> FontDesc {
let style = if let Some(ref spec) = desc.style {
Style::Specific(spec.to_owned())
} else {
Style::Description { slant, weight }
};
FontDesc::new(desc.family.clone(), style)
}
/// Get a glyph from the font.
///
/// If the glyph has never been loaded before, it will be rasterized and inserted into the
/// cache.
///
/// # Errors
///
/// This will fail when the glyph could not be rasterized. Usually this is due to the glyph
/// not being present in any font.
pub fn get<L: ?Sized>(
&mut self,
glyph_key: GlyphKey,
loader: &mut L,
show_missing: bool,
) -> Glyph
where
L: LoadGlyph,
{
// Try to load glyph from cache.
if let Some(glyph) = self.cache.get(&glyph_key) {
return *glyph;
};
// Rasterize the glyph using the built-in font for special characters or the user's font
// for everything else.
let rasterized = self
.builtin_box_drawing
.then(|| {
builtin_font::builtin_glyph(
glyph_key.character,
&self.metrics,
&self.font_offset,
&self.glyph_offset,
)
})
.flatten()
.map_or_else(|| self.rasterizer.get_glyph(glyph_key), Ok);
let glyph = match rasterized {
Ok(rasterized) => self.load_glyph(loader, rasterized),
// Load fallback glyph.
Err(RasterizerError::MissingGlyph(rasterized)) if show_missing => {
// Use `\0` as "missing" glyph to cache it only once.
let missing_key = GlyphKey { character: '\0', ..glyph_key };
if let Some(glyph) = self.cache.get(&missing_key) {
*glyph
} else {
// If no missing glyph was loaded yet, insert it as `\0`.
let glyph = self.load_glyph(loader, rasterized);
self.cache.insert(missing_key, glyph);
glyph
}
},
Err(_) => self.load_glyph(loader, Default::default()),
};
// Cache rasterized glyph.
*self.cache.entry(glyph_key).or_insert(glyph)
}
/// Load glyph into the atlas.
///
/// This will apply all transforms defined for the glyph cache to the rasterized glyph before
pub fn load_glyph<L: ?Sized>(&self, loader: &mut L, mut glyph: RasterizedGlyph) -> Glyph
where
L: LoadGlyph,
{
glyph.left += i32::from(self.glyph_offset.x);
glyph.top += i32::from(self.glyph_offset.y);
glyph.top -= self.metrics.descent as i32;
// The metrics of zero-width characters are based on rendering
// the character after the current cell, with the anchor at the
// right side of the preceding character. Since we render the
// zero-width characters inside the preceding character, the
// anchor has been moved to the right by one cell.
if glyph.character.width() == Some(0) {
glyph.left += self.metrics.average_advance as i32;
}
// Add glyph to cache.
loader.load_glyph(&glyph)
}
/// Reset currently cached data in both GL and the registry to default state.
pub fn reset_glyph_cache<L: LoadGlyph>(&mut self, loader: &mut L) {
loader.clear();
self.cache = Default::default();
self.load_common_glyphs(loader);
}
/// Update the inner font size.
///
/// NOTE: To reload the renderers's fonts [`Self::reset_glyph_cache`] should be called
/// afterwards.
pub fn update_font_size(
&mut self,
font: &Font,
scale_factor: f64,
) -> Result<(), crossfont::Error> {
// Update dpi scaling.
self.rasterizer.update_dpr(scale_factor as f32);
self.font_offset = font.offset;
self.glyph_offset = font.glyph_offset;
// Recompute font keys.
let (regular, bold, italic, bold_italic) =
Self::compute_font_keys(font, &mut self.rasterizer)?;
self.rasterizer.get_glyph(GlyphKey {
font_key: regular,
character: 'm',
size: font.size(),
})?;
let metrics = self.rasterizer.metrics(regular, font.size())?;
info!("Font size changed to {:?} with scale factor of {}", font.size(), scale_factor);
self.font_size = font.size();
self.font_key = regular;
self.bold_key = bold;
self.italic_key = italic;
self.bold_italic_key = bold_italic;
self.metrics = metrics;
self.builtin_box_drawing = font.builtin_box_drawing;
Ok(())
}
pub fn font_metrics(&self) -> crossfont::Metrics {
self.metrics
}
/// Prefetch glyphs that are almost guaranteed to be loaded anyways.
pub fn load_common_glyphs<L: LoadGlyph>(&mut self, loader: &mut L) {
self.load_glyphs_for_font(self.font_key, loader);
self.load_glyphs_for_font(self.bold_key, loader);
self.load_glyphs_for_font(self.italic_key, loader);
self.load_glyphs_for_font(self.bold_italic_key, loader);
}
}
|
use std::env;
use std::path::PathBuf;
use std::process::Command;
fn main() {
let manifest_dir: PathBuf = env::var("CARGO_MANIFEST_DIR").unwrap().into();
let k12_dir = manifest_dir.join("K12");
let build_dir = k12_dir.join("bin/Haswell");
Command::new("make")
.arg("Haswell/libk12.a")
.current_dir(&k12_dir)
.status()
.unwrap();
println!("cargo:rustc-link-search={}", build_dir.to_str().unwrap());
println!("cargo:rustc-link-lib=static=k12");
}
|
use std::prelude::v1::*;
use ring::digest;
pub fn double_sha256(data: &[u8]) -> Vec<u8> {
let res = digest::digest(&digest::SHA256, data);
digest::digest(&digest::SHA256, res.as_ref())
.as_ref()
.to_vec()
}
pub fn sha256(data: &[u8]) -> Vec<u8> {
let res = digest::digest(&digest::SHA256, data);
res.as_ref().to_vec()
}
|
use crate::device::Context;
use crate::explorer::candidate::Candidate;
use crate::explorer::choice::ActionEx;
use rpds::List;
use serde::Serialize;
/// A Trait defining a structure containing the candidates, meant to explore the
/// search space
pub trait Store: Sync {
/// Transmits the information needed to update the store after a `Candidate` is
/// evaluated.
type PayLoad: Send;
/// The type of events this store can emit during search.
type Event: Send + Serialize;
/// Updates the value that will be used to prune the search space
fn update_cut(&self, new_cut: f64);
/// Immediately stops the exploration.
fn stop_exploration(&self) {
self.update_cut(0.0);
}
/// Commit the result of an evaluation back to Store
fn commit_evaluation(
&self,
actions: &List<ActionEx>,
payload: Self::PayLoad,
eval: f64,
);
/// Retrieve a Candidate for evaluation, returns `None` if no candidate remains.
fn explore(&self, context: &dyn Context) -> Option<(Candidate, Self::PayLoad)>;
/// Displays statistics about the candidate store.
fn print_stats(&self) {}
/// Resets the store to restart evaluation.
fn restart(&self) {}
}
|
const MEMORY_SIZE: usize = 4096;
type Byte = u8;
type Word = u16;
pub struct Memory {
pub data: [Byte; MEMORY_SIZE],
}
pub fn new_memory() -> Memory {
let font = [
0xF0, 0x90, 0x90, 0x90, 0xF0, // 0
0x20, 0x60, 0x20, 0x20, 0x70, // 1
0xF0, 0x10, 0xF0, 0x80, 0xF0, // 2
0xF0, 0x10, 0xF0, 0x10, 0xF0, // 3
0x90, 0x90, 0xF0, 0x10, 0x10, // 4
0xF0, 0x80, 0xF0, 0x10, 0xF0, // 5
0xF0, 0x80, 0xF0, 0x90, 0xF0, // 6
0xF0, 0x10, 0x20, 0x40, 0x40, // 7
0xF0, 0x90, 0xF0, 0x90, 0xF0, // 8
0xF0, 0x90, 0xF0, 0x10, 0xF0, // 9
0xF0, 0x90, 0xF0, 0x90, 0x90, // A
0xE0, 0x90, 0xE0, 0x90, 0xE0, // B
0xF0, 0x80, 0x80, 0x80, 0xF0, // C
0xE0, 0x90, 0x90, 0x90, 0xE0, // D
0xF0, 0x80, 0xF0, 0x80, 0xF0, // E
0xF0, 0x80, 0xF0, 0x80, 0x80, // F
];
let mut mem = Memory {
data: [0; MEMORY_SIZE],
};
mem.write_data(&font, 0x50);
return mem;
}
impl Memory {
// i -> index
pub fn fetch(&self, i: usize) -> Word {
assert!(i < MEMORY_SIZE - 2);
let mut x = self.data[i] as Word;
x <<= 8;
x &= 0xff00;
let y = self.data[i + 1] as Word;
return x + y;
}
// i -> index
pub fn get(&self, i: usize) -> Byte {
assert!(i < MEMORY_SIZE - 1);
return self.data[i];
}
}
// * interface
impl Memory {
// s -> start
// c -> count
pub fn _print_mem(&self, s: usize, c: usize) {
let mut i = s;
loop {
if i == s {
print!("{:#05x}\t", i);
print!("{:02x} ", self.data[i]);
i += 1;
}
if i >= c + s {
print!("\n");
return;
}
print!("{:02x} ", self.data[i]);
if i % 8 == 7 || i == s {
print!("\n");
print!("{:#05x}\t", i);
}
i += 1;
}
}
// s -> sart
// d -> data
pub fn write_data(&mut self, d: &[Byte], s: usize) {
for (i, e) in d.iter().enumerate() {
assert!(i + s < self.data.len());
self.data[i + s] = *e;
}
}
}
|
/// Reset terminal formatting
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Reset;
impl Reset {
/// Render the ANSI code
#[inline]
pub fn render(self) -> impl core::fmt::Display {
ResetDisplay
}
}
struct ResetDisplay;
impl core::fmt::Display for ResetDisplay {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
RESET.fmt(f)
}
}
pub(crate) const RESET: &str = "\x1B[0m";
|
#[doc = "Reader of register IMC"]
pub type R = crate::R<u32, super::IMC>;
#[doc = "Writer for register IMC"]
pub type W = crate::W<u32, super::IMC>;
#[doc = "Register IMC `reset()`'s with value 0"]
impl crate::ResetValue for super::IMC {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `BOR1IM`"]
pub type BOR1IM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BOR1IM`"]
pub struct BOR1IM_W<'a> {
w: &'a mut W,
}
impl<'a> BOR1IM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `MOFIM`"]
pub type MOFIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MOFIM`"]
pub struct MOFIM_W<'a> {
w: &'a mut W,
}
impl<'a> MOFIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `PLLLIM`"]
pub type PLLLIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PLLLIM`"]
pub struct PLLLIM_W<'a> {
w: &'a mut W,
}
impl<'a> PLLLIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `USBPLLLIM`"]
pub type USBPLLLIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `USBPLLLIM`"]
pub struct USBPLLLIM_W<'a> {
w: &'a mut W,
}
impl<'a> USBPLLLIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `MOSCPUPIM`"]
pub type MOSCPUPIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MOSCPUPIM`"]
pub struct MOSCPUPIM_W<'a> {
w: &'a mut W,
}
impl<'a> MOSCPUPIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `VDDAIM`"]
pub type VDDAIM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `VDDAIM`"]
pub struct VDDAIM_W<'a> {
w: &'a mut W,
}
impl<'a> VDDAIM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `BOR0IM`"]
pub type BOR0IM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BOR0IM`"]
pub struct BOR0IM_W<'a> {
w: &'a mut W,
}
impl<'a> BOR0IM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
impl R {
#[doc = "Bit 1 - VDD under BOR1 Interrupt Mask"]
#[inline(always)]
pub fn bor1im(&self) -> BOR1IM_R {
BOR1IM_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 3 - Main Oscillator Failure Interrupt Mask"]
#[inline(always)]
pub fn mofim(&self) -> MOFIM_R {
MOFIM_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 6 - PLL Lock Interrupt Mask"]
#[inline(always)]
pub fn plllim(&self) -> PLLLIM_R {
PLLLIM_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - USB PLL Lock Interrupt Mask"]
#[inline(always)]
pub fn usbplllim(&self) -> USBPLLLIM_R {
USBPLLLIM_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - MOSC Power Up Interrupt Mask"]
#[inline(always)]
pub fn moscpupim(&self) -> MOSCPUPIM_R {
MOSCPUPIM_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 10 - VDDA Power OK Interrupt Mask"]
#[inline(always)]
pub fn vddaim(&self) -> VDDAIM_R {
VDDAIM_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - VDD under BOR0 Interrupt Mask"]
#[inline(always)]
pub fn bor0im(&self) -> BOR0IM_R {
BOR0IM_R::new(((self.bits >> 11) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 1 - VDD under BOR1 Interrupt Mask"]
#[inline(always)]
pub fn bor1im(&mut self) -> BOR1IM_W {
BOR1IM_W { w: self }
}
#[doc = "Bit 3 - Main Oscillator Failure Interrupt Mask"]
#[inline(always)]
pub fn mofim(&mut self) -> MOFIM_W {
MOFIM_W { w: self }
}
#[doc = "Bit 6 - PLL Lock Interrupt Mask"]
#[inline(always)]
pub fn plllim(&mut self) -> PLLLIM_W {
PLLLIM_W { w: self }
}
#[doc = "Bit 7 - USB PLL Lock Interrupt Mask"]
#[inline(always)]
pub fn usbplllim(&mut self) -> USBPLLLIM_W {
USBPLLLIM_W { w: self }
}
#[doc = "Bit 8 - MOSC Power Up Interrupt Mask"]
#[inline(always)]
pub fn moscpupim(&mut self) -> MOSCPUPIM_W {
MOSCPUPIM_W { w: self }
}
#[doc = "Bit 10 - VDDA Power OK Interrupt Mask"]
#[inline(always)]
pub fn vddaim(&mut self) -> VDDAIM_W {
VDDAIM_W { w: self }
}
#[doc = "Bit 11 - VDD under BOR0 Interrupt Mask"]
#[inline(always)]
pub fn bor0im(&mut self) -> BOR0IM_W {
BOR0IM_W { w: self }
}
}
|
//! # Naia Client
//! A cross-platform client that can send/receive events to/from a server, and
//! has a pool of in-scope actors that are synced with the server.
#![deny(
missing_docs,
missing_debug_implementations,
trivial_casts,
trivial_numeric_casts,
unsafe_code,
unstable_features,
unused_import_braces
)]
mod client_actor_manager;
mod client_actor_message;
mod client_config;
mod client_connection_state;
mod client_event;
mod client_packet_writer;
mod client_tick_manager;
mod command_receiver;
mod command_sender;
mod error;
mod interpolation_manager;
mod naia_client;
mod ping_manager;
mod server_connection;
mod tick_queue;
pub use naia_shared::{find_my_ip_address, Instant, LinkConditionerConfig, Random};
pub use client_config::ClientConfig;
pub use client_event::ClientEvent;
pub use naia_client::NaiaClient;
pub use naia_client_socket::Packet;
|
use semver::{Identifier, Version};
use crate::error::FatalError;
static VERSION_ALPHA: &'static str = "alpha";
static VERSION_BETA: &'static str = "beta";
static VERSION_RC: &'static str = "rc";
arg_enum! {
#[derive(Debug, Clone, Copy)]
pub enum BumpLevel {
Major,
Minor,
Patch,
Rc,
Beta,
Alpha,
Release,
}
}
impl BumpLevel {
pub fn is_pre_release(self) -> bool {
match self {
BumpLevel::Alpha | BumpLevel::Beta | BumpLevel::Rc => true,
_ => false,
}
}
pub fn bump_version(
self,
version: &mut Version,
metadata: Option<&String>,
) -> Result<bool, FatalError> {
let mut need_commit = false;
match self {
BumpLevel::Major => {
version.increment_major();
need_commit = true;
}
BumpLevel::Minor => {
version.increment_minor();
need_commit = true;
}
BumpLevel::Patch => {
if !version.is_prerelease() {
version.increment_patch();
} else {
version.pre.clear();
}
need_commit = true;
}
BumpLevel::Rc => {
version.increment_rc()?;
need_commit = true;
}
BumpLevel::Beta => {
version.increment_beta()?;
need_commit = true;
}
BumpLevel::Alpha => {
version.increment_alpha()?;
need_commit = true;
}
BumpLevel::Release => {
if version.is_prerelease() {
version.pre.clear();
need_commit = true;
}
}
};
if let Some(metadata) = metadata {
version.metadata(metadata)?;
}
Ok(need_commit)
}
}
trait VersionExt {
fn increment_alpha(&mut self) -> Result<(), FatalError>;
fn increment_beta(&mut self) -> Result<(), FatalError>;
fn increment_rc(&mut self) -> Result<(), FatalError>;
fn prerelease_id_version(&self) -> Result<Option<(String, Option<u64>)>, FatalError>;
fn metadata(&mut self, metadata: &str) -> Result<(), FatalError>;
}
impl VersionExt for Version {
fn prerelease_id_version(&self) -> Result<Option<(String, Option<u64>)>, FatalError> {
if !self.pre.is_empty() {
let e = match self.pre[0] {
Identifier::AlphaNumeric(ref s) => s.to_owned(),
Identifier::Numeric(_) => {
return Err(FatalError::UnsupportedPrereleaseVersionScheme);
}
};
let v = if let Some(v) = self.pre.get(1) {
if let Identifier::Numeric(v) = *v {
Some(v)
} else {
return Err(FatalError::UnsupportedPrereleaseVersionScheme);
}
} else {
None
};
Ok(Some((e, v)))
} else {
Ok(None)
}
}
fn increment_alpha(&mut self) -> Result<(), FatalError> {
if let Some((pre_ext, pre_ext_ver)) = self.prerelease_id_version()? {
if pre_ext == VERSION_BETA || pre_ext == VERSION_RC {
Err(FatalError::InvalidReleaseLevel(VERSION_ALPHA.to_owned()))
} else {
let new_ext_ver = if pre_ext == VERSION_ALPHA {
pre_ext_ver.unwrap_or(0) + 1
} else {
1
};
self.pre = vec![
Identifier::AlphaNumeric(VERSION_ALPHA.to_owned()),
Identifier::Numeric(new_ext_ver),
];
Ok(())
}
} else {
self.increment_patch();
self.pre = vec![
Identifier::AlphaNumeric(VERSION_ALPHA.to_owned()),
Identifier::Numeric(1),
];
Ok(())
}
}
fn increment_beta(&mut self) -> Result<(), FatalError> {
if let Some((pre_ext, pre_ext_ver)) = self.prerelease_id_version()? {
if pre_ext == VERSION_RC {
Err(FatalError::InvalidReleaseLevel(VERSION_BETA.to_owned()))
} else {
let new_ext_ver = if pre_ext == VERSION_BETA {
pre_ext_ver.unwrap_or(0) + 1
} else {
1
};
self.pre = vec![
Identifier::AlphaNumeric(VERSION_BETA.to_owned()),
Identifier::Numeric(new_ext_ver),
];
Ok(())
}
} else {
self.increment_patch();
self.pre = vec![
Identifier::AlphaNumeric(VERSION_BETA.to_owned()),
Identifier::Numeric(1),
];
Ok(())
}
}
fn increment_rc(&mut self) -> Result<(), FatalError> {
if let Some((pre_ext, pre_ext_ver)) = self.prerelease_id_version()? {
let new_ext_ver = if pre_ext == VERSION_RC {
pre_ext_ver.unwrap_or(0) + 1
} else {
1
};
self.pre = vec![
Identifier::AlphaNumeric(VERSION_RC.to_owned()),
Identifier::Numeric(new_ext_ver),
];
Ok(())
} else {
self.increment_patch();
self.pre = vec![
Identifier::AlphaNumeric(VERSION_RC.to_owned()),
Identifier::Numeric(1),
];
Ok(())
}
}
fn metadata(&mut self, build: &str) -> Result<(), FatalError> {
self.build = vec![Identifier::AlphaNumeric(build.to_owned())];
Ok(())
}
}
pub fn set_requirement(
req: &semver::VersionReq,
version: &semver::Version,
) -> Result<Option<String>, FatalError> {
let req_text = req.to_string();
let raw_req = semver_parser::range::parse(&req_text)
.expect("semver to generate valid version requirements");
if raw_req.predicates.is_empty() {
// Empty matches everything, no-change.
Ok(None)
} else {
let predicates: Result<Vec<_>, _> = raw_req
.predicates
.into_iter()
.map(|p| set_predicate(p, version))
.collect();
let predicates = predicates?;
let new_req = semver_parser::range::VersionReq { predicates };
let new_req_text = display::DisplayVersionReq::new(&new_req).to_string();
// Validate contract
#[cfg(debug_assert)]
{
let req = semver::VersionReq::parse(new_req_text).unwrap();
assert!(
req.matches(version),
"Invalid req created: {}",
new_req_text
)
}
if new_req_text == req_text {
Ok(None)
} else {
Ok(Some(new_req_text))
}
}
}
fn set_predicate(
mut pred: semver_parser::range::Predicate,
version: &semver::Version,
) -> Result<semver_parser::range::Predicate, FatalError> {
match pred.op {
semver_parser::range::Op::Wildcard(semver_parser::range::WildcardVersion::Minor) => {
pred.major = version.major;
Ok(pred)
}
semver_parser::range::Op::Wildcard(semver_parser::range::WildcardVersion::Patch) => {
pred.major = version.major;
if pred.minor.is_some() {
pred.minor = Some(version.minor);
}
Ok(pred)
}
semver_parser::range::Op::Ex => assign_partial_req(version, pred),
semver_parser::range::Op::Gt
| semver_parser::range::Op::GtEq
| semver_parser::range::Op::Lt
| semver_parser::range::Op::LtEq => {
let user_pred = display::DisplayPredicate::new(&pred).to_string();
Err(FatalError::UnsupportedVersionReq(user_pred))
}
semver_parser::range::Op::Tilde => assign_partial_req(version, pred),
semver_parser::range::Op::Compatible => assign_partial_req(version, pred),
}
}
fn assign_partial_req(
version: &semver::Version,
mut pred: semver_parser::range::Predicate,
) -> Result<semver_parser::range::Predicate, FatalError> {
pred.major = version.major;
if pred.minor.is_some() {
pred.minor = Some(version.minor);
}
if pred.patch.is_some() {
pred.patch = Some(version.patch);
}
pred.pre = version
.pre
.iter()
.map(|i| match i {
semver::Identifier::Numeric(n) => semver_parser::version::Identifier::Numeric(*n),
semver::Identifier::AlphaNumeric(s) => {
semver_parser::version::Identifier::AlphaNumeric(s.clone())
}
})
.collect();
Ok(pred)
}
// imo this should be moved to semver_parser, see
// https://github.com/steveklabnik/semver-parser/issues/45
mod display {
use std::fmt;
use semver_parser::range::Op::{Compatible, Ex, Gt, GtEq, Lt, LtEq, Tilde, Wildcard};
use semver_parser::range::WildcardVersion::{Minor, Patch};
pub(crate) struct DisplayVersionReq<'v>(&'v semver_parser::range::VersionReq);
impl<'v> DisplayVersionReq<'v> {
pub(crate) fn new(req: &'v semver_parser::range::VersionReq) -> Self {
Self(req)
}
}
impl<'v> fmt::Display for DisplayVersionReq<'v> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.0.predicates.is_empty() {
write!(fmt, "*")?;
} else {
for (i, ref pred) in self.0.predicates.iter().enumerate() {
if i == 0 {
write!(fmt, "{}", DisplayPredicate(pred))?;
} else {
write!(fmt, ", {}", DisplayPredicate(pred))?;
}
}
}
Ok(())
}
}
pub(crate) struct DisplayPredicate<'v>(&'v semver_parser::range::Predicate);
impl<'v> DisplayPredicate<'v> {
pub(crate) fn new(pred: &'v semver_parser::range::Predicate) -> Self {
Self(pred)
}
}
impl<'v> fmt::Display for DisplayPredicate<'v> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.0.op {
Wildcard(Minor) => write!(fmt, "{}.*", self.0.major)?,
Wildcard(Patch) => {
if let Some(minor) = self.0.minor {
write!(fmt, "{}.{}.*", self.0.major, minor)?
} else {
write!(fmt, "{}.*.*", self.0.major)?
}
}
_ => {
write!(fmt, "{}{}", DisplayOp(&self.0.op), self.0.major)?;
if let Some(v) = self.0.minor {
write!(fmt, ".{}", v)?;
}
if let Some(v) = self.0.patch {
write!(fmt, ".{}", v)?;
}
if !self.0.pre.is_empty() {
write!(fmt, "-")?;
for (i, x) in self.0.pre.iter().enumerate() {
if i != 0 {
write!(fmt, ".")?
}
write!(fmt, "{}", x)?;
}
}
}
}
Ok(())
}
}
pub(crate) struct DisplayOp<'v>(&'v semver_parser::range::Op);
impl<'v> fmt::Display for DisplayOp<'v> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.0 {
Ex => write!(fmt, "= ")?,
Gt => write!(fmt, "> ")?,
GtEq => write!(fmt, ">= ")?,
Lt => write!(fmt, "< ")?,
LtEq => write!(fmt, "<= ")?,
Tilde => write!(fmt, "~")?,
Compatible => write!(fmt, "^")?,
// gets handled specially in Predicate::fmt
Wildcard(_) => write!(fmt, "")?,
}
Ok(())
}
}
}
#[cfg(test)]
mod test {
use super::*;
mod increment {
use super::*;
#[test]
fn alpha() {
let mut v = Version::parse("1.0.0").unwrap();
let _ = v.increment_alpha();
assert_eq!(v, Version::parse("1.0.1-alpha.1").unwrap());
let mut v2 = Version::parse("1.0.1-dev").unwrap();
let _ = v2.increment_alpha();
assert_eq!(v2, Version::parse("1.0.1-alpha.1").unwrap());
let mut v3 = Version::parse("1.0.1-alpha.1").unwrap();
let _ = v3.increment_alpha();
assert_eq!(v3, Version::parse("1.0.1-alpha.2").unwrap());
let mut v4 = Version::parse("1.0.1-beta.1").unwrap();
assert!(v4.increment_alpha().is_err());
let mut v5 = Version::parse("1.0.1-1").unwrap();
assert!(v5.increment_alpha().is_err());
}
#[test]
fn beta() {
let mut v = Version::parse("1.0.0").unwrap();
let _ = v.increment_beta();
assert_eq!(v, Version::parse("1.0.1-beta.1").unwrap());
let mut v2 = Version::parse("1.0.1-dev").unwrap();
let _ = v2.increment_beta();
assert_eq!(v2, Version::parse("1.0.1-beta.1").unwrap());
let mut v2 = Version::parse("1.0.1-alpha.1").unwrap();
let _ = v2.increment_beta();
assert_eq!(v2, Version::parse("1.0.1-beta.1").unwrap());
let mut v3 = Version::parse("1.0.1-beta.1").unwrap();
let _ = v3.increment_beta();
assert_eq!(v3, Version::parse("1.0.1-beta.2").unwrap());
let mut v4 = Version::parse("1.0.1-rc.1").unwrap();
assert!(v4.increment_beta().is_err());
let mut v5 = Version::parse("1.0.1-1").unwrap();
assert!(v5.increment_beta().is_err());
}
#[test]
fn rc() {
let mut v = Version::parse("1.0.0").unwrap();
let _ = v.increment_rc();
assert_eq!(v, Version::parse("1.0.1-rc.1").unwrap());
let mut v2 = Version::parse("1.0.1-dev").unwrap();
let _ = v2.increment_rc();
assert_eq!(v2, Version::parse("1.0.1-rc.1").unwrap());
let mut v3 = Version::parse("1.0.1-rc.1").unwrap();
let _ = v3.increment_rc();
assert_eq!(v3, Version::parse("1.0.1-rc.2").unwrap());
}
#[test]
fn metadata() {
let mut v = Version::parse("1.0.0").unwrap();
let _ = v.metadata("git.123456");
assert_eq!(v, Version::parse("1.0.0+git.123456").unwrap());
}
}
mod set_requirement {
use super::*;
fn assert_req_bump<'a, O: Into<Option<&'a str>>>(version: &str, req: &str, expected: O) {
let version = Version::parse(version).unwrap();
let req = semver::VersionReq::parse(req).unwrap();
let actual = set_requirement(&req, &version).unwrap();
let expected = expected.into();
assert_eq!(actual.as_ref().map(|s| s.as_str()), expected);
}
#[test]
fn wildcard_major() {
assert_req_bump("1.0.0", "*", None);
}
#[test]
fn wildcard_minor() {
assert_req_bump("1.0.0", "1.*", None);
assert_req_bump("1.1.0", "1.*", None);
assert_req_bump("2.0.0", "1.*", "2.*");
}
#[test]
fn wildcard_patch() {
assert_req_bump("1.0.0", "1.0.*", None);
assert_req_bump("1.1.0", "1.0.*", "1.1.*");
assert_req_bump("1.1.1", "1.0.*", "1.1.*");
assert_req_bump("2.0.0", "1.0.*", "2.0.*");
}
#[test]
fn caret_major() {
assert_req_bump("1.0.0", "1", None);
assert_req_bump("1.0.0", "^1", None);
assert_req_bump("1.1.0", "1", None);
assert_req_bump("1.1.0", "^1", None);
assert_req_bump("2.0.0", "1", "^2");
assert_req_bump("2.0.0", "^1", "^2");
}
#[test]
fn caret_minor() {
assert_req_bump("1.0.0", "1.0", None);
assert_req_bump("1.0.0", "^1.0", None);
assert_req_bump("1.1.0", "1.0", "^1.1");
assert_req_bump("1.1.0", "^1.0", "^1.1");
assert_req_bump("1.1.1", "1.0", "^1.1");
assert_req_bump("1.1.1", "^1.0", "^1.1");
assert_req_bump("2.0.0", "1.0", "^2.0");
assert_req_bump("2.0.0", "^1.0", "^2.0");
}
#[test]
fn caret_patch() {
assert_req_bump("1.0.0", "1.0.0", None);
assert_req_bump("1.0.0", "^1.0.0", None);
assert_req_bump("1.1.0", "1.0.0", "^1.1.0");
assert_req_bump("1.1.0", "^1.0.0", "^1.1.0");
assert_req_bump("1.1.1", "1.0.0", "^1.1.1");
assert_req_bump("1.1.1", "^1.0.0", "^1.1.1");
assert_req_bump("2.0.0", "1.0.0", "^2.0.0");
assert_req_bump("2.0.0", "^1.0.0", "^2.0.0");
}
#[test]
fn tilde_major() {
assert_req_bump("1.0.0", "~1", None);
assert_req_bump("1.1.0", "~1", None);
assert_req_bump("2.0.0", "~1", "~2");
}
#[test]
fn tilde_minor() {
assert_req_bump("1.0.0", "~1.0", None);
assert_req_bump("1.1.0", "~1.0", "~1.1");
assert_req_bump("1.1.1", "~1.0", "~1.1");
assert_req_bump("2.0.0", "~1.0", "~2.0");
}
#[test]
fn tilde_patch() {
assert_req_bump("1.0.0", "~1.0.0", None);
assert_req_bump("1.1.0", "~1.0.0", "~1.1.0");
assert_req_bump("1.1.1", "~1.0.0", "~1.1.1");
assert_req_bump("2.0.0", "~1.0.0", "~2.0.0");
}
#[test]
fn equal_major() {
assert_req_bump("1.0.0", "= 1", None);
assert_req_bump("1.1.0", "= 1", None);
assert_req_bump("2.0.0", "= 1", "= 2");
}
#[test]
fn equal_minor() {
assert_req_bump("1.0.0", "= 1.0", None);
assert_req_bump("1.1.0", "= 1.0", "= 1.1");
assert_req_bump("1.1.1", "= 1.0", "= 1.1");
assert_req_bump("2.0.0", "= 1.0", "= 2.0");
}
#[test]
fn equal_patch() {
assert_req_bump("1.0.0", "= 1.0.0", None);
assert_req_bump("1.1.0", "= 1.0.0", "= 1.1.0");
assert_req_bump("1.1.1", "= 1.0.0", "= 1.1.1");
assert_req_bump("2.0.0", "= 1.0.0", "= 2.0.0");
}
}
}
|
use crate::{
aabb::AABB,
hittable::{HitRecord, HitTable},
ray::Ray,
vec3::Point3,
};
use std::{sync::Arc, vec};
pub struct HitTableList {
pub objects: vec::Vec<Arc<dyn HitTable>>,
}
impl HitTableList {
pub fn new() -> Self {
Self {
objects: vec::Vec::new(),
}
}
/*pub fn clear(&mut self) {
self.objects.clear();
}*/
pub fn add(&mut self, object: Arc<dyn HitTable>) {
self.objects.push(object);
}
}
impl Default for HitTableList {
fn default() -> Self {
Self::new()
}
}
impl HitTable for HitTableList {
fn hit(&self, r: &Ray, t_min: f64, t_max: f64, rec: &mut HitRecord) -> bool {
let mut tmp_rec = rec.clone();
let mut hit_anything = false;
let mut closest_so_far = t_max;
for object in &self.objects {
if object.hit(r, t_min, closest_so_far, &mut tmp_rec) {
hit_anything = true;
closest_so_far = tmp_rec.t;
*rec = tmp_rec.clone();
}
}
hit_anything
}
fn bounding_box(&self, t0: f64, t1: f64, output_box: &mut AABB) -> bool {
if self.objects.is_empty() {
return false;
}
let mut tmp_box = AABB::new(Point3::zero(), Point3::zero());
let mut first_box = true;
for object in &self.objects {
if !object.bounding_box(t0, t1, &mut tmp_box) {
return false;
}
if first_box {
*output_box = tmp_box.clone();
} else {
*output_box = AABB::surrounding_box(output_box, &tmp_box);
}
first_box = false;
}
true
}
fn distance(&self, _other_center: &Point3) -> f64 {
0.0
}
}
|
/* use std::env;
use std::fs;
fn main() {
let args: Vec<String> = env::args().collect();
let config = Config::new(&args);
println!("Searching for {}", config.query);
println!("In file {}", config.filename);
//use BufReader instead: https://doc.rust-lang.org/1.39.0/std/io/struct.BufReader.html
let contents = fs::read_to_string(config.filename)
.expect("Something went wrong reading the file");
println!("With text:\n{}", contents);
}
struct Config {
query: String,
filename: String,
}
impl Config {
fn new(args: &[String]) -> Config {
let query = args[1].clone();
let filename = args[2].clone();
Config { query, filename }
}
}
pub fn add(a: i32, b: i32) -> i32 {
a + b
}
// This is a really bad adding function, its purpose is to fail in this
// example.
#[allow(dead_code)]
fn bad_add(a: i32, b: i32) -> i32 {
a - b
}
*/ |
//! A simple library for *fast* inspection of binary buffers to guess the type of content.
//!
//! This is mainly intended to quickly determine whether a given buffer contains "binary"
//! or "text" data. Programs like `grep` or `git diff` use similar mechanisms to decide whether
//! to treat some files as "binary data" or not.
//!
//! The analysis is based on a very simple heuristic: Searching for NULL bytes
//! (indicating "binary" content) and the detection of special [byte order
//! marks](https://en.wikipedia.org/wiki/Byte_order_mark) (indicating a particular kind of textual
//! encoding). Note that **this analysis can fail**. For example, even if unlikely, UTF-8-encoded
//! text can legally contain NULL bytes. Conversely, some particular binary formats (like binary
//! [PGM](https://en.wikipedia.org/wiki/Netpbm_format)) may not contain NULL bytes. Also, for
//! performance reasons, only the first 1024 bytes are checked for the NULL-byte (if no BOM was
//! detected).
//!
//! If this library reports a certain type of encoding (say `UTF_16LE`), there is **no guarantee**
//! that the binary buffer can *actually* be decoded as UTF-16LE.
//!
//! # Example
//! ```
//! use content_inspector::{ContentType, inspect};
//!
//! assert_eq!(ContentType::UTF_8, inspect(b"Hello"));
//! assert_eq!(ContentType::BINARY, inspect(b"\xFF\xE0\x00\x10\x4A\x46\x49\x46\x00"));
//!
//! assert!(inspect(b"Hello").is_text());
//! ```
extern crate memchr;
use memchr::memchr;
use std::cmp::min;
use std::fmt;
const MAX_SCAN_SIZE: usize = 1024;
/// The type of encoding that was detected (for "text" data) or `BINARY` for "binary" data.
#[allow(non_camel_case_types)]
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum ContentType {
/// "binary" data
BINARY,
/// UTF-8 encoded "text" data
UTF_8,
/// UTF-8 encoded "text" data with a byte order mark.
UTF_8_BOM,
/// UTF-16 encoded "text" data (little endian)
UTF_16LE,
/// UTF-16 encoded "text" data (big endian)
UTF_16BE,
/// UTF-32 encoded "text" data (little endian)
UTF_32LE,
/// UTF-32 encoded "text" data (big endian)
UTF_32BE,
}
impl ContentType {
/// Returns `true`, if the `ContentType` is `BINARY`.
pub fn is_binary(self) -> bool {
self == ContentType::BINARY
}
/// Returns `true`, if the `ContentType` is __not__ `BINARY`.
pub fn is_text(self) -> bool {
!self.is_binary()
}
}
impl fmt::Display for ContentType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use ContentType::*;
let name: &str = match *self {
BINARY => "binary",
UTF_8 => "UTF-8",
UTF_8_BOM => "UTF-8-BOM",
UTF_16LE => "UTF-16LE",
UTF_16BE => "UTF-16BE",
UTF_32LE => "UTF-32LE",
UTF_32BE => "UTF-32BE",
};
write!(f, "{}", name)
}
}
/// Common byte order marks
/// (see https://en.wikipedia.org/wiki/Byte_order_mark)
static BYTE_ORDER_MARKS: &[(&[u8], ContentType)] = &[
(&[0xEF, 0xBB, 0xBF], ContentType::UTF_8_BOM),
// UTF-32 needs to be checked before UTF-16 (overlapping BOMs)
(&[0x00, 0x00, 0xFE, 0xFF], ContentType::UTF_32BE),
(&[0xFF, 0xFE, 0x00, 0x00], ContentType::UTF_32LE),
(&[0xFE, 0xFF], ContentType::UTF_16BE),
(&[0xFF, 0xFE], ContentType::UTF_16LE),
];
/// Magic numbers for some filetypes that could otherwise be characterized as text.
static MAGIC_NUMBERS: [&[u8]; 2] = [b"%PDF", b"\x89PNG"];
/// Try to determine the type of content in the given buffer. See the crate documentation for a
/// usage example and for more details on how this analysis is performed.
///
/// If the buffer is empty, the content type will be reported as `UTF_8`.
pub fn inspect(buffer: &[u8]) -> ContentType {
use ContentType::*;
for &(bom, content_type) in BYTE_ORDER_MARKS {
if buffer.starts_with(bom) {
return content_type;
}
}
// Scan the first few bytes for zero-bytes
let scan_size = min(buffer.len(), MAX_SCAN_SIZE);
let has_zero_bytes = memchr(0x00, &buffer[..scan_size]).is_some();
if has_zero_bytes {
return BINARY;
}
if MAGIC_NUMBERS.iter().any(|magic| buffer.starts_with(magic)) {
return BINARY;
}
UTF_8
}
#[cfg(test)]
mod tests {
use {inspect, ContentType::*};
#[test]
fn test_empty_buffer_utf_8() {
assert_eq!(UTF_8, inspect(b""));
}
#[test]
fn test_text_simple() {
assert_eq!(UTF_8, inspect("Simple UTF-8 string ☔".as_bytes()));
}
#[test]
fn test_text_utf8() {
assert_eq!(UTF_8, inspect(include_bytes!("../testdata/text_UTF-8.txt")));
}
#[test]
fn test_text_utf8_bom() {
assert_eq!(
UTF_8_BOM,
inspect(include_bytes!("../testdata/text_UTF-8-BOM.txt"))
);
}
#[test]
fn test_text_utf16le() {
assert_eq!(
UTF_16LE,
inspect(include_bytes!("../testdata/text_UTF-16LE-BOM.txt"))
);
}
#[test]
fn test_text_utf16be() {
assert_eq!(
UTF_16BE,
inspect(include_bytes!("../testdata/text_UTF-16BE-BOM.txt"))
);
}
#[test]
fn test_text_utf32le() {
assert_eq!(
UTF_32LE,
inspect(include_bytes!("../testdata/text_UTF-32LE-BOM.txt"))
);
}
#[test]
fn test_text_utf32be() {
assert_eq!(
UTF_32BE,
inspect(include_bytes!("../testdata/text_UTF-32BE-BOM.txt"))
);
}
#[test]
fn test_png() {
assert_eq!(BINARY, inspect(include_bytes!("../testdata/test.png")));
}
#[test]
fn test_jpg() {
assert_eq!(BINARY, inspect(include_bytes!("../testdata/test.jpg")));
}
#[test]
fn test_pdf() {
assert_eq!(BINARY, inspect(include_bytes!("../testdata/test.pdf")));
}
#[test]
fn test_is_text() {
assert!(UTF_8.is_text());
assert!(UTF_32LE.is_text());
}
#[test]
fn test_is_binary() {
assert!(BINARY.is_binary());
}
}
|
use async_std::{io, task};
use futures::{future, prelude::*};
use libp2p::{
Multiaddr,
PeerId,
Swarm,
NetworkBehaviour,
identity,
floodsub::{self, Floodsub, FloodsubEvent},
mdns::{Mdns, MdnsEvent},
swarm::NetworkBehaviourEventProcess
};
use std::{error::Error, task::{Context, Poll}};
fn main() -> Result<(), Box<dyn Error>> {
// Start logging everything that's happening
env_logger::init();
// Generate a unique, random peer ID
let local_key = identity::Keypair::generate_ed25519();
let local_peer_id = PeerId::from(local_key.public());
println!("Local peer id: {:?}", local_peer_id);
// Setup a transport to actually connect with the world
let transport = libp2p::build_development_transport(local_key)?;
// Set the topic to "chat" to find others on the same topic
let floodsub_topic = floodsub::Topic::new("chat");
// Combine the floodsub and mDNS protocols into one behaviour
#[derive(NetworkBehaviour)]
struct MyBehaviour {
floodsub: Floodsub, // For the actual messaging
mdns: Mdns, // For tracking active users
#[behaviour(ignore)]
#[allow(dead_code)]
ignored_member: bool,
}
// Print out message when received over floodsub
impl NetworkBehaviourEventProcess<FloodsubEvent> for MyBehaviour {
fn inject_event(&mut self, message: FloodsubEvent) {
if let FloodsubEvent::Message(message) = message {
println!("Received: '{:?}' from {:?}", String::from_utf8_lossy(&message.data), message.source);
}
}
}
// Add and remove users from active list when they are updated over mDNS
impl NetworkBehaviourEventProcess<MdnsEvent> for MyBehaviour {
fn inject_event(&mut self, event: MdnsEvent) {
match event {
MdnsEvent::Discovered(list) =>
for (peer, _) in list {
self.floodsub.add_node_to_partial_view(peer);
}
MdnsEvent::Expired(list) =>
for (peer, _) in list {
if !self.mdns.has_node(&peer) {
self.floodsub.remove_node_from_partial_view(&peer);
}
}
}
}
}
// Glob together peers and events into a single swarm
let mut swarm = {
let mdns = task::block_on(Mdns::new())?;
let mut behaviour = MyBehaviour {
floodsub: Floodsub::new(local_peer_id.clone()),
mdns,
ignored_member: false,
};
behaviour.floodsub.subscribe(floodsub_topic.clone());
Swarm::new(transport, behaviour, local_peer_id)
};
// Connect to a specificed p2p access point
if let Some(to_dial) = std::env::args().nth(1) {
let addr: Multiaddr = to_dial.parse()?;
Swarm::dial_addr(&mut swarm, addr)?;
println!("Dialed {:?}", to_dial)
}
// Read full lines from the terminal
let mut stdin = io::BufReader::new(io::stdin()).lines();
// Attach the network swarm to whatever port the OS wants
Swarm::listen_on(&mut swarm, "/ip4/0.0.0.0/tcp/0".parse()?)?;
// Actually do things
let mut listening = false;
task::block_on(future::poll_fn(move |cx: &mut Context<'_>| {
loop {
match stdin.try_poll_next_unpin(cx)? {
Poll::Ready(Some(line)) => swarm.floodsub.publish(floodsub_topic.clone(), line.as_bytes()),
Poll::Ready(None) => panic!("Stdin closed"),
Poll::Pending => break
}
}
loop {
match swarm.poll_next_unpin(cx) {
Poll::Ready(Some(event)) => println!("{:?}", event),
Poll::Ready(None) => return Poll::Ready(Ok(())),
Poll::Pending => {
if !listening {
for addr in Swarm::listeners(&swarm) {
println!("Listening on {:?}", addr);
listening = true;
}
}
break
}
}
}
Poll::Pending
}))
} |
use crate::intcode::{IntCodeEmulator, YieldReason};
use itertools::Itertools;
use std::collections::VecDeque;
const INPUT: &str = include_str!("../input/2019/day7.txt");
pub fn part1() -> i64 {
let program = IntCodeEmulator::parse_input(INPUT);
let permutations = (0..5).permutations(5);
permutations
.map(|p| run_in_series(&program, &p))
.max()
.expect("Unable to run IntCode VMs")
}
pub fn part2() -> i64 {
let program = IntCodeEmulator::parse_input(INPUT);
let permutations = (5..10).permutations(5);
permutations
.map(|p| run_feedback_loop(&program, &p))
.max()
.expect("Unable to run IntCode VMs")
}
/// runs intcode VMs with the given IDs in series and returns the output from the final one
fn run_in_series(program: &[i64], ids: &[i64]) -> i64 {
ids.iter().fold(0, |current, &id| {
let mut vm = IntCodeEmulator::new(program.to_vec());
vm.stdin().push_back(id);
vm.stdin().push_back(current);
vm.execute();
vm.stdout().pop_back().expect("No output produced")
})
}
/// runs intcode VMs with the given IDs in a feedback loop until none require any more input
/// then returns the final result
fn run_feedback_loop(program: &[i64], ids: &[i64]) -> i64 {
let mut waiting: VecDeque<IntCodeEmulator> = ids
.iter()
.map(|&id| {
let mut vm = IntCodeEmulator::new(program.to_vec());
vm.stdin().push_back(id);
vm
})
.collect();
let mut value = 0;
while let Some(mut vm) = waiting.pop_front() {
vm.stdin().push_back(value);
let result = vm.execute_until_yield();
value = vm.stdout().pop_back().expect("No output produced");
if let YieldReason::InputRequired = result {
waiting.push_back(vm);
}
}
value
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn day07_part1() {
assert_eq!(part1(), 272_368);
}
#[test]
fn day07_part2() {
assert_eq!(part2(), 19_741_286);
}
}
|
use super::{Client, Structure};
use crate::field::{ForeignKey, LineItem};
use chrono::{DateTime, Utc};
use retriever::traits::record::Record;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
#[derive(Debug, Deserialize, Serialize, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct ExpenseReport {
pub client: ForeignKey<Client>,
pub number: usize,
pub date_issued: DateTime<Utc>,
pub lines: Vec<LineItem>,
}
impl Structure for ExpenseReport {
const STORE: &'static str = "expense-reports";
type ChunkKeys = (
<Client as Structure>::ChunkKeys,
<Client as Structure>::ItemKeys,
);
type ItemKeys = usize;
}
impl Record<<Self as Structure>::ChunkKeys, <Self as Structure>::ItemKeys> for ExpenseReport {
fn chunk_key(&self) -> Cow<<Self as Structure>::ChunkKeys> {
Cow::Owned(self.client.clone().into())
}
fn item_key(&self) -> Cow<<Self as Structure>::ItemKeys> {
Cow::Borrowed(&self.number)
}
}
|
use std::{
collections::HashMap,
iter::FromIterator,
};
use serde::{
Deserialize,
Serialize,
};
use rnc_core::{
urs::Urs,
urs_taxid::UrsTaxid,
};
use crate::normalize::utils;
use crate::normalize::ds::{
basic::Basic,
cross_reference::{
AccessionVec,
CrossReference,
},
crs::{
Crs,
CrsVec,
},
feedback::{
Feedback,
FeedbackVec,
},
go_annotation::{
GoAnnotation,
GoAnnotationVec,
},
interacting_protein::{
InteractingProtein,
InteractingProteinVec,
},
interacting_rna::{
InteractingRna,
InteractingRnaVec,
},
precompute::{
Precompute,
PrecomputeSummary,
},
qa_status::QaStatus,
r2dt::R2dt,
reference::{
Reference,
ReferenceVec,
},
rfam_hit::{
RfamHit,
RfamHitVec,
},
so_tree,
};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Raw {
id: String,
base: Vec<Basic>,
cross_references: Vec<CrossReference>,
crs: Vec<Crs>,
feedback: Vec<Feedback>,
go_annotations: Vec<GoAnnotation>,
interacting_proteins: Vec<InteractingProtein>,
interacting_rnas: Vec<InteractingRna>,
precompute: Vec<Precompute>,
qa_status: Vec<QaStatus>,
r2dt: Vec<R2dt>,
references: Vec<Reference>,
rfam_hits: Vec<RfamHit>,
}
/*
{
"interacting_proteins": [],
"base": [
{
"id": "URS0000614226_291828",
"length": 181,
"md5": "1b40575dabf9994947faba61876fc1a6",
"urs": "URS0000614226"
}
],
"id": "URS0000614226_291828",
"crs": [],
"cross_references": [],
"qa_status": [
{
"has_issue": true,
"id": "URS0000614226_291828",
"incomplete_sequence": true,
"missing_rfam_match": false,
"possible_contamination": false
}
],
"go_annotations": [],
"feedback": [],
"interacting_rnas": [],
"references": [],
"precompute": [
{
"databases": "ENA",
"description": "uncultured Parvibaculum sp. partial 16S ribosomal RNA",
"has_coordinates": false,
"id": "URS0000614226_291828",
"rna_type": "rRNA",
"so_rna_type": "SO:0000650"
}
],
"r2dt": [],
"rfam_hits": [
{
"id": "URS0000614226_291828",
"rfam_clans": "CL00111",
"rfam_family_names": "SSU_rRNA_bacteria",
"rfam_ids": "RF00177",
"urs": "URS0000614226"
}
]
}
*/
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct Normalized {
urs: String,
taxid: u64,
urs_taxid: String,
short_urs: String,
deleted: String,
so_rna_type_tree: so_tree::SoTree,
#[serde(flatten)]
pre_summary: PrecomputeSummary,
#[serde(flatten)]
basic: Basic,
// #[serde(flatten)]
// dates: Dates,
qa_status: QaStatus,
secondary_structure: Option<R2dt>,
accessions: AccessionVec,
crs: CrsVec,
feedback: FeedbackVec,
go_annotations: GoAnnotationVec,
interacting_proteins: InteractingProteinVec,
interacting_rnas: InteractingRnaVec,
references: ReferenceVec,
rfam_hits: RfamHitVec,
}
impl Raw {
pub fn urs(&self) -> anyhow::Result<String> {
let ut: UrsTaxid = self.id.parse()?;
let urs: Urs = ut.into();
Ok(urs.to_string())
}
pub fn taxid(&self) -> anyhow::Result<u64> {
let ut: UrsTaxid = self.id.parse()?;
Ok(ut.taxid())
}
pub fn short_urs(&self) -> anyhow::Result<String> {
let ut: UrsTaxid = self.id.parse()?;
let urs: Urs = ut.into();
Ok(urs.short_urs())
}
}
impl Normalized {
pub fn new(raw: &Raw, so_info: &HashMap<String, so_tree::SoTree>) -> anyhow::Result<Self> {
let basic = utils::expect_single(&raw.base, "base")?;
let precompute = utils::expect_single(&raw.precompute, "precompute")?;
let qa_status = utils::expect_single(&raw.qa_status, "qa_status")?;
let so_rna_type_tree = so_info[precompute.so_rna_type()].clone();
let pre_summary = PrecomputeSummary::from(precompute);
let secondary_structure = utils::maybe_single(&raw.r2dt, "r2dt")?;
Ok(Self {
urs_taxid: raw.id.clone(),
urs: raw.urs()?,
taxid: raw.taxid()?,
short_urs: raw.short_urs()?,
deleted: String::from("N"),
so_rna_type_tree,
pre_summary,
basic,
qa_status,
secondary_structure,
accessions: AccessionVec::from_iter(raw.cross_references.clone()),
crs: CrsVec::from_iter(raw.crs.clone()),
feedback: FeedbackVec::from_iter(raw.feedback.clone()),
go_annotations: GoAnnotationVec::from_iter(raw.go_annotations.clone()),
interacting_proteins: InteractingProteinVec::from_iter(
raw.interacting_proteins.clone(),
),
interacting_rnas: InteractingRnaVec::from_iter(raw.interacting_rnas.clone()),
references: ReferenceVec::from_iter(raw.references.clone()),
rfam_hits: RfamHitVec::from_iter(raw.rfam_hits.clone()),
})
}
}
|
pub mod bezier;
pub mod ds;
pub mod mesh;
pub mod plane;
pub mod sphere;
pub use bezier::BezierRotate;
pub use mesh::Mesh;
pub use plane::Plane;
pub use sphere::Sphere;
|
pub mod base_project_dependency;
pub mod flatten_project_dependency;
|
pub struct SimpleRnd {
values: [i32; 10],
next: usize,
}
impl SimpleRnd {
pub fn new() -> Self {
SimpleRnd {
values: [1 ,3, 5, 7, 2, 4, 6, 8, 9, 0],
next: 0,
}
}
pub fn next_rnd(&mut self) -> i32 {
self.next = self.next + 1;
if self.next > 9 {
self.next = 0;
}
self.values[self.next]
}
}
|
use druid::kurbo::{BezPath, Circle, Insets, Point, Rect, Vec2};
use druid::piet::{RenderContext, StrokeStyle};
use druid::{Data, Env, EventCtx, HotKey, KbKey, KeyEvent, MouseEvent, PaintCtx, RawMods};
use crate::edit_session::EditSession;
use crate::mouse::{Drag, Mouse, MouseDelegate, TaggedEvent};
use crate::path::Segment;
use crate::point::EntityId;
use crate::tools::{EditType, Tool, ToolId};
use crate::{
design_space::{DPoint, DVec2, ViewPort},
quadrant::Quadrant,
selection::Selection,
theme,
};
// distance from edges of the selection bbox to where we draw the handles
const SELECTION_BBOX_HANDLE_PADDING: Insets = Insets::uniform(6.0);
const SELECTION_HANDLE_RADIUS: f64 = 4.;
/// An item that can be selected.
#[derive(Debug, Clone)]
enum Item {
SelectionHandle(Quadrant),
Point(EntityId),
Guide(EntityId),
Segment(Box<Segment>),
}
/// The internal state of the mouse.
#[derive(Debug, Clone)]
enum MouseState {
/// The mouse is idle; it may be hovering on an item.
Idle(Option<Item>),
/// The mouse has clicked, and may have clicked an item.
Down(Option<Item>),
/// The mouse is down but we should not transition to a drag if one
/// begins.
SuppressDrag,
/// A drag gesture is in progress.
Drag(DragState),
/// The mouse is up after clicking an item; if a double-click occurs
/// it will modify that item
WaitDoubleClick(Item),
/// Internal: the state is in transition. This should only be present
/// during event handling.
Transition,
}
/// The possible states for the select tool.
#[derive(Debug, Clone)]
enum DragState {
/// State for a drag that is a rectangular selection.
Select {
previous: Selection,
rect: Rect,
toggle: bool,
},
/// State for a drag that is moving a selected object.
Move { previous: EditSession, delta: DVec2 },
TransformSelection {
quadrant: Quadrant,
previous: EditSession,
delta: DVec2,
/// The paths before this transform; we want to draw these faintly
/// until the gesture completes
pre_paths: BezPath,
},
}
/// The state of the selection tool.
#[derive(Debug, Default, Clone)]
pub struct Select {
/// state preserved between events.
state: MouseState,
last_pos: Point,
/// The edit type produced by the current event, if any.
///
/// This is stashed here because we can't return anything from the methods in
/// `MouseDelegate`.
///
/// It is an invariant that this is always `None`, except while we are in
/// a `key_down`, `key_up`, or `mouse_event` method.
this_edit_type: Option<EditType>,
}
impl Tool for Select {
fn cancel(
&mut self,
mouse: &mut Mouse,
_ctx: &mut EventCtx,
data: &mut EditSession,
) -> Option<EditType> {
mouse.cancel(data, self);
self.this_edit_type.take()
}
fn paint(&mut self, ctx: &mut PaintCtx, data: &EditSession, env: &Env) {
let selection_stroke = env.get(theme::SELECTION_RECT_STROKE_COLOR);
match &self.state {
MouseState::Idle(item) => {
let quad = match &item {
Some(Item::SelectionHandle(quad)) => Some(*quad),
_ => None,
};
paint_selection_bbox(ctx, data, env, quad);
match item {
Some(Item::Point(id)) => {
if let Some(pp) = data.path_point_for_id(*id) {
let point = data.viewport.to_screen(pp.point);
paint_hover_indicator(ctx, data, point, env);
}
}
Some(Item::Segment(seg)) => {
let seg_point = data.viewport.affine()
* seg.nearest_point(data.viewport.from_screen(self.last_pos));
paint_hover_indicator(ctx, data, seg_point, env);
}
Some(Item::Guide(id)) => {
if let Some(point) =
data.guides.iter().find(|g| g.id == *id).map(|guide| {
guide.nearest_screen_point(data.viewport, self.last_pos)
})
{
paint_hover_indicator(ctx, data, point, env);
}
}
Some(Item::SelectionHandle(_)) | None => (),
}
}
MouseState::Drag(drag_state) => match drag_state {
DragState::Select { rect, .. } => {
ctx.fill(rect, &env.get(theme::SELECTION_RECT_FILL_COLOR));
ctx.stroke(rect, &selection_stroke, 1.0);
}
// draw the selection bounding box
DragState::TransformSelection { pre_paths, .. } => {
ctx.stroke(
data.viewport.affine() * pre_paths,
&env.get(theme::PLACEHOLDER_GLYPH_COLOR),
1.0,
);
let bbox = data.viewport.rect_to_screen(data.selection_dpoint_bbox());
let style = StrokeStyle::new().dash(vec![2.0, 4.0], 0.0);
ctx.stroke_styled(&bbox, &selection_stroke, 0.5, &style);
for (_loc, circle) in iter_handle_circles(data) {
//FIXME: we don't fill while dragging because we would
//fill the wrong handle when scale goes negative. Lots of
//ways to be fancy here, but for now we can just leave it.
//if loc == *quadrant {
//ctx.fill(circle, &selection_stroke);
//}
ctx.stroke(circle, &selection_stroke, 0.5);
}
}
_ => (),
},
_ => (),
}
}
fn key_down(
&mut self,
event: &KeyEvent,
_ctx: &mut EventCtx,
data: &mut EditSession,
_: &Env,
) -> Option<EditType> {
assert!(self.this_edit_type.is_none());
match event {
e if e.key == KbKey::ArrowLeft
|| e.key == KbKey::ArrowDown
|| e.key == KbKey::ArrowUp
|| e.key == KbKey::ArrowRight =>
{
self.nudge(data, event);
}
e if e.key == KbKey::Backspace => {
data.delete_selection();
self.this_edit_type = Some(EditType::Normal);
}
e if HotKey::new(None, KbKey::Tab).matches(e) => data.select_next(),
//TODO: add Shift to SysMods
e if HotKey::new(RawMods::Shift, KbKey::Tab).matches(e) => data.select_prev(),
_ => return None,
}
self.this_edit_type.take()
}
fn mouse_event(
&mut self,
event: TaggedEvent,
mouse: &mut Mouse,
ctx: &mut EventCtx,
data: &mut EditSession,
_: &Env,
) -> Option<EditType> {
assert!(self.this_edit_type.is_none());
let pre_rect = self.state.drag_rect();
mouse.mouse_event(event, data, self);
if !pre_rect.same(&self.state.drag_rect()) {
ctx.request_paint();
}
self.this_edit_type.take()
}
fn name(&self) -> ToolId {
"Select"
}
}
impl Select {
fn nudge(&mut self, data: &mut EditSession, event: &KeyEvent) {
let (mut nudge, edit_type) = match event.key {
KbKey::ArrowLeft => (Vec2::new(-1.0, 0.), EditType::NudgeLeft),
KbKey::ArrowRight => (Vec2::new(1.0, 0.), EditType::NudgeRight),
KbKey::ArrowUp => (Vec2::new(0.0, 1.0), EditType::NudgeUp),
KbKey::ArrowDown => (Vec2::new(0.0, -1.0), EditType::NudgeDown),
_ => unreachable!(),
};
if event.mods.meta() {
nudge *= 100.;
} else if event.mods.shift() {
nudge *= 10.;
}
data.nudge_selection(DVec2::from_raw(nudge));
// for the purposes of undo, we only combine single-unit nudges
if nudge.hypot().abs() > 1.0 {
self.this_edit_type = Some(EditType::Normal);
} else {
self.this_edit_type = Some(edit_type);
}
}
fn selection_handle_hit(&self, data: &EditSession, pos: Point) -> Option<Quadrant> {
if data.selection.len() <= 1 {
return None;
}
let (handle, handle_dist) = iter_handle_circles(data)
.map(|(loc, circ)| (loc, circ.center.distance(pos)))
.fold(
(Quadrant::Center, f64::MAX),
|(best_loc, closest), (this_loc, this_dist)| {
let best_loc = if this_dist < closest {
this_loc
} else {
best_loc
};
(best_loc, this_dist.min(closest))
},
);
if handle_dist <= SELECTION_HANDLE_RADIUS {
Some(handle)
} else {
None
}
}
fn hover_item_for_mos_pos(&self, data: &EditSession, pos: Point) -> Option<Item> {
if let Some(quadrant) = self.selection_handle_hit(data, pos) {
return Some(Item::SelectionHandle(quadrant));
}
if let Some(id) = data.hit_test_all(pos, None) {
if id.is_guide() {
Some(Item::Guide(id))
} else {
Some(Item::Point(id))
}
} else if let Some((seg, _t)) =
data.hit_test_segments(pos, Some(crate::edit_session::SEGMENT_CLICK_DISTANCE))
{
Some(Item::Segment(seg.into()))
} else {
None
}
}
}
impl MouseDelegate<EditSession> for Select {
fn mouse_moved(&mut self, event: &MouseEvent, data: &mut EditSession) {
let hover_item = self.hover_item_for_mos_pos(data, event.pos);
self.state = MouseState::Idle(hover_item);
self.last_pos = event.pos;
}
fn left_down(&mut self, event: &MouseEvent, data: &mut EditSession) {
if event.pos != self.last_pos {
log::info!(
"left_down pos != mouse_move pos: {:.2}/{:.2}",
event.pos,
self.last_pos
);
}
let append_mode = event.mods.shift();
if event.count == 1 {
let item = match self.state.transition() {
MouseState::Idle(item) => item,
MouseState::WaitDoubleClick(item) => Some(item),
_ => None,
};
self.state = match item {
Some(Item::SelectionHandle(_)) => MouseState::Down(item),
Some(Item::Point(id)) | Some(Item::Guide(id)) => {
if !append_mode {
if !data.selection.contains(&id) {
data.selection.select_one(id);
}
} else if !data.selection.remove(&id) {
data.selection.insert(id);
}
MouseState::Down(item)
}
// toggle segment type
Some(Item::Segment(seg)) if event.mods.alt() => {
if seg.is_line() {
if let Some(path) = data.path_for_point_mut(seg.start_id()) {
path.upgrade_line_seg(&seg, false);
self.this_edit_type = Some(EditType::Normal);
}
}
MouseState::SuppressDrag
}
Some(Item::Segment(seg)) => {
let all_selected = seg
.raw_segment()
.iter_ids()
.all(|id| data.selection.contains(&id));
if !append_mode {
data.selection.clear();
data.selection.extend(seg.raw_segment().iter_ids());
MouseState::Down(Some(Item::Segment(seg)))
} else if all_selected {
for id in seg.raw_segment().iter_ids() {
data.selection.remove(&id);
}
MouseState::SuppressDrag
} else {
data.selection.extend(seg.raw_segment().iter_ids());
MouseState::Down(Some(Item::Segment(seg)))
}
}
None => MouseState::Down(None),
};
} else if event.count == 2 {
self.state = match self.state.transition() {
MouseState::WaitDoubleClick(item) => {
match &item {
Item::Point(id) => {
data.toggle_point_type(*id);
self.this_edit_type = Some(EditType::Normal);
}
Item::Guide(id) => {
data.toggle_guide(*id, event.pos);
self.this_edit_type = Some(EditType::Normal);
}
Item::Segment(seg) => {
data.select_path(seg.start_id().parent(), append_mode);
}
Item::SelectionHandle(_) => (),
};
MouseState::WaitDoubleClick(item)
}
other => {
log::debug!("double-click mouse state: {:?}", other);
MouseState::SuppressDrag
}
}
}
}
fn left_up(&mut self, event: &MouseEvent, data: &mut EditSession) {
self.state = match self.state.transition() {
MouseState::Down(Some(Item::SelectionHandle(handle))) => {
MouseState::Idle(Some(Item::SelectionHandle(handle)))
}
MouseState::Down(Some(item)) => MouseState::WaitDoubleClick(item),
MouseState::Down(None) => {
data.selection.clear();
MouseState::Idle(self.hover_item_for_mos_pos(data, event.pos))
}
_ => MouseState::Idle(self.hover_item_for_mos_pos(data, event.pos)),
}
}
fn left_drag_began(&mut self, drag: Drag, data: &mut EditSession) {
self.state = match self.state.transition() {
// starting a rectangular selection
MouseState::Down(None) => MouseState::Drag(DragState::Select {
previous: data.selection.clone(),
rect: Rect::from_points(drag.start.pos, drag.current.pos),
toggle: drag.current.mods.shift(),
}),
MouseState::Down(Some(Item::SelectionHandle(handle))) => {
MouseState::Drag(DragState::TransformSelection {
quadrant: handle,
previous: data.clone(),
delta: DVec2::ZERO,
pre_paths: data.to_bezier(),
})
}
MouseState::Down(Some(_)) => MouseState::Drag(DragState::Move {
previous: data.clone(),
delta: DVec2::ZERO,
}),
MouseState::SuppressDrag => MouseState::SuppressDrag,
other => {
log::debug!("unexpected drag_began state: {:?}", other);
MouseState::SuppressDrag
}
};
}
fn left_drag_changed(&mut self, drag: Drag, data: &mut EditSession) {
self.last_pos = drag.current.pos;
if let Some(state) = self.state.drag_state_mut() {
match state {
DragState::Select {
previous,
rect,
toggle,
} => {
*rect = Rect::from_points(drag.current.pos, drag.start.pos);
update_selection_for_drag(data, previous, *rect, *toggle);
}
DragState::Move { delta, .. } => {
let mut new_delta = delta_for_drag_change(&drag, data.viewport);
if drag.current.mods.shift() {
new_delta = new_delta.axis_locked();
}
let drag_delta = new_delta - *delta;
if drag_delta.hypot() > 0. {
data.nudge_selection(drag_delta);
*delta = new_delta;
}
}
DragState::TransformSelection {
quadrant,
previous,
delta,
..
} => {
let new_delta = delta_for_drag_change(&drag, data.viewport);
let new_delta = quadrant.lock_delta(new_delta);
if new_delta.hypot() > 0.0 && new_delta != *delta {
*delta = new_delta;
let mut new_data = previous.clone();
let sel_rect = previous.selection_dpoint_bbox();
let scale = quadrant.scale_dspace_rect(sel_rect, new_delta);
let anchor = quadrant.inverse().point_in_dspace_rect(sel_rect);
new_data.scale_selection(scale, DPoint::from_raw(anchor));
*data = new_data;
}
}
}
if matches!(
state,
DragState::Move { .. } | DragState::TransformSelection { .. }
) {
self.this_edit_type = Some(EditType::Drag);
}
} else {
log::debug!("unexpected state in drag_changed: {:?}", self.state);
}
}
fn left_drag_ended(&mut self, _drag: Drag, _data: &mut EditSession) {
if let MouseState::Drag(state) = &self.state {
if matches!(
state,
DragState::Move { .. } | DragState::TransformSelection { .. }
) {
self.this_edit_type = Some(EditType::DragUp);
}
}
}
//FIXME: this is never actually called? :thinking:
fn cancel(&mut self, data: &mut EditSession) {
let old_state = std::mem::replace(&mut self.state, MouseState::Idle(None));
if let MouseState::Drag(state) = old_state {
match state {
DragState::Select { previous, .. } => data.selection = previous,
DragState::Move { previous, .. }
| DragState::TransformSelection { previous, .. } => {
*data = previous;
// we use 'Drag' and not 'DragUp' because we want this all to combine
// with the previous undo group, and be a no-op?
self.this_edit_type = Some(EditType::Drag);
}
}
}
}
}
/// When dragging, we only update positions when they change in design-space,
/// so we keep track of the current total design-space delta.
fn delta_for_drag_change(drag: &Drag, viewport: ViewPort) -> DVec2 {
let drag_start = viewport.from_screen(drag.start.pos);
let drag_pos = viewport.from_screen(drag.current.pos);
drag_pos - drag_start
}
fn iter_handle_circles(session: &EditSession) -> impl Iterator<Item = (Quadrant, Circle)> {
let bbox = session
.viewport
.rect_to_screen(session.selection_dpoint_bbox());
let handle_frame = bbox + SELECTION_BBOX_HANDLE_PADDING;
#[allow(clippy::nonminimal_bool)]
Quadrant::all()
.iter()
.filter(move |q| {
!(bbox.width() == 0. && q.modifies_x_axis())
&& !(bbox.height() == 0. && q.modifies_y_axis())
&& !matches!(q, Quadrant::Center)
})
.map(move |loc| {
let center = loc.point_in_rect(handle_frame);
let circle = Circle::new(center, SELECTION_HANDLE_RADIUS);
(*loc, circle)
})
}
fn update_selection_for_drag(
data: &mut EditSession,
prev_sel: &Selection,
rect: Rect,
//corresponds to shift being held
toggle: bool,
) {
let in_select_rect = data
.iter_points()
.filter(|p| rect.contains(p.to_screen(data.viewport)))
.map(|p| p.id)
.collect();
data.selection = if toggle {
prev_sel.symmetric_difference(&in_select_rect)
} else {
in_select_rect
};
}
impl MouseState {
/// Move to the Transition state, returning the previous state.
fn transition(&mut self) -> Self {
std::mem::replace(self, MouseState::Transition)
}
/// If we're in a drag gesture, return a mutable reference to the drag state.
fn drag_state_mut(&mut self) -> Option<&mut DragState> {
if let MouseState::Drag(s) = self {
Some(s)
} else {
None
}
}
fn drag_rect(&self) -> Option<Rect> {
if let MouseState::Drag(s) = self {
s.drag_rect()
} else {
None
}
}
}
impl Default for MouseState {
fn default() -> Self {
MouseState::Idle(None)
}
}
impl DragState {
fn drag_rect(&self) -> Option<Rect> {
if let DragState::Select { rect, .. } = self {
Some(*rect)
} else {
None
}
}
}
fn paint_selection_bbox(
ctx: &mut PaintCtx,
data: &EditSession,
env: &Env,
hot_quad: Option<Quadrant>,
) {
if data.selection.len() > 1 {
let selection_stroke = env.get(theme::SELECTION_RECT_STROKE_COLOR);
let bbox = data.viewport.rect_to_screen(data.selection_dpoint_bbox());
let style = StrokeStyle::new().dash(vec![2.0, 4.0], 0.0);
ctx.stroke_styled(&bbox, &selection_stroke, 0.5, &style);
for (quad, circle) in iter_handle_circles(data) {
if Some(quad) == hot_quad {
ctx.fill(circle, &selection_stroke);
}
ctx.stroke(circle, &selection_stroke, 0.5);
}
}
}
const HOVER_ACCENT_COLOR: druid::Color = druid::Color::rgba8(0, 0, 0, 0x58);
/// the point is in design space, but needn't be on the grid.
fn paint_hover_indicator(ctx: &mut PaintCtx, _: &EditSession, point: Point, _env: &Env) {
let circ = Circle::new(point, 3.0);
ctx.fill(circ, &HOVER_ACCENT_COLOR);
}
|
use std::{
collections::HashSet,
marker::PhantomData,
};
use crate::{
pack::*,
class::*,
shape::*,
};
#[derive(Clone, Debug, Default)]
pub struct TestShape<T: 'static> {
phantom: PhantomData<T>,
}
impl<T> TestShape<T> {
pub fn new() -> Self {
Self { phantom: PhantomData }
}
}
impl<T> Shape for TestShape<T> {}
impl<T> Instance<ShapeClass> for TestShape<T> {
fn source(_: &mut HashSet<u64>) -> String { String::new() }
fn inst_name() -> String { "test_shape".to_string() }
}
impl<T> Pack for TestShape<T> {
fn size_int() -> usize { 0 }
fn size_float() -> usize { 0 }
fn pack_to(&self, _buffer_int: &mut [i32], _buffer_float: &mut [f32]) {}
}
|
#[doc = "Reader of register PSSI"]
pub type R = crate::R<u32, super::PSSI>;
#[doc = "Writer for register PSSI"]
pub type W = crate::W<u32, super::PSSI>;
#[doc = "Register PSSI `reset()`'s with value 0"]
impl crate::ResetValue for super::PSSI {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `SS0`"]
pub type SS0_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SS0`"]
pub struct SS0_W<'a> {
w: &'a mut W,
}
impl<'a> SS0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `SS1`"]
pub type SS1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SS1`"]
pub struct SS1_W<'a> {
w: &'a mut W,
}
impl<'a> SS1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `SS2`"]
pub type SS2_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SS2`"]
pub struct SS2_W<'a> {
w: &'a mut W,
}
impl<'a> SS2_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `SS3`"]
pub type SS3_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SS3`"]
pub struct SS3_W<'a> {
w: &'a mut W,
}
impl<'a> SS3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `SYNCWAIT`"]
pub type SYNCWAIT_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SYNCWAIT`"]
pub struct SYNCWAIT_W<'a> {
w: &'a mut W,
}
impl<'a> SYNCWAIT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27);
self.w
}
}
#[doc = "Reader of field `GSYNC`"]
pub type GSYNC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `GSYNC`"]
pub struct GSYNC_W<'a> {
w: &'a mut W,
}
impl<'a> GSYNC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 0 - SS0 Initiate"]
#[inline(always)]
pub fn ss0(&self) -> SS0_R {
SS0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - SS1 Initiate"]
#[inline(always)]
pub fn ss1(&self) -> SS1_R {
SS1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - SS2 Initiate"]
#[inline(always)]
pub fn ss2(&self) -> SS2_R {
SS2_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - SS3 Initiate"]
#[inline(always)]
pub fn ss3(&self) -> SS3_R {
SS3_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 27 - Synchronize Wait"]
#[inline(always)]
pub fn syncwait(&self) -> SYNCWAIT_R {
SYNCWAIT_R::new(((self.bits >> 27) & 0x01) != 0)
}
#[doc = "Bit 31 - Global Synchronize"]
#[inline(always)]
pub fn gsync(&self) -> GSYNC_R {
GSYNC_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - SS0 Initiate"]
#[inline(always)]
pub fn ss0(&mut self) -> SS0_W {
SS0_W { w: self }
}
#[doc = "Bit 1 - SS1 Initiate"]
#[inline(always)]
pub fn ss1(&mut self) -> SS1_W {
SS1_W { w: self }
}
#[doc = "Bit 2 - SS2 Initiate"]
#[inline(always)]
pub fn ss2(&mut self) -> SS2_W {
SS2_W { w: self }
}
#[doc = "Bit 3 - SS3 Initiate"]
#[inline(always)]
pub fn ss3(&mut self) -> SS3_W {
SS3_W { w: self }
}
#[doc = "Bit 27 - Synchronize Wait"]
#[inline(always)]
pub fn syncwait(&mut self) -> SYNCWAIT_W {
SYNCWAIT_W { w: self }
}
#[doc = "Bit 31 - Global Synchronize"]
#[inline(always)]
pub fn gsync(&mut self) -> GSYNC_W {
GSYNC_W { w: self }
}
}
|
mod batch_id;
mod block_height;
mod block_time_height;
mod block_timestamp;
mod config;
mod contract_balances;
pub mod contract_state;
mod epoch_height;
mod gas;
mod lock;
mod redeem_stake_batch;
mod redeem_stake_batch_receipt;
mod stake_account;
mod stake_batch;
mod stake_batch_receipt;
mod stake_token_value;
mod storage_usage;
mod timestamped_near_balance;
mod timestamped_stake_balance;
mod yocto_near;
mod yocto_stake;
pub use batch_id::*;
pub use block_height::*;
pub use block_time_height::*;
pub use block_timestamp::*;
pub use config::*;
pub use contract_balances::*;
pub use epoch_height::*;
pub use gas::*;
pub use redeem_stake_batch::RedeemStakeBatch;
pub use redeem_stake_batch_receipt::RedeemStakeBatchReceipt;
pub use stake_account::StakeAccount;
pub use stake_batch::StakeBatch;
pub use stake_batch_receipt::StakeBatchReceipt;
pub use stake_token_value::StakeTokenValue;
pub use storage_usage::*;
pub use timestamped_near_balance::TimestampedNearBalance;
pub use timestamped_stake_balance::TimestampedStakeBalance;
pub use yocto_near::*;
pub use yocto_stake::*;
|
use blisp::embedded;
use num_bigint::{BigInt, ToBigInt};
#[embedded]
fn test_fun(
_z: BigInt,
_a: Vec<BigInt>,
_b: (BigInt, BigInt),
_c: Option<BigInt>,
_d: Result<BigInt, String>,
) -> Option<BigInt> {
let temp = 5.to_bigint();
temp
}
#[embedded]
fn add_four_ints(a: BigInt, b: (BigInt, BigInt), c: Option<BigInt>) -> Result<BigInt, String> {
let mut result = a + b.0 + b.1;
if let Some(n) = c {
result += n;
}
Ok(result)
}
#[embedded]
fn no_return() {}
#[test]
fn test_embedded() {
// test_fun
let code = "(export call_test_fun ()
(IO (-> () (Option Int)))
(test_fun 1 '(2 3) [4 5] (Some 6) (Ok 7))
)";
let exprs = blisp::init(code, vec![Box::new(TestFun)]).unwrap();
let ctx = blisp::typing(exprs).unwrap();
let result = blisp::eval("(call_test_fun)", &ctx).unwrap();
let front = result.front().unwrap().as_ref().unwrap();
assert_eq!(front, "(Some 5)");
// add_for_ints
let code = "(export call_add_four_ints (n)
(IO (-> ((Option Int)) (Result Int String)))
(add_four_ints 1 [2 3] n)
)";
let exprs = blisp::init(code, vec![Box::new(AddFourInts)]).unwrap();
let ctx = blisp::typing(exprs).unwrap();
let result = blisp::eval("(call_add_four_ints (Some 4))", &ctx).unwrap();
let front = result.front().unwrap().as_ref().unwrap();
assert_eq!(front, "(Ok 10)");
// no_return
let code = "(export call_no_return ()
(IO (-> () []))
(no_return)
)";
let exprs = blisp::init(code, vec![Box::new(NoReturn)]).unwrap();
let ctx = blisp::typing(exprs).unwrap();
let result = blisp::eval("(call_no_return)", &ctx).unwrap();
result.front().unwrap().as_ref().unwrap();
}
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::semaphore;
//use super::shm;
use super::super::qlib::auth::userns::*;
#[derive(Clone, Default)]
pub struct IPCNamespace {
pub userNS: UserNameSpace,
pub semphores: semaphore::Registry,
//pub shms: shm::Registry,
}
impl IPCNamespace {
pub fn New(userNS: &UserNameSpace) -> Self {
return Self {
userNS: userNS.clone(),
semphores: semaphore::Registry::New(userNS),
//shms: shm::Registry::New(userNS)
}
}
pub fn SemaphoreRegistry(&self) -> semaphore::Registry {
return self.semphores.clone()
}
/*pub fn ShmRegistry(&self) -> shm::Registry {
return self.shms.clone()
}*/
} |
extern crate rand;
use super::{data, Color, RngExt};
/// A description of a treatment for a shield.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum ShieldIconTreatment {
/// A single, solid shield color, aka no treatment.
SingleColor,
/// A treatment that results in a two-color shield pattern, by applying
/// another color at an angle.
TwoColor {
/// The color of the pattern.
pattern_color: Color,
/// The treatment's angle.
angle: u16,
},
/// A treatment that results in a two-color striped shield pattern.
Stripes {
/// The color of the strips we are adding.
pattern_color: Color,
/// The strip's stride.
stride: f32,
/// X coordinates for the stripes.
stripe_xs: Vec<f32>,
/// Angle of the stripes.
angle: u16,
},
}
/// A description of a shield icon.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ShieldIconData {
treatment: ShieldIconTreatment,
field_color: Color,
emoji: char,
}
impl ShieldIconData {
fn empty() -> Self {
ShieldIconData {
treatment: ShieldIconTreatment::SingleColor,
field_color: Color::black(),
emoji: ' ',
}
}
}
impl rand::Rand for ShieldIconData {
fn rand<R: rand::Rng>(rng: &mut R) -> Self {
let mut rv = ShieldIconData::empty();
let angle_choices: Vec<u16> = (0..8).map(|a| a * 45).collect();
rv.field_color = *rng.choose(&data::COLORS).unwrap();
let contrasting_colors: Vec<Color> = data::COLORS.iter()
.filter(|c| rv.field_color.contrasts_well(c))
.map(|c| *c)
.collect();
rv.emoji = *rng.choose(&data::EMOJIS).unwrap();
let pattern_color = *rng.choose(&contrasting_colors).unwrap();
let treatment_name = rng.weighted_choice(vec![
("SingleColor", 1),
("TwoColor", 4),
("Stripes", 6),
]);
match treatment_name {
"SingleColor" => (),
"TwoColor" => {
let angle = *rng.choose(&angle_choices).unwrap();
rv.treatment = ShieldIconTreatment::TwoColor { angle, pattern_color };
},
"Stripes" => {
let count: u8 = rng.gen_range(1, 4);
let padding = rng.gen_range(0.1, 0.4);
let stride = (1.0 - 2.0 * padding) / (2.0 * count as f32 + 1.0);
let stripe_xs: Vec<f32> = (0..count)
.map(|i| padding + stride * (2 * i + 1) as f32)
.collect();
let angle = *rng.choose(&angle_choices).unwrap();
rv.treatment = ShieldIconTreatment::Stripes { stride, stripe_xs, pattern_color, angle };
},
_ => panic!("Unexpected treatment name"),
}
rv
}
}
#[cfg(test)]
mod tests {
use super::*;
use rand::{Rng, SeedableRng};
/// Test that certain seeds always generate the same icon
/// data. This is to make sure that icons don't change overtime,
/// since they are supposed to always be the same for a particular
/// hash.
#[test]
fn test_consistent_icons() {
let mut rng = rand::XorShiftRng::from_seed([1, 2, 3, 4]);
let expected = ShieldIconData {
emoji: '🐛',
field_color: Color { r: 164, g: 0, b: 15 },
treatment: ShieldIconTreatment::TwoColor {
pattern_color: Color { r: 0, g: 254, b: 255 },
angle: 45,
},
};
let actual = rng.gen();
assert_eq!(expected, actual);
// ----
let expected = ShieldIconData {
emoji: '🐅',
field_color: Color { r: 68, g: 0, b: 113 },
treatment: ShieldIconTreatment::Stripes {
pattern_color: Color { r: 215, g: 110, b: 0 },
stride: 0.10725436,
stripe_xs: vec![0.2318641, 0.44637284, 0.6608815],
angle: 45,
},
};
let mut rng = rand::XorShiftRng::from_seed([42, 42, 42, 42]);
let actual = rng.gen();
assert_eq!(expected, actual);
}
}
|
use nom::combinator::map_opt;
use nom::number::complete::be_u8;
use nom::IResult;
use num_traits::FromPrimitive;
#[derive(Debug, Clone, Eq, PartialEq, Primitive)]
#[repr(u8)]
pub enum DHCPv6MessageType {
Solicit = 1,
Advertise = 2,
Request = 3,
Confirm = 4,
Renew = 5,
Rebind = 6,
Reply = 7,
Release = 8,
Decline = 9,
Reconfigure = 10,
InformationRequest = 11,
RelayForw = 12,
RelayRepl = 13,
}
pub fn parse_dhcpv6_message_type(input: &[u8]) -> IResult<&[u8], DHCPv6MessageType> {
map_opt(be_u8, DHCPv6MessageType::from_u8)(input)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_valid_dhcpv6_message_type() {
assert_eq!(
parse_dhcpv6_message_type(&[1u8][..]),
Ok((&b""[..], DHCPv6MessageType::Solicit))
);
assert_eq!(
parse_dhcpv6_message_type(&[2u8][..]),
Ok((&b""[..], DHCPv6MessageType::Advertise))
);
assert_eq!(
parse_dhcpv6_message_type(&[3u8][..]),
Ok((&b""[..], DHCPv6MessageType::Request))
);
assert_eq!(
parse_dhcpv6_message_type(&[4u8][..]),
Ok((&b""[..], DHCPv6MessageType::Confirm))
);
assert_eq!(
parse_dhcpv6_message_type(&[5u8][..]),
Ok((&b""[..], DHCPv6MessageType::Renew))
);
assert_eq!(
parse_dhcpv6_message_type(&[6u8][..]),
Ok((&b""[..], DHCPv6MessageType::Rebind))
);
assert_eq!(
parse_dhcpv6_message_type(&[7u8][..]),
Ok((&b""[..], DHCPv6MessageType::Reply))
);
assert_eq!(
parse_dhcpv6_message_type(&[8u8][..]),
Ok((&b""[..], DHCPv6MessageType::Release))
);
assert_eq!(
parse_dhcpv6_message_type(&[9u8][..]),
Ok((&b""[..], DHCPv6MessageType::Decline))
);
assert_eq!(
parse_dhcpv6_message_type(&[10u8][..]),
Ok((&b""[..], DHCPv6MessageType::Reconfigure))
);
assert_eq!(
parse_dhcpv6_message_type(&[11u8][..]),
Ok((&b""[..], DHCPv6MessageType::InformationRequest))
);
assert_eq!(
parse_dhcpv6_message_type(&[12u8][..]),
Ok((&b""[..], DHCPv6MessageType::RelayForw))
);
assert_eq!(
parse_dhcpv6_message_type(&[13u8][..]),
Ok((&b""[..], DHCPv6MessageType::RelayRepl))
);
}
#[test]
fn test_invalid_dhcpv6_message_type() {
assert!(parse_dhcpv6_message_type(&[0u8][..]).is_err());
assert!(parse_dhcpv6_message_type(&[14u8][..]).is_err());
}
}
|
use yew::prelude::*;
use yew_functional::function_component;
#[function_component(User)]
pub fn user() -> Html {
html! {
<>
<div class="clearfix card-user">
<div class="card-avatar-container">
<img class="card-avatar" width="260" height="260" src="https://yuchanns.xyz/yuchanns.jpg" alt="yuchanns" />
</div>
<div class="card-name-container">
<h1 class="card-names">
<span class="card-name">{"科学搜查官"}</span>
<span class="card-nickname">{"yuchanns"}</span>
</h1>
</div>
</div>
<div class="card-note">
<div>{"magnum opus. Gopher / Rustacean"}</div>
</div>
</>
}
}
|
use crate::graphics::Format;
use crate::graphics::SampleCount;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum LoadOp {
Load,
Clear,
DontCare
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum StoreOp {
Store,
DontCare
}
#[derive(Clone, Copy, PartialEq)]
pub enum ImageLayout {
Undefined,
Common,
RenderTarget,
DepthWrite,
DepthRead,
ShaderResource,
CopySrcOptimal,
CopyDstOptimal,
Present
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum RenderpassRecordingMode {
Commands,
CommandBuffers
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct AttachmentInfo {
pub format: Format,
pub samples: SampleCount,
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct SubpassInfo<'a> {
pub input_attachments: &'a [AttachmentRef],
pub output_color_attachments: &'a [OutputAttachmentRef],
pub depth_stencil_attachment: Option<DepthStencilAttachmentRef>
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct DepthStencilAttachmentRef {
pub index: u32,
pub read_only: bool
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct OutputAttachmentRef {
pub index: u32,
pub resolve_attachment_index: Option<u32>
}
bitflags! {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct RenderPassPipelineStage: u32 {
const VERTEX = 0b1;
const FRAGMENT = 0b10;
const BOTH = 0b11;
}
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct AttachmentRef {
pub index: u32,
pub pipeline_stage: RenderPassPipelineStage
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct RenderPassInfo<'a> {
pub attachments: &'a [AttachmentInfo],
pub subpasses: &'a [SubpassInfo<'a>]
}
|
#[macro_use]
extern crate log;
mod config;
mod connection_handler;
mod request_handler;
mod server;
pub use self::config::Config;
pub use self::request_handler::RequestHandler;
pub use self::server::Server;
|
use super::*;
pub struct Jugada {
pub carta: mazo::Carta,
pub numero_jugador: usize,
pub cartas_restantes: usize,
}
pub struct ResumenRonda {
pub jugadores_puntos: Vec<(usize, f64)>,
pub jugador_suspendido: usize,
pub ultima_ronda: bool
}
// Estado inicial, se crean los jugadores y se reparten las cartas
pub fn iniciar_juego(log : &std::sync::Arc<std::sync::Mutex<std::fs::File>>, n_jugadores: usize) -> sinc::SincronizadorCoordinador {
let mut jugadores = vec![];
let mut jugadores_channels_sender = vec![];
let mut jugadores_channels_ronda = vec![];
let mazo = mazo::nuevo();
let barrier = Arc::new(Barrier::new(n_jugadores + 1));
let (pilon_central_sender, pilon_central_receiver) = channel::<Jugada>();
let cartas_por_jugador = mazo.cartas.len() / n_jugadores;
// Lanzo los jugadores
for i in 1..n_jugadores + 1 {
let (sender_jugador, receiver_jugador) = channel::<mazo::Carta>();
let (sender_ronda, receiver_ronda) = channel::<bool>();
jugadores_channels_sender.push(sender_jugador);
jugadores_channels_ronda.push(sender_ronda);
let sinc = sinc::SincronizadorJugador {
cartas_receiver: receiver_jugador,
pilon_central_cartas: pilon_central_sender.clone(),
barrier: barrier.clone(),
ronda_receiver: receiver_ronda
};
let log = Arc::clone(&log);
jugadores.push( thread::spawn(move ||
{
jugador::jugador(&log, i, sinc, cartas_por_jugador);
}
));
}
let mut rng = thread_rng();
let mut cartas = mazo.cartas.clone();
cartas.shuffle(&mut rng); // Mezclo las cartas
for i in 0..(cartas_por_jugador * n_jugadores) {
let carta = cartas[i].clone();
jugadores_channels_sender[i % n_jugadores].send(carta).unwrap();
}
barrier.wait();
return sinc::SincronizadorCoordinador {
jugadores_handler: jugadores,
pilon_central_cartas: pilon_central_receiver,
jugadores_channels: jugadores_channels_sender,
barrier: barrier,
jugadores_ronda: jugadores_channels_ronda
};
}
pub fn iniciar_ronda(log : &std::sync::Arc<std::sync::Mutex<std::fs::File>>, sinc: &sinc::SincronizadorCoordinador, jugador_suspendido: usize) -> ResumenRonda {
let jugadas;
let puntos;
let mut jugador_a_suspender: usize = 0;
if sortear_ronda() > 0.5 {
logger::log(&log, " -- Iniciando ronda normal --\n".to_string());
jugadas = ronda_normal(&log, &sinc, jugador_suspendido);
puntos = contabilizar_puntos(&jugadas);
} else {
logger::log(&log, "-- Iniciando ronda rustica --\n".to_string());
jugadas = ronda_rustica(&log, &sinc, jugador_suspendido);
let result = contabilizar_puntos_ronda_rustica(&jugadas);
puntos = result.0;
jugador_a_suspender = result.1;
logger::log(&log, format!("Jugador {} suspendido\n", jugador_a_suspender));
}
let resumen = ResumenRonda {
jugadores_puntos: puntos,
jugador_suspendido: jugador_a_suspender,
ultima_ronda: ultima_ronda(&jugadas)
};
return resumen;
}
fn ronda_normal(log : &std::sync::Arc<std::sync::Mutex<std::fs::File>>, sinc: &sinc::SincronizadorCoordinador, jugador_suspendido: usize) -> Vec<Jugada> {
let mut cartas_jugadores: Vec<Jugada> = vec![];
for i in 0..sinc.jugadores_channels.len() {
// Le doy el permiso para jugar
// logger::log(&log, format!("Dandole permiso a {}\n", i + 1));
if !(i+1 == jugador_suspendido) {
sinc.jugadores_ronda[i].send(true).unwrap();
// recibo la carta que jugo
let jugada = sinc.pilon_central_cartas.recv().unwrap();
logger::log(&log, format!("Coordinador recibi: {} de {} del jugador {}\n", jugada.carta.numero, jugada.carta.palo, jugada.numero_jugador));
cartas_jugadores.push(jugada);
}
}
return cartas_jugadores;
}
fn ronda_rustica(log : &std::sync::Arc<std::sync::Mutex<std::fs::File>>, sinc: &sinc::SincronizadorCoordinador, jugador_suspendido: usize) -> Vec<Jugada>{
let mut cartas_jugadores: Vec<Jugada> = vec![];
for i in 0..sinc.jugadores_channels.len() {
// Le doy el permiso para jugar
if !(i+1 == jugador_suspendido) {
//logger::log(&log, format!("Dandole permiso a {}\n", i + 1));
sinc.jugadores_ronda[i].send(true).unwrap();
}
}
for i in 0..sinc.jugadores_channels.len() {
// recibo la carta que jugo
if !(i+1 == jugador_suspendido) {
let jugada = sinc.pilon_central_cartas.recv().unwrap();
logger::log(&log, format!("Coordinador recibi: {} de {} del jugador {}\n", jugada.carta.numero, jugada.carta.palo, jugada.numero_jugador));
cartas_jugadores.push(jugada);
}
}
return cartas_jugadores;
}
pub fn terminar_juego(log : &std::sync::Arc<std::sync::Mutex<std::fs::File>>, sinc: &sinc::SincronizadorCoordinador) {
for i in 0..sinc.jugadores_channels.len() {
// Le doy el permiso para jugar
logger::log(&log, format!("Avisandole a {} que se termino el juego\n", i + 1));
sinc.jugadores_ronda[i].send(false).unwrap();
}
}
fn sortear_ronda() -> f64 {
let mut rng = thread_rng();
let random = rng.gen_range(0., 1.0);
return random;
}
// Devuelve un vector de tuplas de la forma (numero_jugador, puntos_ganados)
fn contabilizar_puntos(jugadas: &Vec<Jugada>) -> Vec<(usize, f64)> {
let puntos_a_repartir = 10.;
let mut cantidad_ganadores = 0.;
let mut ganadores = Vec::new();
let mut carta_maxima = &jugadas.first().unwrap().carta;
// veo cual es la carta maximas
for jugada in jugadas.iter() {
if jugada.carta.valor > carta_maxima.valor {
carta_maxima = &jugada.carta;
}
}
// cuantos ganadores tengo
for jugada in jugadas.iter() {
if jugada.carta.valor == carta_maxima.valor {
cantidad_ganadores += 1.;
}
}
// armo el resultado
for jugada in jugadas.iter() {
if jugada.carta.valor == carta_maxima.valor {
ganadores.push((jugada.numero_jugador, puntos_a_repartir / cantidad_ganadores))
}
}
return ganadores;
}
fn contabilizar_puntos_ronda_rustica(jugadas: &Vec<Jugada>) -> (Vec<(usize, f64)>, usize) {
const PUNTOS_POR_SALIR_PRIMERO: f64 = 1.0;
const PUNTOS_POR_SALIR_ULTIMO: f64 = -5.0;
let mut ganadores = contabilizar_puntos(&jugadas);
let primer_jugador = jugadas.first().unwrap();
let ultimo_jugador = jugadas.last().unwrap();
let idx_primero = ganadores.iter().position(|j| j.0 == primer_jugador.numero_jugador );
match idx_primero {
Some(idx_primero) => ganadores[idx_primero].1 += PUNTOS_POR_SALIR_PRIMERO,
None => ganadores.push((primer_jugador.numero_jugador, PUNTOS_POR_SALIR_PRIMERO))
}
let idx_ultimo = ganadores.iter().position(|j| j.0 == ultimo_jugador.numero_jugador );
match idx_ultimo {
Some(idx_ultimo) => ganadores[idx_ultimo].1 += PUNTOS_POR_SALIR_ULTIMO,
None => ganadores.push((ultimo_jugador.numero_jugador, PUNTOS_POR_SALIR_ULTIMO))
}
return (ganadores, ultimo_jugador.numero_jugador);
}
fn ultima_ronda(jugadas: &Vec<Jugada>) -> bool {
for j in jugadas{
if j.cartas_restantes == 0 {
return true;
}
}
return false;
}
#[test]
fn contabilizador_puntos_1() {
let mut jugadas = Vec::new();
jugadas.push(Jugada { carta: mazo::Carta { numero: "1".to_string(), palo: "picas".to_string(), valor: 8 }, numero_jugador: 1, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "2".to_string(), palo: "picas".to_string(), valor: 2 }, numero_jugador: 2, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "3".to_string(), palo: "picas".to_string(), valor: 3 }, numero_jugador: 3, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "4".to_string(), palo: "picas".to_string(), valor: 4 }, numero_jugador: 4, cartas_restantes: 0 });
let resultado = contabilizar_puntos(&jugadas);
assert!(resultado.contains(&(1, 10.)));
}
#[test]
fn contabilizador_puntos_2() {
let mut jugadas = Vec::new();
jugadas.push(Jugada { carta: mazo::Carta { numero: "1".to_string(), palo: "picas".to_string(), valor: 8 }, numero_jugador: 1, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "2".to_string(), palo: "picas".to_string(), valor: 8 }, numero_jugador: 2, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "3".to_string(), palo: "picas".to_string(), valor: 3 }, numero_jugador: 3, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "4".to_string(), palo: "picas".to_string(), valor: 4 }, numero_jugador: 4, cartas_restantes: 0 });
let resultado = contabilizar_puntos(&jugadas);
assert!(resultado.contains(&(1, 5.)));
}
#[test]
fn contabilizador_puntos_rustica_1() {
let mut jugadas = Vec::new();
jugadas.push(Jugada { carta: mazo::Carta { numero: "1".to_string(), palo: "picas".to_string(), valor: 8 }, numero_jugador: 1, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "2".to_string(), palo: "picas".to_string(), valor: 7 }, numero_jugador: 4, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "3".to_string(), palo: "picas".to_string(), valor: 3 }, numero_jugador: 3, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "4".to_string(), palo: "picas".to_string(), valor: 4 }, numero_jugador: 2, cartas_restantes: 0 });
let resultado = contabilizar_puntos_ronda_rustica(&jugadas);
assert!(resultado.1 == 2);
}
#[test]
fn contabilizador_puntos_rustica_2() {
let mut jugadas = Vec::new();
jugadas.push(Jugada { carta: mazo::Carta { numero: "1".to_string(), palo: "picas".to_string(), valor: 8 }, numero_jugador: 1, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "2".to_string(), palo: "picas".to_string(), valor: 7 }, numero_jugador: 4, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "3".to_string(), palo: "picas".to_string(), valor: 3 }, numero_jugador: 3, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "4".to_string(), palo: "picas".to_string(), valor: 4 }, numero_jugador: 2, cartas_restantes: 0 });
let resultado = contabilizar_puntos_ronda_rustica(&jugadas);
assert!(resultado.0.contains(&(1, 11.)));
}
#[test]
fn contabilizador_puntos_rustica_3() {
let mut jugadas = Vec::new();
jugadas.push(Jugada { carta: mazo::Carta { numero: "1".to_string(), palo: "picas".to_string(), valor: 8 }, numero_jugador: 1, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "2".to_string(), palo: "picas".to_string(), valor: 7 }, numero_jugador: 4, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "3".to_string(), palo: "picas".to_string(), valor: 3 }, numero_jugador: 3, cartas_restantes: 0 });
jugadas.push(Jugada { carta: mazo::Carta { numero: "4".to_string(), palo: "picas".to_string(), valor: 4 }, numero_jugador: 2, cartas_restantes: 0 });
let resultado = contabilizar_puntos_ronda_rustica(&jugadas);
assert!(resultado.0.contains(&(2, -5.)));
}
|
use super::math::*;
use super::vec3;
use super::collide::*;
#[derive(Default)]
pub struct World {
pub sphere_x: Vec<f32>,
pub sphere_y: Vec<f32>,
pub sphere_z: Vec<f32>,
pub sphere_r: Vec<f32>,
pub sphere_c: Vec<Vec3>,
pub material_ids: Vec<u32>,
}
impl World {
pub fn construct(objects: &[Sphere]) -> World {
let mut world = World::default();
for sphere in objects {
world.sphere_x.push(sphere.center.x);
world.sphere_y.push(sphere.center.y);
world.sphere_z.push(sphere.center.z);
world.sphere_r.push(sphere.radius);
world.sphere_c.push(sphere.center);
world.material_ids.push(sphere.material_id);
}
world
}
}
|
extern crate sdl2;
use sdl2::event::{Event, WindowEvent};
use sdl2::keyboard::{Keycode, Mod};
use sdl2::mouse::MouseButton;
use sdl2::pixels::Color;
use sdl2::pixels::PixelFormatEnum;
use sdl2::rect::{Point, Rect};
use sdl2::render::{Texture, WindowCanvas};
use sdl2::surface::Surface;
extern crate rusttype;
use rusttype::{point, FontCollection, Scale};
extern crate image;
use image::{DynamicImage, ImageBuffer, Rgba};
mod xclip_wrapper;
use xclip_wrapper::get_clipboard_image;
struct Layer {
texture: Texture,
rect: Rect,
}
struct TextLayer {
layer: Option<Layer>,
text: String,
x: i32,
y: i32,
}
#[derive(Debug)]
struct Selection {
layer_index: usize,
x_offset: i32,
y_offset: i32,
}
#[derive(Debug)]
enum Mode {
Normal,
TextInput,
}
#[derive(Debug)]
struct State {
mouse: Point,
w: i32,
h: i32,
selection: Option<Selection>,
mode: Mode,
}
impl State {
fn new() -> Self {
Self {
mouse: Point::new(0, 0),
w: 0,
h: 0,
selection: None,
mode: Mode::Normal,
}
}
}
impl TextLayer {
fn new(x: i32, y: i32) -> Self {
Self {
layer: None,
text: String::new(),
x: x,
y: y,
}
}
}
fn layer_from_text(canvas: &mut WindowCanvas, text: &str, x: i32, y: i32) -> Option<Layer> {
let font_data = include_bytes!("../data/Roboto-Regular.ttf");
let collection = FontCollection::from_bytes(font_data as &[u8]).unwrap();
let font = collection.into_font().unwrap();
let scale = Scale::uniform(32.0);
let color = (150, 0, 0);
let v_metrics = font.v_metrics(scale);
let glyphs: Vec<_> = font
.layout(text, scale, point(20.0, 20.0 + v_metrics.ascent))
.collect();
// work out the layout size
let glyphs_height = (v_metrics.ascent - v_metrics.descent).ceil() as u32;
let glyphs_width = glyphs
.iter()
.rev()
.map(|g| g.position().x as f32 + g.unpositioned().h_metrics().advance_width)
.next()
.unwrap_or(0.0)
.ceil() as u32;
// Create a new rgba image with some padding
let mut image = DynamicImage::new_rgba8(glyphs_width + 40, glyphs_height + 40).to_rgba();
// Loop through the glyphs in the text, positing each one on a line
for glyph in glyphs {
if let Some(bounding_box) = glyph.pixel_bounding_box() {
// Draw the glyph into the image per-pixel by using the draw closure
glyph.draw(|x, y, v| {
image.put_pixel(
// Offset the position by the glyph bounding box
x + bounding_box.min.x as u32,
y + bounding_box.min.y as u32,
// Turn the coverage into an alpha value
Rgba([color.0, color.1, color.2, (v * 255.0) as u8]),
)
});
}
}
layer_from_image(canvas, image, x, y)
}
fn layer_from_clipboard(canvas: &mut WindowCanvas, x: i32, y: i32) -> Option<Layer> {
if let Some(b) = get_clipboard_image() {
let image = image::load_from_memory(&b).unwrap().to_rgba();
return layer_from_image(canvas, image, x, y);
}
None
}
fn layer_from_image(
canvas: &mut WindowCanvas,
mut image: ImageBuffer<Rgba<u8>, Vec<u8>>,
x: i32,
y: i32,
) -> Option<Layer> {
let (w, h) = image.dimensions();
let surface = Surface::from_data(&mut image, w, h, 4 * w, PixelFormatEnum::RGBA32).unwrap();
let texture = canvas
.texture_creator()
.create_texture_from_surface(&surface)
.unwrap();
let rect = Rect::new(x, y, w, h);
Some(Layer { texture, rect })
}
fn main() {
let sdl_context = sdl2::init().unwrap();
let video_subsys = sdl_context.video().unwrap();
let window = video_subsys
.window("Image Editor", 800, 600)
.resizable()
.opengl()
.build()
.unwrap();
let mut canvas = window.into_canvas().present_vsync().build().unwrap();
let mut layers: Vec<Layer> = Vec::new();
let mut text_layers: Vec<TextLayer> = Vec::new();
let mut active_text_layer_index: Option<usize> = None;
let mut state = State::new();
'mainloop: loop {
for event in sdl_context.event_pump().unwrap().poll_iter() {
match event {
Event::Quit { .. } => break 'mainloop,
Event::MouseMotion {
x, y, mousestate, ..
} => {
state.mouse = Point::new(x, y);
if mousestate.left() {
if let Some(selection) = &state.selection {
if let Some(layer) = layers.get_mut(selection.layer_index) {
layer.rect.x = x - selection.x_offset;
layer.rect.y = y - selection.y_offset;
}
}
}
}
Event::MouseButtonDown {
x, y, mouse_btn, ..
} => {
state.mouse = Point::new(x, y);
if mouse_btn == MouseButton::Left {
for (i, layer) in &mut layers.iter().enumerate() {
if layer.rect.contains_point(state.mouse) {
let x_offset = x - layer.rect.x;
let y_offset = y - layer.rect.y;
state.selection = Some(Selection {
layer_index: i,
x_offset,
y_offset,
});
}
}
}
}
Event::MouseButtonUp { mouse_btn, .. } => {
if mouse_btn == MouseButton::Left {
state.selection = None;
}
}
Event::Window { win_event, .. } => {
if let WindowEvent::Resized(w, h) = win_event {
state.w = w;
state.h = h;
}
}
Event::TextInput { text, .. } => match state.mode {
Mode::TextInput => {
if let Some(i) = active_text_layer_index {
if let Some(mut text_layer) = text_layers.get_mut(i) {
text_layer.text.push_str(&text);
text_layer.layer = layer_from_text(
&mut canvas,
&text_layer.text,
text_layer.x,
text_layer.y,
);
}
}
}
Mode::Normal => {
if &text == "t" {
text_layers.push(TextLayer::new(state.mouse.x, state.mouse.y));
active_text_layer_index = Some(text_layers.len() - 1);
state.mode = Mode::TextInput;
}
}
},
Event::KeyDown {
keycode: Some(kc),
keymod,
..
} => {
if keymod.contains(Mod::LCTRLMOD) || keymod.contains(Mod::RCTRLMOD) {
if kc == Keycode::V {
if let Some(layer) =
layer_from_clipboard(&mut canvas, state.mouse.x, state.mouse.y)
{
layers.push(layer);
}
}
}
}
_ => {}
}
}
canvas.set_draw_color(Color::RGB(200, 180, 100));
canvas.clear();
for layer in &layers {
let src_rect = Rect::new(0, 0, layer.rect.width(), layer.rect.height());
canvas
.copy(&layer.texture, Some(src_rect), Some(layer.rect))
.unwrap();
}
for text_layer in &text_layers {
if let Some(layer) = &text_layer.layer {
let src_rect = Rect::new(0, 0, layer.rect.width(), layer.rect.height());
canvas
.copy(&layer.texture, Some(src_rect), Some(layer.rect))
.unwrap();
}
}
canvas.present();
}
}
|
use proconio::input;
fn main() {
input! {
n: usize,
a: [usize; n],
};
let mut b = vec![false; 4];
let mut p = 0;
for a in a {
assert_eq!(b[0], false);
b[0] = true;
let mut c = vec![false; 4];
for j in 0..=3 {
if b[j] {
if a + j >= 4 {
p += 1;
} else {
c[a + j] = b[j];
}
}
}
b = c;
}
println!("{}", p);
}
|
use rltk::{Algorithm2D, field_of_view, Point};
use specs::prelude::*;
use crate::{IsVisible, Map, Player, Position, Viewshed};
pub struct VisibilitySystem;
impl<'a> System<'a> for VisibilitySystem {
type SystemData = (
WriteExpect<'a, Map>,
Entities<'a>,
WriteStorage<'a, Viewshed>,
WriteStorage<'a, Position>,
ReadStorage<'a, Player>,
WriteStorage<'a, IsVisible>,
);
fn run(&mut self, data: Self::SystemData) {
let (
mut map,
entities,
mut viewsheds,
positions,
player,
mut is_visible) = data;
for (entity, viewshed, position) in (&entities, &mut viewsheds, &positions).join() {
if viewshed.dirty {
viewshed.dirty = false;
viewshed.visible_tiles.clear();
viewshed.visible_tiles = field_of_view(Point::new(position.x, position.y), viewshed.range, &*map);
viewshed.visible_tiles.retain(|p| map.in_bounds(*p));
let player_entity_or_none = player.get(entity);
if let Some(_) = player_entity_or_none {
for is_visible in map.visible_tiles.iter_mut() {
*is_visible = false;
};
is_visible.clear();
for visible_tile in viewshed.visible_tiles.iter() {
let idx = map.xy_idx(visible_tile.x, visible_tile.y);
map.revealed_tiles[idx] = true;
map.visible_tiles[idx] = true;
for tile_entity in map.tile_content[idx].iter() {
is_visible.insert(*tile_entity, IsVisible).expect("Unable to insert");
}
}
}
}
}
}
} |
fn main(){
// let _s = "Hello World!";
// let _num = 10000;
// println!("{}", 1<4);
// let mut v = Vec::new();
// v.push(1);
// v.push(2);
// v.push(3);
// for i in &v{
// println!("{}", i)
// }
// struct Point {
// x: i32,
// y: i32
// }
// let x = 0;
// let y = 0;
// let p = Point {x, y};
// println!("Point is at {} {}", p.x, p.y);
let arr: [i32; 5] = [1,2,3,4,5];
for item in &arr{
println!("{}", item)
}
} |
use proconio::input;
#[allow(unused_imports)]
use proconio::marker::*;
#[allow(unused_imports)]
use std::cmp::*;
#[allow(unused_imports)]
use std::collections::*;
#[allow(unused_imports)]
use std::f64::consts::*;
#[allow(unused)]
const INF: usize = std::usize::MAX / 4;
#[allow(unused)]
const M: usize = 1000000007;
fn main() {
input! {
n: usize,
s: Chars,
q: usize,
query: [(usize, usize, String); q],
}
let s = s
.into_iter()
.map(|c| c as usize - 'a' as usize)
.collect::<Vec<_>>();
let mut acc = vec![vec![0; 26]; n + 1];
for i in 1..=n {
for j in 0..26 {
acc[i][j] = acc[i - 1][j];
}
acc[i][s[i - 1]] += 1;
}
// eprintln!("{:?}", acc);
let mut plus = vec![BTreeSet::new(); 26];
let mut minus = vec![BTreeSet::new(); 26];
'outer: for (t, i, c) in query {
// eprintln!("{} {} {}", t, i, c);
if t == 1 {
let c = c.chars().nth(0).unwrap() as usize - 'a' as usize;
for d in 0..26 {
if plus[d].contains(&i) {
plus[d].remove(&i);
plus[c].insert(i);
continue 'outer;
}
}
let d = s[i - 1];
if c != d {
plus[c].insert(i);
minus[d].insert(i);
}
} else {
let l = i;
let r = c.parse::<usize>().ok().unwrap();
let mut count = 0;
for j in 0..26 {
let pr = plus[j].range(..=r).count();
let mr = minus[j].range(..=r).count();
let pl = plus[j].range(..l).count();
let ml = minus[j].range(..l).count();
let pj = pr - pl;
let mj = mr - ml;
let cj = acc[r][j] - acc[l - 1][j];
// eprintln!("{} {} {} {}", (j + 'a' as usize) as u8 as char, pj, mj, cj);
if cj + pj - mj > 0 {
count += 1;
}
}
println!("{}", count);
}
}
}
|
//! # fn-search-backend-cache
//!
//! Caching the functions found on [packages.elm-lang.org](https://packages.elm-lang.org)
//! is performed with the following algorithm
//!
//! * Download the list of packages on [packages.elm-lang.org](https://packages.elm-lang.org)
//! * Iterate over each repository in parallel
//! * Check if the repository already is cached
//! * If yes, spawn a subprocess and run git pull to update the repository
//! * If no, spawn a subprocess and run git clone to download the repository
//! * Run a Elm parser on the source code to find all exported functions/variables/etc...
//! * Insert exported functions and types into the database
pub mod chromium_dl;
pub mod db_queries;
pub mod elm_package;
pub mod git_repo;
pub mod repo_cache;
mod subprocess;
use crate::db_queries::{insert_functions, refresh_repo_func_mat_view};
use crate::elm_package::{ElmFile, ElmPackage, ElmPackageError};
use crate::repo_cache::{sync_repo, RepoCacheOptions, SyncRepoError, SyncResult};
use clap::{clap_app, crate_authors, crate_description, crate_version, ArgMatches};
use fn_search_backend::{get_config, Config};
use rayon::prelude::*;
use std::collections::HashMap;
use std::error::Error;
fn sync(cfg: &Config, cache_config: &RepoCacheOptions) -> Result<(), Box<Error>> {
let elm_libs = elm_package::get_elm_libs()?;
let failed_libs: Vec<&ElmPackage> = elm_libs
.par_iter()
.map(|i| (i, sync_repo(i, &cache_config, cfg)))
.filter_map(|r| match r.1 {
Ok(res) => {
match res {
SyncResult::Clone => println!("cloned repo {}", r.0.name),
SyncResult::Update => println!("updated repo {}", r.0.name),
}
None
}
Err(e) => {
match &e {
// chrome doesn't finish downloading the page sometimes, try again
SyncRepoError::ElmPackageError(ElmPackageError::CantFindUrl(_)) => Some(r.0),
_ => {
eprintln!("error syncing repo {}: {}", r.0.name, e);
None
}
}
}
})
.collect();
// try failed libs again
failed_libs
.par_iter()
.map(|i| (i, sync_repo(i, &cache_config, cfg)))
.for_each(|r| match r.1 {
Ok(res) => {
match res {
SyncResult::Clone => println!("cloned repo {}", r.0.name),
SyncResult::Update => println!("updated repo {}", r.0.name),
};
}
Err(e) => {
eprintln!("error syncing repo {}: {}", r.0.name, e);
}
});
Ok(())
}
fn parse(cfg: &Config, cache_config: &RepoCacheOptions) -> Result<(), Box<Error>> {
let elm_libs = elm_package::get_elm_libs()?;
// try to parse each elm file
let repo_exports: HashMap<String, Vec<ElmFile>> = HashMap::new();
println!("parsing elm source code for exports...");
// collect exported stuff from source code
let exports: Vec<_> = elm_libs
.par_iter()
.map(|i| (i, i.get_exports(&cache_config)))
.collect();
println!("reducing exports...");
// convert the exports into a more usable format
let reduced_exports: Vec<_> = exports
.into_iter()
.fold(repo_exports, |mut repo_exports, res| match res.1 {
Ok(file_res_vec) => {
for file_res in file_res_vec {
match file_res {
Ok(elm_file) => match res.0.get_repo_path(cache_config) {
Ok(repo_path) => match repo_exports.get(&repo_path) {
Some(elm_files) => {
let mut new_elm_files = elm_files.clone();
new_elm_files.push(elm_file);
repo_exports
.insert(res.0.name.to_string(), new_elm_files.to_vec());
}
None => {
repo_exports.insert(res.0.name.to_string(), vec![elm_file]);
}
},
Err(e) => {
eprintln!("error while finding repository path: {}", e);
}
},
Err(e) => {
eprintln!("error while parsing file: {}", e);
}
}
}
repo_exports
}
Err(e) => {
eprintln!("error while trying to parse elm files: {}", e);
repo_exports
}
})
.into_iter()
.collect();
println!("inserting functions into db...");
// insert the exported functions into the database
reduced_exports.into_par_iter().for_each(|(name, exports)| {
match insert_functions(&cfg.db, &name, &exports) {
Ok(_) => {}
Err(e) => eprintln!("error while inserting functions: {}", e),
}
});
println!("refreshing materialized views...");
refresh_repo_func_mat_view(&cfg.db)?;
Ok(())
}
fn main() -> Result<(), Box<Error>> {
let matches: ArgMatches = clap_app!(fn_search_backend_scrape =>
(version: crate_version!())
(author: crate_authors!())
(about: crate_description!())
(@arg CACHE_DIR: -d --("cache-dir") +takes_value +required "directory for repositories to be cached in")
(@arg CHROME: -h --chrome +takes_value +required default_value("chromium") "google chrome or chromium executable")
(@arg GIT: -g --git +takes_value +required default_value("git") "git executable")
(@arg CONFIG: -c --config +takes_value +required "configuration file")
(@subcommand sync =>
(about: "sync repositories")
)
(@subcommand parse =>
(about: "parse elm files")
)
).get_matches();
let cache_dir = matches
.value_of("CACHE_DIR")
.expect("error, no cache directory specified");
let chrome = matches.value_of("CHROME").unwrap();
let git = matches.value_of("GIT").unwrap();
let config = matches.value_of("CONFIG").unwrap();
let config = get_config(&config).map_err(|e| e as Box<Error>)?;
let cache_config = RepoCacheOptions {
cache_path: String::from(cache_dir),
chromium_bin_path: chrome.to_string(),
git_bin_path: git.to_string(),
};
if let Some(_) = matches.subcommand_matches("sync") {
sync(&config, &cache_config)?;
} else if let Some(_) = matches.subcommand_matches("parse") {
parse(&config, &cache_config)?;
} else {
eprintln!("usage: fn_search_backend_scrape --help");
}
Ok(())
}
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test method calls with self as an argument (cross-crate)
#![feature(box_syntax)]
// aux-build:method_self_arg2.rs
extern crate method_self_arg2;
use method_self_arg2::{Foo, Bar};
fn main() {
let x = Foo;
// Test external call.
Bar::foo1(&x);
Bar::foo2(x);
Bar::foo3(box x);
Bar::bar1(&x);
Bar::bar2(x);
Bar::bar3(box x);
x.run_trait();
assert_eq!(method_self_arg2::get_count(), 2*2*3*3*5*5*7*7*11*11*13*13*17);
}
|
/*
Computer the number of times a pattern appears in a text
http://rosalind.info/problems/ba1a/
NOTE: Take care of overlapping. i.e. pattern_count("ATATA", "ATA") == 2
Input file should contain two lines; the first is a text, and the second is a pattern to look for.
*/
extern crate rosalind_rust;
use rosalind_rust::Cli;
use std::io::{prelude::*, BufReader};
use structopt::StructOpt;
pub fn pattern_count(text: &[u8], pattern: &[u8]) -> i32 {
let k = pattern.len();
return text.windows(k).filter(|&seg| seg == pattern).count() as i32;
}
#[test]
fn test_ba1a() {
let seq = b"GCGCG";
let pat = b"GCG";
assert_eq!(pattern_count(seq, pat), 2);
}
fn main() -> std::io::Result<()> {
let args = Cli::from_args();
let f = std::fs::File::open(&args.path)?;
let reader = BufReader::new(f);
let mut iter = reader.lines();
let content = iter.next().unwrap().unwrap();
let pattern = iter.next().unwrap().unwrap();
let count = pattern_count(&content.as_bytes(), &pattern.as_bytes());
println!("{}: {}", &pattern, count);
Ok(())
}
|
//! EV3 specific features
use std::fs;
use std::path::Path;
use crate::driver::DRIVER_PATH;
use crate::utils::OrErr;
use crate::{Attribute, Ev3Result};
/// Color type.
pub type Color = u8;
/// The led's on top of the EV3 brick.
#[derive(Debug, Clone)]
pub struct Led {
led: Attribute,
}
impl Led {
/// Led off.
pub const COLOR_OFF: Color = 0;
/// Led color blue
pub const COLOR_AMBER: Color = 255;
/// Create a new instance of the `Led` struct.
pub fn new() -> Ev3Result<Led> {
let mut led_name = String::new();
let paths = fs::read_dir(Path::new(DRIVER_PATH).join("leds"))?;
for path in paths {
let file_name = path?.file_name();
let name = file_name.to_str().or_err()?.to_owned();
if name.contains(":brick-status") && name.contains("led0:") {
led_name = name;
}
}
let led = Attribute::from_sys_class("leds", led_name.as_str(), "brightness")?;
Ok(Led { led })
}
/// Returns the current brightness value of led.
pub fn get_led(&self) -> Ev3Result<Color> {
self.led.get()
}
/// Sets the brightness value of led.
pub fn set_led(&self, brightness: Color) -> Ev3Result<()> {
self.led.set(brightness)
}
}
|
use crate::spec::{Target, TargetOptions};
// See https://developer.android.com/ndk/guides/abis.html#arm64-v8a
// for target ABI requirements.
pub fn target() -> Target {
Target {
llvm_target: "aarch64-linux-android".into(),
pointer_width: 64,
data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(),
arch: "aarch64".into(),
options: TargetOptions {
max_atomic_width: Some(128),
// As documented in http://developer.android.com/ndk/guides/cpu-features.html
// the neon (ASIMD) and FP must exist on all android aarch64 targets.
features: "+neon,+fp-armv8".into(),
..super::android_base::opts()
},
}
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Serialization and deserialization of wire formats.
//!
//! This module provides efficient serialization and deserialization of the
//! various wire formats used by this program. Where possible, it uses lifetimes
//! and immutability to allow for safe zero-copy parsing.
//!
//! # Endianness
//!
//! All values exposed or consumed by this crate are in host byte order, so the
//! caller does not need to worry about it. Any necessary conversions are
//! performed under the hood.
// TODO(joshlf): Move into debug_err! and debug_err_fn! definitions once
// attributes are allowed on expressions
// (https://github.com/rust-lang/rust/issues/15701).
#![allow(clippy::block_in_if_condition_stmt)]
/// Emit a debug message and return an error.
///
/// Invoke the `debug!` macro on all but the first argument. A call to
/// `debug_err!(err, ...)` is an expression whose value is the expression `err`.
macro_rules! debug_err {
($err:expr, $($arg:tt)*) => (
// TODO(joshlf): Uncomment once attributes are allowed on expressions
// #[cfg_attr(feature = "cargo-clippy", allow(block_in_if_condition_stmt))]
{
use ::log::debug;
debug!($($arg)*);
$err
}
)
}
/// Create a closure which emits a debug message and returns an error.
///
/// Create a closure which, when called, invokes the `debug!` macro on all but
/// the first argument, and returns the first argument.
macro_rules! debug_err_fn {
($err:expr, $($arg:tt)*) => (
// TODO(joshlf): Uncomment once attributes are allowed on expressions
// #[cfg_attr(feature = "cargo-clippy", allow(block_in_if_condition_stmt))]
|| {
use ::log::debug;
debug!($($arg)*);
$err
}
)
}
#[macro_use]
mod macros;
pub(crate) mod arp;
pub(crate) mod ethernet;
pub(crate) mod icmp;
pub(crate) mod igmp;
pub(crate) mod ipv4;
pub(crate) mod ipv6;
pub(crate) mod records;
pub(crate) mod tcp;
#[cfg(test)]
pub(crate) mod testdata;
pub(crate) mod udp;
use std::convert::TryInto;
use std::ops::Deref;
use byteorder::{ByteOrder, NetworkEndian};
use internet_checksum::Checksum;
use net_types::ip::IpAddress;
use packet::{BufferView, SerializeBuffer};
use specialize_ip_macro::specialize_ip_address;
use zerocopy::ByteSlice;
use crate::ip::IpProto;
pub(crate) type U16 = zerocopy::U16<NetworkEndian>;
pub(crate) type U32 = zerocopy::U32<NetworkEndian>;
#[specialize_ip_address]
fn update_transport_checksum_pseudo_header<A: IpAddress>(
checksum: &mut Checksum,
src_ip: A,
dst_ip: A,
proto: IpProto,
transport_len: usize,
) -> Result<(), std::num::TryFromIntError> {
#[ipv4addr]
let pseudo_header = {
// 4 bytes for src_ip + 4 bytes for dst_ip + 1 byte of zeros + 1 byte
// for protocol + 2 bytes for total_len
let mut pseudo_header = [0u8; 12];
(&mut pseudo_header[..4]).copy_from_slice(src_ip.bytes());
(&mut pseudo_header[4..8]).copy_from_slice(dst_ip.bytes());
pseudo_header[9] = proto.into();
NetworkEndian::write_u16(&mut pseudo_header[10..12], transport_len.try_into()?);
pseudo_header
};
#[ipv6addr]
let pseudo_header = {
// 16 bytes for src_ip + 16 bytes for dst_ip + 4 bytes for total_len + 3
// bytes of zeroes + 1 byte for next header
let mut pseudo_header = [0u8; 40];
(&mut pseudo_header[..16]).copy_from_slice(src_ip.bytes());
(&mut pseudo_header[16..32]).copy_from_slice(dst_ip.bytes());
NetworkEndian::write_u32(&mut pseudo_header[32..36], transport_len.try_into()?);
pseudo_header[39] = proto.into();
pseudo_header
};
// add_bytes contains some branching logic at the beginning which is a bit
// more expensive than the main loop of the algorithm. In order to make sure
// we go through that logic as few times as possible, we construct the
// entire pseudo-header first, and then add it to the checksum all at once.
checksum.add_bytes(&pseudo_header[..]);
Ok(())
}
/// Compute the checksum used by TCP and UDP.
///
/// `compute_transport_checksum` computes the checksum used by TCP and UDP. For
/// IPv4, the total packet length `transport_len` must fit in a `u16`, and for
/// IPv6, a `u32`. If the provided packet is too big,
/// `compute_transport_checksum` returns `None`.
pub(crate) fn compute_transport_checksum_parts<'a, A: IpAddress, I>(
src_ip: A,
dst_ip: A,
proto: IpProto,
parts: I,
) -> Option<[u8; 2]>
where
I: Iterator<Item = &'a &'a [u8]> + Clone,
{
// See for details:
// https://en.wikipedia.org/wiki/Transmission_Control_Protocol#Checksum_computation
let mut checksum = Checksum::new();
let transport_len = parts.clone().map(|b| b.len()).sum();
update_transport_checksum_pseudo_header(&mut checksum, src_ip, dst_ip, proto, transport_len)
.ok()?;
for p in parts {
checksum.add_bytes(p);
}
Some(checksum.checksum())
}
/// Compute the checksum used by TCP and UDP.
///
/// Same as [`compute_transport_checksum_parts`] but gets the parts from a
/// `SerializeBuffer`.
pub(crate) fn compute_transport_checksum_serialize<A: IpAddress>(
src_ip: A,
dst_ip: A,
proto: IpProto,
buffer: &mut SerializeBuffer,
) -> Option<[u8; 2]> {
// See for details:
// https://en.wikipedia.org/wiki/Transmission_Control_Protocol#Checksum_computation
let mut checksum = Checksum::new();
let transport_len = buffer.len();
update_transport_checksum_pseudo_header(&mut checksum, src_ip, dst_ip, proto, transport_len)
.ok()?;
checksum.add_bytes(buffer.header());
for p in buffer.body().iter_fragments() {
checksum.add_bytes(p);
}
checksum.add_bytes(buffer.footer());
Some(checksum.checksum())
}
/// Compute the checksum used by TCP and UDP.
///
/// Same as [`compute_transport_checksum_parts`] but with a single part.
#[cfg(test)]
pub(crate) fn compute_transport_checksum<A: IpAddress>(
src_ip: A,
dst_ip: A,
proto: IpProto,
packet: &[u8],
) -> Option<[u8; 2]> {
let mut checksum = Checksum::new();
update_transport_checksum_pseudo_header(&mut checksum, src_ip, dst_ip, proto, packet.len())
.ok()?;
checksum.add_bytes(packet);
Some(checksum.checksum())
}
/// A frame or packet that can be created from a raw form.
///
/// `FromRaw` provides a common interface for packets or frames that have can be
/// created from an "unchecked" form, that is, that are parsed in raw without
/// any higher-order validation.
///
/// The type parameter `R` is the raw type that an implementer can be converted
/// from, given some arguments of type `A`.
pub(crate) trait FromRaw<R, A>: Sized {
/// The type of error that may happen during validation.
type Error;
/// Attempts to create `Self` from the raw form in `raw` with `args`.
fn try_from_raw_with(raw: R, args: A) -> Result<Self, Self::Error>;
/// Attempts to create `Self` from the raw form in `raw`.
fn try_from_raw(raw: R) -> Result<Self, <Self as FromRaw<R, A>>::Error>
where
Self: FromRaw<R, (), Error = <Self as FromRaw<R, A>>::Error>,
{
Self::try_from_raw_with(raw, ())
}
}
/// A type that encapsulates a complete or incomplete parsing operation.
///
/// `MaybeParsed` is a common utility to provide partial/incomplete parsing
/// results. The type parameters `C` and `I` are the types for a "complete" and
/// "incomplete" parsing result, respectively.
pub(crate) enum MaybeParsed<C, I> {
Complete(C),
Incomplete(I),
}
impl<T> MaybeParsed<T, T> {
/// Creates a `MaybeParsed` instance taking `n` bytes from the front of
/// `buff`.
///
/// Returns [`MaybeParsed::Complete`] with `n` bytes if `buff` contains at
/// least `n` bytes. Otherwise returns [`MaybeParsed::Incomplete`] greedily
/// taking all the remaining bytes from `buff`
#[cfg(test)]
pub(crate) fn take_from_buffer<BV: BufferView<T>>(buff: &mut BV, n: usize) -> Self
where
T: ByteSlice,
{
if let Some(v) = buff.take_front(n) {
MaybeParsed::Complete(v)
} else {
MaybeParsed::Incomplete(buff.take_rest_front())
}
}
/// Creates a `MaybeParsed` instance with `bytes` observing a minimum
/// length `min_len`.
///
/// Returns [`MaybeParsed::Complete`] if `bytes` is at least `min_len` long,
/// otherwise returns [`MaybeParsed::Incomplete`]. In both cases, `bytes`
/// is moved into one of the two `MaybeParsed` variants.
pub(crate) fn new_with_min_len(bytes: T, min_len: usize) -> Self
where
T: ByteSlice,
{
if bytes.len() >= min_len {
MaybeParsed::Complete(bytes)
} else {
MaybeParsed::Incomplete(bytes)
}
}
/// Consumes this `MaybeParsed` and return its contained value if both the
/// `Complete` and `Incomplete` variants contain the same type.
pub(crate) fn into_inner(self) -> T {
match self {
MaybeParsed::Complete(c) => c,
MaybeParsed::Incomplete(i) => i,
}
}
}
impl<C, I> MaybeParsed<C, I> {
/// Creates a `MaybeParsed` instance taking `n` bytes from the front of
/// `buff` and mapping with `map`.
///
/// Returns [`MaybeParsed::Complete`] with the result of `map` of `n` bytes
/// if `buff` contains at least `n` bytes. Otherwise returns
/// [`MaybeParsed::Incomplete`] greedily
/// taking all the remaining bytes from `buff`
pub(crate) fn take_from_buffer_with<BV: BufferView<I>, F>(
buff: &mut BV,
n: usize,
map: F,
) -> Self
where
F: FnOnce(I) -> C,
I: ByteSlice,
{
if let Some(v) = buff.take_front(n) {
MaybeParsed::Complete(map(v))
} else {
MaybeParsed::Incomplete(buff.take_rest_front())
}
}
/// Maps a [`MaybeParsed::Complete`] variant to another type. Otherwise
/// returns the containing [`MaybeParsed::Incomplete`] value.
pub(crate) fn map<M, F>(self, f: F) -> MaybeParsed<M, I>
where
F: FnOnce(C) -> M,
{
match self {
MaybeParsed::Incomplete(v) => MaybeParsed::Incomplete(v),
MaybeParsed::Complete(v) => MaybeParsed::Complete(f(v)),
}
}
/// Maps a [`MaybeParsed::Incomplete`] variant to another type. Otherwise
/// returns the containing [`MaybeParsed::Complete`] value.
pub(crate) fn map_incomplete<M, F>(self, f: F) -> MaybeParsed<C, M>
where
F: FnOnce(I) -> M,
{
match self {
MaybeParsed::Incomplete(v) => MaybeParsed::Incomplete(f(v)),
MaybeParsed::Complete(v) => MaybeParsed::Complete(v),
}
}
/// Converts from `&MaybeParsed<C,I>` to `MaybeParsed<&C,&I>`.
pub(crate) fn as_ref(&self) -> MaybeParsed<&C, &I> {
match self {
MaybeParsed::Incomplete(v) => MaybeParsed::Incomplete(v),
MaybeParsed::Complete(v) => MaybeParsed::Complete(v),
}
}
/// Returns `true` if `self` is [`MaybeParsed::Complete`].
pub(crate) fn is_complete(&self) -> bool {
match self {
MaybeParsed::Incomplete { .. } => false,
MaybeParsed::Complete(_) => true,
}
}
/// Returns `true` if `self` is [`MaybeParsed::Incomplete`].
#[cfg(test)]
pub(crate) fn is_incomplete(&self) -> bool {
match self {
MaybeParsed::Incomplete { .. } => true,
MaybeParsed::Complete(_) => false,
}
}
/// Unwraps the complete value of `self`.
///
/// # Panics
///
/// Panics if `self` is not [`MaybeParsed::Complete`].
#[cfg(test)]
pub(crate) fn unwrap(self) -> C {
match self {
MaybeParsed::Incomplete { .. } => panic!("Called unwrap on incomplete MaybeParsed"),
MaybeParsed::Complete(v) => v,
}
}
/// Unwraps the incomplete value and error of `self`.
///
/// # Panics
///
/// Panics if `self` is not [`MaybeParsed::Incomplete`].
#[cfg(test)]
pub(crate) fn unwrap_incomplete(self) -> I {
match self {
MaybeParsed::Incomplete(v) => v,
MaybeParsed::Complete(_) => panic!("Called unwrap_incomplete on complete MaybeParsed"),
}
}
/// Transforms this `MaybeIncomplete` into a `Result` where the `Complete`
/// variant becomes `Ok` and the `Incomplete` variant is passed through `f`
/// and mapped to `Err`.
pub(crate) fn ok_or_else<F, E>(self, f: F) -> Result<C, E>
where
F: FnOnce(I) -> E,
{
match self {
MaybeParsed::Complete(v) => Ok(v),
MaybeParsed::Incomplete(e) => Err(f(e)),
}
}
}
impl<C, I> MaybeParsed<C, I>
where
C: Deref<Target = [u8]>,
I: Deref<Target = [u8]>,
{
/// Returns the length in bytes of the contained data.
fn len(&self) -> usize {
match self {
MaybeParsed::Incomplete(v) => v.deref().len(),
MaybeParsed::Complete(v) => v.deref().len(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_maybe_parsed_take_from_buffer() {
let buff = [1_u8, 2, 3, 4];
let mut bv = &mut &buff[..];
let mp = MaybeParsed::take_from_buffer(&mut bv, 2);
assert_eq!(mp.unwrap(), &buff[..2]);
let mp = MaybeParsed::take_from_buffer(&mut bv, 3);
assert_eq!(mp.unwrap_incomplete(), &buff[2..]);
}
#[test]
fn test_maybe_parsed_min_len() {
let buff = [1_u8, 2, 3, 4];
let mp = MaybeParsed::new_with_min_len(&buff[..], 3);
assert_eq!(mp.unwrap(), &buff[..]);
let mp = MaybeParsed::new_with_min_len(&buff[..], 5);
assert_eq!(mp.unwrap_incomplete(), &buff[..]);
}
#[test]
fn test_maybe_parsed_take_from_buffer_with() {
let buff = [1_u8, 2, 3, 4];
let mut bv = &mut &buff[..];
let mp = MaybeParsed::take_from_buffer_with(&mut bv, 2, |x| Some(usize::from(x[0] + x[1])));
assert_eq!(mp.unwrap(), Some(3));
let mp =
MaybeParsed::take_from_buffer_with(&mut bv, 3, |_| panic!("map shouldn't be called"));
assert_eq!(mp.unwrap_incomplete(), &buff[2..]);
}
#[test]
fn test_maybe_parsed_map() {
assert_eq!(
MaybeParsed::<&str, ()>::Complete("hello").map(|x| format!("{} you", x)).unwrap(),
"hello you".to_string()
);
assert_eq!(
MaybeParsed::<(), &str>::Incomplete("hello")
.map(|_| panic!("map shouldn't be called"))
.unwrap_incomplete(),
"hello"
);
}
#[test]
fn test_maybe_parsed_len() {
let buff = [1_u8, 2, 3, 4];
let mp1 = MaybeParsed::new_with_min_len(&buff[..], 2);
let mp2 = MaybeParsed::new_with_min_len(&buff[..], 10);
assert_eq!(mp1.len(), 4);
assert_eq!(mp2.len(), 4);
}
#[test]
fn test_maybe_parsed_complete_incomplete() {
let complete = MaybeParsed::<(), ()>::Complete(());
let incomplete = MaybeParsed::<(), ()>::Incomplete(());
assert!(complete.is_complete());
assert!(!complete.is_incomplete());
assert!(!incomplete.is_complete());
assert!(incomplete.is_incomplete());
}
}
|
#[cfg(any(feature = "embedded-server", feature = "no-server"))]
pub fn main() {
shared();
match std::env::var_os("TAURI_DIST_DIR") {
Some(dist_path) => {
let dist_path_string = dist_path.into_string().unwrap();
println!("cargo:rerun-if-changed={}", dist_path_string);
let inlined_assets = match std::env::var_os("TAURI_INLINED_ASSETS") {
Some(assets) => assets
.into_string()
.unwrap()
.split('|')
.map(|s| s.to_string())
.collect(),
None => Vec::new(),
};
// include assets
tauri_includedir_codegen::start("ASSETS")
.dir(
dist_path_string,
tauri_includedir_codegen::Compression::None,
)
.build("data.rs", inlined_assets)
.expect("failed to build data.rs")
}
None => println!("Build error: Couldn't find ENV: TAURI_DIST_DIR"),
}
}
#[cfg(not(any(feature = "embedded-server", feature = "no-server")))]
pub fn main() {
shared();
}
fn shared() {
if let Some(tauri_dir) = std::env::var_os("TAURI_DIR") {
let mut tauri_path = std::path::PathBuf::from(tauri_dir);
tauri_path.push("tauri.conf.json");
println!("cargo:rerun-if-changed={:?}", tauri_path);
}
}
|
//! Hostcall endpoints exposed to guests.
use super::context::EventBuffer;
use crate::wasm::context::RaisedError;
use crate::wasm::WasmModuleConfig;
use crate::Event;
use lucet_runtime::vmctx::Vmctx;
use std::convert::TryInto;
use vector_wasm::Registration;
pub use wrapped_for_ffi::ensure_linked;
// Also add any new functions to the `ffi::ensure_linked` function!
pub const HOSTCALL_LIST: [&str; 5] = ["emit", "register", "raise", "config_size", "config"];
pub fn emit(vmctx: &Vmctx, data: u32, length: u32) -> crate::Result<u32> {
let mut event_buffer = vmctx.get_embed_ctx_mut::<EventBuffer>();
let heap = vmctx.heap_mut();
let slice = &heap[data as usize..(length as usize + data as usize)];
// TODO: Add some usability around `LogEvent` for this.
let value: serde_json::Value = serde_json::from_slice(slice)?;
let mut event = Event::new_empty_log();
for (key, value) in value.as_object().ok_or("Passed JSON was not object.")? {
event.as_mut_log().insert(key, value.clone());
}
event_buffer.push_back(event);
Ok(event_buffer.events.len().try_into()?)
}
fn register(vmctx: &Vmctx, data: u32, length: u32) -> crate::Result<()> {
let heap = vmctx.heap_mut();
let slice = &heap[data as usize..(length as usize + data as usize)];
let value: Registration = serde_json::from_slice(slice).unwrap();
let mut maybe_registration = vmctx.get_embed_ctx_mut::<Option<Registration>>();
*maybe_registration = Some(value);
Ok(())
}
fn raise(vmctx: &Vmctx, data: u32, length: u32) -> crate::Result<u32> {
let heap = vmctx.heap_mut();
let slice = &heap[data as usize..(length as usize + data as usize)];
let value = String::from_utf8(slice.into())?;
let mut maybe_error = vmctx.get_embed_ctx_mut::<RaisedError>();
maybe_error.error = Some(value);
Ok(if maybe_error.error.is_some() { 1 } else { 0 })
}
fn config_size(vmctx: &Vmctx) -> crate::Result<u32> {
let config = vmctx.get_embed_ctx::<WasmModuleConfig>();
let buf = serde_json::to_vec(&*config)?;
Ok(buf.len().try_into()?)
}
fn config(vmctx: &Vmctx, buffer: u32, length: u32) -> crate::Result<()> {
let config = vmctx.get_embed_ctx::<WasmModuleConfig>();
let buf = serde_json::to_vec(&*config)?;
let mut heap = vmctx.heap_mut();
let slice = &mut heap[buffer as usize..(length as usize + buffer as usize)];
slice.copy_from_slice(buf.as_ref());
Ok(())
}
/// All functions here must be fully C ABI compatible for wasm32-wasi.
mod wrapped_for_ffi {
use crate::internal_events;
use lucet_runtime::{lucet_hostcall, vmctx::Vmctx};
use std::sync::Once;
use vector_wasm::Role;
static HOSTCALL_API_INIT: Once = Once::new();
/// This is pretty hackish; we will hopefully be able to avoid this altogether once [this
/// issue](https://github.com/rust-lang/rust/issues/58037) is addressed.
#[no_mangle]
#[doc(hidden)]
pub extern "C" fn ensure_linked() {
use std::ptr::read_volatile;
// Also add any new functions to the `super::HOSTCALL_LIST` const!
HOSTCALL_API_INIT.call_once(|| unsafe {
read_volatile(emit as *const extern "C" fn());
read_volatile(raise as *const extern "C" fn());
read_volatile(config as *const extern "C" fn());
read_volatile(config_size as *const extern "C" fn());
lucet_wasi::export_wasi_funcs();
lucet_runtime::lucet_internal_ensure_linked();
});
}
#[lucet_hostcall]
#[no_mangle]
pub extern "C" fn register(vmctx: &Vmctx, data: u32, length: u32) {
let internal_event = internal_events::Hostcall::begin(Role::Transform, "emit");
// TODO: Handle error.
let ret = super::register(vmctx, data, length).unwrap();
internal_event.complete();
ret
}
#[lucet_hostcall]
#[no_mangle]
pub extern "C" fn emit(vmctx: &Vmctx, data: u32, length: u32) -> u32 {
let internal_event = internal_events::Hostcall::begin(Role::Transform, "register");
// TODO: Handle error.
let ret = super::emit(vmctx, data, length).unwrap();
internal_event.complete();
ret
}
#[lucet_hostcall]
#[no_mangle]
pub extern "C" fn raise(vmctx: &Vmctx, data: u32, length: u32) -> u32 {
let internal_event = internal_events::Hostcall::begin(Role::Transform, "raise");
// TODO: Handle error.
let ret = super::raise(vmctx, data, length).unwrap();
internal_event.complete();
ret
}
#[lucet_hostcall]
#[no_mangle]
pub extern "C" fn config_size(vmctx: &Vmctx) -> u32 {
let internal_event = internal_events::Hostcall::begin(Role::Transform, "config_size");
// TODO: Handle error.
let ret = super::config_size(vmctx).unwrap();
internal_event.complete();
ret
}
#[lucet_hostcall]
#[no_mangle]
pub extern "C" fn config(vmctx: &Vmctx, buffer: u32, length: u32) {
let internal_event = internal_events::Hostcall::begin(Role::Transform, "config");
// TODO: Handle error.
let ret = super::config(vmctx, buffer, length).unwrap();
internal_event.complete();
ret
}
}
|
use std::{
fs,
io::{Read, Write},
net::{TcpListener, TcpStream},
thread::{self, sleep},
time::Duration,
};
use hello::ThreadPool;
fn main() {
let listener = TcpListener::bind("127.0.0.1:7878").unwrap();
let pool = ThreadPool::new(3);
for stream in listener.incoming().take(7) {
let stream = stream.unwrap();
println!("Connection established!");
&pool.execute(move || handle_connection(&stream));
}
println!("shutdown");
thread::sleep(Duration::from_secs(1));
}
fn handle_connection(mut stream: &TcpStream) {
let mut buffer = [0; 1024];
stream.read(&mut buffer).unwrap();
// println!("Request: \n{}", String::from_utf8_lossy(&buffer[..]));·
let get = b"GET / HTTP/1.1\r\n";
if buffer.starts_with(get) {
send_file(stream, 200, "OK", "index.html");
} else {
sleep(Duration::from_secs(2));
send_file(stream, 404, "Not Found", "404.html");
}
}
fn send_file(mut stream: &TcpStream, status: u32, phase: &str, file_path: &str) {
let html = fs::read_to_string(file_path).unwrap();
let response = format!(
"HTTP/1.1 {} {}\r\nContent-Length: {}\r\n\r\n{}",
status,
phase,
html.len(),
html
);
stream.write(response.as_bytes()).unwrap();
stream.flush().unwrap();
}
|
// This sub-crate exists to make sure that everything works well with the `no_alloc` flag enabled
use core::fmt::Write;
use humansize::{SizeFormatter, DECIMAL};
struct Buffer<const N: usize>([u8; N], usize);
impl<const N: usize> Write for Buffer<N> {
fn write_str(&mut self, s: &str) -> core::fmt::Result {
let space_left = self.0.len() - self.1;
if space_left >= s.len() {
self.0[self.1..][..s.len()].copy_from_slice(s.as_bytes());
self.1 += s.len();
Ok(())
} else {
Err(core::fmt::Error)
}
}
}
#[test]
fn test() {
let mut result = Buffer([0u8; 4], 0);
write!(&mut result, "{}", SizeFormatter::new(1000usize, DECIMAL)).unwrap();
assert_eq!(core::str::from_utf8(&result.0).unwrap(), "1 kB");
} |
use piston_window::*;
pub type Point = [f64; 2];
pub type Color = [f32; 4];
#[derive(Copy, Clone, Debug)]
pub struct Rect {
pub origin: Point,
pub size: Point
}
pub trait Widget {
fn layout(&mut self, bounds: Rect);
fn get_bounds(&self) -> Rect;
fn add_child(&mut self, child: Box<Widget>);
fn draw(&self, ctx: Context, gl: &mut G2d, glyphs: &mut Glyphs);
}
pub struct WidgetImpl {
pub bounds: Rect,
pub children: Vec<Box<Widget>>
}
impl WidgetImpl {
pub fn new() -> Self {
let children: Vec<Box<Widget>> = Vec::new();
let bounds = Rect {
origin: [0.0, 0.0],
size: [0.0, 0.0]
};
WidgetImpl {
bounds,
children
}
}
}
impl Widget for WidgetImpl {
fn layout(&mut self, bounds: Rect) {
self.bounds = bounds;
self.children.iter_mut().for_each(|child| {
let child_bounds = Rect {
origin: [0.0, 0.0],
size: bounds.size
};
child.layout(child_bounds);
});
}
fn add_child(&mut self, child: Box<Widget>) {
self.children.push(child)
}
fn get_bounds(&self) -> Rect {
return self.bounds;
}
fn draw(&self, ctx: Context, gl: &mut G2d, glyphs: &mut Glyphs) {
self.children.iter().for_each(|child| {
let bounds = child.get_bounds();
let trans = ctx.transform.trans(bounds.origin[0], bounds.origin[1]);
let viewport = ctx.viewport.unwrap();
let scale_x = viewport.draw_size[0] as f64 / viewport.window_size[0];
let scale_y = viewport.draw_size[1] as f64 / viewport.window_size[1];
//println!("view={:?} trans={:?}", ctx.view, trans);
let clip_rect = [
((bounds.origin[0] + viewport.rect[0] as f64) * scale_x) as u32,
((bounds.origin[1] + viewport.rect[1] as f64) * scale_y) as u32,
(bounds.size[0] * scale_x) as u32,
(bounds.size[1] * scale_y) as u32
];
let vp = Viewport {
rect: [
bounds.origin[0] as i32 + viewport.rect[0],
bounds.origin[1] as i32 + viewport.rect[1],
bounds.size[0] as i32,
bounds.size[1] as i32
],
draw_size: viewport.draw_size,
window_size: viewport.window_size
};
let clipped = Context {
viewport: Some(vp),
view: ctx.view,
transform: trans,
draw_state: ctx.draw_state.scissor(clip_rect)
};
child.draw(clipped, gl, glyphs);
})
}
} |
use na::{DVector, DMatrix};
use rand::distributions::{Normal, IndependentSample};
use rand::thread_rng;
use kalmanfilter::systems::continuous_to_discrete;
use kalmanfilter::nt;
use super::types::*;
pub struct ContinuousLinearModelBuilder {
pub vec_x_init : SystemState,
pub mat_a : ContinuousSystemMatrix,
pub mat_b : ContinuousInputMatrix,
pub vec_w : SystemNoiseVariances,
pub mat_c : MeasurementMatrix,
pub vec_r : MeasurementNoiseVariances,
}
pub struct ContinuousLinearModel {
vec_x : SystemState,
mat_a : ContinuousSystemMatrix,
mat_b : ContinuousInputMatrix,
vec_w : SystemNoiseVariances, // will also be used to hold the drawn samples
mat_c : MeasurementMatrix,
vec_r : MeasurementNoiseVariances, // will also be used to hold the drawn samples
num_inputs : usize,
num_states : usize,
system_noise_gen : Vec<Normal>,
measurement_noise_gen : Vec<Normal>,
}
impl From<ContinuousLinearModelBuilder> for ContinuousLinearModel {
fn from(builder : ContinuousLinearModelBuilder) -> ContinuousLinearModel {
let num_states = builder.mat_a.nrows();
let num_measurements = builder.mat_c.nrows();
let num_inputs = builder.mat_b.ncols();
assert_eq!(num_states, builder.mat_a.ncols());
assert_eq!(num_states, builder.mat_b.nrows());
assert_eq!(num_states, builder.vec_w.nrows());
assert_eq!(num_states, builder.mat_c.ncols());
assert_eq!(num_measurements, builder.vec_r.nrows());
let system_noise_gen = mk_noise_generators(&builder.vec_w);
let measurement_noise_gen = mk_noise_generators(&builder.vec_r);
ContinuousLinearModel {
vec_x : builder.vec_x_init,
mat_a : builder.mat_a,
mat_b : builder.mat_b,
vec_w : builder.vec_w,
mat_c : builder.mat_c,
vec_r : builder.vec_r,
num_inputs : num_inputs,
num_states : num_states,
system_noise_gen : system_noise_gen,
measurement_noise_gen : measurement_noise_gen,
}
}
}
impl ContinuousLinearModel {
pub fn step(&mut self, u : &InputVector, dt : f64) -> Measurements {
assert_eq!(self.num_inputs, u.nrows());
for (gen, n) in self.system_noise_gen.iter().zip(self.vec_w.iter_mut()) {
*n = gen.ind_sample(&mut thread_rng());
}
for (gen, n) in self.measurement_noise_gen.iter().zip(self.vec_r.iter_mut()) {
*n = gen.ind_sample(&mut thread_rng());
}
self.vec_x.0 += ( &self.mat_a.0 * &self.vec_x.0 + &self.mat_b.0 * &u.0 + &self.vec_w.0 ) * dt;
Measurements( &self.mat_c.0 * &self.vec_x.0 + &self.vec_r.0 )
}
pub fn get_system_matrix(&self) -> &ContinuousSystemMatrix {
&self.mat_a
}
pub fn get_input_matrix(&self) -> &ContinuousInputMatrix {
&self.mat_b
}
pub fn get_state(&self) -> &SystemState {
&self.vec_x
}
pub fn get_measurement_matrix(&self) -> &MeasurementMatrix {
&self.mat_c
}
pub fn get_num_states(&self) -> usize {
self.num_states
}
pub fn get_num_inputs(&self) -> usize {
self.num_inputs
}
}
pub struct DiscreteLinearModelBuilder {
pub vec_x_init : SystemState,
pub mat_f : DiscreteSystemMatrix,
pub mat_h : DiscreteInputMatrix,
pub vec_w : SystemNoiseVariances,
pub mat_c : MeasurementMatrix,
pub vec_r : MeasurementNoiseVariances,
}
impl ContinuousLinearModelBuilder {
pub fn into_discrete(self, dt : TimeStep, eps : f64) -> DiscreteLinearModelBuilder {
let disc_sys = continuous_to_discrete(&self.mat_a, &self.mat_b, dt, eps);
DiscreteLinearModelBuilder {
vec_x_init : self.vec_x_init,
mat_f : disc_sys.mat_f,
mat_h : disc_sys.mat_h,
vec_w : self.vec_w,
mat_c : self.mat_c,
vec_r : self.vec_r,
}
}
}
pub struct DiscreteLinearModel {
vec_x : SystemState,
mat_f : DiscreteSystemMatrix,
mat_h : DiscreteInputMatrix,
vec_w : SystemNoiseVariances, // will also be used to hold the drawn samples
mat_c : MeasurementMatrix,
vec_r : MeasurementNoiseVariances, // will also be used to hold the drawn samples
num_inputs : usize,
num_states : usize,
system_noise_gen : Vec<Normal>,
measurement_noise_gen : Vec<Normal>,
}
fn mk_noise_generators(variances : &DVector<f64>) -> Vec<Normal> {
let mut gen = vec![];
for w in variances.iter() {
gen.push(Normal::new(0.0, *w));
}
gen
}
impl From<DiscreteLinearModelBuilder> for DiscreteLinearModel {
fn from(builder : DiscreteLinearModelBuilder) -> DiscreteLinearModel {
let num_states = builder.mat_f.nrows();
let num_measurements = builder.mat_c.nrows();
let num_inputs = builder.mat_h.ncols();
assert_eq!(num_states, builder.mat_f.ncols());
assert_eq!(num_states, builder.mat_h.nrows());
assert_eq!(num_states, builder.vec_w.nrows());
assert_eq!(num_states, builder.mat_c.ncols());
assert_eq!(num_measurements, builder.vec_r.nrows());
let system_noise_gen = mk_noise_generators(&builder.vec_w);
let measurement_noise_gen = mk_noise_generators(&builder.vec_r);
DiscreteLinearModel {
vec_x : builder.vec_x_init,
mat_f : builder.mat_f,
mat_h : builder.mat_h,
vec_w : builder.vec_w,
mat_c : builder.mat_c,
vec_r : builder.vec_r,
num_inputs : num_inputs,
num_states : num_states,
system_noise_gen : system_noise_gen,
measurement_noise_gen : measurement_noise_gen,
}
}
}
impl DiscreteLinearModel {
pub fn step(&mut self, u : &InputVector) -> Measurements {
assert_eq!(self.num_inputs, u.nrows());
for (gen, n) in self.system_noise_gen.iter().zip(self.vec_w.iter_mut()) {
*n = gen.ind_sample(&mut thread_rng());
}
for (gen, n) in self.measurement_noise_gen.iter().zip(self.vec_r.iter_mut()) {
*n = gen.ind_sample(&mut thread_rng());
}
self.vec_x.0 = &self.mat_f.0 * &self.vec_x.0 + &self.mat_h.0 * &u.0 + &self.vec_w.0;
Measurements( &self.mat_c.0 * &self.vec_x.0 + &self.vec_r.0 )
}
pub fn get_system_matrix(&self) -> &DiscreteSystemMatrix {
&self.mat_f
}
pub fn get_input_matrix(&self) -> &DiscreteInputMatrix {
&self.mat_h
}
pub fn get_state(&self) -> &SystemState {
&self.vec_x
}
pub fn get_measurement_matrix(&self) -> &MeasurementMatrix {
&self.mat_c
}
pub fn get_num_states(&self) -> usize {
self.num_states
}
pub fn get_num_inputs(&self) -> usize {
self.num_inputs
}
}
pub fn example_model_2states_regular_stable() -> ContinuousLinearModelBuilder {
ContinuousLinearModelBuilder {
vec_x_init : nt::StateVector(DVector::from_row_slice(2, &[
0.,
0.,
])),
// has eigenvalues -3.5 and -1.5
mat_a : nt::ContinuousSystemMatrix(DMatrix::from_row_slice(2, 2, &[
-3., 1.5,
0.5, -2.,
])),
mat_b : nt::ContinuousInputMatrix(DMatrix::from_row_slice(2, 1, &[
1.,
0.,
])),
vec_w : SystemNoiseVariances(DVector::from_row_slice(2, &[0., 0.,])),
mat_c : MeasurementMatrix(DMatrix::from_row_slice(1, 2, &[0., 2.,])),
vec_r : MeasurementNoiseVariances(DVector::from_row_slice(1, &[0.])),
// mat_c : MeasurementMatrix::from_row_slice(2, 2, &[0., 2., 1., 0.]),
// vec_r : MeasurementNoiseVariances::from_row_slice(2, &[0., 0.]),
}
}
pub fn example_model_2states_singular_stable() -> ContinuousLinearModelBuilder {
ContinuousLinearModelBuilder {
vec_x_init : nt::StateVector(DVector::from_row_slice(2, &[
0.,
0.,
])),
// has eigenvalues -3.5 and -1.5
mat_a : nt::ContinuousSystemMatrix(DMatrix::from_row_slice(2, 2, &[
-3., 1.5,
1.5, -0.75,
])),
mat_b : nt::ContinuousInputMatrix(DMatrix::from_row_slice(2, 1, &[
1.,
0.,
])),
vec_w : SystemNoiseVariances(DVector::from_row_slice(2, &[0., 0.,])),
mat_c : MeasurementMatrix(DMatrix::from_row_slice(1, 2, &[0., 2.,])),
vec_r : MeasurementNoiseVariances(DVector::from_row_slice(1, &[0.])),
// mat_c : MeasurementMatrix::from_row_slice(2, 2, &[0., 2., 1., 0.]),
// vec_r : MeasurementNoiseVariances::from_row_slice(2, &[0., 0.]),
}
} |
use lex::Lex;
use lex::Token;
use std::iter::Peekable;
use std::result;
type Result<T> = result::Result<T, String>;
#[derive(Debug)]
pub struct Invocation {
command: Invocable,
expression: Vec<Token>,
}
#[derive(Debug)]
pub struct Invocable {
token: Token,
}
pub struct Parse<'a> {
token_stream: Peekable<Lex<'a>>,
}
impl<'a> Parse<'a> {
pub fn new(tokenizer: Lex) -> Parse {
Parse {
token_stream: tokenizer.peekable(),
}
}
pub fn parse(&mut self) -> Result<Invocation> {
let invocation = self.parse_invocation()?;
// token stream should be empty, or there's unexpected garbage at end of input
if let Some(token) = self.token_stream.peek() {
return Err(format!("Unexpected {:?}", token));
}
return Ok(invocation);
}
fn parse_invocation(&mut self) -> Result<Invocation> {
let invocable = self.parse_invocable()?;
let arguments = self.parse_list()?;
Ok(Invocation {
command: invocable,
expression: arguments,
})
}
fn parse_invocable(&mut self) -> Result<Invocable> {
match self.token_stream.next() {
Some(token @ Token::Morpheme(_)) => Ok(Invocable { token: token }),
Some(other) => return Err(format!("Unexpected {:?}", other)),
None => return Err(format!("Unexpected end of input")),
}
}
fn parse_list(&mut self) -> Result<Vec<Token>> {
let mut expr = Vec::new();
loop {
match self.token_stream.peek() {
Some(&Token::Morpheme(_)) | Some(&Token::Str(_)) => {
if let Some(token) = self.token_stream.next() {
expr.push(token)
}
}
_ => return Ok(expr),
}
}
}
}
#[test]
fn test_parse() {
let input = "ls -al";
let parser = Parse::new(Lex::new(input));
use lex::Token::*;
use parse::ASTNode::*;
// assert_eq!(Ok(Invocation(Invocable(Morpheme("ls")), Expression(Some([Morpheme("-al")])))), parser.parse());
}
|
use std::cmp::{max, min};
use std::collections::{HashMap, HashSet};
use itertools::Itertools;
use whiteread::parse_line;
const ten97: usize = 1000000007;
fn main() {
let (n, m): (usize, usize) = parse_line().unwrap();
let mut aa: Vec<isize> = parse_line().unwrap();
let mut bb: Vec<isize> = parse_line().unwrap();
aa.sort();
bb.sort();
let mut ai = 0;
let mut bi = 0;
let mut ans = (aa[ai] - bb[bi]).abs();
loop {
if ai == n - 1 && bi == m - 1 {
break;
}
if ai == n - 1 {
bi += 1;
ans = min(ans, (aa[ai] - bb[bi]).abs());
} else if bi == m - 1 {
ai += 1;
ans = min(ans, (aa[ai] - bb[bi]).abs());
} else if aa[ai] > bb[bi] {
bi += 1;
ans = min(ans, (aa[ai] - bb[bi]).abs());
} else if bb[bi] > aa[ai] {
ai += 1;
ans = min(ans, (aa[ai] - bb[bi]).abs());
} else {
if aa[ai + 1] > bb[bi + 1] {
bi += 1;
ans = min(ans, (aa[ai] - bb[bi]).abs());
} else {
ai += 1;
ans = min(ans, (aa[ai] - bb[bi]).abs());
}
}
}
println!("{}", ans);
}
|
mod test_env;
use rustcommon::redisaccessor_async;
use tokio;
use log::kv::Source;
fn get_redis_client_test<'a>() -> redisaccessor_async::RedisAccessorAsync<'a> {
let get_default = || redisaccessor_async::RedisAccessorAsync::new()
.host("localhost")
.port(6379)
.passwd("")
.db(0);
let ref env_map = test_env::ENV_CONFIG;
if env_map.contains_key("redis.host") &&
env_map.contains_key("redis.port") &&
env_map.contains_key("redis.passwd") &&
env_map.contains_key("redis.db") {
return redisaccessor_async::RedisAccessorAsync::new()
.host(env_map.get("redis.host").unwrap())
.port(env_map.get("redis.port").unwrap().parse::<u16>().unwrap())
.passwd(env_map.get("redis.passwd").unwrap())
.db(env_map.get("redis.db").unwrap().parse::<i64>().unwrap());
}
get_default()
}
#[tokio::test]
async fn test_redis_async_multi_set() -> Result<(), String> {
let mut redisaccessor = get_redis_client_test();
let _ = redisaccessor.open_connection().await.unwrap();
let dataset = vec![("test1".to_string(), "jjj".to_string(), 300), ("test2".to_string(), "kkk".to_string(), 300)];
let rst = redisaccessor.multi_setex(dataset).await;
match rst {
Ok(_) => Ok(()),
Err(_) => Err(String::from("do redis_async_multi_set fail"))
}
}
#[tokio::test]
async fn test_redis_async_multi_setex_expire() -> Result<(), String> {
let mut redisaccessor = get_redis_client_test();
let _ = redisaccessor.open_connection().await.unwrap();
let dataset = vec![("test1_nx".to_string(), "jjj2".to_string(), 300), ("test2_nx".to_string(), "kkk2".to_string(), 360)];
let rst = redisaccessor.multi_setnx_expire(dataset).await;
match rst {
Ok(_) => Ok(()),
Err(_) => Err(String::from("do redis_async_multi_set fail"))
}
}
|
extern crate chrono;
use chrono::{DateTime, TimeZone, Duration};
const GIGASECOND: i64 = 1_000_000_000;
pub fn after<T: TimeZone>(datetime: DateTime<T>) -> DateTime<T> {
datetime + Duration::seconds(GIGASECOND)
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - RNG control register"]
pub rng_cr: RNG_CR,
#[doc = "0x04 - RNG status register"]
pub rng_sr: RNG_SR,
#[doc = "0x08 - The RNG_DR register is a read-only register that delivers a 32-bit random value when read. The content of this register is valid when DRDY= 1, even if RNGEN=0."]
pub rng_dr: RNG_DR,
_reserved3: [u8; 4usize],
#[doc = "0x10 - The RNG_DR register is a read-only register that delivers a 32-bit random value when read. The content of this register is valid when DRDY= 1, even if RNGEN=0."]
pub rng_htcr: RNG_HTCR,
}
#[doc = "RNG control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rng_cr](rng_cr) module"]
pub type RNG_CR = crate::Reg<u32, _RNG_CR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RNG_CR;
#[doc = "`read()` method returns [rng_cr::R](rng_cr::R) reader structure"]
impl crate::Readable for RNG_CR {}
#[doc = "`write(|w| ..)` method takes [rng_cr::W](rng_cr::W) writer structure"]
impl crate::Writable for RNG_CR {}
#[doc = "RNG control register"]
pub mod rng_cr;
#[doc = "RNG status register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rng_sr](rng_sr) module"]
pub type RNG_SR = crate::Reg<u32, _RNG_SR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RNG_SR;
#[doc = "`read()` method returns [rng_sr::R](rng_sr::R) reader structure"]
impl crate::Readable for RNG_SR {}
#[doc = "`write(|w| ..)` method takes [rng_sr::W](rng_sr::W) writer structure"]
impl crate::Writable for RNG_SR {}
#[doc = "RNG status register"]
pub mod rng_sr;
#[doc = "The RNG_DR register is a read-only register that delivers a 32-bit random value when read. The content of this register is valid when DRDY= 1, even if RNGEN=0.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rng_dr](rng_dr) module"]
pub type RNG_DR = crate::Reg<u32, _RNG_DR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RNG_DR;
#[doc = "`read()` method returns [rng_dr::R](rng_dr::R) reader structure"]
impl crate::Readable for RNG_DR {}
#[doc = "The RNG_DR register is a read-only register that delivers a 32-bit random value when read. The content of this register is valid when DRDY= 1, even if RNGEN=0."]
pub mod rng_dr;
#[doc = "The RNG_DR register is a read-only register that delivers a 32-bit random value when read. The content of this register is valid when DRDY= 1, even if RNGEN=0.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rng_htcr](rng_htcr) module"]
pub type RNG_HTCR = crate::Reg<u32, _RNG_HTCR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _RNG_HTCR;
#[doc = "`read()` method returns [rng_htcr::R](rng_htcr::R) reader structure"]
impl crate::Readable for RNG_HTCR {}
#[doc = "`write(|w| ..)` method takes [rng_htcr::W](rng_htcr::W) writer structure"]
impl crate::Writable for RNG_HTCR {}
#[doc = "The RNG_DR register is a read-only register that delivers a 32-bit random value when read. The content of this register is valid when DRDY= 1, even if RNGEN=0."]
pub mod rng_htcr;
|
use std::error;
use std::fmt;
use std::fmt::Debug;
#[derive(Clone)]
#[repr(C)]
pub struct RootEntry {
pub filename: [u8; 8],
pub extension: [u8; 3],
attrs: u8,
reserved: u16,
pub creation_time: u16,
pub creation_date: u16,
pub last_access_date: u16,
pub hi_first_lcluster: u16,
pub last_write_time: u16,
pub last_write_date: u16,
pub lo_first_lcluster: u16,
pub file_size: u32, // in bytes
}
#[allow(dead_code)]
impl RootEntry {
/// Create a new empty FAT root directory entry.
pub fn new() -> RootEntry {
RootEntry {
filename: [' ' as u8; 8],
extension: [' ' as u8; 3],
attrs: 0,
reserved: 0,
creation_time: 0,
creation_date: 0,
last_access_date: 0,
hi_first_lcluster: 0,
last_write_time: 0,
last_write_date: 0,
lo_first_lcluster: 0,
file_size: 0,
}
}
/// Get the filename
pub fn filename(&self) -> Result<String, Box<error::Error>> {
let mut my_fn = self.filename.to_vec();
let mut name = my_fn
.drain(..)
.take_while(|&c| c != ' ' as u8)
.collect::<Vec<u8>>();
name.push('.' as u8);
name.extend(self.extension.iter());
match String::from_utf8(name) {
Ok(s) => Ok(s),
Err(err) => Err(From::from(err)),
}
}
/// Set the filename
pub fn set_filename(&mut self, filename: String)
-> Result<(), Box<error::Error>>
{
let parts: Vec<_> = filename.split('.').collect();
if parts.len() != 2 || parts[0].len() > 8 || parts[1].len() > 3 {
return Err(From::from(format!("bad filename: \"{}\"", filename)));
}
// Pad out short filenames to proper length
let new_fn = format!("{:8}", parts[0].to_uppercase());
let new_ext = format!("{:3}", parts[1].to_uppercase());
self.filename.copy_from_slice(new_fn.as_bytes());
self.extension.copy_from_slice(new_ext.as_bytes());
Ok(())
}
/// Set the file size
pub fn set_size(&mut self, bytes: u32)
-> Result<(), Box<error::Error>>
{
self.file_size = bytes;
Ok(())
}
/// Gets the logical entry cluster
pub fn entry_cluster(&self) -> u32 {
(self.hi_first_lcluster as u32) << 16
| self.lo_first_lcluster as u32
}
/// Sets the logical entry cluster
pub fn set_entry_cluster(&mut self, cluster_num: u32)
-> Result<(), Box<error::Error>>
{
self.lo_first_lcluster = (cluster_num & 0xFFFF) as u16;
if cluster_num > u16::max_value() as u32 {
// Only supported on FAT32
self.hi_first_lcluster = (cluster_num >> 16) as u16;
}
Ok(())
}
pub fn is_read_only(&self) -> bool { self.attrs & 0x01 == 0x01 }
pub fn is_hidden(&self) -> bool { self.attrs & 0x02 == 0x02 }
pub fn is_system(&self) -> bool { self.attrs & 0x04 == 0x04 }
pub fn is_volume_label(&self) -> bool { self.attrs & 0x08 == 0x08 }
pub fn is_subdir(&self) -> bool { self.attrs & 0x10 == 0x10 }
pub fn is_archive(&self) -> bool { self.attrs & 0x20 == 0x20 }
pub fn is_free(&self) -> bool {
self.filename[0] == 0 || self.filename[0] == 0xe5
}
pub fn rest_are_free(&self) -> bool {
self.filename[0] == 0
}
pub fn set_is_read_only(&mut self, on: bool) {
self.attrs = (self.attrs & !0x01) | if on { 0x01 } else { 0 }
}
pub fn set_is_hidden(&mut self, on: bool) {
self.attrs = (self.attrs & !0x02) | if on { 0x02 } else { 0 }
}
pub fn set_is_system(&mut self, on: bool) {
self.attrs = (self.attrs & !0x04) | if on { 0x04 } else { 0 }
}
pub fn set_is_volume_label(&mut self, on: bool) {
self.attrs = (self.attrs & !0x08) | if on { 0x08 } else { 0 }
}
pub fn set_is_subdir(&mut self, on: bool) {
self.attrs = (self.attrs & !0x10) | if on { 0x10 } else { 0 }
}
pub fn set_is_archive(&mut self, on: bool) {
self.attrs = (self.attrs & !0x20) | if on { 0x20 } else { 0 }
}
pub fn filename_full(&self) -> String {
let filename = String::from_utf8(
Vec::from(&self.filename[..])
);
let extension = String::from_utf8(
Vec::from(&self.extension[..])
);
if filename.is_ok() && extension.is_ok() {
format!(
"{}.{}",
filename.unwrap(),
extension.unwrap()
)
} else {
"BAD FILENAME".to_string()
}
}
}
impl Debug for RootEntry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("RootEntry")
.field(
"filename",
&self.filename().unwrap_or("bad filename".to_string())
)
.field("attrs", &self.attrs)
.field("creation_time", &self.creation_time)
.field("creation_date", &self.creation_date)
.field("last_access_date", &self.last_access_date)
.field("hi_first_lcluster", &self.hi_first_lcluster)
.field("last_write_time", &self.last_write_time)
.field("last_write_date", &self.last_write_date)
.field("lo_first_lcluster", &self.lo_first_lcluster)
.field("file_size", &format!("{:#x}", self.file_size))
.finish()
}
}
|
use serde_json;
pub use serde_json::Value;
pub use self::grammar::{expression, ParseError};
//# format_string := <text> [ format <text> ] *
// format := '{' [ argument ] [ ':' format_spec ] '}'
// argument := integer | identifier
//
// format_spec := [[fill]align][sign]['#'][0][width]['.' precision][type]
// fill := character
// align := '<' | '^' | '>'
// sign := '+' | '-'
// width := count
// precision := count | '*'
// type := identifier | ''
// count := parameter | integer
// parameter := integer '$'
const OPENED_BRACE: &'static str = "{";
const CLOSED_BRACE: &'static str = "}";
peg! grammar(r#"
use super::{Key, Token, OPENED_BRACE, CLOSED_BRACE};
#[pub]
expression -> Vec<Token<'input>>
= (format / text)+
text -> Token<'input>
= "{{" { Token::Literal(OPENED_BRACE) }
/ "}}" { Token::Literal(CLOSED_BRACE) }
/ [^{}]+ { Token::Literal(match_str) }
format -> Token<'input>
= "{" keys:(name ++ ".") "}" {
Token::Placeholder(&match_str[1..match_str.len() - 1], keys)
}
name -> Key<'input>
= [0-9]+ { Key::Id(match_str.parse().expect("expect number")) }
/ [a-zA-Z][a-zA-Z0-9]* { Key::Name(match_str) }
"#);
// TODO: Format spec.
#[derive(Debug, Clone, PartialEq)]
pub enum Key<'a> {
Id(usize),
Name(&'a str),
}
#[derive(Debug, Clone, PartialEq)]
pub enum Token<'a> {
Literal(&'a str),
Placeholder(&'a str, Vec<Key<'a>>),
}
pub struct Generator<'a> {
pattern: &'a str,
tokens: Vec<Token<'a>>,
}
#[derive(Debug)]
pub enum Error<'a> {
KeyNotFound(&'a str),
Serialization(serde_json::Error),
}
impl<'a> From<serde_json::Error> for Error<'a> {
fn from(err: serde_json::Error) -> Error<'a> {
Error::Serialization(err)
}
}
fn find<'r>(value: &'r Value, key: &Key) -> Option<&'r Value> {
match *key {
Key::Id(id) => {
if let &Value::Array(ref value) = value {
value.get(id)
} else {
None
}
}
Key::Name(name) => value.find(name),
}
}
impl<'a> Generator<'a> {
pub fn new(pattern: &'a str) -> Result<Generator<'a>, ParseError> {
let result = Generator {
pattern: pattern,
tokens: expression(pattern)?,
};
Ok(result)
}
pub fn pattern(&self) -> &str {
self.pattern
}
pub fn consume(&self, val: &Value) -> Result<String, Error> {
let mut res = String::new();
for token in &self.tokens {
match *token {
Token::Literal(literal) => res.push_str(literal),
Token::Placeholder(name, ref keys) => {
let mut cur = val;
for key in keys {
match find(&cur, key) {
Some(val) => {
cur = val;
}
None => {
return Err(Error::KeyNotFound(name));
}
}
}
res.push_str(&serde_json::to_string(&cur)?[..]);
}
}
}
Ok(res)
}
}
#[cfg(test)]
mod test {
use super::{Generator, Key, Token};
#[test]
fn literal_ast() {
let generator = Generator::new("hello").unwrap();
assert_eq!(vec![Token::Literal("hello")], generator.tokens);
}
#[test]
fn placeholder_ast() {
let generator = Generator::new("{hello}").unwrap();
let expected = vec![Token::Placeholder("hello", vec![Key::Name("hello")])];
assert_eq!(expected, generator.tokens);
}
}
|
pub mod cloud;
pub mod device;
pub mod emeter;
pub mod sys;
pub mod sysinfo;
pub mod time;
pub mod wlan;
|
// render_system.rs
//
// Copyright (c) 2019, Univerisity of Minnesota
//
// Author: Bridger Herman (herma582@umn.edu)
//! Renders with WebGL, using wasm-bindgen and web-sys
use std::usize;
use wasm_bindgen::prelude::JsValue;
use wasm_bindgen::JsCast;
use web_sys::WebGl2RenderingContext;
use crate::frame_buffer::FrameBuffer;
use crate::scene::Scene;
use crate::shader::{load_shader, Shader};
use crate::window::DEFAULT_WINDOW_SIZE;
pub struct WebGlContextWrapper {
pub gl: WebGl2RenderingContext,
}
impl Default for WebGlContextWrapper {
fn default() -> Self {
let document = web_sys::window().unwrap().document().unwrap();
let canvas = document.get_element_by_id("canvas").unwrap();
let canvas: web_sys::HtmlCanvasElement = canvas
.dyn_into::<web_sys::HtmlCanvasElement>()
.expect("Unable to get canvas");
let gl: WebGl2RenderingContext = canvas
.get_context("webgl2")
.unwrap()
.unwrap()
.dyn_into::<WebGl2RenderingContext>()
.expect("Unable to get WebGL context");
Self { gl }
}
}
unsafe impl Send for WebGlContextWrapper {}
pub struct RenderSystem {
scene: Option<Scene>,
post_processing_shader: Option<Shader>,
frame_buffer: FrameBuffer,
}
unsafe impl Send for RenderSystem {}
impl RenderSystem {
pub fn render(&self) {
if self.scene.is_none() || self.post_processing_shader.is_none() {
return;
}
self.frame_buffer.bind();
wre_gl!().clear_color(0.0, 0.0, 0.0, 1.0);
wre_gl!().clear(
WebGl2RenderingContext::COLOR_BUFFER_BIT
| WebGl2RenderingContext::DEPTH_BUFFER_BIT,
);
let (width, height) = DEFAULT_WINDOW_SIZE;
wre_gl!().viewport(0, 0, width as i32, height as i32);
// Save a texture image of rendered meshes to the frame buffer
self.render_meshes();
self.frame_buffer.unbind();
// Splat that texture onto the viewport
if let Some(shader) = &self.post_processing_shader {
self.frame_buffer.render(&shader.program, &self.scene);
}
}
/// Pass 1: Forward render all the meshes
fn render_meshes(&self) {
if let Some(scene) = &self.scene {
for (_path, mesh) in &scene.meshes {
// Don't render anything that's not attached to an entity
for eid in &mesh.attached_to {
// Load the shader and VAO for this material and model
let shader_id = wre_entities!(*eid).material().shader_id;
let shader = scene
.get_shader_by_id(shader_id)
.unwrap_or_else(|| {
error_panic!("No shader with id: {}", shader_id);
});
wre_gl!().use_program(Some(&shader.program));
wre_gl!().bind_vertex_array(Some(&mesh.vao));
// Send the camera position for lighting information
let camera_position: Vec<f32> =
wre_camera!().transform().position().into();
let camera_position_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_camera_position",
);
wre_gl!().uniform3fv_with_f32_array(
camera_position_location.as_ref(),
&camera_position,
);
// Send the model matrix to the GPU
let model_matrix = wre_entities!(*eid).transform().matrix();
let model_uniform_location = wre_gl!()
.get_uniform_location(&shader.program, "uni_model");
wre_gl!().uniform_matrix4fv_with_f32_array(
model_uniform_location.as_ref(),
false,
&model_matrix.to_flat_vec(),
);
// Send the normal matrix (inverse transpose of model matrix) to the
// GPU for calculating transform of normals
let normal_matrix = model_matrix.inverse().transpose();
let normal_uniform_location = wre_gl!()
.get_uniform_location(&shader.program, "uni_normal");
wre_gl!().uniform_matrix4fv_with_f32_array(
normal_uniform_location.as_ref(),
false,
&normal_matrix.to_flat_vec(),
);
// Send the camera's view/projection matrix to the GPU
let view_matrix = wre_camera!().view_matrix();
let projection_matrix = wre_camera!().projection_matrix();
let projection_view = projection_matrix * view_matrix;
let pv_uniform_location = wre_gl!().get_uniform_location(
&shader.program,
"uni_projection_view",
);
wre_gl!().uniform_matrix4fv_with_f32_array(
pv_uniform_location.as_ref(),
false,
&projection_view.to_flat_vec(),
);
// Send all the point lights over to the shader
let num_point_light_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_num_point_lights",
);
wre_gl!().uniform1i(
num_point_light_location.as_ref(),
scene.point_lights.len() as i32,
);
if scene.point_lights.len() > 0 {
let point_light_positions: Vec<f32> = scene
.point_lights
.iter()
.map(|light| -> Vec<f32> { light.position.into() })
.flatten()
.collect();
let point_light_positions_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_point_light_positions",
);
wre_gl!().uniform3fv_with_f32_array(
point_light_positions_location.as_ref(),
&point_light_positions,
);
let point_light_colors: Vec<f32> = scene
.point_lights
.iter()
.map(|light| -> Vec<f32> { light.color.into() })
.flatten()
.collect();
let point_light_colors_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_point_light_colors",
);
wre_gl!().uniform3fv_with_f32_array(
point_light_colors_location.as_ref(),
&point_light_colors,
);
let point_light_halo_intensity: Vec<f32> = scene
.point_lights
.iter()
.map(|light| -> f32 { light.halo_intensity })
.collect();
let point_light_halo_intensity_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_point_light_halo_intensity",
);
wre_gl!().uniform1fv_with_f32_array(
point_light_halo_intensity_location.as_ref(),
&point_light_halo_intensity,
);
}
// Send all the spot lights over to the shader
let num_spot_light_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_num_spot_lights",
);
wre_gl!().uniform1i(
num_spot_light_location.as_ref(),
scene.spot_lights.len() as i32,
);
if scene.spot_lights.len() > 0 {
let spot_light_positions: Vec<f32> = scene
.spot_lights
.iter()
.map(|light| -> Vec<f32> { light.position.into() })
.flatten()
.collect();
let spot_light_positions_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_spot_light_positions",
);
wre_gl!().uniform3fv_with_f32_array(
spot_light_positions_location.as_ref(),
&spot_light_positions,
);
let spot_light_directions: Vec<f32> = scene
.spot_lights
.iter()
.map(|light| -> Vec<f32> { light.direction.into() })
.flatten()
.collect();
let spot_light_directions_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_spot_light_directions",
);
wre_gl!().uniform3fv_with_f32_array(
spot_light_directions_location.as_ref(),
&spot_light_directions,
);
let spot_light_colors: Vec<f32> = scene
.spot_lights
.iter()
.map(|light| -> Vec<f32> { light.color.into() })
.flatten()
.collect();
let spot_light_colors_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_spot_light_colors",
);
wre_gl!().uniform3fv_with_f32_array(
spot_light_colors_location.as_ref(),
&spot_light_colors,
);
let spot_light_angle_inside: Vec<f32> = scene
.spot_lights
.iter()
.map(|light| -> f32 { light.angle_inside })
.collect();
let spot_light_angle_inside_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_spot_light_angle_inside",
);
wre_gl!().uniform1fv_with_f32_array(
spot_light_angle_inside_location.as_ref(),
&spot_light_angle_inside,
);
let spot_light_angle_outside: Vec<f32> = scene
.spot_lights
.iter()
.map(|light| -> f32 { light.angle_outside })
.collect();
let spot_light_angle_outside_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_spot_light_angle_outside",
);
wre_gl!().uniform1fv_with_f32_array(
spot_light_angle_outside_location.as_ref(),
&spot_light_angle_outside,
);
let spot_light_halo_intensity: Vec<f32> = scene
.spot_lights
.iter()
.map(|light| -> f32 { light.halo_intensity })
.collect();
let spot_light_halo_intensity_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_spot_light_halo_intensity",
);
wre_gl!().uniform1fv_with_f32_array(
spot_light_halo_intensity_location.as_ref(),
&spot_light_halo_intensity,
);
}
// Send the material's color to the GPU
let color: [f32; 4] =
wre_entities!(*eid).material().color.into();
let color_uniform_location = wre_gl!()
.get_uniform_location(&shader.program, "uni_color");
wre_gl!().uniform4fv_with_f32_array(
color_uniform_location.as_ref(),
&color,
);
// Send the material's specularity to the GPU
let specular: [f32; 4] =
wre_entities!(*eid).material().specular.into();
let specular_uniform_location = wre_gl!()
.get_uniform_location(&shader.program, "uni_specular");
wre_gl!().uniform4fv_with_f32_array(
specular_uniform_location.as_ref(),
&specular,
);
// If there's a texture, send it to the GPU
if let Some(texture_id) =
wre_entities!(*eid).material().texture_id()
{
wre_gl!()
.active_texture(WebGl2RenderingContext::TEXTURE0);
wre_gl!().bind_texture(
WebGl2RenderingContext::TEXTURE_2D,
Some(
&scene
.get_texture_by_id(texture_id)
.unwrap()
.tex,
),
);
let tex_uniform_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_texture",
);
wre_gl!().uniform1i(tex_uniform_location.as_ref(), 0);
let use_tex_uniform_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_use_texture",
);
wre_gl!()
.uniform1i(use_tex_uniform_location.as_ref(), 1);
} else {
let use_tex_uniform_location = wre_gl!()
.get_uniform_location(
&shader.program,
"uni_use_texture",
);
wre_gl!()
.uniform1i(use_tex_uniform_location.as_ref(), 0);
}
// Draw the geometry
wre_gl!().draw_arrays(
WebGl2RenderingContext::TRIANGLES,
0,
mesh.num_vertices,
);
wre_gl!().use_program(None);
}
}
}
}
pub fn scene(&self) -> &Option<Scene> {
&self.scene
}
pub fn mut_scene(&mut self) -> &mut Option<Scene> {
&mut self.scene
}
/// Add a scene to the rendering system, consuming scene
pub fn add_scene(&mut self, scene: Scene) {
self.scene = Some(scene);
}
/// Load the post-processing shaders
pub async fn load_post_processing_shader(&mut self) -> Result<(), JsValue> {
let post_pro_name = "volumetric";
let shader = load_shader(post_pro_name, usize::max_value()).await?;
self.post_processing_shader = Some(shader);
info!("Initialized post processing shader {}", post_pro_name);
Ok(())
}
}
impl Default for RenderSystem {
fn default() -> Self {
// Enable depth testing for proper object occlusion
wre_gl!().enable(WebGl2RenderingContext::DEPTH_TEST);
Self {
scene: None,
post_processing_shader: None,
frame_buffer: FrameBuffer::default(),
}
}
}
|
use rand::rngs::SmallRng;
use rand::{Rng, SeedableRng};
fn roll2die<T: Rng>(mut rng: T) -> (i32, i32) {
let first_roll = rng.gen_range(1, 7);
let second_roll = rng.gen_range(1, 7);
(first_roll, second_roll)
}
fn main() {
let mut thread_rng = SmallRng::seed_from_u64(1);
let mut string = String::with_capacity(10_000_000);
for _ in 0..1_000_000 {
run(&mut thread_rng, &mut string);
}
print!("{}", string);
}
fn run<T: Rng>(mut thread_rng: T, buffer: &mut String) {
let mut pt = match game(&mut thread_rng, &[7, 11], &[2, 3, 12]) {
GameStates::Win => {
buffer.push_str("you win! \n");
return;
}
GameStates::Lose => {
buffer.push_str("you lose!\n");
return;
}
GameStates::Neither(num) => num,
};
loop {
match game(&mut thread_rng, &[pt], &[7]) {
GameStates::Win => {
buffer.push_str("you win!\n");
break;
}
GameStates::Lose => {
buffer.push_str("you lose!\n");
break;
}
GameStates::Neither(num) => pt = num,
}
}
}
enum GameStates {
Win,
Lose,
Neither(i32),
}
fn game<T: Rng>(rng: T, win: &[i32], lose: &[i32]) -> GameStates {
let (first_roll, second_roll) = roll2die(rng);
if win.iter().any(|&num| num == first_roll + second_roll) {
GameStates::Win
} else if lose.iter().any(|&num| num == first_roll + second_roll) {
GameStates::Lose
} else {
GameStates::Neither(first_roll + second_roll)
}
}
|
pub mod base_types;
pub mod core_state;
pub mod core_types;
pub mod crypto;
pub mod driver;
pub mod mempool;
pub mod messages;
pub mod net;
#[macro_use]
extern crate serde_big_array;
big_array! { BigArray; }
#[macro_use]
extern crate failure;
fn main() {
println!("Hello, world!");
}
|
use std::borrow::Cow;
use anyhow::Context;
use pathfinder_common::StarknetVersion;
pub const COMPILER_VERSION: &str = env!("SIERRA_CASM_COMPILER_VERSION");
/// Compile a Sierra class definition into CASM.
///
/// The class representation expected by the compiler doesn't match the representation used
/// by the feeder gateway for Sierra classes, so we have to convert the JSON to something
/// that can be parsed into the expected input format for the compiler.
pub fn compile_to_casm(
sierra_definition: &[u8],
version: &StarknetVersion,
) -> anyhow::Result<Vec<u8>> {
let definition = serde_json::from_slice::<FeederGatewayContractClass<'_>>(sierra_definition)
.context("Parsing Sierra class")?;
const V_0_11_0: semver::Version = semver::Version::new(0, 11, 0);
const V_0_11_1: semver::Version = semver::Version::new(0, 11, 1);
const V_0_11_2: semver::Version = semver::Version::new(0, 11, 2);
match version
.parse_as_semver()
.context("Deciding on compiler version")?
{
Some(v) if v > V_0_11_2 => v2::compile(definition),
Some(v) if v > V_0_11_1 => v1_1_1::compile(definition),
Some(v) if v > V_0_11_0 => v1_0_0_rc0::compile(definition),
_ => v1_0_0_alpha6::compile(definition),
}
}
/// Compile a Sierra class definition to CASM _with the latest compiler we support_.
///
/// Execution depends on our ability to compile a Sierra class to CASM for which we
/// always want to use the latest compiler.
pub fn compile_to_casm_with_latest_compiler(sierra_definition: &[u8]) -> anyhow::Result<Vec<u8>> {
let definition = serde_json::from_slice::<FeederGatewayContractClass<'_>>(sierra_definition)
.context("Parsing Sierra class")?;
v2::compile(definition)
}
mod v1_0_0_alpha6 {
use anyhow::Context;
use casm_compiler_v1_0_0_alpha6::allowed_libfuncs::{
validate_compatible_sierra_version, ListSelector,
};
use casm_compiler_v1_0_0_alpha6::casm_contract_class::CasmContractClass;
use casm_compiler_v1_0_0_alpha6::contract_class::ContractClass;
use super::FeederGatewayContractClass;
impl<'a> TryFrom<FeederGatewayContractClass<'a>> for ContractClass {
type Error = serde_json::Error;
fn try_from(value: FeederGatewayContractClass<'a>) -> Result<Self, Self::Error> {
let json = serde_json::json!({
"abi": [],
"sierra_program": value.sierra_program,
"contract_class_version": value.contract_class_version,
"entry_points_by_type": value.entry_points_by_type,
});
serde_json::from_value::<ContractClass>(json)
}
}
pub(super) fn compile(definition: FeederGatewayContractClass<'_>) -> anyhow::Result<Vec<u8>> {
let sierra_class: ContractClass = definition
.try_into()
.context("Converting to Sierra class")?;
validate_compatible_sierra_version(
&sierra_class,
ListSelector::ListName(
casm_compiler_v1_0_0_alpha6::allowed_libfuncs::DEFAULT_EXPERIMENTAL_LIBFUNCS_LIST
.to_string(),
),
)
.context("Validating Sierra class")?;
let casm_class = CasmContractClass::from_contract_class(sierra_class, true)
.context("Compiling to CASM")?;
let casm_definition = serde_json::to_vec(&casm_class)?;
Ok(casm_definition)
}
}
mod v1_0_0_rc0 {
use anyhow::Context;
use casm_compiler_v1_0_0_rc0::allowed_libfuncs::{
validate_compatible_sierra_version, ListSelector,
};
use casm_compiler_v1_0_0_rc0::casm_contract_class::CasmContractClass;
use casm_compiler_v1_0_0_rc0::contract_class::ContractClass;
use super::FeederGatewayContractClass;
impl<'a> TryFrom<FeederGatewayContractClass<'a>> for ContractClass {
type Error = serde_json::Error;
fn try_from(value: FeederGatewayContractClass<'a>) -> Result<Self, Self::Error> {
let json = serde_json::json!({
"abi": [],
"sierra_program": value.sierra_program,
"contract_class_version": value.contract_class_version,
"entry_points_by_type": value.entry_points_by_type,
});
serde_json::from_value::<ContractClass>(json)
}
}
pub(super) fn compile(definition: FeederGatewayContractClass<'_>) -> anyhow::Result<Vec<u8>> {
let sierra_class: ContractClass = definition
.try_into()
.context("Converting to Sierra class")?;
validate_compatible_sierra_version(
&sierra_class,
ListSelector::ListName(
casm_compiler_v1_0_0_rc0::allowed_libfuncs::DEFAULT_EXPERIMENTAL_LIBFUNCS_LIST
.to_string(),
),
)
.context("Validating Sierra class")?;
let casm_class = CasmContractClass::from_contract_class(sierra_class, true)
.context("Compiling to CASM")?;
let casm_definition = serde_json::to_vec(&casm_class)?;
Ok(casm_definition)
}
}
mod v1_1_1 {
use anyhow::Context;
use casm_compiler_v1_1_1::allowed_libfuncs::{
validate_compatible_sierra_version, ListSelector,
};
use casm_compiler_v1_1_1::casm_contract_class::CasmContractClass;
use casm_compiler_v1_1_1::contract_class::ContractClass;
use super::FeederGatewayContractClass;
impl<'a> TryFrom<FeederGatewayContractClass<'a>> for ContractClass {
type Error = serde_json::Error;
fn try_from(value: FeederGatewayContractClass<'a>) -> Result<Self, Self::Error> {
let json = serde_json::json!({
"abi": [],
"sierra_program": value.sierra_program,
"contract_class_version": value.contract_class_version,
"entry_points_by_type": value.entry_points_by_type,
});
serde_json::from_value::<ContractClass>(json)
}
}
pub(super) fn compile(definition: FeederGatewayContractClass<'_>) -> anyhow::Result<Vec<u8>> {
let sierra_class: ContractClass = definition
.try_into()
.context("Converting to Sierra class")?;
validate_compatible_sierra_version(
&sierra_class,
ListSelector::ListName(
casm_compiler_v1_0_0_rc0::allowed_libfuncs::DEFAULT_EXPERIMENTAL_LIBFUNCS_LIST
.to_string(),
),
)
.context("Validating Sierra class")?;
let casm_class = CasmContractClass::from_contract_class(sierra_class, true)
.context("Compiling to CASM")?;
let casm_definition = serde_json::to_vec(&casm_class)?;
Ok(casm_definition)
}
}
// This compiler is backwards compatible with v1.1.
mod v2 {
use anyhow::Context;
use casm_compiler_v2::allowed_libfuncs::{validate_compatible_sierra_version, ListSelector};
use casm_compiler_v2::casm_contract_class::CasmContractClass;
use casm_compiler_v2::contract_class::ContractClass;
use super::FeederGatewayContractClass;
impl<'a> TryFrom<FeederGatewayContractClass<'a>> for ContractClass {
type Error = serde_json::Error;
fn try_from(value: FeederGatewayContractClass<'a>) -> Result<Self, Self::Error> {
let json = serde_json::json!({
"abi": [],
"sierra_program": value.sierra_program,
"contract_class_version": value.contract_class_version,
"entry_points_by_type": value.entry_points_by_type,
});
serde_json::from_value::<ContractClass>(json)
}
}
pub(super) fn compile(definition: FeederGatewayContractClass<'_>) -> anyhow::Result<Vec<u8>> {
let sierra_class: ContractClass = definition
.try_into()
.context("Converting to Sierra class")?;
validate_compatible_sierra_version(
&sierra_class,
ListSelector::ListName(
casm_compiler_v2::allowed_libfuncs::BUILTIN_ALL_LIBFUNCS_LIST.to_string(),
),
)
.context("Validating Sierra class")?;
let casm_class = CasmContractClass::from_contract_class(sierra_class, true)
.context("Compiling to CASM")?;
let casm_definition = serde_json::to_vec(&casm_class)?;
Ok(casm_definition)
}
}
#[derive(serde::Deserialize, serde::Serialize)]
#[serde(deny_unknown_fields)]
struct FeederGatewayContractClass<'a> {
#[serde(borrow)]
pub abi: Cow<'a, str>,
#[serde(borrow)]
pub sierra_program: &'a serde_json::value::RawValue,
#[serde(borrow)]
pub contract_class_version: &'a serde_json::value::RawValue,
#[serde(borrow)]
pub entry_points_by_type: &'a serde_json::value::RawValue,
}
#[cfg(test)]
mod tests {
use super::{compile_to_casm, FeederGatewayContractClass};
use pathfinder_common::StarknetVersion;
mod starknet_v0_11_0 {
use super::*;
use starknet_gateway_test_fixtures::class_definitions::CAIRO_1_0_0_ALPHA5_SIERRA;
#[test]
fn test_feeder_gateway_contract_conversion() {
let class =
serde_json::from_slice::<FeederGatewayContractClass<'_>>(CAIRO_1_0_0_ALPHA5_SIERRA)
.unwrap();
let _: casm_compiler_v1_0_0_rc0::contract_class::ContractClass =
class.try_into().unwrap();
}
#[test]
fn test_compile() {
compile_to_casm(CAIRO_1_0_0_ALPHA5_SIERRA, &StarknetVersion::default()).unwrap();
}
}
mod starknet_v0_11_1 {
use super::*;
use starknet_gateway_test_fixtures::class_definitions::CAIRO_1_0_0_RC0_SIERRA;
#[test]
fn test_feeder_gateway_contract_conversion() {
let class =
serde_json::from_slice::<FeederGatewayContractClass<'_>>(CAIRO_1_0_0_RC0_SIERRA)
.unwrap();
let _: casm_compiler_v1_0_0_rc0::contract_class::ContractClass =
class.try_into().unwrap();
}
#[test]
fn test_compile() {
compile_to_casm(CAIRO_1_0_0_RC0_SIERRA, &StarknetVersion::new(0, 11, 1)).unwrap();
}
}
mod starknet_v0_11_2_onwards {
use super::*;
use starknet_gateway_test_fixtures::class_definitions::{
CAIRO_1_1_0_RC0_SIERRA, CAIRO_2_0_0_STACK_OVERFLOW,
};
#[test]
fn test_feeder_gateway_contract_conversion() {
let class =
serde_json::from_slice::<FeederGatewayContractClass<'_>>(CAIRO_1_1_0_RC0_SIERRA)
.unwrap();
let _: casm_compiler_v2::contract_class::ContractClass = class.try_into().unwrap();
}
#[test]
fn test_compile() {
compile_to_casm(CAIRO_1_1_0_RC0_SIERRA, &StarknetVersion::new(0, 11, 2)).unwrap();
}
#[test]
fn regression_stack_overflow() {
// This class caused a stack-overflow in v2 compilers <= v2.0.1
compile_to_casm(CAIRO_2_0_0_STACK_OVERFLOW, &StarknetVersion::new(0, 12, 0)).unwrap();
}
}
}
|
use std::io;
fn main() {
println!("Enter 1st Number = ");
let mut a = String::new();
io::stdin().read_line(&mut a);
let a:f32 = a.trim().parse().unwrap();
println!("Enter 2nd Number = " );
let mut b = String::new();
io::stdin().read_line(&mut b);
let b:f32 = b.trim().parse().unwrap();
println!("Enter Operation " );
let mut c = String::new();
io::stdin().read_line(&mut c);
let c:&str = c.trim();
if c == "+" {
println!("{} + {} = {}", a,b, a + b );
}
else if c == "-" {
println!("{} - {} = {}", a,b, a - b );
}
else if c == "/" {
println!("{} / {} = {}", a,b, a / b );
}
else if c == "*" {
println!("{} * {} = {}", a,b, a * b );
}
else {
println!("invalid operation" );
}
}
|
#[cfg(test)]
#[macro_use]
extern crate approx; // For the macro relative_eq!
extern crate nalgebra as na;
extern crate float_cmp as flcmp;
extern crate num;
extern crate num_traits as numt;
extern crate uuid;
extern crate rand;
pub mod defs;
pub mod tools;
pub mod core;
pub mod basic;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.