text stringlengths 8 4.13M |
|---|
use chacha;
use chacha::KeyStream;
use rand::{thread_rng, Rng};
use sha3::{Digest, Sha3_256};
pub fn random_fill_25(buf: &mut [u8]) {
let mut res = [0u8; 25];
thread_rng().fill(&mut res);
buf.copy_from_slice(&res);
}
pub fn is_tampered_16(buf: &[u8]) -> bool {
let mut tampered = false;
for i in 0..8 {
if buf[i] != buf[i + 8] {
tampered = true;
}
}
tampered
}
pub struct ChaCha {
internal: Option<chacha::ChaCha>,
}
impl ChaCha {
pub fn new(should_be_encrypted: bool, password: &str, nonce: &[u8; 24]) -> ChaCha {
if should_be_encrypted {
let secret_key = {
let mut hasher = Sha3_256::default();
hasher.input(password.as_bytes());
let mut result = [0u8; 32];
result.copy_from_slice(hasher.result().as_slice());
result
};
ChaCha {
internal: Some(chacha::ChaCha::new_xchacha20(&secret_key, nonce)),
}
} else {
ChaCha { internal: None }
}
}
pub fn xor(&mut self, dest: &mut [u8]) {
if let Some(ref mut chacha) = self.internal {
chacha.xor_read(dest).is_ok();
}
}
}
|
use std::cmp::max;
use std::fs::File;
use std::io::{Seek, SeekFrom};
use objdump::Elf;
use crate::action;
use crate::action::{ExecuteInfo, Instruction};
use crate::cache;
use crate::cache::{Storage};
use crate::memory::{Memory, MemorySegment};
use crate::register::{RegisterFile, from_name};
use crate::statistic::Statistic;
const STACK_ADDRESS: u64 = 0x3f3f3f_fffff;
const STACK_SIZE: usize = 4096;
pub struct Simulator {
pub memory: Memory,
pub regs: RegisterFile,
pub elf: Elf,
pub pc: u64,
pub stat: Statistic,
pub cache: Box<dyn Storage>,
pub instr: [ExecuteInfo; 5],
}
impl Simulator {
pub fn new() -> Self {
Simulator {
memory: Memory::new(),
regs: RegisterFile::new(),
pc: 0,
elf: Elf::default(),
stat: Statistic::default(),
cache: cache::new_3_levels(),
instr: [ExecuteInfo::default(); 5],
}
}
pub fn load_from_elf(&mut self, filename: &str) {
let elf: Elf = Elf::open(filename)
.expect("can not open the binary file");
let mut f = File::open(filename).unwrap();
elf.programs.iter().for_each(|segment| {
let _ = f.seek(SeekFrom::Start(segment.off)).unwrap();
debug_assert!(segment.memsz >= segment.filesz);
let mut seg = MemorySegment::new(
segment.vaddr, segment.memsz as usize);
println!("load segment {:x} ~ {:x}",
segment.vaddr,
segment.vaddr + segment.memsz);
seg.load_from(&mut f, segment.filesz as usize);
self.memory.push(seg);
});
self.memory.push(MemorySegment::new(
STACK_ADDRESS - STACK_SIZE as u64, STACK_SIZE));
self.regs.set(from_name("sp"), STACK_ADDRESS);
let main = elf.symbol_entries.iter()
.filter(|x| {
x.0.contains("main")
}).next().unwrap();
self.pc = main.1;
self.elf = elf;
}
fn decode(&mut self) -> Instruction {
let inst: u32 = self.memory.load_u32(self.pc);
if (inst & 0b11) != 0b11 {
panic!("oa, it's a 16bit instruction");
}
if (inst & 0b11100) == 0b11100 {
panic!("it's an instruction that is longer that 32bit");
}
action::matching(inst)
}
pub fn run(&mut self) -> bool {
if self.pc == 0 {
return false
}
print!("{:<7x}", self.pc);
let inst = self.decode();
println!("{:?}", inst);
self.single_step(inst);
true
}
fn single_step(&mut self, inst: Instruction) {
self.stat.num_inst += 1;
self.instr[4] = self.instr[3]; // WB
self.instr[3] = self.instr[2]; // MEM
self.instr[2] = self.instr[1]; // EX
self.instr[1] = self.instr[0]; // ID
self.instr[0] = action::execute(self, inst);
let mut cycles = max(
self.instr[3].mem_access,
self.instr[2].exe_cycles);
let load_reg = self.instr[3].load_reg;
if load_reg.not_zero() {
if self.instr[2].reg_read[0] == load_reg
|| self.instr[2].reg_read[1] == load_reg {
cycles = max(cycles, 2);
self.stat.num_data_hazard += 1;
}
}
if self.instr[2].is_branch {
self.stat.num_branch += 1;
if !self.instr[2].taken_branch {
self.stat.num_mis_pred += 1;
}
}
self.stat.cycle += cycles;
}
}
|
///// chapter 4 "structuring data and matching patterns"
///// program section:
//
fn main() {
let magician1 = "merlin";
let magician2: &'static str = "gandalf";
let greeting = "hello, you, wizard, you!";
println!("magician {} greets magician {} with such words: \"{}\"",
magician1, magician2, greeting);
}
///// output should be:
/*
*/// end of output
|
use core::*;
use core::hash::*;
use common::*;
pub struct Entry<T, U> {
pub keys: Vec<(T, U)>,
cur: usize,
}
impl<T, U> Iterator<Item = (T, U)> for Entry<T, U> {
fn next(&mut self) -> Option<(T, U)> {
self.usize += 1;
self.keys.get(self.usize)
}
}
/// A hashmap (a simple implementation)
pub struct HashMap<T, U>
where T: Hash
{
values: [Entry<T, U>; 247],
}
impl<T, U> HashMap<T, U>
where T: Hash {
/// Get the position of an entry
pub fn get_pos(key: &T) -> u8 {
let hash = SipHasher::new();
key.hash(hash);
hash.finish() % 248
}
/// Get a refference to an entry
pub fn get(&self, key: &T) -> Option<&U> {
&self.values[self.get_pos(key)].find(|(k, v)| key == k)
}
/// Get a mutable refference to an entry
pub fn get_mut(&mut self, key: &T) -> Option<&mut U> {
&mut self.values[self.get_pos(key)].find(|(k, v)| key == k)
}
/// Set the value of an entry
pub fn set(&mut self, key: &T, val: &U) {
match self.get_mut(key) {
Some(e) => e,
None => {
self.values[self.get_pos(key)]
.keys
.push((*key, *val));
}
}
}
}
|
use regex::Regex;
use std::collections::HashMap;
const MEMORY_SIZE: usize = 100000; // Is there an address bigger than this in the input?
const MAX_36_BITS: u64 = u64::max_value() >> (64 - 36);
#[derive(Debug, PartialEq)]
pub struct Instruction {
value: u64,
address: usize,
}
#[derive(Debug, PartialEq)]
pub struct Program {
ones_mask: u64,
zeroes_mask: u64,
xs_mask: u64,
instructions: Vec<Instruction>,
}
#[aoc_generator(day14)]
pub fn input_generator(input: &str) -> Vec<Program> {
let instruction_re = Regex::new(r"mem\[(.*)\] = (.*)").unwrap();
let mut program_vec = Vec::new();
for block in input.split("mask = ") {
let mut ones_mask = 0;
let mut zeroes_mask = 0;
let mut instructions = Vec::new();
for l in block.lines() {
if !instruction_re.is_match(l) {
for (i, c) in l.chars().enumerate() {
match c {
'0' => zeroes_mask |= 1 << (35 - i),
'1' => ones_mask |= 1 << (35 - i),
'X' => (),
_ => panic!("Invalid mask input"),
}
}
} else {
let cap = instruction_re.captures(l).unwrap();
instructions.push(Instruction {
value: cap[2].parse::<u64>().unwrap(),
address: cap[1].parse::<usize>().unwrap(),
});
}
}
if !instructions.is_empty() {
program_vec.push(Program {
ones_mask,
zeroes_mask,
xs_mask: ones_mask | zeroes_mask, // This is an inverted bit mask
instructions,
});
}
}
program_vec
}
#[aoc(day14, part1)]
pub fn part1(programs: &Vec<Program>) -> u64 {
let mut memory = [0 as u64; MEMORY_SIZE];
for p in programs {
for i in p.instructions.iter() {
memory[i.address] = (i.value | p.ones_mask) & !p.zeroes_mask;
}
}
check_sums(memory)
}
#[aoc(day14, part2)]
pub fn part2(programs: &Vec<Program>) -> u64 {
// For part2 the full 32 bits are needed
let mut memory = HashMap::new();
for p in programs {
for inst in p.instructions.iter() {
let base_addr = (inst.address as u64 | p.ones_mask) & p.xs_mask;
let mut curr_mask = p.xs_mask;
loop {
memory.insert(base_addr | (!curr_mask & MAX_36_BITS), inst.value);
if curr_mask & MAX_36_BITS == MAX_36_BITS {
break;
}
curr_mask = (curr_mask + 1) | p.xs_mask;
}
}
}
check_sums_hash(memory)
}
// Helpers
fn check_sums(memory: [u64; MEMORY_SIZE]) -> u64 {
let mut sum = 0;
for i in memory.iter() {
sum += i;
}
sum
}
fn check_sums_hash(memory: HashMap<u64, u64>) -> u64 {
let mut sum = 0;
for (_, value) in memory.iter() {
sum += value;
}
sum
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_input() {
assert_eq!(
input_generator(
"mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X\n\
mem[8] = 11\n\
mem[7] = 101\n\
mem[8] = 0\n\
mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X\n\
mem[8] = 11"
),
vec![
Program {
ones_mask: 64,
zeroes_mask: 2,
xs_mask: 66,
instructions: vec![
Instruction {
value: 11,
address: 8
},
Instruction {
value: 101,
address: 7
},
Instruction {
value: 0,
address: 8
}
]
},
Program {
ones_mask: 64,
zeroes_mask: 2,
xs_mask: 66,
instructions: vec![Instruction {
value: 11,
address: 8
}]
}
]
);
}
#[test]
fn test_part1() {
assert_eq!(
part1(&input_generator(
"mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X\n\
mem[8] = 11\n\
mem[7] = 101\n\
mem[8] = 0"
)),
165
);
}
#[test]
fn test_part2() {
assert_eq!(
part2(&input_generator(
"mask = 000000000000000000000000000000X1001X\n\
mem[42] = 100\n\
mask = 00000000000000000000000000000000X0XX\n\
mem[26] = 1"
)),
208
);
}
}
|
fn main() {
tonic_build::configure()
.out_dir("src")
.compile(&["proto/paxos.proto"], &["proto"])
.expect("Failed to compile proto")
}
|
use std::rc::Rc;
use std::fmt;
use super::*;
#[derive(Debug, Clone, PartialEq)]
pub enum StatementNode<'s> {
Expression(Expression<'s>),
Variable(Type<'s>, String, Option<Expression<'s>>),
Assignment(Expression<'s>, Expression<'s>),
Return(Option<Rc<Expression<'s>>>),
}
#[derive(Debug, Clone, PartialEq)]
pub struct Statement<'s> {
pub node: StatementNode<'s>,
pub pos: TokenElement<'s>,
}
impl<'s> Statement<'s> {
pub fn new(node: StatementNode<'s>, pos: TokenElement<'s>) -> Self {
Statement {
node,
pos,
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum ExpressionNode<'e> {
Int(u64),
Float(f64),
Str(String),
Char(char),
Bool(bool),
Unwrap(Rc<Expression<'e>>),
Identifier(String),
Binary(Rc<Expression<'e>>, Operator, Rc<Expression<'e>>),
Block(Vec<Statement<'e>>),
Cast(Rc<Expression<'e>>, Type<'e>),
Array(Vec<Expression<'e>>),
Index(Rc<Expression<'e>>, Rc<Expression<'e>>),
Function(Vec<(String, Type<'e>)>, Type<'e>, Rc<Expression<'e>>, Option<Vec<String>>),
Call(Rc<Expression<'e>>, Vec<Expression<'e>>),
If(Rc<Expression<'e>>, Rc<Expression<'e>>, Option<Vec<(Option<Expression<'e>>, Expression<'e>, TokenElement<'e>)>>),
EOF,
Empty,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Expression<'e> {
pub node: ExpressionNode<'e>,
pub pos: TokenElement<'e>,
}
impl<'e> Expression<'e> {
pub fn new(node: ExpressionNode<'e>, pos: TokenElement<'e>) -> Self {
Expression {
node,
pos,
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum Operator {
Add, Sub, Mul, Div, Mod, Pow, Concat, Eq, Lt, Gt, NEq, LtEq, GtEq,
}
impl Operator {
pub fn from_str(operator: &str) -> Option<(Operator, u8)> {
use self::Operator::*;
let op_prec = match operator {
"==" => (Eq, 1),
"<" => (Lt, 1),
">" => (Gt, 1),
"!=" => (NEq, 1),
"<=" => (LtEq, 1),
">=" => (GtEq, 1),
"+" => (Add, 2),
"-" => (Sub, 2),
"++" => (Concat, 2),
"*" => (Mul, 3),
"/" => (Div, 3),
"%" => (Mod, 3),
"^" => (Pow, 4),
_ => return None,
};
Some(op_prec)
}
pub fn as_str(&self) -> &str {
use self::Operator::*;
match *self {
Add => "+",
Sub => "-",
Concat => "++",
Pow => "^",
Mul => "*",
Div => "/",
Mod => "%",
Eq => "==",
Lt => "<",
Gt => ">",
NEq => "!=",
LtEq => "<=",
GtEq => ">=",
}
}
}
impl fmt::Display for Operator {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.as_str())
}
} |
extern crate gui;
use gui::*;
use math::*;
struct PixelExample;
impl State for PixelExample {
fn draw(&mut self, frame: &mut Frame, _state_data: &StateData) {
frame.ellipse()
.size(Vec2::new(50.0, 50.0))
.draw();
frame.rect()
.size(Vec2::new(50.0, 50.0))
.color(color::rgb(0.0, 1.0, 0.0))
.draw();
}
}
fn main() {
Application::new()
.with_title("Pixel Example")
.with_window_size(600, 400)
.with_pixel_window_size(600, 400)
.run(|_loader| {
Box::new(PixelExample)
});
}
|
use crate::models::{LogEntry, Token};
use crate::util::ensure_is_authorized;
use actix_web::{error, web, HttpResponse, Result};
use actix_web_grants::permissions::AuthDetails;
use database::models::LogEntry as DatabaseLogEntry;
use database::DbPool;
use serde::{Deserialize, Serialize};
use shared::token_permissions;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(get)));
}
const PAGE_SIZE: u16 = 10;
async fn get(
pool: web::Data<DbPool>,
query: web::Query<LogQuery>,
auth: AuthDetails,
) -> Result<HttpResponse> {
ensure_is_authorized(&auth, token_permissions::HAS_VALID_TOKEN)
.map_err(error::ErrorForbidden)?;
let mut conn = pool
.acquire()
.await
.map_err(error::ErrorInternalServerError)?;
let log_entry_count = DatabaseLogEntry::count(&mut conn)
.await
.map_err(error::ErrorInternalServerError)?;
let log_entries =
DatabaseLogEntry::by_page_number_with_mapping(&mut conn, query.page, LogEntry::from)
.await
.map_err(error::ErrorInternalServerError)?;
Ok(HttpResponse::Ok().json(LogResponse {
log_entries,
page: query.page,
log_entry_count,
page_count: (log_entry_count as f64 / f64::from(PAGE_SIZE)).ceil() as u16,
}))
}
#[derive(Debug, Deserialize)]
struct LogQuery {
#[allow(unused)]
token: Token,
page: u16,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct LogResponse {
log_entries: Vec<LogEntry>,
page: u16,
page_count: u16,
log_entry_count: i64,
}
|
use std::cell::RefCell;
use std::cmp::Reverse;
use std::collections::{BinaryHeap, HashMap, HashSet};
use std::env;
#[derive(Clone, Copy)]
enum RegionType {
Rocky,
Narrow,
Wet,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
struct Coord(u64, u64);
impl Coord {
fn manhattan_distance(&self, other: Coord) -> u64 {
((self.0 as i64 - other.0 as i64).abs() + (self.1 as i64 - other.1 as i64).abs()) as u64
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
enum Tool {
Torch,
Gear,
Neither,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
struct State(Reverse<u64>, Tool, Coord);
struct Cave {
target: Coord,
depth: u64,
erosion_cache: RefCell<HashMap<Coord, u64>>,
}
impl Cave {
fn geologic_index(&self, coord: Coord) -> u64 {
let Coord(x, y) = coord;
if coord == Coord(0, 0) || coord == self.target {
0
} else if y == 0 {
x * 16807
} else if x == 0 {
y * 48271
} else {
self.erosion_level(Coord(x - 1, y)) * self.erosion_level(Coord(x, y - 1))
}
}
fn erosion_level(&self, coord: Coord) -> u64 {
if let Some(&level) = self.erosion_cache.borrow().get(&coord) {
return level;
}
let level = (self.geologic_index(coord) + self.depth) % 20183;
self.erosion_cache.borrow_mut().insert(coord, level);
level
}
fn region_type(&self, coord: Coord) -> RegionType {
match self.erosion_level(coord) % 3 {
0 => RegionType::Rocky,
1 => RegionType::Wet,
2 => RegionType::Narrow,
_ => unreachable!(),
}
}
fn risk_level(&self, tl: Coord, br: Coord) -> u64 {
let mut sum = 0;
for x in tl.0..=br.0 {
for y in tl.1..=br.1 {
sum += match self.region_type(Coord(x, y)) {
RegionType::Rocky => 0,
RegionType::Wet => 1,
RegionType::Narrow => 2,
};
}
}
sum
}
fn region_tools(&self, coord: Coord) -> &[Tool; 2] {
match self.region_type(coord) {
RegionType::Rocky => &[Tool::Gear, Tool::Torch],
RegionType::Wet => &[Tool::Gear, Tool::Neither],
RegionType::Narrow => &[Tool::Torch, Tool::Neither],
}
}
fn explore(&self, state: State, frontier: &mut BinaryHeap<(Reverse<u64>, State)>) {
let State(Reverse(distance), tool, coord) = state;
if self.region_tools(coord).contains(&tool) {
frontier.push((
Reverse(distance + 1 + coord.manhattan_distance(self.target)),
State(Reverse(distance + 1), tool, coord),
));
}
}
fn astar(&self) -> u64 {
let mut visited = HashSet::new();
let mut frontier = BinaryHeap::new();
frontier.push((
Reverse(Coord(0, 0).manhattan_distance(self.target)),
State(Reverse(0), Tool::Torch, Coord(0, 0)),
));
while let Some((_, state)) = frontier.pop() {
let State(Reverse(distance), tool, Coord(x, y)) = state;
if !visited.insert((tool, Coord(x, y))) {
continue;
}
if Coord(x, y) == self.target && tool == Tool::Torch {
return distance;
}
self.explore(
State(Reverse(distance), tool, Coord(x + 1, y)),
&mut frontier,
);
self.explore(
State(Reverse(distance), tool, Coord(x, y + 1)),
&mut frontier,
);
if x > 0 {
self.explore(
State(Reverse(distance), tool, Coord(x - 1, y)),
&mut frontier,
);
}
if y > 0 {
self.explore(
State(Reverse(distance), tool, Coord(x, y - 1)),
&mut frontier,
);
}
for &switch_tool in self.region_tools(Coord(x, y)) {
frontier.push((
Reverse(distance + 7 + Coord(x, y).manhattan_distance(self.target)),
State(Reverse(distance + 7), switch_tool, Coord(x, y)),
));
}
}
unreachable!();
}
}
fn main() {
let args: Vec<_> = env::args().collect();
let (depth, target_x, target_y) = match &args.as_slice() {
&[_, depth, target_x, target_y] => (
depth.parse().unwrap(),
target_x.parse().unwrap(),
target_y.parse().unwrap(),
),
_ => panic!("expected 3 args"),
};
let cave = Cave {
target: Coord(target_x, target_y),
depth,
erosion_cache: RefCell::new(HashMap::new()),
};
println!("{}", cave.risk_level(Coord(0, 0), cave.target));
println!("{}", cave.astar());
}
|
use serde::Serialize;
#[derive(Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct RawMeasurement {
pub kit_serial: String,
pub datetime: u64,
pub peripheral: i32,
pub quantity_type: i32,
pub value: f64,
}
|
use anyhow::anyhow;
use moebius_program::instruction::update_data;
use solana_client::{
client_error::ClientError, rpc_client::RpcClient, rpc_config::RpcSendTransactionConfig,
};
use solana_sdk::{
commitment_config::CommitmentConfig,
instruction::Instruction,
message::Message,
pubkey::Pubkey,
signature::{Keypair, Signature, Signer},
transaction::Transaction,
};
use tokio::task::JoinHandle;
type BroadcastHandle = JoinHandle<Result<Signature, ClientError>>;
pub struct Broadcaster {
authority: Keypair,
moebius_account: Pubkey,
rpc_url: String,
}
fn instruction_update_data(
moebius_account: &Pubkey,
authority: &Pubkey,
target_program: &Pubkey,
target_account: &Pubkey,
data: Vec<u8>,
) -> Vec<Instruction> {
let (caller_account, _) = Pubkey::find_program_address(
&[&target_program.to_bytes(), &target_account.to_bytes()],
&moebius_program::id(),
);
let instructions = vec![update_data(
&moebius_program::id(),
moebius_account,
authority,
&caller_account,
target_program,
target_account,
data,
)
.unwrap()];
instructions
}
impl Broadcaster {
pub async fn new(
rpc_url: String,
authority: Keypair,
moebius_account: Pubkey,
) -> anyhow::Result<Broadcaster> {
Ok(Self {
authority,
moebius_account,
rpc_url,
})
}
pub async fn broadcast(
&self,
program_id: [u8; 32],
account_id: [u8; 32],
data: Vec<u8>,
) -> anyhow::Result<Signature> {
// Data that will be moved into the blocking task.
let rpc_url = self.rpc_url.clone();
let moebius_account = self.moebius_account;
let authority = Keypair::from_bytes(&self.authority.to_bytes()[..])?;
let commitment_config = CommitmentConfig::single_gossip();
let program_id = Pubkey::new_from_array(program_id);
let account_id = Pubkey::new_from_array(account_id);
let broadcast_task: BroadcastHandle = tokio::task::spawn_blocking(move || {
// Initialize RPC client.
let rpc_client = RpcClient::new(rpc_url);
// Get the recent blockhash.
let (recent_blockhash, _, _) = rpc_client
.get_recent_blockhash_with_commitment(commitment_config)?
.value;
// Construct the instruction for updating data via Moebius.
let instructions = instruction_update_data(
&moebius_account,
&authority.pubkey(),
&program_id,
&account_id,
data,
);
// Construct transaction message.
let message = Message::new(&instructions, Some(&authority.pubkey()));
// Construct transaction.
let mut transaction = Transaction::new_unsigned(message);
// Sign the transaction using authority's key.
transaction.try_sign(&[&authority], recent_blockhash)?;
// Send transaction.
Ok(rpc_client.send_transaction_with_config(
&transaction,
RpcSendTransactionConfig {
preflight_commitment: Some(commitment_config.commitment),
..RpcSendTransactionConfig::default()
},
)?)
});
Ok(broadcast_task
.await?
.map_err(|e| anyhow!("Broadcast tx: {}", e.to_string()))?)
}
}
|
$NetBSD: patch-library_std_src_sys_unix_mod.rs,v 1.7 2023/01/23 18:49:04 he Exp $
Add libexecinfo for backtrace() on NetBSD.
--- library/std/src/sys/unix/mod.rs.orig 2021-02-10 17:36:44.000000000 +0000
+++ library/std/src/sys/unix/mod.rs
@@ -269,6 +269,7 @@ cfg_if::cfg_if! {
#[link(name = "pthread")]
extern "C" {}
} else if #[cfg(target_os = "netbsd")] {
+ #[link(name = "execinfo")]
#[link(name = "pthread")]
#[link(name = "rt")]
extern "C" {}
|
use cgmath;
pub struct SpritesViewModel
{
pub sizes: [cgmath::Vector2<f32>;10],
pub positions: [cgmath::Vector2<f32>;10],
pub tile_map_indices: [i32;10],
pub count: i32,
}
pub struct LevelViewModel
{
pub data: std::vec::Vec<i32>,
pub width: f32,
pub height: f32,
}
pub struct ParticlesViewModel
{
pub positions: [cgmath::Vector2<f32>;10],
pub max_speeds: [f32;10],
pub running_times: [f32;10],
pub max_running_times: [f32;10],
pub count: i32
}
pub struct PostProcessEffect
{
pub name: PostProcessEffects,
pub running_time: f32,
pub max_running_time: f32,
}
pub struct PostProcessViewModel
{
pub effects: std::vec::Vec<PostProcessEffect>,
}
#[derive(Copy, Clone, Hash)]
pub enum PostProcessEffects
{
VIGNETTE = 0,
}
impl PartialEq for PostProcessEffects
{
fn eq(&self, other: &Self) -> bool {
self == other
}
}
impl std::cmp::Eq for PostProcessEffects { } |
use crate::{
cursor::CursorInformation,
operation::{GetMore, Operation},
options::ServerAddress,
sdam::{ServerDescription, ServerInfo, ServerType},
Namespace,
};
#[test]
fn op_selection_criteria() {
let address = ServerAddress::Tcp {
host: "myhost.com".to_string(),
port: Some(1234),
};
let info = CursorInformation {
ns: Namespace::empty(),
address: address.clone(),
id: 123,
batch_size: None,
max_time: None,
comment: None,
};
let get_more = GetMore::new(info, None);
let server_description = ServerDescription {
address,
server_type: ServerType::Unknown,
reply: Ok(None),
last_update_time: None,
average_round_trip_time: None,
};
let server_info = ServerInfo::new_borrowed(&server_description);
let predicate = get_more
.selection_criteria()
.expect("should not be none")
.as_predicate()
.expect("should be predicate");
assert!(predicate(&server_info));
let server_description = ServerDescription {
address: ServerAddress::default(),
..server_description
};
let server_info = ServerInfo::new_borrowed(&server_description);
assert!(!predicate(&server_info));
}
|
/**********************************************
> File Name : length_longest_path.rs
> Author : lunar
> Email : lunar_ubuntu@qq.com
> Created Time : Wed 20 Apr 2022 11:06:50 PM CST
> Location : Shanghai
> Copyright@ https://github.com/xiaoqixian
**********************************************/
struct Solution;
impl Solution {
pub fn length_longest_path(input: String) -> i32 {
let mut levels = Vec::<usize>::new();
let mut res: usize = 0;
let mut level_index: usize = 0;
let mut curr_path_len: usize = 0;
let mut first_letter: bool = true;
let mut is_file: bool = false;
for c in input.chars() {
print!("char: ");
match c {
'\n' => println!("\\n"),
'\t' => println!("\\t"),
c => println!("{}", c)
}
match c {
'\n' => {
level_index = 0;
if is_file && curr_path_len + levels.len() - 1 > res {
res = curr_path_len + levels.len() - 1;
is_file = false;
println!("res = {}", res);
}
first_letter = true;
},
'\t' => {
level_index += 1;
},
_ if first_letter => {
while level_index < levels.len() {
curr_path_len -= match levels.pop() {
None => break,
Some(v) => v
};
}
levels.push(1);
curr_path_len += 1;
first_letter = false;
},
c => {
levels[level_index] += 1;
curr_path_len += 1;
if c == '.' {
is_file = true;
}
}
}
}
if is_file && curr_path_len + levels.len() - 1 > res {
res = curr_path_len + levels.len() - 1;
}
res as i32
}
}
#[derive(Debug)]
struct A {
i: i32
}
impl std::ops::Add for &A {
type Output = A;
fn add(self, rhs: Self) -> Self::Output {
A {
i: self.i + rhs.i
}
}
}
fn main() {
println!("{}", Solution::length_longest_path(String::from("a")));
//println!("{}", Solution::length_longest_path(String::from("dir\n\tsubdir1\n\t\tfile1.ext\n\t\tsubsubdir1\n\tsubdir2\n\t\tsubsubdir2\n\t\t\tfile2.ext")));
}
|
//! Callbacks for the `button` object in the Lua libraries
use ::luaA::{self, pushmodifiers};
use ::lua::Lua;
use ::object::signal::Signal;
use ::object::class::{Class, Object};
use libc::c_int;
use lua_sys::*;
use xcb::ffi::xproto::xcb_button_t;
LUA_OBJECT_FUNCS!(luaA::BUTTON_CLASS, Class, button_new);
LUA_CLASS_FUNCS!(luaA::BUTTON_CLASS,
button_class_add_signal,
button_class_connect_signal,
button_class_disconnect_signal,
button_class_emit_signal,
button_class_instances,
button_set_index_miss_handler,
button_set_newindex_miss_handler);
LUA_OBJECT_EXPORT_PROPERTY!(button_get_button, ButtonState, button, lua_pushinteger);
LUA_OBJECT_EXPORT_PROPERTY!(button_get_modifiers, ButtonState, modifiers, pushmodifiers);
/// State of the button
#[repr(C)]
pub struct ButtonState {
pub signals: Vec<Signal>,
pub modifiers: u16,
pub button: xcb_button_t
}
#[allow(non_snake_case)]
pub trait Button {
/* Methods */
fn button_add_signal(&self, lua: &Lua) -> c_int {
unsafe {
button_class_add_signal(lua.0)
}
}
fn button_connect_signal(&self, lua: &Lua) -> c_int {
unsafe {
button_class_connect_signal(lua.0)
}
}
fn button_disconnect_signal(&self, lua: &Lua) -> c_int {
unsafe {
button_class_disconnect_signal(lua.0)
}
}
fn button_emit_signal(&self, lua: &Lua) -> c_int {
unsafe {
button_class_emit_signal(lua.0)
}
}
fn button_instances(&self, lua: &Lua) -> c_int {
unsafe {
button_class_instances(lua.0)
}
}
fn button_set_index_miss_handler(&self, lua: &Lua) -> c_int {
unsafe {
button_set_index_miss_handler(lua.0)
}
}
fn button_set_newindex_miss_handler(&self, lua: &Lua) -> c_int {
unsafe {
button_set_newindex_miss_handler(lua.0)
}
}
fn button___call(&self, lua: &Lua) -> c_int {
unsafe {
luaA::button_new(lua.0)
}
}
/* Meta */
fn button___tostring_meta(&self, lua: &Lua) -> c_int {
unsafe {
luaA::object_tostring(lua.0)
}
}
fn button_connect_signal_meta(&self, lua: &Lua) -> c_int {
unsafe {
luaA::object_connect_signal_simple(lua.0)
}
}
fn button_disconnect_signal_meta(&self, lua: &Lua) -> c_int {
unsafe {
luaA::object_disconnect_signal_simple(lua.0)
}
}
fn button_emit_signal_meta(&self, lua: &Lua) -> c_int {
unsafe {
luaA::object_emit_signal_simple(lua.0)
}
}
/* LUA_CLASS_META methods */
fn button___index_meta(&self, lua: &Lua) -> c_int {
unsafe {
luaA::class_index(lua.0)
}
}
fn button___newindex_meta(&self, lua: &Lua) -> c_int {
unsafe {
luaA::class_newindex(lua.0)
}
}
}
|
use super::{Pusherator, PusheratorBuild};
pub struct Filter<Next, Func> {
next: Next,
func: Func,
}
impl<Next, Func> Pusherator for Filter<Next, Func>
where
Next: Pusherator,
Func: FnMut(&Next::Item) -> bool,
{
type Item = Next::Item;
fn give(&mut self, item: Self::Item) {
if (self.func)(&item) {
self.next.give(item);
}
}
}
impl<Next, Func> Filter<Next, Func>
where
Next: Pusherator,
Func: FnMut(&Next::Item) -> bool,
{
pub fn new(func: Func, next: Next) -> Self {
Self { next, func }
}
}
pub struct FilterBuild<Prev, Func>
where
Prev: PusheratorBuild,
Func: FnMut(&Prev::ItemOut) -> bool,
{
prev: Prev,
func: Func,
}
impl<Prev, Func> FilterBuild<Prev, Func>
where
Prev: PusheratorBuild,
Func: FnMut(&Prev::ItemOut) -> bool,
{
pub fn new(prev: Prev, func: Func) -> Self {
Self { prev, func }
}
}
impl<Prev, Func> PusheratorBuild for FilterBuild<Prev, Func>
where
Prev: PusheratorBuild,
Func: FnMut(&Prev::ItemOut) -> bool,
{
type ItemOut = Prev::ItemOut;
type Output<Next: Pusherator<Item = Self::ItemOut>> = Prev::Output<Filter<Next, Func>>;
fn push_to<Next>(self, input: Next) -> Self::Output<Next>
where
Next: Pusherator<Item = Self::ItemOut>,
{
self.prev.push_to(Filter::new(self.func, input))
}
}
|
pub fn bar() -> &'static str {
"bar"
}
|
//! Submodule for organizing user command help text.
/// Topics for which help text exists.
enum Topic {
Connect,
Disconnect,
Help,
Host,
Stop,
Quit
}
impl Topic {
/// Parse a string into a Topic.
fn from_string(s: &str) -> Option<Topic> {
let mut s = s.to_string();
if s.starts_with("/") {
s.remove(0);
}
match &s as &str {
"connect" => Some(Topic::Connect),
"disconnect" => Some(Topic::Disconnect),
"help" => Some(Topic::Help),
"host" => Some(Topic::Host),
"stop" => Some(Topic::Stop),
"quit" => Some(Topic::Quit),
_ => None
}
}
/// The help text associated with the topic.
fn description(&self) -> &str {
match *self {
Topic::Connect =>
"Connects to the server at hostname or IP <host> with the username <name>.",
Topic::Disconnect =>
"Disconnects from the server.",
Topic::Help =>
"Displays help, optionally for a topic.",
Topic::Host =>
"Starts running a server locally on port <port>.",
Topic::Stop =>
"Stops the running server.",
Topic::Quit =>
"Exits the program."
}
}
/// Usage examples for the topic.
fn usage(&self) -> &str {
match *self {
Topic::Connect => "/connect <host> <port> <name>",
Topic::Disconnect => "/disconnect",
Topic::Help => "/help [topic]",
Topic::Host => "/host <port>",
Topic::Stop => "/stop",
Topic::Quit => "/quit"
}
}
}
/// Prints help to the user, optionally about a given topic.
pub fn print_help(topic_string: Option<String>) {
if let Some(topic_string) = topic_string {
match Topic::from_string(&topic_string) {
Some(t) => {
println!("{}", t.description());
println!("Usage: {}", t.usage());
},
None => println!("Unknown command {:?}.", topic_string)
}
return;
}
println!("Commands: /connect, /disconnect, /help, /host, /quit, /stop");
}
|
#![no_std]
pub mod elfloader;
pub mod rsrvmalloc; |
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// DistributionWidgetYAxis : Y Axis controls for the distribution widget.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DistributionWidgetYAxis {
/// True includes zero.
#[serde(rename = "include_zero", skip_serializing_if = "Option::is_none")]
pub include_zero: Option<bool>,
/// The label of the axis to display on the graph.
#[serde(rename = "label", skip_serializing_if = "Option::is_none")]
pub label: Option<String>,
/// Specifies the maximum value to show on the y-axis. It takes a number, or auto for default behavior.
#[serde(rename = "max", skip_serializing_if = "Option::is_none")]
pub max: Option<String>,
/// Specifies minimum value to show on the y-axis. It takes a number, or auto for default behavior.
#[serde(rename = "min", skip_serializing_if = "Option::is_none")]
pub min: Option<String>,
/// Specifies the scale type. Possible values are `linear` or `log`.
#[serde(rename = "scale", skip_serializing_if = "Option::is_none")]
pub scale: Option<String>,
}
impl DistributionWidgetYAxis {
/// Y Axis controls for the distribution widget.
pub fn new() -> DistributionWidgetYAxis {
DistributionWidgetYAxis {
include_zero: None,
label: None,
max: None,
min: None,
scale: None,
}
}
}
|
#[macro_use]
extern crate nom;
use std::cell::RefCell;
use std::str;
use nom::{alphanumeric, space};
use nom::IResult::Done;
use std::fs::File;
use std::io::Read;
use std::collections::{HashMap, HashSet};
named!(name<&str>, map_res!(alphanumeric, str::from_utf8));
named!(
weight<i32>,
map_res!(
map_res!(
delimited!(char!('('), is_not!(")"), char!(')')),
str::from_utf8
),
str::parse::<i32>
)
);
named!(
children<Vec<&str>>,
map!(
separated_nonempty_list_complete!(tag!(", "), alphanumeric),
|vec: Vec<_>| vec.into_iter()
.map(|v| str::from_utf8(v).unwrap())
.collect()
)
);
#[derive(Debug, Eq, PartialEq)]
struct Program<'a> {
name: &'a str,
weight: i32,
children: Option<Vec<&'a str>>,
}
named!(
program<Program>,
do_parse!(
n: name >> space >> w: weight >> alt!(eof!() | tag!(" -> ")) >> c: opt!(children)
>> (Program {
name: n,
weight: w,
children: c,
})
)
);
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_name() {
assert_eq!(name(b"pbga (66)"), Done(&b" (66)"[..], "pbga"));
}
#[test]
fn test_weight() {
assert_eq!(weight(b"(66)"), Done(&b""[..], 66));
}
#[test]
fn test_children() {
assert_eq!(
children(b"gyxo, ebii, jptl"),
Done(&b""[..], vec!["gyxo", "ebii", "jptl"])
);
}
#[test]
fn test_single_program() {
assert_eq!(
program(b"gyxo (61)"),
Done(
&b""[..],
Program {
name: "gyxo",
weight: 61,
children: None,
}
)
);
}
#[test]
fn test_single_program_with_children() {
assert_eq!(
program(b"ugml (68) -> gyxo, ebii, jptl"),
Done(
&b""[..],
Program {
name: "ugml",
weight: 68,
children: Some(vec!["gyxo", "ebii", "jptl"]),
}
)
);
}
}
fn main() {
let path = "input.txt";
let mut input = File::open(path).expect("Unable to open file!");
let mut input_txt = String::new();
match input.read_to_string(&mut input_txt) {
Err(_) => return,
Ok(n) => println!("Read {} bytes", n),
}
let mut programs = Vec::new();
for line in input_txt.lines() {
if let Done(_, o) = program(line.as_bytes()) {
programs.push(o);
}
}
let bottom_prog = bottom_program(&programs);
println!("Bottom program: {:?}", bottom_prog);
println!(
"Needed weight is: {:?}",
calculate_needed_weight(&bottom_prog, &programs)
);
}
fn bottom_program<'a>(programs: &'a [Program]) -> String {
// create a set of all names then create a set of all the children and do a
// difference operation on them, that should give us the bottom program
let all_programs = programs.iter().map(|p| p.name).collect::<HashSet<_>>();
let all_children = programs
.iter()
.filter(|p| p.children.is_some())
.flat_map(|p| p.children.as_ref().unwrap().iter().cloned())
.collect::<HashSet<_>>();
let all_children = all_children.into_iter().collect::<HashSet<_>>();
all_programs
.difference(&all_children)
.take(1)
.next()
.unwrap()
.to_string()
}
fn sum_all_children<'a>(
child: &str,
name_program_map: &'a HashMap<&'a str, &'a Program>,
sum: &mut i32,
) {
if name_program_map[child].children.is_none() {
return;
}
for c in name_program_map[child].children.clone().unwrap() {
sum_all_children(c, name_program_map, sum);
let weight = name_program_map[c].weight;
*sum += weight;
}
}
fn get_sub_towers<'a>(
child: &'a str,
name_program_map: &'a HashMap<&'a str, &'a Program>,
sub_towers: &mut RefCell<Vec<&'a str>>,
) {
if name_program_map[child].children.is_none() {
return;
}
sub_towers.borrow_mut().push(child);
for c in name_program_map[child].children.clone().unwrap() {
get_sub_towers(c, name_program_map, sub_towers);
}
}
fn calculate_needed_weight<'a>(bottom_prog: &str, programs: &'a [Program]) -> i32 {
let name_prog_map = programs.iter().fold(HashMap::new(), |mut acc, v| {
acc.insert(v.name, v);
acc
});
let mut sub_towers = RefCell::new(Vec::new());
get_sub_towers(bottom_prog, &name_prog_map, &mut sub_towers);
// check all sub-towers and see wich one is not balanced
let sub_towers_with_weights = sub_towers
.into_inner()
.into_iter()
.map(|t| (t, name_prog_map[t].children.clone().unwrap()))
.map(|t| {
(
t.0,
t.1
.into_iter()
.map(|t| {
let mut sum = 0;
sum_all_children(t, &name_prog_map, &mut sum);
sum += name_prog_map[t].weight;
(t, sum)
})
.collect::<Vec<_>>(),
)
})
.collect::<Vec<_>>();
// we need to go from bottom to up, first tower that has the first wrong weight is
// the unbalanced tower
for (_, t) in sub_towers_with_weights.into_iter().rev() {
if let Some(unbalanced_tower) = get_unbalanced_data(t.as_slice()) {
let mut weights = vec![name_prog_map[(unbalanced_tower.0).0].weight];
weights.extend(
name_prog_map[(unbalanced_tower.0).0]
.children
.clone()
.unwrap()
.into_iter()
.map(|v| {
let mut sum = 0;
sum_all_children(v, &name_prog_map, &mut sum);
sum += name_prog_map[v].weight;
sum
})
.collect::<Vec<_>>(),
);
// get the tower with unbalanced weight
let unbalanced_tower_weigth = weights
.iter()
.fold(HashMap::new(), |mut acc, v| {
{
let counter = acc.entry(v).or_insert(0);
*counter += 1;
}
acc
})
.into_iter()
.find(|&(_, v)| v == 1)
.unwrap()
.0;
return unbalanced_tower_weigth - ((unbalanced_tower.0).1 - unbalanced_tower.1).abs();
}
}
0
}
fn get_unbalanced_data<'a>(towers: &'a [(&'a str, i32)]) -> Option<((&'a str, i32), i32)> {
let tower_weight_counter = towers
.iter()
.map(|&v| v.1)
.fold(HashMap::new(), |mut acc, v| {
{
let counter = acc.entry(v).or_insert(0);
*counter += 1;
}
acc
});
if tower_weight_counter
.iter()
.find(|&(_, &v)| v == 1)
.is_some()
{
let unbalanced_weight = tower_weight_counter
.iter()
.find(|&(_, &v)| v == 1)
.unwrap()
.0;
let weight = tower_weight_counter
.iter()
.find(|&(_, &v)| v != 1)
.unwrap()
.0;
// need to get the tower name that is not balanced
let name = towers
.iter()
.find(|&v| v.1 == *unbalanced_weight)
.unwrap()
.0;
return Some(((name, *unbalanced_weight), *weight));
}
None
}
|
use thiserror::Error;
#[derive(Debug, Clone, Error)]
pub enum OverworldGenerationParamsError {
#[error(
"max_bridge_len {max_bridge_len} has to be less than or equal to room radius {room_radius} and greater than or equal to min_bridge_len {min_bridge_len}. Bridges must be at least 1 long"
)]
BadBridgeLen {
min_bridge_len: u32,
max_bridge_len: u32,
room_radius: u32,
},
#[error("Radius must be non-zero")]
BadRadius,
}
#[derive(Debug, Clone)]
pub struct OverworldGenerationParams {
pub(crate) radius: u32,
pub(crate) room_radius: u32,
pub(crate) min_bridge_len: u32,
pub(crate) max_bridge_len: u32,
}
#[derive(Debug, Clone, Default)]
pub struct OverworldGenerationParamsBuilder {
pub radius: u32,
pub room_radius: u32,
pub min_bridge_len: u32,
pub max_bridge_len: u32,
}
impl OverworldGenerationParams {
pub fn builder() -> OverworldGenerationParamsBuilder {
Default::default()
}
}
impl OverworldGenerationParamsBuilder {
pub fn build(self) -> Result<OverworldGenerationParams, OverworldGenerationParamsError> {
if self.min_bridge_len > self.room_radius
|| self.min_bridge_len > self.max_bridge_len
|| self.max_bridge_len > self.room_radius
|| self.min_bridge_len == 0
{
return Err(OverworldGenerationParamsError::BadBridgeLen {
min_bridge_len: self.min_bridge_len,
max_bridge_len: self.max_bridge_len,
room_radius: self.room_radius,
});
}
if self.radius == 0 || self.room_radius == 0 {
return Err(OverworldGenerationParamsError::BadRadius);
}
let params = OverworldGenerationParams {
radius: self.radius,
room_radius: self.room_radius,
min_bridge_len: self.min_bridge_len,
max_bridge_len: self.max_bridge_len,
};
Ok(params)
}
pub fn with_radius(mut self, radius: u32) -> Self {
self.radius = radius;
self
}
pub fn with_room_radius(mut self, room_radius: u32) -> Self {
self.room_radius = room_radius;
self
}
pub fn with_min_bridge_len(mut self, min_bridge_len: u32) -> Self {
self.min_bridge_len = min_bridge_len;
self
}
pub fn with_max_bridge_len(mut self, max_bridge_len: u32) -> Self {
self.max_bridge_len = max_bridge_len;
self
}
}
|
pub(crate) mod version;
pub(crate) mod version_revision_resolver;
pub(crate) mod vtable;
use std::{cell::RefCell, rc::Rc};
use self::{
version::repository_impl::VersionRepositoryImpl, vtable::repository_impl::VTableRepositoryImpl,
};
use crate::{
error::InfraError,
sqlite::{
database::SqliteDatabase, rows::from_sqlite_rows::FromSqliteRows, sqlite_rowid::SqliteRowid,
},
};
use apllodb_shared_components::{ApllodbResult, DatabaseName};
use apllodb_storage_engine_interface::{ColumnDataType, ColumnName, Rows, TableName};
use log::debug;
/// Many transactions share 1 SQLite connection in `Database`.
#[derive(Debug)]
pub(crate) struct SqliteTx {
database_name: DatabaseName,
// will be Option::take() -n on commit/abort.
sqlx_tx: Option<sqlx::Transaction<'static, sqlx::sqlite::Sqlite>>,
}
impl SqliteTx {
pub(crate) fn vtable_repo(slf: Rc<RefCell<Self>>) -> VTableRepositoryImpl {
VTableRepositoryImpl::new(slf)
}
pub(crate) fn version_repo(slf: Rc<RefCell<Self>>) -> VersionRepositoryImpl {
VersionRepositoryImpl::new(slf)
}
}
impl SqliteTx {
/// # Failures
///
/// - [IoError](apllodb_shared_components::SqlState::IoError) when:
/// - sqlx raises an error.
pub(crate) async fn begin(db: &SqliteDatabase) -> ApllodbResult<Rc<RefCell<SqliteTx>>> {
let database_name = { db.name().clone() };
let tx = db.sqlite_pool().begin().await.map_err(InfraError::from)?;
Ok(Rc::new(RefCell::new(Self {
database_name,
sqlx_tx: Some(tx),
})))
}
/// # Failures
///
/// If any of the following error is returned, transaction has already been aborted.
///
/// - [IoError](apllodb_shared_components::SqlState::IoError) when:
/// - sqlx raises an error.
pub(crate) async fn commit(&mut self) -> ApllodbResult<()> {
self.sqlx_tx
.take()
.expect("SqliteTx::commit() / SqliteTx::abort() must be called only once")
.commit()
.await
.map_err(InfraError::from)?;
Ok(())
}
/// # Failures
///
/// - [IoError](apllodb_shared_components::SqlState::IoError) when:
/// - sqlx raises an error.
pub(crate) async fn abort(&mut self) -> ApllodbResult<()> {
self.sqlx_tx
.take()
.expect("SqliteTx::commit() / SqliteTx::abort() must be called only once")
.rollback()
.await
.map_err(InfraError::from)?;
Ok(())
}
pub(crate) fn database_name(&self) -> &DatabaseName {
&self.database_name
}
}
impl SqliteTx {
// FIXME should take placeholder argument to prevent SQL-i
pub(in crate::sqlite::transaction::sqlite_tx) async fn query(
&mut self,
sql: &str,
table_name: &TableName,
column_data_types: &[&ColumnDataType],
void_projection: &[ColumnName],
) -> ApllodbResult<Rows> {
debug!("SqliteTx::query():\n {}", sql);
let rows = sqlx::query(sql)
.fetch_all(self.sqlx_tx.as_mut().unwrap())
.await
.map_err(InfraError::from)?;
Rows::from_sqlite_rows(&rows, table_name, column_data_types, void_projection)
}
pub(in crate::sqlite::transaction::sqlite_tx) async fn execute(
&mut self,
sql: &str,
) -> ApllodbResult<SqliteRowid> {
debug!("SqliteTx::execute():\n {}", sql);
let done = sqlx::query(sql)
.execute(self.sqlx_tx.as_mut().unwrap())
.await
.map_err(InfraError::from)?;
Ok(SqliteRowid(done.last_insert_rowid()))
}
}
|
//! This example shows how to use and define a multi-batch dispatcher.
//!
//! It allows to influence how many times a set of systems gets dispatched.
//!
//! Specifically here we have three Systems
//! - `SayHelloSystem`: Which is directly registered under the main dispatcher.
//! - `BuyTomatoSystem` and `BuyPotatoSystem` are registered to the batch.
//!
//! Notice that none of these systems are directly depending on others.
//! The `SayHelloSystem` is requesting the resources `TomatoStore` and
//! `PotatoStore`, which are also requested by the other two systems inside
//! the batch and by the batch controller itself.
//!
//! This is done by defining `Run3Times` which decides that the inner systems
//! should be run 3 times. This is similar to the `batch_dispatching.rs`
//! example, but that one uses a more flexible (but also more verbose) way of
//! doing it.
use shred::{
DispatcherBuilder, MultiDispatchController, MultiDispatcher, Read, System, World, Write,
};
use std::{thread::sleep, time::Duration};
fn main() {
let mut dispatcher = DispatcherBuilder::new()
.with(SayHelloSystem, "say_hello_system", &[])
.with_batch(
MultiDispatcher::new(Run3Times),
DispatcherBuilder::new()
.with(BuyTomatoSystem, "buy_tomato_system", &[])
.with(BuyPotatoSystem, "buy_potato_system", &[]),
"BatchSystemTest",
&[],
)
.build();
let mut world = World::empty();
dispatcher.setup(&mut world);
// Running phase
for i in 0..10 {
println!("Dispatching {} ", i);
dispatcher.dispatch(&world);
sleep(Duration::new(0, 100000000));
}
// Done
println!("Execution finished");
}
// Resources
#[derive(Default)]
pub struct PotatoStore(i32);
#[derive(Default)]
pub struct TomatoStore(f32);
/// System that says "Hello!"
pub struct SayHelloSystem;
impl<'a> System<'a> for SayHelloSystem {
type SystemData = (Write<'a, PotatoStore>, Write<'a, TomatoStore>);
fn run(&mut self, _data: Self::SystemData) {
println!("Hello!")
}
}
/// System that says "Buy Potato"
pub struct BuyPotatoSystem;
impl<'a> System<'a> for BuyPotatoSystem {
type SystemData = Write<'a, PotatoStore>;
fn run(&mut self, _data: Self::SystemData) {
println!("Buy Potato")
}
}
/// System that says "Buy Tomato"
pub struct BuyTomatoSystem;
impl<'a> System<'a> for BuyTomatoSystem {
type SystemData = Write<'a, TomatoStore>;
fn run(&mut self, _data: Self::SystemData) {
println!("Buy Tomato")
}
}
#[derive(Default)]
struct Run3Times;
impl<'a> MultiDispatchController<'a> for Run3Times {
type SystemData = Read<'a, TomatoStore>;
fn plan(&mut self, _data: Self::SystemData) -> usize {
3
}
}
|
use std::path::PathBuf;
use anyhow::{bail, Result};
use glob::Pattern;
use serde::Deserialize;
use toml::{self, Value as Toml};
#[derive(Clone, Debug)]
pub struct Item {
pub name: String,
pub files: Vec<PathBuf>,
pub ignore: Option<Vec<String>>,
}
#[macro_export]
macro_rules! item {
($name:expr, $files:expr) => {{
let files = {
let v: Vec<String> = $files.to_vec().iter().map(|s| s.to_string()).collect();
v
};
$crate::data::item::Item::new($name.to_string(), files, None)
}};
($name:expr, $files:expr, $ignore:expr) => {{
let files = {
let v: Vec<String> = $files.to_vec().iter().map(|s| s.to_string()).collect();
v
};
let ignore = {
let v: Vec<String> = $ignore.to_vec().iter().map(|s| s.to_string()).collect();
v
};
$crate::data::item::Item::new($name.to_string(), files, Some(ignore))
}};
}
impl Item {
pub fn new(name: String, files: Vec<String>, ignore: Option<Vec<String>>) -> Self {
Self {
name,
ignore,
files: files.iter().map(PathBuf::from).collect(),
}
}
pub fn from_str(name: String, file: String) -> Self {
Self::new(name, vec![file], None)
}
pub fn from_list(name: String, files: Vec<String>) -> Self {
Self::new(name, files, None)
}
pub fn from_toml(name: String, value: Toml) -> Result<Self> {
let item = match value {
Toml::String(s) => {
if s.trim().is_empty() {
bail!("{}: string must not be empty", name);
}
Self::from_str(name, s)
}
Toml::Array(arr) => {
if arr.is_empty() {
bail!("{}: list must not be empty", name);
}
let mut files = Vec::new();
for value in arr {
match value {
Toml::String(s) => files.push(s),
_ => bail!("invalid type for {}", name),
}
}
Self::from_list(name, files)
}
Toml::Table(t) => {
let s = toml::to_string(&t)?;
let obj: Obj = toml::from_str(&s)?;
Self::new(name, obj.files, obj.ignore)
}
_ => bail!("invalid type for {}", name),
};
Ok(item)
}
pub fn with_suffix(mut self, suffix: &str) -> Self {
let root = PathBuf::from(suffix);
self.files = self.files.iter().map(|p| root.join(p)).collect();
self
}
pub fn ignore_patterns(&self) -> Result<Option<Vec<Pattern>>> {
let patterns = match &self.ignore {
None => None,
Some(v) => {
let mut ps = Vec::new();
for s in v {
ps.push(Pattern::new(s)?);
}
Some(ps)
}
};
Ok(patterns)
}
}
#[derive(Deserialize)]
struct Obj {
ignore: Option<Vec<String>>,
files: Vec<String>,
}
#[cfg(test)]
impl Item {
pub fn simple_new(name: &str, file: &str) -> Item {
Item::from_str(name.to_string(), file.to_string())
}
pub fn object_new(name: &str, files: &[&str], ignore: Option<&[&str]>) -> Item {
let files = files
.to_vec()
.iter()
.map(|s| s.to_string())
.collect::<Vec<String>>();
let ignore = ignore.map(|f| {
f.to_vec()
.iter()
.map(|s| s.to_string())
.collect::<Vec<String>>()
});
Item::new(name.to_string(), files, ignore)
}
}
|
use crate::util::base_types::PvmBaseUnit;
// const nop: u8 = 0;
const nop: u8 = 0;
// BitCode
pub struct BitCode {
op_code : u8,
op_num : PvmBaseUnit,
} |
//! This example demonstrates using the [`Format`] [`CellOption`] factory to alter
//! the cells of a [`Table`].
//!
//! * Note how [`Format::content()`] gives access to the respective cell content for replacement.
//! And [`Format::positioned()`] additionally provides the index coordinates of that cell.
//!
//! * Note how the [std] [`format!`] macro is used to update the values of the affected cells.
use tabled::{
settings::{
object::{Columns, Object, Rows},
Format, Modify, Style,
},
Table, Tabled,
};
#[derive(Tabled)]
struct Commit {
id: &'static str,
header: &'static str,
message: &'static str,
}
fn main() {
let data = [
Commit {
header: "bypass open-source transmitter",
message: "index neural panel",
id: "8ae4e8957caeaa467acbce963701e227af00a1c7",
},
Commit {
header: "program online alarm",
message: "copy bluetooth card",
id: "48c76de71bd685486d97dc8f4f05aa6fcc0c3f86",
},
Commit {
header: "CSV",
message: "reboot mobile capacitor",
id: "6ffc2a2796229fc7bf59471ad907f58b897005d0",
},
];
let table = Table::new(data)
.with(Style::psql())
.with(
Modify::new(Rows::first())
.with(Format::positioned(|_, (_, column)| column.to_string())),
)
.with(
Modify::new(Columns::first().not(Rows::first()))
.with(Format::content(|s| format!("{s}..."))),
)
.to_string();
println!("{table}");
}
|
extern crate bindgen;
use std::env;
use std::path::PathBuf;
fn main () {
println!("cargo:rerun-if-env-changed=PHP_LIB_DIR");
println!("cargo:rerun-if-env-changed=PHP_INCLUDE_DIR");
println!("cargo:rerun-if-env-changed=PHP_LINK_STATIC");
let default_lib_dir = PathBuf::from("/usr/lib");
let default_include_dir = PathBuf::from("/usr/include/php");
let default_link_static = false;
let lib_dir = env::var_os("PHP_LIB_DIR").map(PathBuf::from).unwrap_or(default_lib_dir);
let include_dir = env::var_os("PHP_INCLUDE_DIR").map(PathBuf::from).unwrap_or(default_include_dir);
let link_static = match env::var_os("PHP_LINK_STATIC") {
Some(val) => val.to_string_lossy().parse::<bool>().unwrap_or(default_link_static),
None => default_link_static,
};
if !lib_dir.exists() {
panic!(
"PHP library directory does not exist: {}",
lib_dir.to_string_lossy()
);
}
if !include_dir.exists() {
panic!(
"PHP include directory does not exist: {}",
include_dir.to_string_lossy()
);
}
let link_type = if link_static {
"=static"
} else {
"=dylib"
};
println!("cargo:rustc-link-lib{}=php7", link_type);
println!("cargo:rustc-link-search=native={}", lib_dir.to_string_lossy());
let includes = ["/", "/TSRM", "/Zend", "/main"].iter().map(|d| {
format!("-I{}{}", include_dir.to_string_lossy(), d)
}).collect::<Vec<String>>();
let bindings = bindgen::Builder::default()
.rustfmt_bindings(true)
.clang_args(includes)
.whitelist_function("zend_error")
.whitelist_function("php_info_print_table_start")
.whitelist_function("php_info_print_table_row")
.whitelist_function("php_info_print_table_end")
.whitelist_function("php_printf")
.whitelist_function("_zend_new_array")
.whitelist_function("add_index_zval")
.whitelist_function("add_assoc_zval_ex")
.whitelist_function("zval_ptr_dtor")
.whitelist_function("zend_strpprintf")
.whitelist_type("zval")
.whitelist_type("zend_execute_data")
.whitelist_type("zend_string")
.whitelist_type("zend_module_entry")
.whitelist_type("zend_function_entry")
.derive_default(true)
.header("wrapper.h")
.generate()
.expect("Unable to generate bindings");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}
|
// This file is part of Darwinia.
//
// Copyright (C) 2018-2021 Darwinia Network
// SPDX-License-Identifier: GPL-3.0
//
// Darwinia is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Darwinia is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Darwinia. If not, see <https://www.gnu.org/licenses/>.
pub mod crab_parachain;
pub use crab_parachain::{
self as crab_parachain_service, RuntimeExecutor as CrabParachainRuntimeExecutor,
};
pub use crab_parachain_runtime::{self, RuntimeApi as CrabParachainRuntimeApi};
pub mod darwinia_parachain;
pub use darwinia_parachain::{
self as darwinia_parachain_service, RuntimeExecutor as DarwiniaParachainRuntimeExecutor,
};
pub use darwinia_parachain_runtime::{self, RuntimeApi as DarwiniaParachainRuntimeApi};
// --- std ---
use std::sync::Arc;
// --- crates.io ---
use futures::lock::Mutex;
// --- paritytech ---
use cumulus_client_consensus_common::{ParachainCandidate, ParachainConsensus};
use cumulus_client_network::build_block_announce_validator;
use cumulus_client_service::{
prepare_node_config, start_collator, start_full_node, StartCollatorParams, StartFullNodeParams,
};
use cumulus_primitives_core::{
relay_chain::v1::{Hash as PHash, PersistedValidationData},
ParaId,
};
use polkadot_service::NativeExecutionDispatch;
use sc_consensus::{import_queue::Verifier as VerifierT, BlockImportParams};
use sc_executor::NativeElseWasmExecutor;
use sc_network::NetworkService;
use sc_service::{
ChainSpec, Configuration, PartialComponents, Role, TFullBackend, TFullClient, TaskManager,
};
use sc_telemetry::{Telemetry, TelemetryHandle, TelemetryWorker, TelemetryWorkerHandle};
use sp_api::{ApiExt, ConstructRuntimeApi, HeaderT};
use sp_consensus::CacheKeyId;
use sp_consensus_aura::{sr25519::AuthorityId as AuraId, AuraApi};
use sp_keystore::SyncCryptoStorePtr;
use sp_runtime::{generic::BlockId, traits::BlakeTwo256};
use substrate_prometheus_endpoint::Registry;
// --- darwinia-network ---
use darwinia_collator_primitives::{AccountId, Balance, Hash, Header, Nonce, OpaqueBlock as Block};
use darwinia_collator_rpc::FullDeps;
/// Can be called for a `Configuration` to check if it is a configuration for the `Crab Parachain` network.
pub trait IdentifyVariant {
/// Returns if this is a configuration for the `Crab Parachain` network.
fn is_crab_parachain(&self) -> bool;
/// Returns true if this configuration is for a development network.
fn is_dev(&self) -> bool;
}
impl IdentifyVariant for Box<dyn ChainSpec> {
fn is_crab_parachain(&self) -> bool {
self.id().starts_with("crab-parachain")
}
fn is_dev(&self) -> bool {
self.id().ends_with("dev")
}
}
enum BuildOnAccess<R> {
Uninitialized(Option<Box<dyn FnOnce() -> R + Send + Sync>>),
Initialized(R),
}
impl<R> BuildOnAccess<R> {
fn get_mut(&mut self) -> &mut R {
loop {
match self {
Self::Uninitialized(f) => {
*self = Self::Initialized((f.take().unwrap())());
}
Self::Initialized(ref mut r) => return r,
}
}
}
}
struct Verifier<Client> {
client: Arc<Client>,
aura_verifier: BuildOnAccess<Box<dyn VerifierT<Block>>>,
relay_chain_verifier: Box<dyn VerifierT<Block>>,
}
#[async_trait::async_trait]
impl<Client> VerifierT<Block> for Verifier<Client>
where
Client: sp_api::ProvideRuntimeApi<Block> + Send + Sync,
Client::Api: AuraApi<Block, AuraId>,
{
async fn verify(
&mut self,
block_import: BlockImportParams<Block, ()>,
) -> Result<
(
BlockImportParams<Block, ()>,
Option<Vec<(CacheKeyId, Vec<u8>)>>,
),
String,
> {
let block_id = BlockId::hash(*block_import.header.parent_hash());
if self
.client
.runtime_api()
.has_api::<dyn AuraApi<Block, AuraId>>(&block_id)
.unwrap_or(false)
{
self.aura_verifier.get_mut().verify(block_import).await
} else {
self.relay_chain_verifier.verify(block_import).await
}
}
}
/// Special [`ParachainConsensus`] implementation that waits for the upgrade from
/// shell to a parachain runtime that implements Aura.
struct WaitForAuraConsensus<Client> {
client: Arc<Client>,
aura_consensus: Arc<Mutex<BuildOnAccess<Box<dyn ParachainConsensus<Block>>>>>,
relay_chain_consensus: Arc<Mutex<Box<dyn ParachainConsensus<Block>>>>,
}
impl<Client> Clone for WaitForAuraConsensus<Client> {
fn clone(&self) -> Self {
Self {
client: self.client.clone(),
aura_consensus: self.aura_consensus.clone(),
relay_chain_consensus: self.relay_chain_consensus.clone(),
}
}
}
#[async_trait::async_trait]
impl<Client> ParachainConsensus<Block> for WaitForAuraConsensus<Client>
where
Client: sp_api::ProvideRuntimeApi<Block> + Send + Sync,
Client::Api: AuraApi<Block, AuraId>,
{
async fn produce_candidate(
&mut self,
parent: &Header,
relay_parent: PHash,
validation_data: &PersistedValidationData,
) -> Option<ParachainCandidate<Block>> {
let block_id = BlockId::hash(parent.hash());
if self
.client
.runtime_api()
.has_api::<dyn AuraApi<Block, AuraId>>(&block_id)
.unwrap_or(false)
{
self.aura_consensus
.lock()
.await
.get_mut()
.produce_candidate(parent, relay_parent, validation_data)
.await
} else {
self.relay_chain_consensus
.lock()
.await
.produce_candidate(parent, relay_parent, validation_data)
.await
}
}
}
/// Starts a `ServiceBuilder` for a full service.
///
/// Use this macro if you don't actually need the full service, but just the builder in order to
/// be able to perform chain operations.
pub fn new_partial<RuntimeApi, Executor, BIQ>(
config: &Configuration,
build_import_queue: BIQ,
) -> Result<
PartialComponents<
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
TFullBackend<Block>,
(),
sc_consensus::DefaultImportQueue<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
sc_transaction_pool::FullPool<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
(Option<Telemetry>, Option<TelemetryWorkerHandle>),
>,
sc_service::Error,
>
where
RuntimeApi: 'static
+ Send
+ Sync
+ ConstructRuntimeApi<Block, TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
RuntimeApi::RuntimeApi: sp_api::ApiExt<
Block,
StateBackend = sc_client_api::StateBackendFor<TFullBackend<Block>, Block>,
> + sp_api::Metadata<Block>
+ sp_block_builder::BlockBuilder<Block>
+ sp_offchain::OffchainWorkerApi<Block>
+ sp_session::SessionKeys<Block>
+ sp_transaction_pool::runtime_api::TaggedTransactionQueue<Block>,
sc_client_api::StateBackendFor<TFullBackend<Block>, Block>: sp_api::StateBackend<BlakeTwo256>,
Executor: 'static + NativeExecutionDispatch,
BIQ: FnOnce(
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
&Configuration,
Option<TelemetryHandle>,
&TaskManager,
) -> Result<
sc_consensus::DefaultImportQueue<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
sc_service::Error,
>,
{
let telemetry = config
.telemetry_endpoints
.clone()
.filter(|x| !x.is_empty())
.map(|endpoints| -> Result<_, sc_telemetry::Error> {
let worker = TelemetryWorker::new(16)?;
let telemetry = worker.handle().new_telemetry(endpoints);
Ok((worker, telemetry))
})
.transpose()?;
let executor = sc_executor::NativeElseWasmExecutor::<Executor>::new(
config.wasm_method,
config.default_heap_pages,
config.max_runtime_instances,
);
let (client, backend, keystore_container, task_manager) =
sc_service::new_full_parts::<Block, RuntimeApi, _>(
&config,
telemetry.as_ref().map(|(_, telemetry)| telemetry.handle()),
executor,
)?;
let client = Arc::new(client);
let telemetry_worker_handle = telemetry.as_ref().map(|(worker, _)| worker.handle());
let telemetry = telemetry.map(|(worker, telemetry)| {
task_manager.spawn_handle().spawn("telemetry", worker.run());
telemetry
});
let transaction_pool = sc_transaction_pool::BasicPool::new_full(
config.transaction_pool.clone(),
config.role.is_authority().into(),
config.prometheus_registry(),
task_manager.spawn_essential_handle(),
client.clone(),
);
let import_queue = build_import_queue(
client.clone(),
config,
telemetry.as_ref().map(|telemetry| telemetry.handle()),
&task_manager,
)?;
let params = PartialComponents {
backend,
client,
import_queue,
keystore_container,
task_manager,
transaction_pool,
select_chain: (),
other: (telemetry, telemetry_worker_handle),
};
Ok(params)
}
/// Start a node with the given parachain `Configuration` and relay chain `Configuration`.
///
/// This is the actual implementation that is abstract over the executor and the runtime api.
#[sc_tracing::logging::prefix_logs_with("Parachain")]
async fn start_node_impl<RuntimeApi, Executor, RB, BIQ, BIC>(
parachain_config: Configuration,
polkadot_config: Configuration,
id: ParaId,
_rpc_ext_builder: RB,
build_import_queue: BIQ,
build_consensus: BIC,
) -> sc_service::error::Result<(
TaskManager,
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
)>
where
RuntimeApi: 'static
+ Send
+ Sync
+ ConstructRuntimeApi<Block, TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
RuntimeApi::RuntimeApi: cumulus_primitives_core::CollectCollationInfo<Block>
+ sp_api::ApiExt<
Block,
StateBackend = sc_client_api::StateBackendFor<TFullBackend<Block>, Block>,
> + sp_api::Metadata<Block>
+ sp_block_builder::BlockBuilder<Block>
+ sp_offchain::OffchainWorkerApi<Block>
+ sp_session::SessionKeys<Block>
+ sp_transaction_pool::runtime_api::TaggedTransactionQueue<Block>
+ pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance>
+ substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Nonce>,
sc_client_api::StateBackendFor<TFullBackend<Block>, Block>: sp_api::StateBackend<BlakeTwo256>,
Executor: 'static + sc_executor::NativeExecutionDispatch,
RB: 'static
+ Send
+ Fn(
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
) -> Result<jsonrpc_core::IoHandler<sc_rpc::Metadata>, sc_service::Error>,
BIQ: FnOnce(
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
&Configuration,
Option<TelemetryHandle>,
&TaskManager,
) -> Result<
sc_consensus::DefaultImportQueue<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
sc_service::Error,
>,
BIC: FnOnce(
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
Option<&Registry>,
Option<TelemetryHandle>,
&TaskManager,
&polkadot_service::NewFull<polkadot_service::Client>,
Arc<
sc_transaction_pool::FullPool<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
>,
Arc<NetworkService<Block, Hash>>,
SyncCryptoStorePtr,
bool,
) -> Result<Box<dyn ParachainConsensus<Block>>, sc_service::Error>,
{
if matches!(parachain_config.role, Role::Light) {
return Err("Light client not supported!".into());
}
let parachain_config = prepare_node_config(parachain_config);
let params = new_partial::<RuntimeApi, Executor, BIQ>(¶chain_config, build_import_queue)?;
let (mut telemetry, telemetry_worker_handle) = params.other;
let relay_chain_full_node =
cumulus_client_service::build_polkadot_full_node(polkadot_config, telemetry_worker_handle)
.map_err(|e| match e {
polkadot_service::Error::Sub(x) => x,
s => format!("{}", s).into(),
})?;
let client = params.client.clone();
let backend = params.backend.clone();
let block_announce_validator = build_block_announce_validator(
relay_chain_full_node.client.clone(),
id,
Box::new(relay_chain_full_node.network.clone()),
relay_chain_full_node.backend.clone(),
);
let force_authoring = parachain_config.force_authoring;
let validator = parachain_config.role.is_authority();
let prometheus_registry = parachain_config.prometheus_registry().cloned();
let transaction_pool = params.transaction_pool.clone();
let mut task_manager = params.task_manager;
let import_queue = cumulus_client_service::SharedImportQueue::new(params.import_queue);
let (network, system_rpc_tx, start_network) =
sc_service::build_network(sc_service::BuildNetworkParams {
config: ¶chain_config,
client: client.clone(),
transaction_pool: transaction_pool.clone(),
spawn_handle: task_manager.spawn_handle(),
import_queue: import_queue.clone(),
on_demand: None,
block_announce_validator_builder: Some(Box::new(|_| block_announce_validator)),
warp_sync: None,
})?;
let rpc_extensions_builder = {
let client = client.clone();
let transaction_pool = transaction_pool.clone();
Box::new(move |deny_unsafe, _| {
let deps = FullDeps {
client: client.clone(),
pool: transaction_pool.clone(),
deny_unsafe,
};
Ok(darwinia_collator_rpc::create_full(deps))
})
};
sc_service::spawn_tasks(sc_service::SpawnTasksParams {
on_demand: None,
remote_blockchain: None,
rpc_extensions_builder,
client: client.clone(),
transaction_pool: transaction_pool.clone(),
task_manager: &mut task_manager,
config: parachain_config,
keystore: params.keystore_container.sync_keystore(),
backend: backend.clone(),
network: network.clone(),
system_rpc_tx,
telemetry: telemetry.as_mut(),
})?;
let announce_block = {
let network = network.clone();
Arc::new(move |hash, data| network.announce_block(hash, data))
};
if validator {
let parachain_consensus = build_consensus(
client.clone(),
prometheus_registry.as_ref(),
telemetry.as_ref().map(|t| t.handle()),
&task_manager,
&relay_chain_full_node,
transaction_pool,
network,
params.keystore_container.sync_keystore(),
force_authoring,
)?;
let spawner = task_manager.spawn_handle();
let params = StartCollatorParams {
para_id: id,
block_status: client.clone(),
announce_block,
client: client.clone(),
task_manager: &mut task_manager,
relay_chain_full_node,
spawner,
parachain_consensus,
import_queue,
};
start_collator(params).await?;
} else {
let params = StartFullNodeParams {
client: client.clone(),
announce_block,
task_manager: &mut task_manager,
para_id: id,
relay_chain_full_node,
};
start_full_node(params)?;
}
start_network.start_network();
Ok((task_manager, client))
}
|
use pretty_assertions::assert_eq;
use super::*;
use crate::ast::{self, tests::Locator};
#[test]
fn test_parse_type_expression() {
let mut p = Parser::new(r#"(a:T, b:T) => T where T: Addable + Divisible"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 45),
..BaseNode::default()
},
monotype: MonoType::Function(Box::new(FunctionType {
base: BaseNode {
location: loc.get(1, 1, 1, 16),
..BaseNode::default()
},
parameters: vec![
ParameterType::Required {
base: BaseNode {
location: loc.get(1, 2, 1, 5),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 2, 1, 3),
..BaseNode::default()
},
name: "a".to_string(),
},
monotype: MonoType::Tvar(TvarType {
base: BaseNode {
location: loc.get(1, 4, 1, 5),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 4, 1, 5),
..BaseNode::default()
},
name: "T".to_string(),
},
}),
},
ParameterType::Required {
base: BaseNode {
location: loc.get(1, 7, 1, 10),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 7, 1, 8),
..BaseNode::default()
},
name: "b".to_string(),
},
monotype: MonoType::Tvar(TvarType {
base: BaseNode {
location: loc.get(1, 9, 1, 10),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 9, 1, 10),
..BaseNode::default()
},
name: "T".to_string(),
},
}),
},
],
monotype: MonoType::Tvar(TvarType {
base: BaseNode {
location: loc.get(1, 15, 1, 16),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 15, 1, 16),
..BaseNode::default()
},
name: "T".to_string(),
},
}),
})),
constraints: vec![TypeConstraint {
base: BaseNode {
location: loc.get(1, 23, 1, 45),
..BaseNode::default()
},
tvar: Identifier {
base: BaseNode {
location: loc.get(1, 23, 1, 24),
..BaseNode::default()
},
name: "T".to_string(),
},
kinds: vec![
Identifier {
base: BaseNode {
location: loc.get(1, 26, 1, 33),
..BaseNode::default()
},
name: "Addable".to_string(),
},
Identifier {
base: BaseNode {
location: loc.get(1, 36, 1, 45),
..BaseNode::default()
},
name: "Divisible".to_string(),
},
],
}],
},
)
}
#[test]
fn test_parse_type_expression_tvar() {
let mut p = Parser::new(r#"A"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 2),
..BaseNode::default()
},
monotype: MonoType::Tvar(TvarType {
base: BaseNode {
location: loc.get(1, 1, 1, 2),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 1, 1, 2),
..BaseNode::default()
},
name: "A".to_string(),
}
}),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_int() {
let mut p = Parser::new(r#"int"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 4),
..BaseNode::default()
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 1, 1, 4),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 1, 1, 4),
..BaseNode::default()
},
name: "int".to_string(),
}
}),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_uint() {
let mut p = Parser::new(r#"uint"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 5),
..BaseNode::default()
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 1, 1, 5),
..BaseNode::default()
},
name: Identifier {
name: "uint".to_string(),
base: BaseNode {
location: loc.get(1, 1, 1, 5),
..BaseNode::default()
},
}
}),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_float() {
let mut p = Parser::new(r#"float"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 6),
..BaseNode::default()
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 1, 1, 6),
..BaseNode::default()
},
name: Identifier {
name: "float".to_string(),
base: BaseNode {
location: loc.get(1, 1, 1, 6),
..BaseNode::default()
},
}
}),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_string() {
let mut p = Parser::new(r#"string"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 7),
..BaseNode::default()
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 1, 1, 7),
..BaseNode::default()
},
name: Identifier {
name: "string".to_string(),
base: BaseNode {
location: loc.get(1, 1, 1, 7),
..BaseNode::default()
},
}
}),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_bool() {
let mut p = Parser::new(r#"bool"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 5),
..BaseNode::default()
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 1, 1, 5),
..BaseNode::default()
},
name: Identifier {
name: "bool".to_string(),
base: BaseNode {
location: loc.get(1, 1, 1, 5),
..BaseNode::default()
},
}
}),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_time() {
let mut p = Parser::new(r#"time"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 5),
..BaseNode::default()
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 1, 1, 5),
..BaseNode::default()
},
name: Identifier {
name: "time".to_string(),
base: BaseNode {
location: loc.get(1, 1, 1, 5),
..BaseNode::default()
},
}
}),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_duration() {
let mut p = Parser::new(r#"duration"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 9),
..BaseNode::default()
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 1, 1, 9),
..BaseNode::default()
},
name: Identifier {
name: "duration".to_string(),
base: BaseNode {
location: loc.get(1, 1, 1, 9),
..BaseNode::default()
},
}
}),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_bytes() {
let mut p = Parser::new(r#"bytes"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 6),
..BaseNode::default()
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 1, 1, 6),
..BaseNode::default()
},
name: Identifier {
name: "bytes".to_string(),
base: BaseNode {
location: loc.get(1, 1, 1, 6),
..BaseNode::default()
},
}
}),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_regexp() {
let mut p = Parser::new(r#"regexp"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 7),
..BaseNode::default()
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 1, 1, 7),
..BaseNode::default()
},
name: Identifier {
name: "regexp".to_string(),
base: BaseNode {
location: loc.get(1, 1, 1, 7),
..BaseNode::default()
},
}
}),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_array_int() {
let mut p = Parser::new(r#"[int]"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 6),
..BaseNode::default()
},
monotype: MonoType::Array(Box::new(ArrayType {
base: BaseNode {
location: loc.get(1, 1, 1, 6),
..BaseNode::default()
},
element: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 2, 1, 5),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 2, 1, 5),
..BaseNode::default()
},
name: "int".to_string(),
}
})
})),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_array_string() {
let mut p = Parser::new(r#"[string]"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 9),
..BaseNode::default()
},
monotype: MonoType::Array(Box::new(ArrayType {
base: BaseNode {
location: loc.get(1, 1, 1, 9),
..BaseNode::default()
},
element: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 2, 1, 8),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 2, 1, 8),
..BaseNode::default()
},
name: "string".to_string(),
}
})
})),
constraints: vec![],
}
)
}
#[test]
fn test_parse_type_expression_dict() {
let mut p = Parser::new(r#"[string:int]"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 13),
..BaseNode::default()
},
monotype: MonoType::Dict(Box::new(DictType {
base: BaseNode {
location: loc.get(1, 1, 1, 13),
..BaseNode::default()
},
key: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 2, 1, 8),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 2, 1, 8),
..BaseNode::default()
},
name: "string".to_string(),
}
}),
val: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 9, 1, 12),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 9, 1, 12),
..BaseNode::default()
},
name: "int".to_string(),
}
}),
})),
constraints: vec![],
}
)
}
#[test]
fn test_parse_record_type_only_properties() {
let mut p = Parser::new(r#"{a:int, b:uint}"#);
let parsed = p.parse_record_type();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
MonoType::Record(RecordType {
base: BaseNode {
location: loc.get(1, 1, 1, 16),
..BaseNode::default()
},
tvar: None,
properties: vec![
PropertyType {
base: BaseNode {
location: loc.get(1, 2, 1, 7),
..BaseNode::default()
},
name: Identifier {
name: "a".to_string(),
base: BaseNode {
location: loc.get(1, 2, 1, 3),
..BaseNode::default()
},
}
.into(),
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 4, 1, 7),
..BaseNode::default()
},
name: Identifier {
name: "int".to_string(),
base: BaseNode {
location: loc.get(1, 4, 1, 7),
..BaseNode::default()
},
}
})
},
PropertyType {
base: BaseNode {
location: loc.get(1, 9, 1, 15),
..BaseNode::default()
},
name: Identifier {
name: "b".to_string(),
base: BaseNode {
location: loc.get(1, 9, 1, 10),
..BaseNode::default()
},
}
.into(),
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 11, 1, 15),
..BaseNode::default()
},
name: Identifier {
name: "uint".to_string(),
base: BaseNode {
location: loc.get(1, 11, 1, 15),
..BaseNode::default()
},
}
})
}
]
},)
)
}
#[test]
fn test_parse_record_type_string_literal_property() {
let mut p = Parser::new(r#"{"a":int, b:uint}"#);
let parsed = p.parse_record_type();
expect_test::expect![[r#"
Record(
RecordType {
base: BaseNode {
location: SourceLocation {
file: None,
start: Position {
line: 1,
column: 1,
},
end: Position {
line: 1,
column: 18,
},
source: Some(
"{\"a\":int, b:uint}",
),
},
comments: [],
errors: [],
},
tvar: None,
properties: [
PropertyType {
base: BaseNode {
location: SourceLocation {
file: None,
start: Position {
line: 1,
column: 2,
},
end: Position {
line: 1,
column: 9,
},
source: Some(
"\"a\":int",
),
},
comments: [],
errors: [],
},
name: StringLit(
StringLit {
base: BaseNode {
location: SourceLocation {
file: None,
start: Position {
line: 1,
column: 2,
},
end: Position {
line: 1,
column: 5,
},
source: Some(
"\"a\"",
),
},
comments: [],
errors: [],
},
value: "a",
},
),
monotype: Basic(
NamedType {
base: BaseNode {
location: SourceLocation {
file: None,
start: Position {
line: 1,
column: 6,
},
end: Position {
line: 1,
column: 9,
},
source: Some(
"int",
),
},
comments: [],
errors: [],
},
name: Identifier {
base: BaseNode {
location: SourceLocation {
file: None,
start: Position {
line: 1,
column: 6,
},
end: Position {
line: 1,
column: 9,
},
source: Some(
"int",
),
},
comments: [],
errors: [],
},
name: "int",
},
},
),
},
PropertyType {
base: BaseNode {
location: SourceLocation {
file: None,
start: Position {
line: 1,
column: 11,
},
end: Position {
line: 1,
column: 17,
},
source: Some(
"b:uint",
),
},
comments: [],
errors: [],
},
name: Identifier(
Identifier {
base: BaseNode {
location: SourceLocation {
file: None,
start: Position {
line: 1,
column: 11,
},
end: Position {
line: 1,
column: 12,
},
source: Some(
"b",
),
},
comments: [],
errors: [],
},
name: "b",
},
),
monotype: Basic(
NamedType {
base: BaseNode {
location: SourceLocation {
file: None,
start: Position {
line: 1,
column: 13,
},
end: Position {
line: 1,
column: 17,
},
source: Some(
"uint",
),
},
comments: [],
errors: [],
},
name: Identifier {
base: BaseNode {
location: SourceLocation {
file: None,
start: Position {
line: 1,
column: 13,
},
end: Position {
line: 1,
column: 17,
},
source: Some(
"uint",
),
},
comments: [],
errors: [],
},
name: "uint",
},
},
),
},
],
},
)
"#]]
.assert_debug_eq(&parsed);
}
#[test]
fn test_parse_record_type_trailing_comma() {
let mut p = Parser::new(r#"{a:int,}"#);
let parsed = p.parse_record_type();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
MonoType::Record(RecordType {
base: BaseNode {
location: loc.get(1, 1, 1, 9),
..BaseNode::default()
},
tvar: None,
properties: vec![PropertyType {
base: BaseNode {
location: loc.get(1, 2, 1, 7),
..BaseNode::default()
},
name: Identifier {
name: "a".to_string(),
base: BaseNode {
location: loc.get(1, 2, 1, 3),
..BaseNode::default()
},
}
.into(),
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 4, 1, 7),
..BaseNode::default()
},
name: Identifier {
name: "int".to_string(),
base: BaseNode {
location: loc.get(1, 4, 1, 7),
..BaseNode::default()
},
}
})
},]
},)
)
}
#[test]
fn test_parse_record_type_invalid() {
let mut p = Parser::new(r#"{a b}"#);
let parsed = p.parse_record_type();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
MonoType::Record(RecordType {
base: BaseNode {
location: loc.get(1, 1, 1, 5),
errors: vec!["expected RBRACE, got IDENT".to_string()],
..BaseNode::default()
},
tvar: None,
properties: vec![],
})
)
}
#[test]
fn test_parse_constraint_one_ident() {
let mut p = Parser::new(r#"A : date"#);
let parsed = p.parse_constraints();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
vec![TypeConstraint {
base: BaseNode {
location: loc.get(1, 1, 1, 9),
..BaseNode::default()
},
tvar: Identifier {
base: BaseNode {
location: loc.get(1, 1, 1, 2),
..BaseNode::default()
},
name: "A".to_string(),
},
kinds: vec![Identifier {
base: BaseNode {
location: loc.get(1, 5, 1, 9),
..BaseNode::default()
},
name: "date".to_string(),
}]
}],
)
}
#[test]
fn test_parse_record_type_blank() {
let mut p = Parser::new(r#"{}"#);
let parsed = p.parse_record_type();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
MonoType::Record(RecordType {
base: BaseNode {
location: loc.get(1, 1, 1, 3),
..BaseNode::default()
},
tvar: None,
properties: vec![],
},)
)
}
#[test]
fn test_parse_type_expression_function_with_no_params() {
let mut p = Parser::new(r#"() => int"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 10),
..BaseNode::default()
},
monotype: MonoType::Function(Box::new(FunctionType {
base: BaseNode {
location: loc.get(1, 1, 1, 10),
..BaseNode::default()
},
parameters: vec![],
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 7, 1, 10),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 7, 1, 10),
..BaseNode::default()
},
name: "int".to_string(),
}
}),
})),
constraints: vec![],
},
)
}
#[test]
fn test_parse_function_type_trailing_comma() {
let mut p = Parser::new(r#"(a:int,) => int"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 16),
..BaseNode::default()
},
monotype: MonoType::Function(Box::new(FunctionType {
base: BaseNode {
location: loc.get(1, 1, 1, 16),
..BaseNode::default()
},
parameters: vec![ParameterType::Required {
base: BaseNode {
location: loc.get(1, 2, 1, 7),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 2, 1, 3),
..BaseNode::default()
},
name: "a".to_string(),
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 4, 1, 7),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 4, 1, 7),
..BaseNode::default()
},
name: "int".to_string(),
},
}),
},],
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 13, 1, 16),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 13, 1, 16),
..BaseNode::default()
},
name: "int".to_string(),
}
}),
})),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_function_with_params() {
let mut p = Parser::new(r#"(A: int, B: uint) => int"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 25),
..BaseNode::default()
},
monotype: MonoType::Function(Box::new(FunctionType {
base: BaseNode {
location: loc.get(1, 1, 1, 25),
..BaseNode::default()
},
parameters: vec![
ParameterType::Required {
base: BaseNode {
location: loc.get(1, 2, 1, 8),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 2, 1, 3),
..BaseNode::default()
},
name: "A".to_string(),
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 5, 1, 8),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 5, 1, 8),
..BaseNode::default()
},
name: "int".to_string(),
},
}),
},
ParameterType::Required {
base: BaseNode {
location: loc.get(1, 10, 1, 17),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 10, 1, 11),
..BaseNode::default()
},
name: "B".to_string(),
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 13, 1, 17),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 13, 1, 17),
..BaseNode::default()
},
name: "uint".to_string(),
},
}),
}
],
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 22, 1, 25),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 22, 1, 25),
..BaseNode::default()
},
name: "int".to_string(),
}
}),
})),
constraints: vec![],
},
)
}
// optional parameters like (.., ?n: ..) -> ..
#[test]
fn test_parse_type_expression_function_optional_params() {
let mut p = Parser::new(r#"(?A: int) => int"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 17),
..BaseNode::default()
},
monotype: MonoType::Function(Box::new(FunctionType {
base: BaseNode {
location: loc.get(1, 1, 1, 17),
..BaseNode::default()
},
parameters: vec![ParameterType::Optional {
base: BaseNode {
location: loc.get(1, 2, 1, 9),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 3, 1, 4),
..BaseNode::default()
},
name: "A".to_string(),
},
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 6, 1, 9),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 6, 1, 9),
..BaseNode::default()
},
name: "int".to_string(),
},
}),
default: None,
}],
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 14, 1, 17),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 14, 1, 17),
..BaseNode::default()
},
name: "int".to_string(),
}
}),
})),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_function_named_params() {
let mut p = Parser::new(r#"(<-A: int) => int"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 18),
..BaseNode::default()
},
monotype: MonoType::Function(Box::new(FunctionType {
base: BaseNode {
location: loc.get(1, 1, 1, 18),
..BaseNode::default()
},
parameters: vec![ParameterType::Pipe {
base: BaseNode {
location: loc.get(1, 2, 1, 10),
..BaseNode::default()
},
name: Some(Identifier {
base: BaseNode {
location: loc.get(1, 4, 1, 5),
..BaseNode::default()
},
name: "A".to_string(),
}),
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 7, 1, 10),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 7, 1, 10),
..BaseNode::default()
},
name: "int".to_string(),
},
}),
}],
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 15, 1, 18),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 15, 1, 18),
..BaseNode::default()
},
name: "int".to_string(),
}
}),
})),
constraints: vec![],
},
)
}
#[test]
fn test_parse_type_expression_function_unnamed_params() {
let mut p = Parser::new(r#"(<- : int) => int"#);
let parsed = p.parse_type_expression();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
TypeExpression {
base: BaseNode {
location: loc.get(1, 1, 1, 18),
..BaseNode::default()
},
monotype: MonoType::Function(Box::new(FunctionType {
base: BaseNode {
location: loc.get(1, 1, 1, 18),
..BaseNode::default()
},
parameters: vec![ParameterType::Pipe {
base: BaseNode {
location: loc.get(1, 2, 1, 10),
..BaseNode::default()
},
name: None,
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 7, 1, 10),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 7, 1, 10),
..BaseNode::default()
},
name: "int".to_string(),
},
}),
}],
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 15, 1, 18),
..BaseNode::default()
},
name: Identifier {
base: BaseNode {
location: loc.get(1, 15, 1, 18),
..BaseNode::default()
},
name: "int".to_string(),
}
}),
})),
constraints: vec![],
},
)
}
#[test]
fn test_parse_constraint_two_ident() {
let mut p = Parser::new(r#"A: Addable + Subtractable"#);
let parsed = p.parse_constraints();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
vec![TypeConstraint {
base: BaseNode {
location: loc.get(1, 1, 1, 26),
..BaseNode::default()
},
tvar: Identifier {
base: BaseNode {
location: loc.get(1, 1, 1, 2),
..BaseNode::default()
},
name: "A".to_string(),
},
kinds: vec![
Identifier {
base: BaseNode {
location: loc.get(1, 4, 1, 11),
..BaseNode::default()
},
name: "Addable".to_string(),
},
Identifier {
base: BaseNode {
location: loc.get(1, 14, 1, 26),
..BaseNode::default()
},
name: "Subtractable".to_string(),
}
]
}],
)
}
#[test]
fn test_parse_constraint_two_con() {
let mut p = Parser::new(r#"A: Addable, B: Subtractable"#);
let parsed = p.parse_constraints();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
vec![
TypeConstraint {
base: BaseNode {
location: loc.get(1, 1, 1, 11),
..BaseNode::default()
},
tvar: Identifier {
base: BaseNode {
location: loc.get(1, 1, 1, 2),
..BaseNode::default()
},
name: "A".to_string(),
},
kinds: vec![Identifier {
base: BaseNode {
location: loc.get(1, 4, 1, 11),
..BaseNode::default()
},
name: "Addable".to_string(),
}]
},
TypeConstraint {
base: BaseNode {
location: loc.get(1, 13, 1, 28),
..BaseNode::default()
},
tvar: Identifier {
base: BaseNode {
location: loc.get(1, 13, 1, 14),
..BaseNode::default()
},
name: "B".to_string(),
},
kinds: vec![Identifier {
base: BaseNode {
location: loc.get(1, 16, 1, 28),
..BaseNode::default()
},
name: "Subtractable".to_string(),
}]
}
],
)
}
#[test]
fn test_parse_record_type_tvar_properties() {
let mut p = Parser::new(r#"{A with a:int, b:uint}"#);
let parsed = p.parse_record_type();
let loc = Locator::new(&p.source[..]);
assert_eq!(
parsed,
MonoType::Record(RecordType {
base: BaseNode {
location: loc.get(1, 1, 1, 23),
..BaseNode::default()
},
tvar: Some(Identifier {
base: BaseNode {
location: loc.get(1, 2, 1, 3),
..BaseNode::default()
},
name: "A".to_string(),
}),
properties: vec![
PropertyType {
base: BaseNode {
location: loc.get(1, 9, 1, 14),
..BaseNode::default()
},
name: Identifier {
name: "a".to_string(),
base: BaseNode {
location: loc.get(1, 9, 1, 10),
..BaseNode::default()
}
.into(),
}
.into(),
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 11, 1, 14),
..BaseNode::default()
},
name: Identifier {
name: "int".to_string(),
base: BaseNode {
location: loc.get(1, 11, 1, 14),
..BaseNode::default()
},
}
})
},
PropertyType {
base: BaseNode {
location: loc.get(1, 16, 1, 22),
..BaseNode::default()
},
name: Identifier {
name: "b".to_string(),
base: BaseNode {
location: loc.get(1, 16, 1, 17),
..BaseNode::default()
},
}
.into(),
monotype: MonoType::Basic(NamedType {
base: BaseNode {
location: loc.get(1, 18, 1, 22),
..BaseNode::default()
},
name: Identifier {
name: "uint".to_string(),
base: BaseNode {
location: loc.get(1, 18, 1, 22),
..BaseNode::default()
},
}
})
}
]
},)
)
}
#[test]
fn test_parse_record_unclosed_error() {
let mut p = Parser::new(r#"(r:{A with a:int) => int"#);
let parsed = p.parse_type_expression();
expect_test::expect![["error @1:4-1:18: expected RBRACE, got RPAREN"]].assert_eq(
&ast::check::check(ast::walk::Node::TypeExpression(&parsed))
.unwrap_err()
.to_string(),
);
}
|
// Copyright 2019 Steven Bosnick
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE-2.0 or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms
// Implements #SPC-sel4platcrate.am335x
//! This crate provides the platform specifc parts of the sel4 library for the Am335x
//! platform.
//!
//! This crate will be empty if the target architecture isn't arm
//! A side effect of building this project (on the arm architecture)
//! is that the seL4 microkernal for the Am335x platform will be built.
//!
//! Currently this crate will also be empty for a debug profile. This is tied
//! to [issue 116] in the seL4 project.
//!
//! [issue 116]: https://github.com/seL4/seL4/issues/116
#![no_std]
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
extern crate cty;
#[cfg(all(target_arch = "arm", not(debug_assertions)))]
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
|
use std::io;
fn main() {
println!("Enter a positive number to calculate factorial: ");
let mut number:u64;
let min_val = 1;
let max_val = 20;
loop {
let mut str_number = String::new();
io::stdin().read_line(&mut str_number).expect("Failed to read the number");
number = match str_number.trim().parse() {
Ok(num) => num,
Err(_) => continue
};
if number < min_val || number > max_val {
println!("Please enter a number between {} and {}.", min_val, max_val);
continue;
}
break;
}
let factorial_val = factorial(number);
println!("Factorial of number {} is {}", number, factorial_val);
}
fn factorial(number: u64) -> u64 {
if number == 0 {
return 1;
}
return number * factorial(number - 1);
} |
use std::ffi::OsString;
use clap::{clap_app, crate_authors, crate_version, crate_description};
use enumset::EnumSet;
use crate::{
modules::{metasource::MetaSources, tracksource::TrackSources},
track::Track,
};
#[cfg(test)]
mod tests;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Command {
Help(Box<str>),
Version(Box<str>),
Download(Args)
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Args {
pub log_level: log::Level,
pub track: Track,
pub metasources: EnumSet<MetaSources>,
pub tracksources: EnumSet<TrackSources>,
}
pub fn parse<A, T>(args: A) -> clap::Result<Command>
where
A: IntoIterator<Item = T>,
T: Into<OsString> + Clone
{
let app = clap_app!(
slizzy =>
(version: crate_version!())
(author: crate_authors!())
(about: crate_description!())
(@arg verbose: -v ... "level of logging information")
(@arg id: +required "the track id to download")
(@arg duration: -d --duration +takes_value "specify the track duration")
// metasources:
(@arg beatport: --beatport "Use the beatport module")
(@arg bandcamp: --bandcamp "Use the bandcamp module")
// tracksources:
(@arg slider: --slider "Use the slider module")
(@arg music2k: --music2k "Use the music2k module")
(@arg zippy: --zippy "Use the zippy module")
);
match app.get_matches_from_safe(args) {
Ok(matches) => {
let track = parse_track(&matches)?;
Ok(
Command::Download(
Args {
track,
metasources: parse_metasources(&matches),
tracksources: parse_tracksources(&matches),
log_level: parse_log_level(&matches),
}
)
)
},
Err(error) => match error.kind {
clap::ErrorKind::HelpDisplayed => Ok(
Command::Help(error.message.into_boxed_str())
),
clap::ErrorKind::VersionDisplayed => Ok(
Command::Version(error.message.into_boxed_str())
),
_ => Err(error)
}
}
}
fn parse_log_level(matches: &clap::ArgMatches) -> log::Level {
match matches.occurrences_of("verbose") {
0 => log::Level::Info,
1 => log::Level::Debug,
_ => log::Level::Trace,
}
}
fn parse_track(matches: &clap::ArgMatches) -> clap::Result<Track> {
let mut track = Track
::new(
matches
.value_of("id")
.expect("id parameter is required")
)
.map_err(
|error| clap::Error::with_description(
&format!("invalid track id: {}", error),
clap::ErrorKind::ValueValidation
)
)?;
if let Some(duration) = matches.value_of("duration") {
let duration = duration
.parse()
.map_err(
|error: crate::track::ParseDurationError| clap::Error::with_description(
&format!("invalid track duration: {}", error),
clap::ErrorKind::ValueValidation
)
)?;
track.duration = Some(duration);
}
Ok(track)
}
fn parse_metasources(matches: &clap::ArgMatches) -> EnumSet<MetaSources> {
let mut sources = EnumSet::new();
if matches.is_present("beatport") {
sources.insert(MetaSources::Beatport);
}
if matches.is_present("bandcamp") {
sources.insert(MetaSources::Bandcamp);
}
if sources.is_empty() {
EnumSet::all()
}
else {
sources
}
}
fn parse_tracksources(matches: &clap::ArgMatches) -> EnumSet<TrackSources> {
let mut sources = EnumSet::new();
if matches.is_present("slider") {
sources.insert(TrackSources::Slider);
}
if matches.is_present("music2k") {
sources.insert(TrackSources::Music2k);
}
if matches.is_present("zippy") {
sources.insert(TrackSources::Zippy);
}
if sources.is_empty() {
EnumSet::all()
}
else {
sources
}
}
|
mod combined_range;
mod simulator;
pub use combined_range::CombinedRange;
pub use simulator::{approx_equity, exact_equity};
|
use std::fs;
pub fn run(filename: &str) {
let file = fs::read_to_string(filename).expect("Something went wrong reading the file");
let mut lines = file.split('\n');
let dna = lines.next().expect("Missing dna");
let numbers = lines.next().expect("Missing pattern");
let numbers: Vec<f64> = numbers
.to_string()
.split_whitespace()
.map(|s| s.parse().unwrap())
.collect();
for number in numbers.iter() {
let mut chance = 1.0;
let gc_prob = number / 2.0;
let at_prob = (1.0 - number) / 2.0;
for c in dna.chars() {
match c {
'G' | 'C' => chance *= gc_prob,
'A' | 'T' => chance *= at_prob,
_ => println!("Invalid"),
}
}
print!("{} ", chance.log10())
}
println!();
}
|
use std::{error, fmt, io};
#[derive(Debug)]
pub enum ParseError {
Io(io::Error),
MissingType(String),
MissingAttribute,
InvalidFile,
InvalidTypeName,
InvalidBlob,
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Io(err) => fmt::Display::fmt(err, f),
Self::MissingType(ty) => write!(f, "Missing type: {}", ty),
Self::MissingAttribute => write!(f, "Missing attribute"),
Self::InvalidFile => write!(f, "Invalid file"),
Self::InvalidTypeName => write!(f, "Invalid type name"),
Self::InvalidBlob => write!(f, "Invalid blob"),
}
}
}
impl error::Error for ParseError {}
#[derive(Debug)]
pub enum Error {
Io(io::Error),
ParseError(ParseError),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Io(err) => fmt::Display::fmt(err, f),
Self::ParseError(err) => fmt::Display::fmt(err, f),
}
}
}
impl error::Error for Error {}
pub type ParseResult<T> = Result<T, ParseError>;
impl std::convert::From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::Io(error)
}
}
impl std::convert::From<ParseError> for Error {
fn from(error: ParseError) -> Self {
Error::ParseError(error)
}
}
impl std::convert::From<io::Error> for ParseError {
fn from(error: io::Error) -> Self {
ParseError::Io(error)
}
}
impl std::convert::From<ParseError> for std::fmt::Error {
fn from(_: ParseError) -> Self {
std::fmt::Error {}
}
}
|
use std::path::PathBuf;
/// A custom property
#[derive(Debug, Clone)]
#[allow(missing_docs)]
pub enum Property {
String(String),
Int(i32),
Float(f64),
Bool(bool),
/// A color in the format `[a, r, g, b]`
Color([u8; 4]),
File(String),
}
impl Property {
/// Return &str value if this property is a string, `None` otherwise.
pub fn as_str(&self) -> Option<&str> {
match self {
Property::String(x) => Some(x.as_str()),
_ => None,
}
}
/// Return i32 value if this property is an int or float, `None` otherwise.
pub fn as_int(&self) -> Option<i32> {
match *self {
Property::Int(x) => Some(x),
Property::Float(x) => Some(x as i32),
_ => None,
}
}
/// Return f64 value if this property is a float or int, `None` otherwise.
pub fn as_float(&self) -> Option<f64> {
match *self {
Property::Float(x) => Some(x),
Property::Int(x) => Some(x as f64),
_ => None,
}
}
/// Return bool value if this property is a bool, `None` otherwise.
pub fn as_bool(&self) -> Option<bool> {
match *self {
Property::Bool(x) => Some(x),
_ => None,
}
}
/// Return `[u8; 4]` value if this property is a color, `None` otherwise.
pub fn as_color(&self) -> Option<[u8; 4]> {
match *self {
Property::Color(x) => Some(x),
_ => None,
}
}
/// Return PathBuf value if this property is a file, `None` otherwise.
pub fn as_file(&self) -> Option<PathBuf> {
match self {
Property::File(x) => Some(PathBuf::from(x)),
_ => None,
}
}
}
|
use std::convert::TryFrom;
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
pub(crate) enum Instruction {
Accumulate(i32),
Jump(i32),
Noop(i32),
}
impl TryFrom<(&str, i32)> for Instruction {
type Error = String;
fn try_from(value: (&str, i32)) -> Result<Self, Self::Error> {
Ok(match value {
("acc", val) => Instruction::Accumulate(val),
("jmp", val) => Instruction::Jump(val),
("nop", val) => Instruction::Noop(val),
_ => return Err(format!("Can't parse {:?} as an instruction!", value)),
})
}
}
fn split_line(line: &str) -> (&str, i32) {
let mut split = line.trim().split_whitespace();
if let (Some(left), Some(right), None) = (split.next(), split.next(), split.next()) {
(left, right.parse::<i32>().unwrap())
} else {
panic!("Uh-oh! Line {} can't be split properly.", line)
}
}
pub(crate) fn compile(input: &str) -> Vec<Instruction> {
input
.trim()
.lines()
.map(split_line)
.map(|i| Instruction::try_from(i).unwrap())
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example_1_compiles() {
use super::Instruction::*;
let input = include_str!("example_1_input.txt");
let expected = vec![
Noop(0),
Accumulate(1),
Jump(4),
Accumulate(3),
Jump(-3),
Accumulate(-99),
Accumulate(1),
Jump(-4),
Accumulate(6),
];
let actual = compile(input);
assert_eq!(expected, actual);
}
}
|
fn main() {
let mut v = Vec::new();
v.push(1);
v.push(2);
v.push(3);
println!("{:?}", v);
v.push(44);
println!("{:?}", v);
println!("{:?}", v[0]);
// It is impossible to use signed values as indices:
// let idx:i32 = 0;
// println!("{:?}", v[idx]);
let idx:usize = 0;
v[idx] = 0;
println!("{:?}", v[idx]);
// Thread panics when we read out of bounds:
// println!("{:?}", v[100]);
match v.get(6)
{
Some(v) => println!("The value is {}", v),
None => println!("Got nothing!")
}
for x in &v
{
println!("{}", x);
}
v.push(77);
let last = if let Some(i) = v.pop() { i } else { 10 };
println!("Last element was {}", last);
// Iterate over vector items using pattern matching:
while let Some(x) = v.pop()
{
println!("{}", x);
}
}
|
use rosu_v2::prelude::Username;
use crate::embeds::EmbedFields;
use std::{collections::HashSet, fmt::Write};
pub struct UntrackEmbed {
fields: EmbedFields,
title: &'static str,
}
impl UntrackEmbed {
pub fn new(success: HashSet<Username>, failed: Option<&Username>) -> Self {
let title = "Top score tracking";
let mut fields = EmbedFields::new();
let mut iter = success.iter();
if let Some(first) = iter.next() {
let names_len: usize = success.iter().map(|name| name.len() + 4).sum();
let mut value = String::with_capacity(names_len);
let _ = write!(value, "`{}`", first);
for name in iter {
let _ = write!(value, ", `{}`", name);
}
fields.push(field!("No longer tracking:", value, false));
}
if let Some(failed) = failed {
fields.push(field!("Failed to untrack:", format!("`{}`", failed), false));
}
Self { fields, title }
}
}
impl_builder!(UntrackEmbed { fields, title });
|
// Copyright (C) 2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 2.0.0-rc5
use frame_support::weights::{constants::RocksDbWeight as DbWeight, Weight};
impl crate::WeightInfo for () {
fn proxy(p: u32) -> Weight {
(26127000 as Weight)
.saturating_add((214000 as Weight).saturating_mul(p as Weight))
.saturating_add(DbWeight::get().reads(1 as Weight))
}
fn proxy_announced(a: u32, p: u32) -> Weight {
(55405000 as Weight)
.saturating_add((774000 as Weight).saturating_mul(a as Weight))
.saturating_add((209000 as Weight).saturating_mul(p as Weight))
.saturating_add(DbWeight::get().reads(3 as Weight))
.saturating_add(DbWeight::get().writes(2 as Weight))
}
fn remove_announcement(a: u32, p: u32) -> Weight {
(35879000 as Weight)
.saturating_add((783000 as Weight).saturating_mul(a as Weight))
.saturating_add((20000 as Weight).saturating_mul(p as Weight))
.saturating_add(DbWeight::get().reads(2 as Weight))
.saturating_add(DbWeight::get().writes(2 as Weight))
}
fn reject_announcement(a: u32, p: u32) -> Weight {
(36097000 as Weight)
.saturating_add((780000 as Weight).saturating_mul(a as Weight))
.saturating_add((12000 as Weight).saturating_mul(p as Weight))
.saturating_add(DbWeight::get().reads(2 as Weight))
.saturating_add(DbWeight::get().writes(2 as Weight))
}
fn announce(a: u32, p: u32) -> Weight {
(53769000 as Weight)
.saturating_add((675000 as Weight).saturating_mul(a as Weight))
.saturating_add((214000 as Weight).saturating_mul(p as Weight))
.saturating_add(DbWeight::get().reads(3 as Weight))
.saturating_add(DbWeight::get().writes(2 as Weight))
}
fn add_proxy(p: u32) -> Weight {
(36082000 as Weight)
.saturating_add((234000 as Weight).saturating_mul(p as Weight))
.saturating_add(DbWeight::get().reads(1 as Weight))
.saturating_add(DbWeight::get().writes(1 as Weight))
}
fn remove_proxy(p: u32) -> Weight {
(32885000 as Weight)
.saturating_add((267000 as Weight).saturating_mul(p as Weight))
.saturating_add(DbWeight::get().reads(1 as Weight))
.saturating_add(DbWeight::get().writes(1 as Weight))
}
fn remove_proxies(p: u32) -> Weight {
(31735000 as Weight)
.saturating_add((215000 as Weight).saturating_mul(p as Weight))
.saturating_add(DbWeight::get().reads(1 as Weight))
.saturating_add(DbWeight::get().writes(1 as Weight))
}
fn anonymous(p: u32) -> Weight {
(50907000 as Weight)
.saturating_add((61000 as Weight).saturating_mul(p as Weight))
.saturating_add(DbWeight::get().reads(2 as Weight))
.saturating_add(DbWeight::get().writes(1 as Weight))
}
fn kill_anonymous(p: u32) -> Weight {
(33926000 as Weight)
.saturating_add((208000 as Weight).saturating_mul(p as Weight))
.saturating_add(DbWeight::get().reads(1 as Weight))
.saturating_add(DbWeight::get().writes(1 as Weight))
}
}
|
// Generated by `scripts/generate.js`
use utils::vk_traits::*;
/// Wrapper for [VkIndirectCommandsLayoutUsageFlagsNV](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkIndirectCommandsLayoutUsageFlagsNV.html).
///
/// Use the macro `VkIndirectCommandsLayoutUsageFlags!` as an alternative method to create a structure. For example, these two snippets return the same value:
/// ```
/// VkIndirectCommandsLayoutUsageFlags!(explicit_preprocess, indexed_sequences)
/// ```
/// ```
/// VkIndirectCommandsLayoutUsageFlags {
/// explicit_preprocess: true,
/// indexed_sequences: true,
/// ..VkIndirectCommandsLayoutUsageFlags::none()
/// }
/// ```
#[derive(Debug, Clone)]
pub struct VkIndirectCommandsLayoutUsageFlags {
pub explicit_preprocess: bool,
pub indexed_sequences: bool,
pub unordered_sequences: bool,
}
#[doc(hidden)]
pub type RawVkIndirectCommandsLayoutUsageFlags = u32;
impl VkWrappedType<RawVkIndirectCommandsLayoutUsageFlags> for VkIndirectCommandsLayoutUsageFlags {
fn vk_to_raw(src: &VkIndirectCommandsLayoutUsageFlags, dst: &mut RawVkIndirectCommandsLayoutUsageFlags) {
*dst = 0;
if src.explicit_preprocess { *dst |= 0x00000001; }
if src.indexed_sequences { *dst |= 0x00000002; }
if src.unordered_sequences { *dst |= 0x00000004; }
}
}
impl VkRawType<VkIndirectCommandsLayoutUsageFlags> for RawVkIndirectCommandsLayoutUsageFlags {
fn vk_to_wrapped(src: &RawVkIndirectCommandsLayoutUsageFlags) -> VkIndirectCommandsLayoutUsageFlags {
VkIndirectCommandsLayoutUsageFlags {
explicit_preprocess: (src & 0x00000001) != 0,
indexed_sequences: (src & 0x00000002) != 0,
unordered_sequences: (src & 0x00000004) != 0,
}
}
}
impl Default for VkIndirectCommandsLayoutUsageFlags {
fn default() -> VkIndirectCommandsLayoutUsageFlags {
VkIndirectCommandsLayoutUsageFlags {
explicit_preprocess: false,
indexed_sequences: false,
unordered_sequences: false,
}
}
}
impl VkIndirectCommandsLayoutUsageFlags {
/// Return a structure with all flags to `false`.
pub fn none() -> Self {
VkIndirectCommandsLayoutUsageFlags {
explicit_preprocess: false,
indexed_sequences: false,
unordered_sequences: false,
}
}
/// Return a structure with all flags to `true`.
pub fn all() -> Self {
VkIndirectCommandsLayoutUsageFlags {
explicit_preprocess: true,
indexed_sequences: true,
unordered_sequences: true,
}
}
/// Return the numerical bit flags corresponding to the structure (as described in the Vulkan specs).
pub fn to_u32(&self) -> u32 {
0
+ if self.explicit_preprocess { 0x00000001 } else { 0 }
+ if self.indexed_sequences { 0x00000002 } else { 0 }
+ if self.unordered_sequences { 0x00000004 } else { 0 }
}
/// Create a structure corresponding to the specified numerical bit flags.
pub fn from_u32(value: u32) -> Self {
VkIndirectCommandsLayoutUsageFlags {
explicit_preprocess: value & 0x00000001 > 0,
indexed_sequences: value & 0x00000002 > 0,
unordered_sequences: value & 0x00000004 > 0,
}
}
}
#[doc(hidden)]
#[macro_export]
macro_rules! VkIndirectCommandsLayoutUsageFlags {
( $( $x:ident ),* ) => {
VkIndirectCommandsLayoutUsageFlags {
$($x: true,)*
..VkIndirectCommandsLayoutUsageFlags::none()
}
}
} |
use super::super::HasTable;
use super::{Component, FromWorld, TableId, World};
use std::ops::Deref;
/// Fetch read-only tables from a Storage
///
pub struct View<'a, Id: TableId, C: Component<Id>>(&'a C::Table);
impl<'a, Id: TableId, C: Component<Id>> Clone for View<'a, Id, C> {
fn clone(&self) -> Self {
View(self.0)
}
}
impl<'a, Id: TableId, C: Component<Id>> Copy for View<'a, Id, C> {}
unsafe impl<'a, Id: TableId, C: Component<Id>> Send for View<'a, Id, C> {}
unsafe impl<'a, Id: TableId, C: Component<Id>> Sync for View<'a, Id, C> {}
impl<'a, Id: TableId, C: Component<Id>> View<'a, Id, C> {
pub fn reborrow(self) -> &'a C::Table {
self.0
}
pub fn from_table(t: &'a C::Table) -> Self {
Self(t)
}
}
impl<'a, Id: TableId, C: Component<Id>> Deref for View<'a, Id, C> {
type Target = C::Table;
fn deref(&self) -> &Self::Target {
self.0
}
}
impl<'a, Id: TableId, C: Component<Id>> FromWorld<'a> for View<'a, Id, C>
where
crate::world::World: HasTable<Id, C>,
{
fn from_world(w: &'a World) -> Self {
<World as HasTable<Id, C>>::view(w)
}
}
|
extern crate chrono;
extern crate hyper;
extern crate iron;
extern crate mount;
extern crate router;
extern crate rustc_serialize;
extern crate staticfile;
extern crate urlencoded;
use self::chrono::UTC;
use self::hyper::header::ContentType;
use self::hyper::mime::{Mime, TopLevel, SubLevel};
use self::iron::prelude::*;
use self::iron::status;
use self::mount::Mount;
use self::router::Router;
use self::rustc_serialize::json;
use self::rustc_serialize::json::{ToJson, Json};
use self::staticfile::Static;
use self::urlencoded::UrlEncodedBody;
use record_backend::{RecordRepository, RecordRepositoryError};
use config;
use std::any::Any;
use std::collections::BTreeMap;
use std::path::Path;
use worker::Record;
#[derive(Debug, Clone, RustcDecodable, RustcEncodable)]
pub struct Config {
address: String,
webapp_path: String,
websockets: bool,
password: String
}
pub struct HttpServer<R> {
config: Config,
backend: R,
started_at: chrono::DateTime<UTC>
}
#[derive(Debug, Clone, RustcDecodable, RustcEncodable)]
struct ServerResponse {
ip: String,
hostname: String,
version: String,
started_at: String,
websockets: bool
}
struct JobsResponse {
job: Vec<Record>
}
impl ToJson for JobsResponse {
fn to_json(&self) -> Json {
let mut data = BTreeMap::new();
data.insert("job".to_string(), self.job.to_json());
Json::Object(data)
}
}
fn verify_password (expected: &String, actual: &String) -> bool {
actual == expected
}
fn verify_request (req: &Request, password: &String) -> bool {
let requested_password = match req.headers.get_raw("x-password") {
Some(pwd_bytes) => String::from_utf8(pwd_bytes.concat()).unwrap(),
None => "".to_string()
};
verify_password(&requested_password, &password)
}
impl <R: RecordRepository + Clone + Send + Sync + Any> HttpServer<R> {
pub fn new (config: Config, backend: R) -> HttpServer <R> {
HttpServer { config: config, backend: backend, started_at: UTC::now() }
}
pub fn listen (&mut self) {
let mut router = Router::new();
{
let password = self.config.password.clone();
let hostname = self.config.address.clone();
let version = String::from(config::VERSION);
let started_at = self.started_at.to_rfc3339().clone();
let websockets = self.config.websockets;
router.get("/server", move |req: &mut Request| {
if !verify_request(&req, &password) {
return Ok(Response::with((status::Unauthorized, "")))
}
let ip = format!("{}", req.local_addr).to_string();
let server = ServerResponse {
ip: ip, hostname: hostname.clone(), version: version.clone(), started_at: started_at.clone(), websockets: websockets
};
let response_data : String = json::encode(&server).unwrap();
let mut response = Response::with((status::Ok, response_data));
response.headers.set(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![])));
Ok(response)
});
}
{
let password = self.config.password.clone();
router.post("/auth", move |req: &mut Request| {
match req.get_ref::<UrlEncodedBody>() {
Ok(ref body) => {
if !body.contains_key("password") {
return Ok(Response::with((status::BadRequest, "")))
}
if !verify_password(&body.get("password").unwrap()[0], &password) {
return Ok(Response::with((status::Unauthorized, "")))
}
Ok(Response::with((status::Ok, "")))
},
Err(_) => {
Ok(Response::with((status::BadRequest, "")))
}
}
});
}
{
let backend = self.backend.clone();
let password = self.config.password.clone();
router.get("/jobs", move |req: &mut Request| {
if !verify_request(&req, &password) {
return Ok(Response::with((status::Unauthorized, "")))
}
match backend.fetch_limit(30, 0) {
Err(RecordRepositoryError::CannotDenormalizeRecord) => {
let json = "{\"message\": \"Cannot denormalize records from database\"}";
let mut response = Response::with((status::InternalServerError, json));
response.headers.set(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![])));
Ok(response)
},
Err(_) => {
let mut response = Response::with((status::InternalServerError, "{\"message\": \"Unknown error\" }"));
response.headers.set(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![])));
Ok(response)
},
Ok(records) => {
let job_response = JobsResponse { job: records };
let response_data = job_response.to_json();
let mut response = Response::with((status::Ok, response_data.to_string()));
response.headers.set(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![])));
Ok(response)
}
}
});
}
{
let backend = self.backend.clone();
let password = self.config.password.clone();
router.get("/jobs/:id", move |req: &mut Request| {
if !verify_request(&req, &password) {
return Ok(Response::with((status::Unauthorized, "")))
}
let ref id = req.extensions.get::<Router>().unwrap().find("id").unwrap_or("/");
match backend.fetch_record(id.to_string()) {
Err(RecordRepositoryError::CannotDenormalizeRecord) => {
let json = "{\"message\": \"Cannot denormalize records from database\"}";
let mut response = Response::with((status::InternalServerError, json));
response.headers.set(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![])));
Ok(response)
},
Err(RecordRepositoryError::RecordNotFound) => {
let json = "{\"message\": \"Record does not exists\"}";
let mut response = Response::with((status::NotFound, json));
response.headers.set(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![])));
Ok(response)
},
Err(_) => {
let mut response = Response::with((status::InternalServerError, "{\"message\": \"Unknown error\" }"));
response.headers.set(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![])));
Ok(response)
},
Ok(record) => {
let job_response = JobsResponse { job: vec![record] };
let response_data = job_response.to_json();
let mut response = Response::with((status::Ok, response_data.to_string()));
response.headers.set(ContentType(Mime(TopLevel::Application, SubLevel::Json, vec![])));
Ok(response)
}
}
});
}
let mut mount = Mount::new();
mount.mount("/api", router);
mount.mount("/", Static::new(Path::new(&self.config.webapp_path)));
Iron::new(mount).http(&*self.config.address).unwrap();
}
} |
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_force(number_of_links: u8, link_length: f64, persistance_length: f64, end_to_end_length: f64, temperature: f64) -> f64
{
super::force(&number_of_links, &link_length, &persistance_length, &end_to_end_length, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_nondimensional_force(number_of_links: u8, nondimensional_persistance_length: f64, nondimensional_end_to_end_length_per_link: f64) -> f64
{
super::nondimensional_force(&number_of_links, &nondimensional_persistance_length, &nondimensional_end_to_end_length_per_link)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_helmholtz_free_energy(number_of_links: u8, link_length: f64, hinge_mass: f64, persistance_length: f64, end_to_end_length: f64, temperature: f64) -> f64
{
super::helmholtz_free_energy(&number_of_links, &link_length, &hinge_mass, &persistance_length, &end_to_end_length, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_helmholtz_free_energy_per_link(number_of_links: u8, link_length: f64, hinge_mass: f64, persistance_length: f64, end_to_end_length: f64, temperature: f64) -> f64
{
super::helmholtz_free_energy_per_link(&number_of_links, &link_length, &hinge_mass, &persistance_length, &end_to_end_length, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_relative_helmholtz_free_energy(number_of_links: u8, link_length: f64, persistance_length: f64, end_to_end_length: f64, temperature: f64) -> f64
{
super::relative_helmholtz_free_energy(&number_of_links, &link_length, &persistance_length, &end_to_end_length, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_relative_helmholtz_free_energy_per_link(number_of_links: u8, link_length: f64, persistance_length: f64, end_to_end_length: f64, temperature: f64) -> f64
{
super::relative_helmholtz_free_energy_per_link(&number_of_links, &link_length, &persistance_length, &end_to_end_length, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_nondimensional_helmholtz_free_energy(number_of_links: u8, link_length: f64, hinge_mass: f64, nondimensional_persistance_length: f64, nondimensional_end_to_end_length_per_link: f64, temperature: f64) -> f64
{
super::nondimensional_helmholtz_free_energy(&number_of_links, &link_length, &hinge_mass, &nondimensional_persistance_length, &nondimensional_end_to_end_length_per_link, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_nondimensional_helmholtz_free_energy_per_link(number_of_links: u8, link_length: f64, hinge_mass: f64, nondimensional_persistance_length: f64, nondimensional_end_to_end_length_per_link: f64, temperature: f64) -> f64
{
super::nondimensional_helmholtz_free_energy_per_link(&number_of_links, &link_length, &hinge_mass, &nondimensional_persistance_length, &nondimensional_end_to_end_length_per_link, &temperature)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_nondimensional_relative_helmholtz_free_energy(nondimensional_persistance_length: f64, nondimensional_end_to_end_length_per_link: f64) -> f64
{
super::nondimensional_relative_helmholtz_free_energy(&nondimensional_persistance_length, &nondimensional_end_to_end_length_per_link)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_nondimensional_relative_helmholtz_free_energy_per_link(number_of_links: u8, nondimensional_persistance_length: f64, nondimensional_end_to_end_length_per_link: f64) -> f64
{
super::nondimensional_relative_helmholtz_free_energy_per_link(&number_of_links, &nondimensional_persistance_length, &nondimensional_end_to_end_length_per_link)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_equilibrium_distribution(number_of_links: u8, link_length: f64, persistance_length: f64, normalization_nondimensional_equilibrium_distribution: f64, end_to_end_length: f64) -> f64
{
super::equilibrium_distribution(&number_of_links, &link_length, &persistance_length, &normalization_nondimensional_equilibrium_distribution, &end_to_end_length)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_nondimensional_equilibrium_distribution(nondimensional_persistance_length: f64, normalization_nondimensional_equilibrium_distribution: f64, nondimensional_end_to_end_length_per_link: f64) -> f64
{
super::nondimensional_equilibrium_distribution(&nondimensional_persistance_length, &normalization_nondimensional_equilibrium_distribution, &nondimensional_end_to_end_length_per_link)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_equilibrium_radial_distribution(number_of_links: u8, link_length: f64, persistance_length: f64, normalization_nondimensional_equilibrium_distribution: f64, end_to_end_length: f64) -> f64
{
super::equilibrium_radial_distribution(&number_of_links, &link_length, &persistance_length, &normalization_nondimensional_equilibrium_distribution, &end_to_end_length)
}
#[no_mangle]
pub extern fn physics_single_chain_wlc_thermodynamics_isometric_nondimensional_equilibrium_radial_distribution(nondimensional_persistance_length: f64, normalization_nondimensional_equilibrium_distribution: f64, nondimensional_end_to_end_length_per_link: f64) -> f64
{
super::nondimensional_equilibrium_radial_distribution(&nondimensional_persistance_length, &normalization_nondimensional_equilibrium_distribution, &nondimensional_end_to_end_length_per_link)
} |
use hyper::{Body, Response};
use nails::error::NailsError;
use nails::Preroute;
use serde::Serialize;
use crate::context::AppCtx;
#[derive(Debug, Preroute)]
#[nails(path = "/api/posts/{id}")]
pub(crate) struct GetPostRequest {
id: u64,
}
#[derive(Debug, Serialize)]
pub(crate) struct GetPostBody {
post: Post,
}
#[derive(Debug, Serialize)]
pub(crate) struct Post {
body: String,
}
pub(crate) async fn get_post(
_ctx: AppCtx,
_req: GetPostRequest,
) -> Result<Response<Body>, NailsError> {
let body = GetPostBody {
post: Post {
body: String::from("foo"),
},
};
Ok(super::json_response(&body))
}
|
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::{
fs,
path::{Path, PathBuf},
};
use crate::{game::Game, game_info::GameInfo};
#[derive(Serialize, Deserialize)]
pub struct Database {
pub games: Vec<Game>,
pub path: PathBuf,
}
impl Database {
pub fn empty(path: &Path) -> Self {
Database {
games: vec![],
path: path.to_owned(),
}
}
pub fn read(path: &Path) -> Result<Self> {
let data = fs::read_to_string(path);
if let Ok(data) = data {
let games = serde_yaml::from_str(&data).context("Reading database file contents")?;
Ok(Database {
games,
path: path.to_owned(),
})
} else {
Ok(Database::empty(path))
}
}
pub fn add_game(&mut self, game: Game) {
self.games.push(game);
}
pub fn write(&self) -> Result<()> {
let content = serde_yaml::to_string(&self)?;
fs::write(&self.path, content).context("Writing database file contents")
}
pub fn game_exists(&self, game_info: &GameInfo) -> bool {
let game_id = game_info.get_id();
self.games.iter().any(|game| game.id == game_id)
}
}
|
//! MIPS CP0 Config register
#[derive(Clone, Copy, Debug)]
pub struct Config {
pub config0: u32,
pub config1: u32,
pub config2: u32,
pub config3: u32,
}
#[derive(Clone, Copy, Debug)]
pub enum EndianMode {
LittleEndian,
BigEndian,
}
#[derive(Clone, Copy, Debug)]
pub enum MMUType {
NoMMU,
StandardTLB,
StandardBAT,
StandardFixed,
Unknown,
}
impl Config {
pub fn endian(&self) -> EndianMode {
if ((self.config0 >> 15) & 1) == 1 {
EndianMode::BigEndian
} else {
EndianMode::LittleEndian
}
}
pub fn mmu_type(&self) -> MMUType {
match (self.config0 >> 7) & 7 {
0 => MMUType::NoMMU,
1 => MMUType::StandardTLB,
2 => MMUType::StandardBAT,
3 => MMUType::StandardFixed,
_ => MMUType::Unknown,
}
}
pub fn mmu_size(&self) -> u32 {
(self.config1 >> 25) & 0b111111
}
pub fn icache_per_way(&self) -> u32 {
64 << ((self.config1 >> 22) & 0b111)
}
pub fn icache_line_size(&self) -> u32 {
let il: u32 = (self.config1 >> 19) & 0b111;
if il == 0 {
0
} else {
2 << il
}
}
pub fn icache_associativity(&self) -> u32 {
let is: u32 = (self.config1 >> 16) & 0b111;
if is == 0 {
0
} else {
is + 1
}
}
pub fn dcache_per_way(&self) -> u32 {
64 << ((self.config1 >> 13) & 0b111)
}
pub fn dcache_line_size(&self) -> u32 {
let dl: u32 = (self.config1 >> 10) & 0b111;
if dl == 0 {
0
} else {
2 << dl
}
}
pub fn dcache_associativity(&self) -> u32 {
let ds: u32 = (self.config1 >> 7) & 0b111;
if ds == 0 {
0
} else {
ds + 1
}
}
pub fn has_cp2(&self) -> bool {
((self.config1 >> 6) & 1) == 1
}
pub fn has_performance_counter(&self) -> bool {
((self.config1 >> 4) & 1) == 1
}
pub fn has_watch_regs(&self) -> bool {
((self.config1 >> 3) & 1) == 1
}
pub fn has_mips16(&self) -> bool {
((self.config1 >> 2) & 1) == 1
}
pub fn has_ejtag(&self) -> bool {
((self.config1 >> 1) & 1) == 1
}
pub fn has_fpu(&self) -> bool {
(self.config1 & 1) == 1
}
}
pub mod __config0 {
register_r!(16, 0);
}
pub mod __config1 {
register_r!(16, 1);
}
pub mod __config2 {
register_r!(16, 2);
}
pub mod __config3 {
register_r!(16, 3);
}
pub fn read() -> Config {
Config {
config0: __config0::read_u32(),
config1: __config1::read_u32(),
config2: __config2::read_u32(),
config3: __config3::read_u32(),
}
}
pub fn mmu_size() -> u32 {
(__config1::read_u32() >> 25) & 0b111111
}
|
use priv_prelude::*;
use util;
struct InTransit {
packet: Option<Ipv4Packet>,
timeout: Timeout,
}
impl Future for InTransit {
type Item = Ipv4Packet;
type Error = Void;
fn poll(&mut self) -> Result<Async<Ipv4Packet>, Void> {
match self.timeout.poll().void_unwrap() {
Async::Ready(()) => Ok(Async::Ready(unwrap!(self.packet.take()))),
Async::NotReady => Ok(Async::NotReady),
}
}
}
/// Links two `Ipv4Plug`s and adds delay to packets travelling between them.
pub struct LatencyV4 {
handle: Handle,
plug_a: Ipv4Plug,
plug_b: Ipv4Plug,
outgoing_a: FuturesUnordered<InTransit>,
outgoing_b: FuturesUnordered<InTransit>,
min_latency: Duration,
mean_additional_latency: Duration,
}
impl LatencyV4 {
/// Connect the two given plugs with latency added to the connection.
///
/// `min_latency` is the baseline for the amount of delay added to packets travelling along
/// this connection. `mean_additional_latency` controls the amount of random, additional
/// latency added to any given packet. A non-zero `mean_additional_latency` can cause packets
/// to be re-ordered.
pub fn spawn(
handle: &Handle,
min_latency: Duration,
mean_additional_latency: Duration,
plug_a: Ipv4Plug,
plug_b: Ipv4Plug,
) {
let latency = LatencyV4 {
handle: handle.clone(),
plug_a: plug_a,
plug_b: plug_b,
outgoing_a: FuturesUnordered::new(),
outgoing_b: FuturesUnordered::new(),
min_latency: min_latency,
mean_additional_latency: mean_additional_latency,
};
handle.spawn(latency.infallible());
}
}
impl Future for LatencyV4 {
type Item = ();
type Error = Void;
fn poll(&mut self) -> Result<Async<()>, Void> {
let a_unplugged = loop {
match self.plug_a.rx.poll().void_unwrap() {
Async::NotReady => break false,
Async::Ready(None) => break true,
Async::Ready(Some(packet)) => {
let delay
= self.min_latency
+ self.mean_additional_latency.mul_f64(util::expovariate_rand());
let in_transit = InTransit {
packet: Some(packet),
timeout: Timeout::new(delay, &self.handle),
};
self.outgoing_b.push(in_transit);
},
}
};
let b_unplugged = loop {
match self.plug_b.rx.poll().void_unwrap() {
Async::NotReady => break false,
Async::Ready(None) => break true,
Async::Ready(Some(packet)) => {
let delay
= self.min_latency
+ self.mean_additional_latency.mul_f64(util::expovariate_rand());
let in_transit = InTransit {
packet: Some(packet),
timeout: Timeout::new(delay, &self.handle),
};
self.outgoing_a.push(in_transit);
},
}
};
loop {
match self.outgoing_a.poll().void_unwrap() {
Async::NotReady => break,
Async::Ready(None) => break,
Async::Ready(Some(packet)) => {
let _ = self.plug_a.tx.unbounded_send(packet);
},
}
}
loop {
match self.outgoing_b.poll().void_unwrap() {
Async::NotReady => break,
Async::Ready(None) => break,
Async::Ready(Some(packet)) => {
let _ = self.plug_b.tx.unbounded_send(packet);
},
}
}
if a_unplugged && b_unplugged {
return Ok(Async::Ready(()));
}
Ok(Async::NotReady)
}
}
/*
TODO: fix the math in this test
#[cfg(test)]
#[test]
fn test() {
run_test(|| {
use rand;
const NUM_PACKETS: u64 = 1000;
let mut core = unwrap!(Core::new());
let handle = core.handle();
let source_addr = SocketAddrV4::new(
Ipv4Addr::random_global(),
rand::random::<u16>() / 2 + 1000,
);
let dest_addr = SocketAddrV4::new(
Ipv4Addr::random_global(),
rand::random::<u16>() / 2 + 1000,
);
let packet = Ipv4Packet::new_from_fields_recursive(
Ipv4Fields {
source_ip: *source_addr.ip(),
dest_ip: *dest_addr.ip(),
ttl: 16,
},
Ipv4PayloadFields::Udp {
fields: UdpFields::V4 {
source_addr: source_addr,
dest_addr: dest_addr,
},
payload: Bytes::from(&rand::random::<[u8; 8]>()[..]),
},
);
let min_latency = Duration::from_millis(100).mul_f64(util::expovariate_rand());
let mean_additional_latency = Duration::from_millis(100).mul_f64(util::expovariate_rand());
let (plug_a, plug_a_pass) = Ipv4Plug::new_wire();
let (plug_b, plug_b_pass) = Ipv4Plug::new_wire();
LatencyV4::spawn(&handle, min_latency, mean_additional_latency, plug_a_pass, plug_b_pass);
let res = core.run({
let start_time_0 = Instant::now();
for _ in 0..NUM_PACKETS {
let _ = plug_a.tx.unbounded_send(packet.clone());
}
let start_time_1 = Instant::now();
let start_time = start_time_0 + (start_time_1 - start_time_0) / 2;
plug_b.rx
.take(NUM_PACKETS)
.map(move |_packet| {
let delay = Instant::now() - start_time;
assert!(delay >= min_latency);
let additional_delay = delay - min_latency;
additional_delay.div_to_f64(mean_additional_latency)
})
.collect()
.map(move |samples| {
// let this test fail one in a million times due to randomness
const CHANCE_OF_FAILURE: f64 = 1e-6f64;
// inverse of the normal distribution cumulative probability function
fn quantile(mean: f64, variance: f64, p: f64) -> f64 {
use statrs::function::erf::erf_inv;
mean + f64::sqrt(2.0 * variance) * erf_inv(2.0 * p - 1.0)
}
// see: https://en.wikipedia.org/wiki/Exponential_distribution#Confidence_intervals
// a chi-squared(k) distribution can be approximated by normal distribution with mean k
// and variance 2 * k
let lower_chi_squared = quantile(
NUM_PACKETS as f64,
(2 * NUM_PACKETS) as f64,
CHANCE_OF_FAILURE / 2.0,
);
let upper_chi_squared = quantile(
NUM_PACKETS as f64,
(2 * NUM_PACKETS) as f64,
1.0 - CHANCE_OF_FAILURE / 2.0,
);
let mean = samples.into_iter().sum::<f64>() / (NUM_PACKETS as f64);
assert!(2.0 * NUM_PACKETS as f64 * mean / lower_chi_squared < 1.0);
assert!(2.0 * NUM_PACKETS as f64 * mean / upper_chi_squared > 1.0);
})
});
res.void_unwrap()
})
}
*/
|
// q0174_dungeon_game
struct Solution;
impl Solution {
pub fn calculate_minimum_hp(dungeon: Vec<Vec<i32>>) -> i32 {
let l = dungeon.len();
let r = dungeon[0].len();
let mut min_hp = vec![vec![0; r]; l];
min_hp[l - 1][r - 1] = if dungeon[l - 1][r - 1] < 0 {
1 - dungeon[l - 1][r - 1]
} else {
1
};
let cal_min_hp = |cur_change: i32, min_hp_need: i32| {
if cur_change < min_hp_need {
min_hp_need - cur_change
} else {
1
}
};
for i in (0..l - 1).rev() {
min_hp[i][r - 1] = cal_min_hp(dungeon[i][r - 1], min_hp[i + 1][r - 1]);
}
for i in (0..r - 1).rev() {
min_hp[l - 1][i] = cal_min_hp(dungeon[l - 1][i], min_hp[l - 1][i + 1]);
}
for line in (0..l - 1).rev() {
for row in (0..r - 1).rev() {
let t1 = cal_min_hp(dungeon[line][row], min_hp[line + 1][row]);
let t2 = cal_min_hp(dungeon[line][row], min_hp[line][row + 1]);
min_hp[line][row] = t1.min(t2);
}
}
min_hp[0][0]
}
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn it_works() {
assert_eq!(
7,
Solution::calculate_minimum_hp(vec![
vec![-2, -3, 3],
vec![-5, -10, 1],
vec![10, 30, -5]
])
);
}
}
|
use std::fs;
fn int_code(values: &mut Vec<i32>) {
let len = values.len();
let mut i = 0;
loop {
if i > len {
break;
}
match values[i] {
1 => {
let index_to_write = values[i+3] as usize;
values[index_to_write] = values[values[i+1] as usize] + values[values[i+2] as usize];
i += 4;
},
2 => {
let index_to_write = values[i+3] as usize;
values[index_to_write] = values[values[i+1] as usize] * values[values[i+2] as usize];
i += 4;
},
99 => {
break;
},
_ => {
panic!("Uknow OP code {}", i);
}
}
}
}
fn main() {
let text = fs::read_to_string("inputs.txt").expect("got an error opening the file");
let text = text.trim();
println!("{}", text);
let original: Vec<i32> = text.split(",")
.map(|x| x.parse().unwrap())
.collect();
// part 1
let mut values = original.clone();
println!("{:?}", values);
values[1] = 12;
values[2] = 2;
int_code(&mut values);
println!("{:?}", values);
assert_eq!(values[0], 5290681);
// part 2
let mut result: Option<i32> = None;
'main: for i in 0..99 {
for j in 0..99 {
let mut values = original.clone();
values[1] = i;
values[2] = j;
int_code(&mut values);
if values[0] == 19690720 {
result = Some(100 * i + j);
println!("Noun={} Verb={} Result={}", i, j, result.unwrap());
break 'main;
}
}
}
assert_eq!(result.unwrap(), 5741);
}
|
//! Implement a pgn_parser
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
use crate::state::*;
use crate::consts::*;
use crate::utils::*;
// Game Start Delineator
pub const PGN_DELINEATOR: &'static str = "[Event";
// Game Result Strings
pub const WHITE_WON: &'static str = "\"1-0\"";
pub const BLACK_WON: &'static str = "\"0-1\"";
pub const GAME_DRAWN: &'static str = "\"1/2-1/2\"";
pub const GAME_ONGOING: &'static str = "\"*\"";
#[derive(Copy,Clone,Debug,PartialEq)]
pub enum GameResult {
WhiteWon,
BlackWon,
Drawn,
Ongoing, // Ongoing, Unknown or Abandoned
}
#[derive(Clone,Debug,PartialEq)]
pub struct Game {
pub init_pos: String, // Start Position as FEN
pub move_list: Vec<Move>,
pub result: GameResult, // Recorded result
pub end_status: Status, // Objective Status
}
pub fn oc( mt: &mut String, oc: &mut bool ) {
assert!( !( *oc ) );
*oc = true;
mt.push_str( " \" " );
}
pub fn parse_move( mv_str: &str, state: &State ) -> Result< Move, String > {
let ( legal_moves, _ ) = state.node_info();
match mv_str {
"O-O" | "O-O-O" => {
let mut mv = match state.to_move {
WHITE => {
let mut mv_c = Move::null_move( WHITE_KING, 4 );
mv_c.to = if mv_str == "O-O" { 6 } else { 2 };
mv_c
},
BLACK => {
let mut mv_c = Move::null_move( BLACK_KING, 60 );
mv_c.to = if mv_str == "O-O" { 62 } else { 58 };
mv_c
},
_ => panic!( "Invalid side to move!" ),
};
state.evaluate_move( &mut mv );
if legal_moves.contains( &mv ) {
Ok( mv )
} else {
Err( format!( "Illegal move: {}", mv ) )
}
},
_ => {
let mut mv_str_mut: String = mv_str.to_string();
// Check
let check_str: String = mv_str_mut.chars().filter( |&x| x == '+' ).collect();
let check_count = check_str.chars().count();
if check_count > 1 { return Err( format!( "Too many '+'s in {}", mv_str_mut ) ); }
let is_check = check_count > 0;
if is_check && !mv_str_mut.ends_with( &check_str ) {
return Err( format!( "{} doesn't end with {}", mv_str_mut, check_str ) );
}
// Checkmate
let checkmate_str: String = mv_str_mut.chars().filter( |&x| x == '#' ).collect();
let checkmate_count = checkmate_str.chars().count();
if checkmate_count > 1 { return Err( format!( "Too many '#'s in {}", mv_str_mut ) ); }
let is_checkmate = checkmate_count > 0;
if is_checkmate && !mv_str_mut.ends_with( &checkmate_str ) {
return Err( format!( "{} doesn't end with {}", mv_str_mut, checkmate_str ) );
}
if is_check && is_checkmate { return Err( format!( "Input move has both '+' and '#'" ) ); }
// Remove check and checkmate
mv_str_mut = mv_str_mut.chars().filter( |&x| x != '+' && x != '#' ).collect();
let piece_char = match mv_str_mut.chars().nth( 0 ) {
Some( piece_char_actual ) => piece_char_actual,
None => return Err( format!( "piece_char: mv_str_mut[ 0 ] out of bounds!" ) ),
};
let from: usize;
let to: usize;
let mut capture = EMPTY;
let mut promotion = EMPTY;
let piece = state.to_move | match piece_char {
'K' => KING,
'Q' => QUEEN,
'R' => ROOK,
'B' => BISHOP,
'N' => KNIGHT,
_ => {
if 'a' <= piece_char && piece_char <= 'h' {
PAWN
} else {
return Err( format!( "Invalid piece_char: {}", piece_char ) )
}
},
};
// Handle promotion
if piece == ( state.to_move | PAWN ) {
let promotion_str: String = mv_str_mut.chars().filter( |&x| x == '=' ).collect();
let promotion_count = promotion_str.chars().count();
if promotion_count > 1 { return Err( format!( "Too many '='s in {}", mv_str_mut ) ); }
let is_promotion = promotion_count > 0;
if is_promotion {
let temp_str = mv_str_mut;
let promoted_to = match temp_str.split( '=' ).nth( 1 ) {
Some( promoted_to_actual ) => promoted_to_actual,
None => return Err( format!( "promoted_to: temp_str[ 1 ] out of bounds!" ) ),
};
mv_str_mut = ( match temp_str.split( '=' ).nth( 0 ) {
Some( mv_str_mut_actual ) => mv_str_mut_actual,
None => return Err( format!( "temp_str[ 0 ] out of bounds!" ) ),
} ).to_string();
promotion = state.to_move | match promoted_to {
"Q" => QUEEN,
"R" => ROOK,
"B" => BISHOP,
"N" => KNIGHT,
_ => return Err( format!( "Invalid promotion: {}", promoted_to ) ),
};
}
}
// Handle capture and to location
let capture_str: String = mv_str_mut.chars().filter( |&x| x == 'x' ).collect();
let capture_count = capture_str.chars().count();
if capture_count > 1 { return Err( format!( "Too many 'x's in the input move: {}", mv_str_mut ) ); }
let is_capture = capture_count > 0;
if is_capture {
let temp_str = mv_str_mut;
let capture_square = match temp_str.split( 'x' ).nth( 1 ) {
Some( capture_square_actual ) => capture_square_actual,
None => return Err( format!( "capture_square: temp_str[ 1 ] out of bounds!" ) ),
};
mv_str_mut = ( match temp_str.split( 'x' ).nth( 0 ) {
Some( mv_str_mut_actual ) => mv_str_mut_actual,
None => return Err( format!( "temp_str[ 0 ] out of bounds!" ) ),
} ).to_string();
to = algebraic_to_offset( capture_square );
capture = state.simple_board[ to ];
} else {
let temp_str = mv_str_mut;
let size = temp_str.chars().count();
let destination: String = temp_str.chars().skip( size - 2 ).collect();
to = algebraic_to_offset( &destination );
mv_str_mut = temp_str.chars().take( size - 2 ).collect();
}
// Handle disambiguation
let mut possibilities: Vec<Move> = Vec::new();
for x in legal_moves.iter() {
if x.piece == piece && x.to == to && x.capture == capture && x.promotion == promotion {
possibilities.push( *x );
}
}
let num_poss = possibilities.iter().count();
if num_poss > 1 {
let mut poss_filtered: Vec<Move> = Vec::new();
if piece == ( state.to_move | PAWN ) {
let size = mv_str_mut.chars().count();
if size != 1 {
return Err( format!( "Disambiguation is problematic 1: {}, {}", mv_str, mv_str_mut ) );
} else {
let file = match mv_str_mut.chars().nth( 0 ) {
Some( file_actual ) => file_actual,
None => return Err( format!( "file: mv_str_mut[ 0 ] out of bounds!" ) ),
};
if 'a' <= file && file <= 'h' {
let file_num = file as usize - 'a' as usize;
for x in possibilities.iter() {
if x.from % 8 == file_num {
poss_filtered.push( *x );
}
}
} else {
return Err( format!( "Invalid file: {}", file ) );
}
}
} else {
mv_str_mut = mv_str_mut.chars().skip( 1 ).collect(); // Remove the piece identifier
let size = mv_str_mut.chars().count();
if size > 2 {
return Err( format!( "Disambiguation is problematic 2: {}, {}", mv_str, mv_str_mut ) );
} else if size == 2 {
from = algebraic_to_offset( &mv_str_mut );
for x in possibilities.iter() {
if x.from == from {
poss_filtered.push( *x );
}
}
} else if size == 1 {
let disamb = match mv_str_mut.chars().nth( 0 ) {
Some( file_actual ) => file_actual,
None => return Err( format!( "disamb: mv_str_mut[ 0 ] out of bounds!" ) ),
};
if '1' <= disamb && disamb <= '8' {
let rank_num = disamb as usize - '1' as usize;
for x in possibilities.iter() {
if x.from / 8 == rank_num {
poss_filtered.push( *x );
}
}
} else if 'a' <= disamb && disamb <= 'h' {
let file_num = disamb as usize - 'a' as usize;
for x in possibilities.iter() {
if x.from % 8 == file_num {
poss_filtered.push( *x );
}
}
} else {
return Err( format!( "Disambiguation is problematic 3: {}, {}", mv_str, mv_str_mut ) )
}
} else {
return Err( format!( "Disambiguation is problematic 4: {}, {}", mv_str, mv_str_mut ) )
}
}
let final_size = poss_filtered.iter().count();
if final_size == 1 {
match poss_filtered.iter_mut().nth( 0 ) {
Some( poss_filtered_actual ) => {
state.evaluate_move( poss_filtered_actual );
Ok( *poss_filtered_actual )
},
None => return Err( format!( "poss_filtered: poss_filtered[ 0 ] out of bounds!" ) ),
}
} else {
return Err( format!( "Disambiguation is problematic 5: {}, {}", mv_str, mv_str_mut ) )
}
} else if num_poss == 1 {
match possibilities.iter_mut().nth( 0 ) {
Some( possibilities_actual ) => {
state.evaluate_move( possibilities_actual );
Ok( *possibilities_actual )
},
None => return Err( format!( "possibilities: possibilities[ 0 ] out of bounds!" ) ),
}
} else {
return Err( format!( "Illegal move: {}", mv_str ) )
}
}
}
}
// Parse pgn, do some checks and return the GameList
pub fn parse_pgn( path: &str ) -> Vec<Game> {
let file = match File::open( path ) {
Ok( file ) => BufReader::new( file ),
Err( error ) => panic!( "Can't find {}: {:?}", path, error ),
};
// get file as String
let mut file_string = String::new();
for line in file.lines() {
file_string.push_str( "\n" );
file_string.push_str( &line.unwrap() );
}
let mut games: Vec<Game> = Vec::new();
// Get a game iterator
let mut game_iter = file_string.split( PGN_DELINEATOR ).skip( 1 );
while let Some( pgn ) = game_iter.next() {
let pgn = pgn.trim();
let move_text = pgn.split( "]" ).last().unwrap();
let mut curr_iter = pgn.split( "]" ).skip( 1 );
// Seven Tag Roster - we already ignored 'Event'
// NOTE: We are going to ignore the contents of most of the tags
let site = curr_iter.next().unwrap().trim();
assert!( site.starts_with( "[Site" ) );
let date = curr_iter.next().unwrap().trim();
assert!( date.starts_with( "[Date" ) );
let round = curr_iter.next().unwrap().trim();
assert!( round.starts_with( "[Round" ) );
let white = curr_iter.next().unwrap().trim();
assert!( white.starts_with( "[White" ) );
let black = curr_iter.next().unwrap().trim();
assert!( black.starts_with( "[Black" ) );
let result = curr_iter.next().unwrap().trim();
assert!( result.starts_with( "[Result" ) );
let result_val = result.split_whitespace().last().unwrap();
let result_val_nq = result_val.split( "\"" ).nth( 1 ).unwrap();
assert_eq!( move_text.split_whitespace().last().unwrap(), result_val_nq );
let result_enum = match result_val {
WHITE_WON => GameResult::WhiteWon,
BLACK_WON => GameResult::BlackWon,
GAME_DRAWN => GameResult::Drawn,
GAME_ONGOING => GameResult::Ongoing,
_ => panic!( "Invalid game result: {}", result_val ),
};
let mut fen: String = START_FEN.to_string();
let mut set_up: bool = false;
let mut termination: String = "".to_string();
let mut final_fen: String = "".to_string();
while let Some( item ) = curr_iter.next() {
let item = item.trim();
if item.starts_with( "[SetUp" ) {
if item.split( "\"" ).nth( 1 ).unwrap() == "1" {
set_up = true;
}
} else if item.starts_with( "[FEN" ) {
fen = item.split( "\"" ).nth( 1 ).unwrap().to_string();
} else if item.starts_with( "[FinalFEN" ) {
final_fen = item.split( "\"" ).nth( 1 ).unwrap().to_string();
} else if item.starts_with( "[Termination" ) {
termination = item.split( "\"" ).nth( 1 ).unwrap().to_string();
}
}
if !set_up && fen != START_FEN {
panic!( "If not Set Up - fen has to be START_FEN!" );
}
// Replace any comment/annotation/variation with the comment indicator, '"'
// We just ignore all the above mentioned stuff
// Nested stuff is unsupported!
// Too lazy to write a proper parser....
let all_special: String = "\"{}()%".to_string();
let move_text_nr = move_text.split( result_val_nq ).nth( 0 ).unwrap().trim();
let mut move_text_pure = String::new();
let mut open_comment: bool = false;
let mut comment_type: char = '"';
for elem in move_text_nr.chars() {
if open_comment {
if elem == comment_type {
open_comment = false;
} else if all_special.contains( elem ) {
panic!( "We don't support nested variations/comments!" );
}
} else {
match elem {
'"' => {
oc( &mut move_text_pure, &mut open_comment );
comment_type = '"';
},
'{' => {
oc( &mut move_text_pure, &mut open_comment );
comment_type = '}';
},
'(' => {
oc( &mut move_text_pure, &mut open_comment );
comment_type = ')';
},
'%' => {
oc( &mut move_text_pure, &mut open_comment );
comment_type = '\n';
},
_ => {
move_text_pure.push( elem );
},
}
}
}
let mut state = State::generate_state_from_fen( &fen );
let mut move_list: Vec<Move> = Vec::new();
// Parse move_text_pure
let mut mni: bool = true; // move number indication
for mv_str in move_text_pure.split_whitespace() {
if mv_str == "\"" {
mni = true;
} else if mni {
let number: String = mv_str.chars().filter( |&x| x != '.' ).collect();
let dots: String = mv_str.chars().filter( |&x| x == '.' ).collect();
assert_eq!( number.parse::<usize>().unwrap(), state.fullmove_count );
assert!( mv_str.ends_with( &dots ) );
match state.to_move {
WHITE => assert_eq!( dots, "." ),
BLACK => assert_eq!( dots, "..." ),
_ => panic!( "Invalid side!" ),
}
mni = false;
} else { // This one is a move... finally!
let the_move = match parse_move( mv_str, &state ) {
Ok( the_move_actual ) => the_move_actual,
Err( error ) => panic!( "{}", error ),
};
move_list.push( the_move );
state.make( &the_move );
mni = state.to_move == WHITE;
if mv_str.ends_with( '+' ) {
assert!( state.num_checks > 0 );
} else if mv_str.ends_with( '#' ) {
assert!( state.num_checks > 0 );
let ( legal_moves, _ ) = state.node_info();
assert_eq!( legal_moves.iter().count(), 0 );
}
}
}
if final_fen != "" {
assert_eq!( final_fen, state.fen( true ) );
}
let ( _, status ) = state.node_info();
if termination.contains( "checkmate" ) {
assert_eq!( status, Status::Checkmate );
} else if termination.contains( "stalemate" ) {
assert_eq!( status, Status::Stalemate );
} else if termination.contains( "fifty move rule" ) {
assert_eq!( status, Status::FiftyMoveDraw );
} else if termination.contains( "repetition" ) {
assert_eq!( status, Status::RepetitionDraw );
} else if termination.contains( "insufficient material" ) {
assert_eq!( status, Status::InsufficientMaterial );
}
games.push( Game { init_pos: fen,
move_list: move_list,
result: result_enum,
end_status: status } );
}
games
}
|
use std::io::{
Read,
Seek,
};
use blorb::BlorbCursor;
use glulx::Glulx;
pub fn run_blorb<T: Read + Seek>(code: Vec<u8>, blorb: BlorbCursor<T>) {
let mut glulx = Glulx::from_rom(code).unwrap();
glulx.run();
}
pub fn run(code: Vec<u8>) {
let mut glulx = Glulx::from_rom(code).unwrap();
glulx.run();
}
|
use contracts::{Contract, STANDARD_THREE};
use player::{PlayerId, PlayerTurn};
#[deriving(Eq, PartialEq, Show)]
pub enum Success {
Next(PlayerId),
Last,
}
#[deriving(Eq, PartialEq, Show)]
pub enum BidError {
NotPlayersTurn,
ContractTooLow,
InvalidContract,
MustBid,
Done,
}
// The `Bidding` trait is used to specify the process of bidding for different
// game variants.
pub trait Bidding {
// Return the player's id that is currently bidding.
fn current_player(&self) -> &PlayerId;
// Bid a contract for a player.
fn bid(&mut self, player: &PlayerId, contract: Contract) -> Result<Success, BidError>;
// Pass the bid for a player.
fn pass(&mut self, player: &PlayerId) -> Result<Success, BidError>;
// Return true if the bidding process is finished.
fn is_done(&self) -> bool;
// Returns the winning bid after the bidding is done, returns `None` otherwise.
fn winner(&self) -> Option<Bid>;
}
// A bid of a player.
#[deriving(Eq, PartialEq, Show)]
pub struct Bid {
player: PlayerId,
player_priority: uint,
contract: Contract,
}
impl Bid {
// Constructs a new bid for a player with priority and the bid contract.
fn new(player: PlayerId, priority: uint, contract: Contract) -> Bid {
Bid {
player: player,
player_priority: priority,
contract: contract,
}
}
// Return the contract that was bid by the player.
pub fn contract(&self) -> Contract {
self.contract
}
// Returns the player id of the player that made the bid.
pub fn player(&self) -> PlayerId {
self.player
}
}
// A 4-player bidding helper.
struct Bidder {
forehand: PlayerId,
done: bool,
highest: Bid,
turn: PlayerTurn,
}
// Default contract for the forehand player.
const DEFAULT_CONTRACT: Contract = STANDARD_THREE;
// The number of players that Bidder is implemented for.
const NUM_PLAYERS: uint = 4;
impl Bidder {
// Create a new 4-player implementation of Bidding.
pub fn new(dealer: PlayerId) -> Bidder {
let mut turn = PlayerTurn::start_with(NUM_PLAYERS, dealer);
// Skip the dealer as he is the last one to bid.
turn.next();
let highest_bid = Bid::new(*turn.current(), player_priority(&turn, turn.current()), DEFAULT_CONTRACT);
let forehand = *turn.current();
// Skip the first player because he has a default bid assigned and bids
// after everybody else.
turn.next();
Bidder {
forehand: forehand,
done: false,
highest: highest_bid,
turn: turn,
}
}
// Returns the current highest bid.
pub fn current_bid(&self) -> &Bid {
&self.highest
}
// Returns true if forehand player is bidding and the only bid is the default.
fn has_no_bets(&self, player: &PlayerId) -> bool {
&self.forehand == player && self.highest.contract() == DEFAULT_CONTRACT
}
fn next_player(&mut self, f: |&mut PlayerTurn| -> PlayerId) -> Success {
if self.turn.current_players() == 1 {
// Now that the last remaining player bidding has bid we are done.
self.done = true;
Last
} else {
Next(f(&mut self.turn))
}
}
}
impl Bidding for Bidder {
fn current_player(&self) -> &PlayerId {
self.turn.current()
}
fn bid(&mut self, player: &PlayerId, contract: Contract) -> Result<Success, BidError> {
let bid = Bid::new(*self.turn.current(), player_priority(&self.turn, player), contract);
if self.is_done() {
Err(Done)
} else if self.turn.current() != player {
Err(NotPlayersTurn)
} else if contract.is_klop() && !self.has_no_bets(player) {
// Klop cannot be played by everyone except the forehand player when
// no other bids are made.
Err(InvalidContract)
} else if !is_bid_valid(&self.highest, &bid){
Err(ContractTooLow)
} else {
self.highest = bid;
Ok(self.next_player(|turn| *turn.next()))
}
}
fn pass(&mut self, player: &PlayerId) -> Result<Success, BidError> {
if self.is_done() {
Err(Done)
} else if self.turn.current() != player {
Err(NotPlayersTurn)
} else if self.has_no_bets(player) || self.turn.current_players() == 1 {
// Bidding is mandatory if there were no bids made or the last
// player bidding did not bid yet.
Err(MustBid)
} else {
Ok(self.next_player(|turn| {
// Player that passes the bid cannot rejoin the bidding again.
*turn.remove()
}))
}
}
fn is_done(&self) -> bool {
self.done
}
fn winner(&self) -> Option<Bid> {
if self.is_done() {
Some(self.highest)
} else {
None
}
}
}
fn player_priority(turn: &PlayerTurn, player: &PlayerId) -> uint {
let pos_diff = *player as uint - *turn.started_with() as uint;
(pos_diff + turn.num_players() - 1) % turn.num_players()
}
fn is_bid_valid(highest: &Bid, wanted: &Bid) -> bool {
if wanted.player_priority <= highest.player_priority {
wanted.contract >= highest.contract
} else {
wanted.contract > highest.contract
}
}
#[cfg(test)]
mod test {
use super::{Bidder, Bidding, Next, Last, NotPlayersTurn,
MustBid, Done, InvalidContract, ContractTooLow};
use super::DEFAULT_CONTRACT;
use contracts::{KLOP, STANDARD_THREE, STANDARD_TWO, STANDARD_ONE,
SOLO_THREE, SOLO_TWO, SOLO_ONE};
#[test]
fn forehand_player_has_default_bid() {
let bidder = Bidder::new(0);
assert_eq!(bidder.current_bid().contract(), DEFAULT_CONTRACT);
assert_eq!(*bidder.current_player(), 2);
}
#[test]
fn player_can_pass_a_bid() {
let mut bidder = Bidder::new(0);
assert_eq!(bidder.current_bid().contract(), DEFAULT_CONTRACT);
assert!(bidder.pass(&2).is_ok())
assert_eq!(bidder.current_bid().contract(), DEFAULT_CONTRACT);
assert!(bidder.pass(&3).is_ok())
assert_eq!(bidder.current_bid().contract(), DEFAULT_CONTRACT);
}
#[test]
fn player_cant_pass_if_its_not_his_turn() {
let mut bidder = Bidder::new(0);
assert_eq!(bidder.pass(&1), Err(NotPlayersTurn));
assert_eq!(bidder.pass(&3), Err(NotPlayersTurn));
}
#[test]
fn forehand_player_is_not_allowed_to_pass_the_bid_if_no_bids_were_made() {
let mut bidder = Bidder::new(0);
assert!(bidder.pass(&2).is_ok())
assert!(bidder.pass(&3).is_ok())
assert!(bidder.pass(&0).is_ok())
assert_eq!(bidder.pass(&1), Err(MustBid));
}
#[test]
fn passing_is_not_allowed_when_bidding_is_finished() {
let mut bidder = Bidder::new(0);
assert!(bidder.pass(&2).is_ok())
assert!(bidder.pass(&3).is_ok())
assert!(bidder.pass(&0).is_ok())
assert_eq!(bidder.bid(&1, DEFAULT_CONTRACT), Ok(Last));
assert_eq!(bidder.pass(&2), Err(Done));
}
#[test]
fn player_can_bid() {
let mut bidder = Bidder::new(0);
assert_eq!(bidder.bid(&2, STANDARD_TWO), Ok(Next(3)))
}
#[test]
fn play_is_not_allowed_to_bid_three_of_klop() {
let mut bidder = Bidder::new(0);
assert_eq!(bidder.bid(&2, KLOP), Err(InvalidContract))
assert_eq!(bidder.bid(&2, STANDARD_THREE), Err(ContractTooLow))
}
#[test]
fn forehand_player_can_bid_klop_if_no_other_bids_are_made() {
let mut bidder = Bidder::new(0);
assert!(bidder.pass(&2).is_ok())
assert!(bidder.pass(&3).is_ok())
assert!(bidder.pass(&0).is_ok())
assert_eq!(bidder.bid(&1, KLOP), Ok(Last));
}
#[test]
fn player_must_bid_a_higher_bid_than_the_highest() {
let mut bidder = Bidder::new(0);
assert_eq!(bidder.bid(&2, STANDARD_TWO), Ok(Next(3)));
assert_eq!(bidder.bid(&3, STANDARD_TWO), Err(ContractTooLow));
assert_eq!(bidder.bid(&3, STANDARD_ONE), Ok(Next(0)));
}
#[test]
fn forehand_player_can_bid_contracts_of_equal_or_higher_value() {
let mut bidder = Bidder::new(0);
assert_eq!(bidder.bid(&2, STANDARD_TWO), Ok(Next(3)));
assert_eq!(bidder.pass(&3), Ok(Next(0)));
assert_eq!(bidder.pass(&0), Ok(Next(1)));
assert_eq!(bidder.bid(&1, STANDARD_TWO), Ok(Next(2)));
}
#[test]
fn bidding_continues_until_all_players_but_one_pass_the_bid() {
let mut bidder = Bidder::new(0);
assert_eq!(bidder.bid(&2, STANDARD_TWO), Ok(Next(3)));
assert_eq!(bidder.pass(&3), Ok(Next(0)));
assert_eq!(bidder.bid(&0, STANDARD_ONE), Ok(Next(1)));
assert_eq!(bidder.bid(&1, SOLO_THREE), Ok(Next(2)));
assert_eq!(bidder.pass(&2), Ok(Next(0)));
assert_eq!(bidder.bid(&0, SOLO_TWO), Ok(Next(1)));
assert_eq!(bidder.pass(&1), Ok(Next(0)));
assert_eq!(bidder.bid(&0, SOLO_ONE), Ok(Last));
}
#[test]
fn bidding_starts_with_next_player_to_dealer() {
let mut bidder = Bidder::new(3);
assert_eq!(bidder.bid(&1, STANDARD_TWO), Ok(Next(2)));
}
#[test]
fn winner_bids_last() {
let mut bidder = Bidder::new(0);
assert_eq!(bidder.bid(&2, STANDARD_TWO), Ok(Next(3)));
assert_eq!(bidder.pass(&3), Ok(Next(0)));
assert_eq!(bidder.pass(&0), Ok(Next(1)))
assert_eq!(bidder.bid(&1, STANDARD_TWO), Ok(Next(2)));
assert_eq!(bidder.bid(&2, STANDARD_ONE), Ok(Next(1)));
assert_eq!(bidder.pass(&1), Ok(Next(2)));
assert_eq!(bidder.pass(&2), Err(MustBid));
assert_eq!(bidder.bid(&2, STANDARD_ONE), Ok(Last));
}
}
|
extern crate asn1_der;
use ::asn1_der::{ Asn1DerError, IntoDerObject, FromDerObject };
const RANDOM: &[u8] = include_bytes!("rand.dat");
#[test]
fn test_ok() {
fn test((bytes, data): &(&[u8], &[u8])) {
// Test deserialization
let deserialized = Vec::<u8>::deserialize(bytes.iter()).unwrap();
assert_eq!(*data, deserialized.as_slice());
// Test length prediction
assert_eq!(deserialized.serialized_len(), bytes.len());
// Test serialization
let mut target = vec![0u8; bytes.len()];
deserialized.serialize(target.iter_mut()).unwrap();
assert_eq!(*bytes, target.as_slice());
}
[
(b"\x04\x00".as_ref(), b"".as_ref()),
(RANDOM, &RANDOM[5..])
].iter().for_each(test);
}
#[test]
fn test_err() {
fn test((bytes, error): &(&[u8], Asn1DerError)) {
assert_eq!(u128::deserialize(bytes.iter()).unwrap_err(), *error);
}
[
// Invalid tag
(b"\x03\x01\x00".as_ref(), Asn1DerError::InvalidTag),
// Length mismatch
(b"\x04\x01".as_ref(), Asn1DerError::LengthMismatch)
].iter().for_each(test);
} |
#![no_std]
#![feature(const_panic)]
#![feature(abi_x86_interrupt)]
extern crate spin;
extern crate x86_64;
extern crate ros_alloc;
use core::borrow::{BorrowMut, Borrow};
use ros_alloc::{Alloc, MutexAlloc};
#[global_allocator]
static mut ALLOC: MutexAlloc = unsafe { MutexAlloc::empty() };
#[macro_use] pub mod macros;
pub mod io;
pub mod text_output;
pub mod interrupts;
pub mod alloc;
#[doc(hidden)]
pub fn _print(args: core::fmt::Arguments) {
use core::fmt::Write;
x86_64::instructions::interrupts::without_interrupts(|| unsafe { (**text_output::TEXT_OUT.lock()).write_fmt(args).unwrap() });
}
#[doc(hidden)]
pub fn _debug(args: core::fmt::Arguments) {
use core::fmt::Write;
x86_64::instructions::interrupts::without_interrupts(|| unsafe { (**text_output::TEXT_DEBUG.lock()).write_fmt(args).unwrap() });
}
|
use crate::rerrs::{ErrorKind, SteelErr};
use crate::rvals::SteelVal::*;
use crate::rvals::{ConsCell, Result, SteelVal};
use crate::stop;
use crate::throw;
use im_rc::Vector;
use crate::gc::Gc;
pub struct ListOperations {}
impl ListOperations {
pub fn cons() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 2 {
stop!(ArityMismatch => "cons takes only two arguments")
}
let mut args = args.iter().cloned();
match (args.next(), args.next()) {
(Some(elem), Some(lst)) => match lst {
SteelVal::VectorV(ref l) => {
if l.is_empty() {
Ok(SteelVal::Pair(Gc::new(ConsCell::new(elem, None))))
} else {
Ok(SteelVal::Pair(Gc::new(ConsCell::new(
elem,
Some(Gc::new(ConsCell::new(lst, None))),
))))
}
}
SteelVal::Pair(cdr) => {
Ok(SteelVal::Pair(Gc::new(ConsCell::new(elem, Some(cdr)))))
}
_ => Ok(SteelVal::Pair(Gc::new(ConsCell::new(
elem,
Some(Gc::new(ConsCell::new(lst, None))),
)))),
},
_ => stop!(ArityMismatch => "cons takes two arguments"),
}
})
}
pub fn car() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 1 {
stop!(ArityMismatch => "car takes one argument");
}
if let Some(first) = args.iter().next() {
match first {
Pair(cell) => Ok(cell.car()),
e => {
stop!(TypeMismatch => "car takes a list, given: {}", e);
}
}
} else {
stop!(ArityMismatch => "car takes one argument");
}
})
}
pub fn cdr() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 1 {
stop!(ArityMismatch => "cdr takes one argument");
}
if let Some(first) = args.iter().next() {
match first {
Pair(cell) => match cell.cdr() {
Some(rest) => Ok(SteelVal::Pair(Gc::clone(rest))),
None => Ok(SteelVal::VectorV(Gc::new(Vector::new()))), // TODO
},
e => {
stop!(TypeMismatch => "cdr takes a list, given: {}", e);
}
}
} else {
stop!(ArityMismatch => "cdr takes one argument");
}
})
}
pub fn list() -> SteelVal {
SteelVal::FuncV(Self::built_in_list_func())
}
// TODO fix this
// This panics on non positive values
pub fn range() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 2 {
stop!(ArityMismatch => "range takes two arguments")
}
let mut args = args.into_iter();
match (args.next(), args.next()) {
(Some(elem), Some(lst)) => {
if let (IntV(lower), IntV(upper)) = (elem, lst) {
// let size = (upper - lower) as usize;
// let mut res = Vec::with_capacity(size);
// println!("{} {}", lower, upper);
Ok(Self::built_in_list_normal_iter_non_result(
(*lower as usize..*upper as usize)
.into_iter()
.map(|x| SteelVal::IntV(x as isize)),
))
// for i in lower as usize..upper as usize {
// res.push(Gc::new(SteelVal::IntV(i as isize)));
// }
// Self::built_in_list_func()(&res)
} else {
stop!(TypeMismatch => "range expected number")
}
}
_ => stop!(ArityMismatch => "range takes two arguments"),
}
})
}
// TODO fix the VectorV case
pub fn reverse() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() == 1 {
match &args[0] {
SteelVal::Pair(_) => {
let mut lst = Self::collect_into_vec(&args[0])?;
lst.reverse();
Self::built_in_list_func_flat(&lst)
}
SteelVal::VectorV(v) => Ok(SteelVal::VectorV(Gc::new(
v.iter().rev().cloned().collect(),
))),
_ => {
stop!(TypeMismatch => "reverse requires an iterable")
}
}
} else {
stop!(ArityMismatch => "reverse takes one argument");
}
})
}
pub fn list_to_string() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() != 1 {
stop!(ArityMismatch => "list->string takes one argument");
}
if let Some(first) = args.into_iter().next() {
match first {
Pair(_) => {
let collected_string = SteelVal::iter(first.clone()).map(|x| {
x.char_or_else(throw!(TypeMismatch => "list->string expected a list of characters"))
})
.collect::<Result<String>>()?;
Ok(SteelVal::StringV(collected_string.into()))
}
VectorV(v) if v.len() == 0 => Ok(SteelVal::StringV("".into())),
e => {
stop!(TypeMismatch => "list->string takes a list, given: {}", e);
}
}
} else {
stop!(ArityMismatch => "list->string takes one argument");
}
})
}
pub fn push_back() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
// let mut lst = Vec::new();
if args.len() != 2 {
stop!(ArityMismatch => "push-back expected 2 arguments");
}
match &args[0] {
SteelVal::Pair(_) => {
let mut lst: Vec<_> = SteelVal::iter(args[0].clone()).collect();
lst.push(args[1].clone());
ListOperations::built_in_list_func_flat(&lst)
}
SteelVal::VectorV(v) => {
if v.is_empty() {
let lst = vec![args[1].clone()];
ListOperations::built_in_list_func_flat(&lst)
} else {
stop!(TypeMismatch => "push-back requires a list as the first argument")
}
}
_ => {
stop!(TypeMismatch => "push-back requires a list as the first argument");
}
}
})
}
pub fn append() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
let mut lst = Vec::new();
for arg in args {
match arg {
SteelVal::Pair(_) => {
for value in SteelVal::iter(arg.clone()) {
// println!("{:?}", value);
lst.push(value);
}
}
SteelVal::VectorV(v) => {
// unimplemented!();
// println!("{:?}", v);
if v.is_empty() {
continue;
}
}
_ => {
let error_msg =
format!("append expected a list, found: {}", arg.to_string());
stop!(TypeMismatch => error_msg);
}
}
}
// let lst = args
// .map(|x| {
// if let SteelVal::Pair(_, _) = x.as_ref() {
// Ok(SteelVal::iter(x))
// } else {
// stop!(TypeMismatch => "append expected a list");
// }
// })
// .flatten()
// .collect::<Result<Vec<Gc<SteelVal>>>>();
Self::built_in_list_func_flat(&lst)
})
}
// pub fn take() -> SteelVal {
// SteelVal::FuncV(|args: Vec<Gc<SteelVal>>| -> Result<Gc<SteelVal>> {
// if args.len() == 1 {
// match &args[0].as_ref() {
// SteelVal::Pair(_, _) => {
// let mut lst = Self::collect_into_vec(&args[0])?;
// lst.reverse();
// Self::built_in_list_func()(lst)
// }
// SteelVal::VectorV(v) => Ok(Gc::new(SteelVal::BoolV(v.is_empty()))),
// _ => Ok(Gc::new(SteelVal::BoolV(false))),
// }
// } else {
// stop!(ArityMismatch => "reverse takes one argument");
// }
// })
// }
// pub fn flatten() -> SteelVal {
// SteelVal::FuncV(|args: Vec<Gc<SteelVal>>| -> Result<Gc<SteelVal>> {
// let flattened_vec = args
// .into_iter()
// .map(|x| Self::collect_into_vec(&x))
// .collect::<Result<Vec<Vec<Gc<SteelVal>>>>>()?
// .into_iter()
// .flatten()
// .collect::<Vec<Gc<SteelVal>>>();
// Self::built_in_list_func()(flattened_vec)
// // unimplemented!()
// })
// }
pub fn list_to_vec() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() == 1 {
if let SteelVal::Pair(..) = &args[0] {
// let iter = SteelVal::iter(Gc::clone(&args[0])).collect()
// let collected: Vec<SteelVal> = Self::collect_into_vec(&args[0])?
// .into_iter()
// .map(|x| (*x).clone())
// .collect();
// let im_vec: Vector<SteelVal> = collected.into();
Ok(SteelVal::VectorV(Gc::new(
SteelVal::iter(args[0].clone()).collect(),
)))
} else {
stop!(TypeMismatch => "list->vector expected list")
}
} else {
stop!(ArityMismatch => "list->vector takes one argument");
}
})
}
pub fn collect_into_vec(p: &SteelVal) -> Result<Vec<SteelVal>> {
// let mut lst = Vec::new();
Ok(SteelVal::iter(p.clone()).into_iter().collect())
// if let SteelVal::Pair(mut cell) = p {
// let ConsCell { car, cdr } = cell;
// loop {
// lst.push(car.clone());
// match cdr.as_ref() {
// Some(rest) => match rest.as_ref() {
// ConsCell { .. } => cell = rest,
// _ => {
// lst.push(Gc::clone(rest));
// break;
// }
// },
// None => break,
// }
// }
// } else {
// stop!(TypeMismatch => "collect into vec expected a list")
// }
// loop {
// match p.as_ref() {
// SteelVal::Pair(ConsCell { car, cdr }) => {
// lst.push(car.clone());
// match cdr.as_ref() {
// Some(rest) => match rest.as_ref() {
// ConsCell { .. } => p = rest,
// _ => {
// lst.push(Gc::clone(rest));
// break;
// }
// },
// None => break,
// }
// }
// _ => stop!(TypeMismatch => "collect into vec expected a list"),
// }
// }
// Ok(lst)
}
pub fn built_in_list_normal_iter_non_result<I>(args: I) -> SteelVal
where
I: Iterator<Item = SteelVal>,
{
let mut pairs: Vec<ConsCell> = args.map(|car| ConsCell::new(car, None)).collect();
// let mut rev_iter = pairs.iter_mut().rev().enumerate();
let mut rev_iter = (0..pairs.len()).into_iter().rev();
rev_iter.next();
for i in rev_iter {
let prev = pairs.pop().unwrap();
if let Some(ConsCell { cdr, .. }) = pairs.get_mut(i) {
*cdr = Some(Gc::new(prev))
} else {
unreachable!()
}
}
pairs
.pop()
.map(|x| SteelVal::Pair(Gc::new(x)))
.unwrap_or(VectorV(Gc::new(Vector::new())))
}
pub fn built_in_list_normal_iter<I>(args: I) -> Result<SteelVal>
where
I: Iterator<Item = Result<SteelVal>>,
{
// unimplemented!();
// let mut pairs = Vec::new();
let mut pairs: Vec<Gc<ConsCell>> = args
.map(|car| Ok(Gc::new(ConsCell::new(car?, None))))
.collect::<Result<_>>()?;
// use this as empty for now
if pairs.is_empty() {
return Ok(SteelVal::VectorV(Gc::new(Vector::new())));
}
// let mut rev_iter = pairs.iter_mut().rev().enumerate();
let mut rev_iter = (0..pairs.len()).into_iter().rev();
rev_iter.next();
for i in rev_iter {
let prev = pairs.pop().unwrap();
if let Some(ConsCell { car: _, cdr }) = pairs.get_mut(i).map(Gc::get_mut).flatten() {
*cdr = Some(prev)
} else {
unreachable!()
}
}
pairs.pop().map(SteelVal::Pair).ok_or_else(|| {
SteelErr::new(
ErrorKind::Generic,
"list-pair broke inside build_in_list_normal_ier".to_string(),
)
})
// rev_iter.next();
// for (i, val) in rev_iter {
// }
// unimplemented!()
}
pub fn built_in_list_func_iter<I>(args: I) -> Result<SteelVal>
where
I: DoubleEndedIterator<Item = SteelVal>,
{
let mut args = args.rev();
let mut pairs = Vec::new();
match args.next() {
Some(car) => {
pairs.push(Gc::new(ConsCell::new(car, None)));
}
_ => {
return Ok(SteelVal::VectorV(Gc::new(Vector::new())));
}
}
for (i, val) in args.enumerate() {
pairs.push(Gc::new(ConsCell::new(val, Some(Gc::clone(&pairs[i])))));
}
pairs.pop().map(SteelVal::Pair).ok_or_else(|| {
SteelErr::new(
ErrorKind::Generic,
"list-pair broke inside built_in_list_func_iter".to_string(),
)
})
// unimplemented!()
}
pub fn built_in_list_func_iter_result<I>(args: I) -> Result<SteelVal>
where
I: DoubleEndedIterator<Item = Result<SteelVal>>,
{
let mut args = args.rev();
let mut pairs = Vec::new();
match args.next() {
Some(car) => {
pairs.push(Gc::new(ConsCell::new(car?, None)));
}
_ => {
return Ok(SteelVal::VectorV(Gc::new(Vector::new())));
}
}
for (i, val) in args.enumerate() {
pairs.push(Gc::new(ConsCell::new(val?, Some(Gc::clone(&pairs[i])))));
}
pairs.pop().map(SteelVal::Pair).ok_or_else(|| {
SteelErr::new(
ErrorKind::Generic,
"list-pair broke in built_in_list_func_iter_result".to_string(),
)
})
// unimplemented!()
}
// pub fn built_is_list_func_normal_iter<I>(args: I) -> Result<Gc<SteelVal>>
// where
// I: Iterator<Item = Gc<SteelVal>>,
// {
// // unimplemented!()
// let mut args = args;
// let mut pairs = Vec::new();
// match (args.next(), args.next()) {
// (cdr, Some(car)) => {
// pairs.push(Gc::new(SteelVal::Pair(car, cdr)));
// }
// (Some(cdr), None) => {
// pairs.push(Gc::new(SteelVal::Pair(cdr, None)));
// }
// (_, _) => {
// return Ok(Gc::new(SteelVal::VectorV(Vector::new())));
// }
// }
// for (i, val) in args.enumerate() {
// pairs.push(Gc::new(SteelVal::Pair(val, Some(Gc::clone(&pairs[i])))));
// }
// pairs
// .pop()
// .ok_or_else(|| SteelErr::ContractViolation("list-pair broke".to_string(), None))
// // unimplemented!()
// }
// TODO add a function that does the same but takes an iterator
// This definitely needs to get looked at
// This could lead to nasty speed ups for map/filter
pub fn built_in_list_func() -> fn(&[SteelVal]) -> Result<SteelVal> {
|args: &[SteelVal]| -> Result<SteelVal> {
let mut args = args.into_iter().rev();
let mut pairs = Vec::new();
match args.next() {
Some(car) => {
pairs.push(Gc::new(ConsCell::new(car.clone(), None)));
}
_ => {
return Ok(SteelVal::VectorV(Gc::new(Vector::new())));
}
}
for (i, val) in args.enumerate() {
pairs.push(Gc::new(ConsCell::new(
val.clone(),
Some(Gc::clone(&pairs[i])),
)));
}
pairs.pop().map(SteelVal::Pair).ok_or_else(|| {
SteelErr::new(
ErrorKind::Generic,
"list-pair broke inside built_in_list_func".to_string(),
)
})
}
}
pub fn built_in_list_func_flat(args: &[SteelVal]) -> Result<SteelVal> {
let mut args = args.into_iter().rev();
let mut pairs = Vec::new();
match args.next() {
Some(car) => {
pairs.push(Gc::new(ConsCell::new(car.clone(), None)));
}
_ => {
return Ok(SteelVal::VectorV(Gc::new(Vector::new())));
}
}
for (i, val) in args.enumerate() {
pairs.push(Gc::new(ConsCell::new(
val.clone(),
Some(Gc::clone(&pairs[i])),
)));
}
pairs.pop().map(SteelVal::Pair).ok_or_else(|| {
SteelErr::new(
ErrorKind::Generic,
"list-pair broke inside built_in_list_func_flat".to_string(),
)
})
}
pub fn list_length() -> SteelVal {
SteelVal::FuncV(|args: &[SteelVal]| -> Result<SteelVal> {
if args.len() == 1 {
match &args[0] {
SteelVal::VectorV(v) => Ok(SteelVal::IntV(v.len() as isize)),
SteelVal::Pair(_) => {
let mut count: isize = 0;
for _ in SteelVal::iter(args[0].clone()) {
count += 1;
}
Ok(SteelVal::IntV(count))
}
_ => stop!(TypeMismatch => "length expects a list"),
}
} else {
stop!(ArityMismatch => "length takes one argument");
}
})
}
pub fn built_in_list_func_flat_non_gc(args: Vec<SteelVal>) -> Result<SteelVal> {
let mut args = args.into_iter().rev();
let mut pairs = Vec::new();
match args.next() {
Some(car) => {
pairs.push(Gc::new(ConsCell::new(car, None)));
}
_ => {
return Ok(SteelVal::VectorV(Gc::new(Vector::new())));
}
}
for (i, val) in args.enumerate() {
pairs.push(Gc::new(ConsCell::new(val, Some(Gc::clone(&pairs[i])))));
}
pairs.pop().map(SteelVal::Pair).ok_or_else(|| {
SteelErr::new(
ErrorKind::Generic,
"list-pair broke inside built_in_list_func_flat_non_gc".to_string(),
)
})
}
}
#[cfg(test)]
mod list_operation_tests {
use super::*;
use crate::rerrs::ErrorKind;
use crate::throw;
use im_rc::vector;
fn apply_function(func: SteelVal, args: Vec<SteelVal>) -> Result<SteelVal> {
func.func_or_else(throw!(BadSyntax => "list tests"))
.unwrap()(&args)
}
#[test]
fn cons_test_normal_input() {
let args = vec![SteelVal::IntV(1), SteelVal::IntV(2)];
let res = apply_function(ListOperations::cons(), args);
let expected = SteelVal::Pair(Gc::new(ConsCell::new(
SteelVal::IntV(1),
Some(Gc::new(ConsCell::new(SteelVal::IntV(2), None))),
)));
assert_eq!(res.unwrap(), expected);
}
#[test]
fn cons_single_input() {
let args = vec![SteelVal::IntV(1)];
let res = apply_function(ListOperations::cons(), args);
let expected = ErrorKind::ArityMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn cons_no_input() {
let args = vec![];
let res = apply_function(ListOperations::cons(), args);
let expected = ErrorKind::ArityMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn cons_with_empty_list() {
let args = vec![SteelVal::IntV(1), SteelVal::VectorV(Gc::new(Vector::new()))];
let res = apply_function(ListOperations::cons(), args);
let expected = SteelVal::Pair(Gc::new(ConsCell::new(SteelVal::IntV(1), None)));
assert_eq!(res.unwrap(), expected);
}
#[test]
fn cons_with_non_empty_vector() {
let args = vec![
SteelVal::IntV(1),
SteelVal::VectorV(Gc::new(vector![SteelVal::IntV(2)])),
];
let res = apply_function(ListOperations::cons(), args);
let expected = SteelVal::Pair(Gc::new(ConsCell::new(
SteelVal::IntV(1),
Some(Gc::new(ConsCell::new(
SteelVal::VectorV(Gc::new(vector![SteelVal::IntV(2)])),
None,
))),
)));
assert_eq!(res.unwrap(), expected);
}
#[test]
fn car_normal_input() {
let args = vec![SteelVal::Pair(Gc::new(ConsCell::new(
SteelVal::IntV(1),
Some(Gc::new(ConsCell::new(SteelVal::IntV(2), None))),
)))];
let res = apply_function(ListOperations::car(), args);
let expected = SteelVal::IntV(1);
assert_eq!(res.unwrap(), expected);
}
#[test]
fn car_bad_input() {
let args = vec![SteelVal::IntV(1)];
let res = apply_function(ListOperations::car(), args);
let expected = ErrorKind::TypeMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn car_too_many_args() {
let args = vec![SteelVal::IntV(1), SteelVal::IntV(2)];
let res = apply_function(ListOperations::car(), args);
let expected = ErrorKind::ArityMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn cdr_normal_input_2_elements() {
let args = vec![SteelVal::Pair(Gc::new(ConsCell::new(
SteelVal::IntV(1),
Some(Gc::new(ConsCell::new(SteelVal::IntV(2), None))),
)))];
let res = apply_function(ListOperations::cdr(), args);
let expected = SteelVal::Pair(Gc::new(ConsCell::new(SteelVal::IntV(2), None)));
assert_eq!(res.unwrap(), expected);
}
#[test]
fn cdr_normal_input_3_elements() {
let args = vec![SteelVal::Pair(Gc::new(ConsCell::new(
SteelVal::IntV(1),
Some(Gc::new(ConsCell::new(
SteelVal::IntV(2),
Some(Gc::new(ConsCell::new(SteelVal::IntV(3), None))),
))),
)))];
let res = apply_function(ListOperations::cdr(), args);
let expected = SteelVal::Pair(Gc::new(ConsCell::new(
SteelVal::IntV(2),
Some(Gc::new(ConsCell::new(SteelVal::IntV(3), None))),
)));
assert_eq!(res.unwrap(), expected);
}
#[test]
fn cdr_bad_input() {
let args = vec![SteelVal::IntV(1)];
let res = apply_function(ListOperations::cdr(), args);
let expected = ErrorKind::TypeMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn cdr_too_many_args() {
let args = vec![SteelVal::NumV(1.0), SteelVal::NumV(2.0)];
let res = apply_function(ListOperations::cdr(), args);
let expected = ErrorKind::ArityMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn cdr_single_element_list() {
let args = vec![SteelVal::Pair(Gc::new(ConsCell::new(
SteelVal::NumV(1.0),
None,
)))];
let res = apply_function(ListOperations::cdr(), args);
let expected = SteelVal::VectorV(Gc::new(Vector::new()));
assert_eq!(res.unwrap(), expected);
}
#[test]
fn range_tests_arity_too_few() {
let args = vec![SteelVal::NumV(1.0)];
let res = apply_function(ListOperations::range(), args);
let expected = ErrorKind::ArityMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn range_test_arity_too_many() {
let args = vec![
SteelVal::NumV(1.0),
SteelVal::NumV(2.0),
SteelVal::NumV(3.0),
];
let res = apply_function(ListOperations::range(), args);
let expected = ErrorKind::ArityMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn range_test_normal_input() {
let args = vec![SteelVal::IntV(0), SteelVal::IntV(3)];
let res = apply_function(ListOperations::range(), args);
let expected = SteelVal::Pair(Gc::new(ConsCell::new(
SteelVal::IntV(0),
Some(Gc::new(ConsCell::new(
SteelVal::IntV(1),
Some(Gc::new(ConsCell::new(SteelVal::IntV(2), None))),
))),
)));
assert_eq!(res.unwrap(), expected);
}
#[test]
fn list_to_vec_arity_too_few() {
let args = vec![];
let res = apply_function(ListOperations::list_to_vec(), args);
let expected = ErrorKind::ArityMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn list_to_vec_arity_too_many() {
let args = vec![SteelVal::NumV(1.0), SteelVal::NumV(2.0)];
let res = apply_function(ListOperations::list_to_vec(), args);
let expected = ErrorKind::ArityMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn list_to_vec_bad_arg() {
let args = vec![SteelVal::NumV(1.0)];
let res = apply_function(ListOperations::list_to_vec(), args);
let expected = ErrorKind::TypeMismatch;
assert_eq!(res.unwrap_err().kind(), expected);
}
#[test]
fn list_to_vec_normal() {
let args = vec![SteelVal::Pair(Gc::new(ConsCell::new(
SteelVal::IntV(1),
Some(Gc::new(ConsCell::new(SteelVal::IntV(2), None))),
)))];
let res = apply_function(ListOperations::list_to_vec(), args);
let expected = SteelVal::VectorV(Gc::new(vector![SteelVal::IntV(1), SteelVal::IntV(2)]));
assert_eq!(res.unwrap(), expected);
}
}
|
//! Even Fibonacci numbers
//!
//! Problem 2
//!
//! Each new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the
//! first 10 terms will be:
//!
//! 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
//!
//! By considering the terms in the Fibonacci sequence whose values do not exceed four million, find the sum of the
//! even-valued terms.
use utils;
fn run(num: u64) -> u64 {
utils::FibRange::with_max(num).filter(|i| i % 2 == 0).sum()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_p002() {
assert_eq!(run(100), 44);
assert_eq!(run(4_000_000), 4_613_732);
}
}
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use ess_store::{EssStore, SavedEss};
use failure;
use fidl::endpoints2::create_endpoints;
use fidl_sme;
use future_util::retry_until;
use futures::{prelude::*, channel::oneshot, channel::mpsc, future::Either, stream};
use state_machine::{self, IntoStateExt};
use std::sync::Arc;
use zx::prelude::*;
const AUTO_CONNECT_RETRY_SECONDS: u64 = 10;
const AUTO_CONNECT_SCAN_TIMEOUT_SECONDS: u8 = 20;
#[derive(Clone)]
pub struct Client {
req_sender: mpsc::UnboundedSender<ManualRequest>,
}
impl Client {
pub fn connect(&self, request: ConnectRequest) -> Result<(), failure::Error> {
self.req_sender.unbounded_send(ManualRequest::Connect(request))
.map_err(|_| format_err!("Station does not exist anymore"))
}
}
pub struct ConnectRequest {
pub ssid: Vec<u8>,
pub password: Vec<u8>,
pub responder: oneshot::Sender<fidl_sme::ConnectResultCode>,
}
enum ManualRequest {
Connect(ConnectRequest),
}
pub fn new_client(iface_id: u16,
sme: fidl_sme::ClientSmeProxy,
ess_store: Arc<EssStore>)
-> (Client, impl Future<Item = (), Error = Never>)
{
let (req_sender, req_receiver) = mpsc::unbounded();
let sme_event_stream = sme.take_event_stream();
let services = Services {
sme,
ess_store: Arc::clone(&ess_store)
};
let state_machine = auto_connect_state(services, req_receiver.next()).into_future()
.map(Never::never_into::<()>)
.recover::<Never, _>(move |e| eprintln!("wlancfg: Client station state machine \
for iface {} terminated with an error: {}", iface_id, e));
let removal_watcher = sme_event_stream.for_each(|_| Ok(()))
.map(move |_| println!("wlancfg: Client station removed (iface {})", iface_id))
.recover::<Never, _>(move |e|
println!("wlancfg: Removing client station (iface {}) because of an error: {}", iface_id, e));
let fut = state_machine.select(removal_watcher)
.map(|_| ())
.recover(|_| ());
let client = Client { req_sender };
(client, fut)
}
type State = state_machine::State<failure::Error>;
type NextReqFut = stream::StreamFuture<mpsc::UnboundedReceiver<ManualRequest>>;
#[derive(Clone)]
struct Services {
sme: fidl_sme::ClientSmeProxy,
ess_store: Arc<EssStore>,
}
fn auto_connect_state(services: Services, next_req: NextReqFut) -> State {
println!("wlancfg: Starting auto-connect loop");
auto_connect(services.clone()).select(next_req)
.map_err(|e| e.either(|(left, _)| left,
|((right, _), _)| right.never_into()))
.and_then(move |r| match r {
Either::Left((_ssid, next_req)) => Ok(connected_state(services, next_req)),
Either::Right(((req, req_stream), _))
=> handle_manual_request(services, req, req_stream)
})
.into_state()
}
fn handle_manual_request(services: Services,
req: Option<ManualRequest>,
req_stream: mpsc::UnboundedReceiver<ManualRequest>)
-> Result<State, failure::Error>
{
match req {
Some(ManualRequest::Connect(req)) => {
Ok(manual_connect_state(services, req_stream.next(), req))
},
None => bail!("The stream of user requests ended unexpectedly")
}
}
fn auto_connect(services: Services)
-> impl Future<Item = Vec<u8>, Error = failure::Error>
{
retry_until(AUTO_CONNECT_RETRY_SECONDS.seconds(),
move || attempt_auto_connect(services.clone()))
}
fn attempt_auto_connect(services: Services)
-> impl Future<Item = Option<Vec<u8>>, Error = failure::Error>
{
start_scan_txn(&services.sme)
.into_future()
.and_then(fetch_scan_results)
.and_then(move |results| {
let saved_networks = {
let services = services.clone();
results.into_iter()
.filter_map(move |ess| {
services.ess_store.lookup(&ess.best_bss.ssid)
.map(|saved_ess| (ess.best_bss.ssid, saved_ess))
})
};
stream::iter_ok(saved_networks)
.skip_while(move |(ssid, saved_ess)| {
connect_to_saved_network(&services.sme, ssid, saved_ess)
.map(|connected| !connected)
})
.next()
.map_err(|(e, _stream)| e)
})
.map(|(item, _)| item.map(|(ssid, _)| ssid))
}
fn connect_to_saved_network(sme: &fidl_sme::ClientSmeProxy, ssid: &[u8], saved_ess: &SavedEss)
-> impl Future<Item = bool, Error = failure::Error>
{
let ssid_str = String::from_utf8_lossy(ssid).into_owned();
println!("wlancfg: Auto-connecting to '{}'", ssid_str);
start_connect_txn(sme, &ssid, &saved_ess.password)
.into_future()
.and_then(wait_until_connected)
.map(move |r| match r {
fidl_sme::ConnectResultCode::Success => {
println!("wlancfg: Auto-connected to '{}'", ssid_str);
true
},
other => {
println!("wlancfg: Failed to auto-connect to '{}': {:?}", ssid_str, other);
false
},
})
}
fn manual_connect_state(services: Services, next_req: NextReqFut, req: ConnectRequest) -> State {
println!("wlancfg: Connecting to '{}' because of a manual request from the user",
String::from_utf8_lossy(&req.ssid));
services.ess_store.store(req.ssid.clone(), SavedEss {
password: req.password.clone()
}).unwrap_or_else(|e| eprintln!("wlancfg: Failed to store network password: {}", e));
let connect_fut = start_connect_txn(&services.sme, &req.ssid, &req.password)
.into_future()
.and_then(wait_until_connected);
connect_fut.select(next_req)
.map_err(|e| e.either(|(left, _)| left,
|((right, _), _)| right.never_into()))
.and_then(move |r| match r {
Either::Left((error_code, next_req)) => {
req.responder.send(error_code).unwrap_or_else(|_| ());
Ok(match error_code {
fidl_sme::ConnectResultCode::Success => {
println!("wlancfg: Successfully connected to '{}'", String::from_utf8_lossy(&req.ssid));
connected_state(services, next_req)
},
other => {
println!("wlancfg: Failed to connect to '{}': {:?}",
String::from_utf8_lossy(&req.ssid), other);
auto_connect_state(services, next_req)
}
})
},
Either::Right(((new_req, req_stream), _coonect_fut)) => {
req.responder.send(fidl_sme::ConnectResultCode::Canceled).unwrap_or_else(|_| ());
handle_manual_request(services, new_req, req_stream)
}
})
.into_state()
}
fn connected_state(services: Services, next_req: NextReqFut) -> State {
// TODO(gbonik): monitor connection status and jump back to auto-connect state when disconnected
next_req
.map_err(|(e, _stream)| e.never_into())
.and_then(|(req, req_stream)| {
handle_manual_request(services, req, req_stream)
}).into_state()
}
fn start_scan_txn(sme: &fidl_sme::ClientSmeProxy)
-> Result<fidl_sme::ScanTransactionProxy, failure::Error>
{
let (scan_txn, remote) = create_endpoints()?;
let mut req = fidl_sme::ScanRequest {
timeout: AUTO_CONNECT_SCAN_TIMEOUT_SECONDS,
};
sme.scan(&mut req, remote)?;
Ok(scan_txn)
}
fn start_connect_txn(sme: &fidl_sme::ClientSmeProxy, ssid: &[u8], password: &[u8])
-> Result<fidl_sme::ConnectTransactionProxy, failure::Error>
{
let (connect_txn, remote) = create_endpoints()?;
let mut req = fidl_sme::ConnectRequest { ssid: ssid.to_vec(), password: password.to_vec() };
sme.connect(&mut req, Some(remote))?;
Ok(connect_txn)
}
fn wait_until_connected(txn: fidl_sme::ConnectTransactionProxy)
-> impl Future<Item = fidl_sme::ConnectResultCode, Error = failure::Error>
{
txn.take_event_stream()
.filter_map(|e| Ok(match e {
fidl_sme::ConnectTransactionEvent::OnFinished{ code } => Some(code),
}))
.next()
.map_err(|(e, _stream)| e.into())
.and_then(|(code, _stream)| code.ok_or_else(||
format_err!("Server closed the ConnectTransaction channel before sending a response"))
)
}
fn fetch_scan_results(txn: fidl_sme::ScanTransactionProxy)
-> impl Future<Item = Vec<fidl_sme::EssInfo>, Error = failure::Error>
{
txn.take_event_stream().fold(Vec::new(), |mut old_aps, event| {
match event {
fidl_sme::ScanTransactionEvent::OnResult { aps } => {
old_aps.extend(aps);
Ok(old_aps)
},
fidl_sme::ScanTransactionEvent::OnFinished { } => Ok(old_aps),
fidl_sme::ScanTransactionEvent::OnError { error } => {
eprintln!("wlancfg: Scanning failed with error: {:?}", error);
Ok(old_aps)
}
}
}).err_into()
}
|
// Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
use super::VestingFunction;
use address::Address;
use clock::ChainEpoch;
use num_bigint::biguint_ser::{BigUintDe, BigUintSer};
use num_bigint::BigUint;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use vm::TokenAmount;
/// Number of token units in an abstract "FIL" token.
/// The network works purely in the indivisible token amounts. This constant converts to a fixed decimal with more
/// human-friendly scale.
pub const TOKEN_PRECISION: u64 = 1_000_000_000_000_000_000;
lazy_static! {
/// Target reward released to each block winner.
pub static ref BLOCK_REWARD_TARGET: BigUint = BigUint::from(100u8) * TOKEN_PRECISION;
}
pub(super) const REWARD_VESTING_FUNCTION: VestingFunction = VestingFunction::None;
pub(super) const REWARD_VESTING_PERIOD: ChainEpoch = 0;
#[derive(Clone, Debug, PartialEq)]
pub struct AwardBlockRewardParams {
pub miner: Address,
pub penalty: TokenAmount,
pub gas_reward: TokenAmount,
pub ticket_count: u64,
}
impl Serialize for AwardBlockRewardParams {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
(
&self.miner,
BigUintSer(&self.penalty),
BigUintSer(&self.gas_reward),
&self.ticket_count,
)
.serialize(serializer)
}
}
impl<'de> Deserialize<'de> for AwardBlockRewardParams {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let (miner, BigUintDe(penalty), BigUintDe(gas_reward), ticket_count) =
Deserialize::deserialize(deserializer)?;
Ok(Self {
miner,
penalty,
gas_reward,
ticket_count,
})
}
}
|
use std::path::PathBuf;
use crate::cfg::VarName;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum CfgError {
#[error("setup not found `{0}`")]
SetupNotFound(String),
#[error("project not found `{0:?}`")]
ProjectNotFound(PathBuf),
#[error("project `{0:?}` already added")]
ProjectAlreadyAdded(PathBuf),
#[error("private env dir not found for `{0}`")]
PrivateEnvDirNotFound(String),
#[error("private env dir must be an absolute path `{0:?}` for `{1}`")]
PrivateEnvDirRelativePath(PathBuf, String),
#[error("public env dir not found for `{0}`")]
PublicEnvDirNotFound(String),
#[error("public env dir already unset for `{0}`")]
PublicEnvAlreadyUnset(String),
#[error("private env dir already unset for `{0}`")]
PrivateEnvAlreadyUnset(String),
#[error("there is two `{0}` env `{1:?}` and `{2:?}`, please remove once")]
EnvExistTwice(String, PathBuf, PathBuf),
#[error("env {0} not found")]
EnvNotFound(String),
#[error("format not found for array_vars `{0}`")]
FormatNotFound(VarName),
#[error("delimiter not found for array_vars `{0}`")]
DelimiterNotFound(VarName),
#[error("unknown cfg error")]
Unknown,
}
|
use std::collections::{HashSet};
struct WebCamera;
#[derive(Debug)]
enum VisibleEmotion {
Anger,
Contempt,
Disgust,
Fear,
Happiness,
Neutral,
Sadness,
Surprise
}
#[derive(Debug,Clone)]
struct BoundingBox {
top: u64,
left: u64,
height: u64,
width: u64
}
#[derive(Debug)]
enum CameraFilters {
Sparkles,
Rain,
Fire,
Disco
}
impl WebCamera {
fn map_emotion<T,F>(&self, translate: F) -> Vec<(BoundingBox,T)>
where F: Fn(VisibleEmotion) -> T {
//Simulate emotion extracted from WebCamera
vec![
(BoundingBox { top: 1, left: 1, height: 1, width: 1 }, VisibleEmotion::Anger),
(BoundingBox { top: 1, left: 1, height: 1, width: 1 }, VisibleEmotion::Sadness),
(BoundingBox { top: 4, left: 4, height: 1, width: 1 }, VisibleEmotion::Surprise),
(BoundingBox { top: 8, left: 1, height: 1, width: 1 }, VisibleEmotion::Neutral)
].into_iter().map(|(bb,emt)| {
(bb, translate(emt))
}).collect::<Vec<(BoundingBox,T)>>()
}
fn flatmap_emotion<T,F,U:IntoIterator<Item=T>>(&self, mut translate: F) -> Vec<(BoundingBox,T)>
where F: FnMut(VisibleEmotion) -> U {
//Simulate emotion extracted from WebCamera
vec![
(BoundingBox { top: 1, left: 1, height: 1, width: 1 }, VisibleEmotion::Anger),
(BoundingBox { top: 1, left: 1, height: 1, width: 1 }, VisibleEmotion::Sadness),
(BoundingBox { top: 4, left: 4, height: 1, width: 1 }, VisibleEmotion::Surprise),
(BoundingBox { top: 8, left: 1, height: 1, width: 1 }, VisibleEmotion::Neutral)
].into_iter().flat_map(|(bb,emt)| {
translate(emt).into_iter().map(move |t| (bb.clone(), t))
}).collect::<Vec<(BoundingBox,T)>>()
}
}
fn main()
{
let m: Vec<u64> = vec![1, 2, 3];
let n: Vec<u64> = m.iter().map(|x| { x*x }).collect();
println!("{:?}", m);
println!("{:?}", n);
let mut a: HashSet<u64> = HashSet::new();
a.insert(1);
a.insert(2);
a.insert(3);
a.insert(4);
let b: HashSet<u64> = a.iter().cloned().map(|x| x/2).collect();
println!("{:?}", a);
println!("{:?}", b);
let sentences = vec!["this is a sentence","paragraphs have many sentences"];
let words:Vec<&str> = sentences.iter().flat_map(|&x| x.split(" ")).collect();
println!("{:?}", sentences);
println!("{:?}", words);
let v: Vec<u64> = vec![1, 2, 3];
let s: HashSet<u64> = v.iter().cloned().map(|x| x/2).collect();
println!("{:?}", v);
println!("{:?}", s);
let camera = WebCamera;
let emotes: Vec<(BoundingBox,VisibleEmotion)> = camera.map_emotion(|emt| {
match emt {
VisibleEmotion::Anger |
VisibleEmotion::Contempt |
VisibleEmotion::Disgust |
VisibleEmotion::Fear |
VisibleEmotion::Sadness => VisibleEmotion::Happiness,
VisibleEmotion::Neutral |
VisibleEmotion::Happiness |
VisibleEmotion::Surprise => VisibleEmotion::Sadness
}
});
let filters: Vec<(BoundingBox,CameraFilters)> = camera.flatmap_emotion(|emt| {
match emt {
VisibleEmotion::Anger |
VisibleEmotion::Contempt |
VisibleEmotion::Disgust |
VisibleEmotion::Fear |
VisibleEmotion::Sadness => vec![CameraFilters::Sparkles, CameraFilters::Rain],
VisibleEmotion::Neutral |
VisibleEmotion::Happiness |
VisibleEmotion::Surprise => vec![CameraFilters::Disco]
}
});
println!("{:?}",emotes);
println!("{:?}",filters);
}
|
extern crate num_cpus;
#[macro_use]
extern crate lazy_static;
pub mod op;
pub mod collect_block;
pub mod collect_stream;
pub mod collect_par;
#[cfg(test)]
mod test {
use super::*;
use op::*;
use std::sync::Arc;
macro_rules! correctness {
($name:expr, $f:expr, $v:expr, $ops:expr, $expected:expr) => {
let result = $f($v.iter().cloned(), $ops.clone());
assert_eq!(result.len(), $expected.len());
for i in 0..$expected.len() {
assert_eq!(result[i], $expected[i]);
}
}
}
fn correctness_all(v: &[f64], ops: Vec<Op>, expected: &[f64]) {
correctness!(
"collect_block",
collect_block::collect_block,
v,
&ops,
expected
);
correctness!(
"collect_stream",
collect_stream::collect_stream,
v,
&ops,
expected
);
// correctness!(
// "collect_par",
// collect_par::collect_par,
// v,
// &ops,
// expected
// );
}
#[test]
fn map_test() {
let v: Vec<f64> = (0..1024).into_iter().map(|x| x as f64).collect();
let ops = vec![
Op::Map(Arc::new(|x| 2.0 * x)),
Op::Map(Arc::new(|x| x + 1.0)),
];
let mut expected: Vec<f64> = Vec::new();
for item in &v {
expected.push(*item * 2.0 + 1.0);
}
correctness_all(&v, ops, &expected);
}
#[test]
fn filter_test() {
let v: Vec<f64> = (0..1024).into_iter().map(|x| x as f64).collect();
let ops = vec![Op::Filter(Arc::new(|x| *x < 512.0))];
let expected: Vec<f64> =
(0..512).into_iter().map(|x| x as f64).collect();
correctness_all(&v, ops, &expected);
}
#[test]
fn one_group_test() {
let v: Vec<f64> = (0..1024).into_iter().map(|x| x as f64).collect();
let ops = vec![
Op::GroupBy(
Arc::new(|_x| 1 as i64),
Arc::new(|v| v.len() as f64)
),
];
let expected = vec![1024.0];
correctness_all(&v, ops, &expected);
}
#[test]
fn many_groups_test() {
let v: Vec<f64> = (0..1024).into_iter().map(|x| x as f64).collect();
let ops = vec![
Op::GroupBy(
Arc::new(|x| (x / 2.0) as i64),
Arc::new(|v| v.len() as f64)
),
];
let expected: Vec<f64> =
(0..512).into_iter().map(|_x| 2 as f64).collect();
correctness_all(&v, ops, &expected);
}
#[test]
fn map_filter_test() {
let v: Vec<f64> = (0..1024).into_iter().map(|x| x as f64).collect();
let ops = vec![
Op::Filter(Arc::new(|x| *x >= 512.0)),
Op::Map(Arc::new(|x| x - 512.0)),
];
let expected: Vec<f64> =
(0..512).into_iter().map(|x| x as f64).collect();
correctness_all(&v, ops, &expected);
}
#[test]
fn streaming_test() {
let v = (0..(1024i64 * 1024 * 1024 * 4)).into_iter().map(
|x| x as f64,
);
let ops = vec![Op::Filter(Arc::new(|_x| false))];
collect_stream::collect_stream(v, ops);
}
}
|
pub mod embl;
pub mod fasta;
pub mod feature_table;
pub mod gff3;
pub mod parser; |
mod utils;
mod solutions;
use crate::utils::builder::create_list;
use crate::solutions::next_larger_nodes::*;
use crate::solutions::reverse_integer::*;
use crate::solutions::num_of_subarrays::*;
use crate::solutions::two_sum::*;
fn main() {
println!("Reverse: {:?} to {}", 123, reverse_integer(123));
let v = two_sum([3, 2, 3].to_vec(), 6);
println!(r#"Two sum: {:?}"#, v);
test_next_larger_nodes(vec![2, 7, 4, 3, 5], vec![7, 0, 5, 5, 0]);
test_next_larger_nodes(vec![1, 7, 5, 1, 9, 2, 5, 1], vec![7, 9, 9, 9, 0, 5, 0, 0]);
test_next_larger_nodes(vec![2, 2, 5], vec![5, 5, 0]);
test_next_larger_nodes(vec![9, 7, 6, 7, 6, 9], vec![0, 9, 7, 9, 9, 0]);
test_next_larger_nodes(vec![4, 3, 2, 5, 1, 8, 10], vec![5, 5, 5, 8, 8, 10, 0]);
println!("{}", num_of_subarrays(vec![1,3,5]));
println!("{}", num_of_subarrays(vec![1,2,3,4,5,6,7]));
}
fn test_next_larger_nodes(in_v: Vec<i32>, out_v: Vec<i32>) {
println!("{:?}", in_v);
let res = next_larger_nodes(create_list(in_v));
//assert!(out_v == res);
println!("{:?}", res);
println!("{:?}", out_v);
println!("---------");
} |
use std::sync::{Arc, RwLock};
use tinyroute::{Agent, ToAddress, Message};
// * Be notified of a change, but not what changed
// * Have a reference, not to mirror the data (that would be silly)
#[derive(Debug, Copy, Clone)]
pub enum Op {
Updated,
Removed,
// Created,
}
struct StoreOne<T, A: ToAddress> {
inner: Arc<RwLock<T>>,
agent: Agent<(), A>,
}
impl<T, A: ToAddress> StoreOne<T, A> {
pub fn new(inner: T, agent: Agent<(), A>) -> Self {
Self {
inner: Arc::new(RwLock::new(inner)),
agent,
}
}
pub async fn run(mut self) {
while let Ok(msg) = self.agent.recv().await {
}
}
}
|
pub fn test() {
println!("utils");
}
|
//! rust-tdlib is a wrapper for [TDlib (Telegram Database library)](https://core.telegram.org/tdlib).
//! It allows you to make all the things that you can do with real telegram. So, yes, you can build your own telegram client using Rust language.
//!
//! First of all you have to initialize client. Your steps:
//! 1. initialize [TdlibParameters](crate::types::TdlibParameters) with two required parameters: api_id and api_hash.
//! 2. use [ConsoleAuthStateHandler](crate::client::ConsoleAuthStateHandler) with default builder or write you own [AuthStateHandler](crate::client::AuthStateHandler).
//! 3. use them as parameters of an [ClientBuilder](crate::client::ClientBuilder).
//! 4. [start](crate::client::Client::start) the client.
//! 5. write your own code to interact with Telegram.
//! ```
//! use rust_tdlib::{types::{TdlibParameters, GetMe}, client::Client};
//! #[tokio::main]
//! async fn main() {
//! let tdlib_params = TdlibParameters::builder().api_id(env!("API_ID").parse::<i64>().unwrap()).api_hash(env!("API_HASH")).build();
//! let client = rust_tdlib::client::Client::builder().with_tdlib_parameters(tdlib_params).build();
//! client.start().await.unwrap();
//! let me = client.api().get_me(GetMe::builder().build()).await.unwrap();
//! eprintln!("{:?}", me);
//! }
//! ```
//!
//! You can read all updates, received from Telegram server, such as: new messages, chats updates, new chats, user updates and so on. All updates varians declared within [Update](crate::types::Update).
//! ```
//! use rust_tdlib::{types::{TdlibParameters, GetMe, TdType}, client::Client};
//! #[tokio::main]
//! async fn main() {
//! let tdlib_params = TdlibParameters::builder().api_id(env!("API_ID").parse::<i64>().unwrap()).api_hash(env!("API_HASH")).build();
//! let (sender, mut receiver) = tokio::sync::mpsc::channel::<TdType>(10);
//! let client = rust_tdlib::client::Client::builder().with_updates_sender(sender).with_tdlib_parameters(tdlib_params).build();
//! client.start().await.unwrap();
//! if let Some(message) = receiver.recv().await.unwrap() {
//! eprintln!("updates handler received {:?}", message);
//! }
//! }
//! ```
#![allow(clippy::large_enum_variant, clippy::borrowed_box)]
#[macro_use]
extern crate serde_derive;
#[cfg(feature = "client")]
#[macro_use]
extern crate log;
#[cfg(feature = "client")]
#[macro_use]
extern crate lazy_static;
#[cfg(feature = "client")]
pub mod client;
pub mod errors;
pub mod types;
|
use crate::*;
use std::{collections::HashMap, sync::Mutex, time::Instant};
/// The Riddle audio system core state.
///
/// Manages underlying audio device and riddle audio objects' states. The recommended
/// way to use this type is to let the `riddle` crate initialize and manage it for you.
///
/// It is possible to manage the audio system state independantly - the most important
/// thing to note is that [`ext::AudioSystemExt::process_frame`] must be called periodically
/// for [`ClipPlayer`] to work properly. This is **not** something that needs doing if
/// using the `riddle` crate to manage the [`AudioSystem`] automatically.
#[derive(Clone)]
pub struct AudioSystem {
pub(crate) internal: std::sync::Arc<AudioSystemInternal>,
}
pub(crate) struct AudioSystemInternal {
pub stream_handle: rodio::OutputStreamHandle,
pub fades: Mutex<HashMap<FadeKey, Fade>>,
}
impl AudioSystem {
pub(crate) fn register_fade(&self, fade: Fade) {
let mut fades = self.internal.fades.lock().unwrap();
let existing = fades.remove(&fade.key());
match existing {
Some(old) => fades.insert(fade.key(), Fade::merge_pair(old, fade)),
None => fades.insert(fade.key(), fade),
};
}
fn tick_fades(&self, now: Instant) {
let mut fades = self.internal.fades.lock().unwrap();
fades.retain(|_, f| f.update(now));
}
}
impl ext::AudioSystemExt for AudioSystem {
fn new_system_pair() -> Result<(AudioSystem, AudioMainThreadState)> {
let (stream, stream_handle) = rodio::OutputStream::try_default()
.map_err(|_| AudioError::InitFailed("Failed to get rodio output device"))?;
let main_thread_state = AudioMainThreadState { _stream: stream };
let internal = AudioSystemInternal {
stream_handle,
fades: Mutex::new(HashMap::new()),
};
Ok((
AudioSystem {
internal: std::sync::Arc::new(internal),
},
main_thread_state,
))
}
fn process_frame(&self) {
let now = Instant::now();
self.tick_fades(now);
}
}
pub struct AudioMainThreadState {
_stream: rodio::OutputStream,
}
|
use chrono::DateTime;
pub struct Enablement {
enabled: bool,
start_date: DateTimeime,
end_date: DateTimeime,
} |
use super::schema::gossipers;
use super::schema::gossips;
use chrono::prelude::*;
#[derive(Identifiable, Queryable)]
pub struct Gossip {
id: i32,
msg: String,
added: NaiveDateTime,
guild_id: Vec<u8>,
}
impl Gossip {
/// Get the message.
#[inline]
pub fn message(&self) -> &str {
self.msg.as_str()
}
/// Get the time at which this gossip was added.
#[inline]
pub fn added(&self) -> &NaiveDateTime {
&self.added
}
/// Get the guild id.
#[inline]
pub fn guild_id(&self) -> u64 {
super::vecu8_to_u64(&self.guild_id)
}
}
#[derive(Insertable)]
#[table_name = "gossips"]
pub struct NewGossip<'a> {
msg: &'a str,
guild_id: Vec<u8>,
}
impl<'a> NewGossip<'a> {
#[inline]
/// Create a new gossip.
pub fn new<S, U>(message: S, guild_id: u64) -> Self
where
S: Into<&'a str>,
U: Into<u64>,
{
NewGossip::new_impl(message.into(), guild_id.into())
}
fn new_impl(message: &'a str, guild_id: u64) -> Self {
NewGossip {
msg: message,
guild_id: super::u64_to_vecu8(guild_id),
}
}
/// Get the message.
#[inline]
pub fn message(&self) -> &'a str {
self.msg
}
/// Get the guild id.
#[inline]
pub fn guild_id(&self) -> u64 {
super::vecu8_to_u64(&self.guild_id)
}
}
#[derive(Identifiable, Queryable)]
pub struct Gossiper {
id: i32,
discord_id: Vec<u8>,
/// The preferred guild is only used by a select group of commands and only when talking to the
/// bot via a private message.
preferred_guild: Option<Vec<u8>>,
}
impl Gossiper {
#[inline]
pub fn discord_id(&self) -> u64 {
super::vecu8_to_u64(&self.discord_id)
}
#[inline]
pub fn preferred_guild(&self) -> Option<u64> {
match &self.preferred_guild {
Some(value) => Some(super::vecu8_to_u64(value)),
None => None,
}
}
}
#[derive(Insertable)]
#[table_name = "gossipers"]
pub struct NewGossiper {
discord_id: Vec<u8>,
preferred_guild: Option<Vec<u8>>,
}
impl NewGossiper {
/// Create a new gossiper.
#[inline]
pub fn new<V, O>(discord_id: V, preferred_guild: O) -> Self
where
V: Into<u64>,
O: Into<Option<u64>>,
{
Self::new_impl(discord_id.into(), preferred_guild.into())
}
fn new_impl(discord_id: u64, preferred_guild: Option<u64>) -> Self {
Self {
discord_id: super::u64_to_vecu8(discord_id),
preferred_guild: match preferred_guild {
Some(value) => Some(super::u64_to_vecu8(value)),
None => None,
},
}
}
#[inline]
pub fn discord_id(&self) -> u64 {
super::vecu8_to_u64(&self.discord_id)
}
#[inline]
pub fn prefered_guild(&self) -> Option<u64> {
match &self.preferred_guild {
Some(value) => Some(super::vecu8_to_u64(value)),
None => None,
}
}
}
|
// This file is part of Substrate.
// Copyright (C) 2019-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Some helper functions to work with 128bit numbers. Note that the functionality provided here is
//! only sensible to use with 128bit numbers because for smaller sizes, you can always rely on
//! assumptions of a bigger type (u128) being available, or simply create a per-thing and use the
//! multiplication implementation provided there.
use crate::biguint;
use num_traits::Zero;
use sp_std::{
cmp::{max, min},
convert::TryInto,
mem,
};
/// Helper gcd function used in Rational128 implementation.
pub fn gcd(a: u128, b: u128) -> u128 {
match ((a, b), (a & 1, b & 1)) {
((x, y), _) if x == y => y,
((0, x), _) | ((x, 0), _) => x,
((x, y), (0, 1)) | ((y, x), (1, 0)) => gcd(x >> 1, y),
((x, y), (0, 0)) => gcd(x >> 1, y >> 1) << 1,
((x, y), (1, 1)) => {
let (x, y) = (min(x, y), max(x, y));
gcd((y - x) >> 1, x)
},
_ => unreachable!(),
}
}
/// split a u128 into two u64 limbs
pub fn split(a: u128) -> (u64, u64) {
let al = a as u64;
let ah = (a >> 64) as u64;
(ah, al)
}
/// Convert a u128 to a u32 based biguint.
pub fn to_big_uint(x: u128) -> biguint::BigUint {
let (xh, xl) = split(x);
let (xhh, xhl) = biguint::split(xh);
let (xlh, xll) = biguint::split(xl);
let mut n = biguint::BigUint::from_limbs(&[xhh, xhl, xlh, xll]);
n.lstrip();
n
}
/// Safely and accurately compute `a * b / c`. The approach is:
/// - Simply try `a * b / c`.
/// - Else, convert them both into big numbers and re-try. `Err` is returned if the result cannot
/// be safely casted back to u128.
///
/// Invariant: c must be greater than or equal to 1.
pub fn multiply_by_rational(mut a: u128, mut b: u128, mut c: u128) -> Result<u128, &'static str> {
if a.is_zero() || b.is_zero() {
return Ok(Zero::zero())
}
c = c.max(1);
// a and b are interchangeable by definition in this function. It always helps to assume the
// bigger of which is being multiplied by a `0 < b/c < 1`. Hence, a should be the bigger and
// b the smaller one.
if b > a {
mem::swap(&mut a, &mut b);
}
// Attempt to perform the division first
if a % c == 0 {
a /= c;
c = 1;
} else if b % c == 0 {
b /= c;
c = 1;
}
if let Some(x) = a.checked_mul(b) {
// This is the safest way to go. Try it.
Ok(x / c)
} else {
let a_num = to_big_uint(a);
let b_num = to_big_uint(b);
let c_num = to_big_uint(c);
let mut ab = a_num * b_num;
ab.lstrip();
let mut q = if c_num.len() == 1 {
// PROOF: if `c_num.len() == 1` then `c` fits in one limb.
ab.div_unit(c as biguint::Single)
} else {
// PROOF: both `ab` and `c` cannot have leading zero limbs; if length of `c` is 1,
// the previous branch would handle. Also, if ab for sure has a bigger size than
// c, because `a.checked_mul(b)` has failed, hence ab must be at least one limb
// bigger than c. In this case, returning zero is defensive-only and div should
// always return Some.
let (mut q, r) = ab.div(&c_num, true).unwrap_or((Zero::zero(), Zero::zero()));
let r: u128 = r.try_into().expect("reminder of div by c is always less than c; qed");
if r > (c / 2) {
q = q.add(&to_big_uint(1));
}
q
};
q.lstrip();
q.try_into().map_err(|_| "result cannot fit in u128")
}
}
|
#[doc = "Register `RDATA` reader"]
pub type R = crate::R<RDATA_SPEC>;
#[doc = "Field `RDATA` reader - Read data When a read access to this register occurs, the read data are the contents of the Y output buffer at the address offset indicated by the READ pointer. The pointer address is automatically incremented after each read access."]
pub type RDATA_R = crate::FieldReader<u16>;
impl R {
#[doc = "Bits 0:15 - Read data When a read access to this register occurs, the read data are the contents of the Y output buffer at the address offset indicated by the READ pointer. The pointer address is automatically incremented after each read access."]
#[inline(always)]
pub fn rdata(&self) -> RDATA_R {
RDATA_R::new((self.bits & 0xffff) as u16)
}
}
#[doc = "FMAC read data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdata::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RDATA_SPEC;
impl crate::RegisterSpec for RDATA_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rdata::R`](R) reader structure"]
impl crate::Readable for RDATA_SPEC {}
#[doc = "`reset()` method sets RDATA to value 0"]
impl crate::Resettable for RDATA_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use anyhow::{Context, Result};
use rusoto_ec2::{
AuthorizeSecurityGroupIngressRequest, Ec2, Ec2Client, IpPermission, IpRange,
RevokeSecurityGroupIngressRequest,
};
pub struct Handler<'a> {
client: &'a Ec2Client,
}
impl<'a> Handler<'a> {
pub fn new(client: &'a Ec2Client) -> Self {
Self { client }
}
pub async fn grant_access(
&self,
security_group_id: String,
description: Option<String>,
) -> Result<()> {
let mut request = AuthorizeSecurityGroupIngressRequest::default();
request.group_id = Some(security_group_id);
request.ip_permissions = Some(vec![self.get_ip_permission(description).await?]);
self.client
.authorize_security_group_ingress(request)
.await
.context("Failed to add public ip to allowlist")?;
Ok(())
}
pub async fn revoke_access(&self, security_group_id: String) -> Result<()> {
let mut request = RevokeSecurityGroupIngressRequest::default();
request.group_id = Some(security_group_id);
request.ip_permissions = Some(vec![self.get_ip_permission(None).await?]);
self.client
.revoke_security_group_ingress(request)
.await
.context("Failed to remove public ip from allowlist")?;
Ok(())
}
async fn get_ip_permission(&self, description: Option<String>) -> Result<IpPermission> {
let ip = public_ip::addr()
.await
.context("Unable to determine public ip")?;
Ok(IpPermission {
from_port: Some(22),
to_port: Some(22),
ip_protocol: Some("tcp".into()),
ip_ranges: Some(vec![IpRange {
cidr_ip: Some(format!("{}/32", ip)),
description,
}]),
ipv_6_ranges: None,
prefix_list_ids: None,
user_id_group_pairs: None,
})
}
}
|
use core::ptr;
pub const I2C_BASES: [u32; 3] = [0x10161000, 0x10144000, 0x10148000];
#[derive(Clone, Copy)]
#[allow(non_camel_case_types)]
pub enum Device {
MCU = 0x03,
}
bf!(RegCnt[u8] {
end: 0:0,
beginning: 1:1,
pause: 2:2,
ack: 4:4,
should_read: 5:5,
enable_irq: 6:6,
running: 7:7
});
#[derive(Clone, Copy)]
enum Reg {
DATA = 0x00,
CNT = 0x01,
}
#[derive(Clone, Copy)]
struct DevData {
pub bus_id: u8,
pub dev_addr: u8,
}
impl DevData {
fn new(device: Device) -> DevData {
const DEV_DATA: [DevData; 15] = [
DevData { bus_id: 0, dev_addr: 0x4A },
DevData { bus_id: 0, dev_addr: 0x7A },
DevData { bus_id: 0, dev_addr: 0x78 },
DevData { bus_id: 1, dev_addr: 0x4A },
DevData { bus_id: 1, dev_addr: 0x78 },
DevData { bus_id: 1, dev_addr: 0x2C },
DevData { bus_id: 1, dev_addr: 0x2E },
DevData { bus_id: 1, dev_addr: 0x40 },
DevData { bus_id: 1, dev_addr: 0x44 },
DevData { bus_id: 2, dev_addr: 0xD6 },
DevData { bus_id: 2, dev_addr: 0xD0 },
DevData { bus_id: 2, dev_addr: 0xD2 },
DevData { bus_id: 2, dev_addr: 0xA4 },
DevData { bus_id: 2, dev_addr: 0x9A },
DevData { bus_id: 2, dev_addr: 0xA0 }
];
DEV_DATA[device as usize]
}
#[inline(never)]
fn read_reg(&self, reg: Reg) -> u8 {
let base = I2C_BASES[self.bus_id as usize];
unsafe { ptr::read_volatile((base + reg as u32) as *const u8) }
}
#[inline(never)]
fn write_reg(&self, reg: Reg, val: u8) {
let base = I2C_BASES[self.bus_id as usize];
unsafe { ptr::write_volatile((base + reg as u32) as *mut u8, val); }
}
fn wait_busy(&self) {
let is_busy = || {
let cnt = self.read_reg(Reg::CNT);
let cnt = RegCnt::new(cnt);
cnt.running.get() == 1
};
while is_busy() { }
}
fn op_result(&self) -> Result<(), ()> {
self.wait_busy();
let cnt = self.read_reg(Reg::CNT);
let cnt = RegCnt::new(cnt);
if cnt.ack.get() == 1 {
Ok(())
} else {
Err(())
}
}
fn halt_xfer(&self) {
self.write_reg(Reg::CNT, 0xc5);
}
fn xfer_last_byte(&self, is_reading: u8) {
self.write_reg(Reg::CNT, is_reading << 5 | 0xc1);
}
fn xfer_byte(&self, is_reading: u8) {
self.write_reg(Reg::CNT, is_reading << 5 | 0xc0);
}
fn select_target(&self, reg: u8, is_reading: bool) -> Result<(), ()> {
self.wait_busy();
self.write_reg(Reg::DATA, self.dev_addr);
self.write_reg(Reg::CNT, 0xc2);
self.op_result()?;
self.wait_busy();
self.write_reg(Reg::DATA, reg);
self.write_reg(Reg::CNT, 0xc0);
self.op_result()?;
if is_reading {
self.wait_busy();
self.write_reg(Reg::DATA, self.dev_addr | 1);
self.write_reg(Reg::CNT, 0xc2);
self.op_result()?;
}
Ok(())
}
}
pub fn read_byte(dev: Device, reg: u8) -> Result<u8, ()> {
let dev_data = DevData::new(dev);
for _ in 0..8 {
if dev_data.select_target(reg, true).is_ok() {
dev_data.wait_busy();
dev_data.xfer_byte(1);
dev_data.wait_busy();
dev_data.halt_xfer();
dev_data.wait_busy();
return Ok(dev_data.read_reg(Reg::DATA))
}
dev_data.halt_xfer();
dev_data.wait_busy();
}
Err(())
}
pub fn read_bytes(dev: Device, reg: u8, dest: &mut [u8]) -> Result<(), ()> {
if dest.len() == 0 {
return Ok(())
}
let dev_data = DevData::new(dev);
for _ in 0..8 {
if dev_data.select_target(reg, true).is_ok() {
for n in 0..(dest.len() - 1) {
dev_data.wait_busy();
dev_data.write_reg(Reg::CNT, 0xF0);
dev_data.wait_busy();
dest[n] = dev_data.read_reg(Reg::DATA);
}
dev_data.wait_busy();
dev_data.xfer_last_byte(1);
dev_data.wait_busy();
let dest_end = dest.len() - 1;
dest[dest_end] = dev_data.read_reg(Reg::DATA);
return Ok(())
}
dev_data.wait_busy();
dev_data.halt_xfer();
dev_data.wait_busy();
}
Err(())
}
pub fn write_byte(dev: Device, reg: u8, data: u8) -> Result<(), ()> {
let dev_data = DevData::new(dev);
for _ in 0..8 {
if dev_data.select_target(reg, false).is_ok() {
dev_data.wait_busy();
dev_data.write_reg(Reg::DATA, data);
dev_data.xfer_last_byte(0);
dev_data.xfer_byte(0);
dev_data.wait_busy();
dev_data.halt_xfer();
if dev_data.op_result().is_ok() {
return Ok(())
}
}
dev_data.halt_xfer();
dev_data.wait_busy();
}
Err(())
}
|
pub fn letter_combinations(digits: String) -> Vec<String> {
if digits.len() == 0 {
return vec![]
}
fn core(s: &str) -> Vec<String> {
let arr = vec![
vec![" "],
vec![""],
vec!["a", "b", "c"],
vec!["d", "e", "f"],
vec!["g", "h", "i"],
vec!["j", "k", "l"],
vec!["m", "n", "o"],
vec!["p", "q", "r", "s"],
vec!["t", "u", "v"],
vec!["w", "x", "y", "z"],
];
let first = s.chars().next().unwrap();
let letters = arr[first as usize - '0' as usize].clone();
if s.len() == 1 {
return letters.iter().map(|x| x.to_string()).collect()
}
let comb = core(&s[1..]);
if first == '1' {
comb
} else {
let mut result = Vec::<String>::new();
for prefix in letters {
for c in &comb {
result.push(prefix.to_string() + c.clone().as_str());
}
}
result
}
}
core(digits.as_str())
}
#[test]
fn test_letter_combinations() {
println!("{:?}", letter_combinations("23".to_string()));
println!("{:?}", letter_combinations("123".to_string()));
} |
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use web_sys::CanvasRenderingContext2d as Canvas2d;
use web_sys::HtmlCanvasElement;
use yew::services::{RenderService, Task};
use yew::{html, Component, ComponentLink, Html, NodeRef, Properties, ShouldRender};
use shared::game::{GameState, Visibility};
use shared::message::{ClientMessage, GameAction};
use crate::game_view::GameView;
use crate::networking;
pub struct Board {
props: Props,
canvas: Option<HtmlCanvasElement>,
canvas2d: Option<Canvas2d>,
link: ComponentLink<Self>,
node_ref: NodeRef,
render_loop: Option<Box<dyn Task>>,
mouse_pos: Option<(f64, f64)>,
selection_pos: Option<(u32, u32)>,
}
#[derive(Properties, Clone, PartialEq)]
pub struct Props {
pub game: GameView,
}
pub enum Msg {
Render(f64),
MouseMove((f64, f64)),
Click((f64, f64)),
MouseLeave,
}
impl Component for Board {
type Message = Msg;
type Properties = Props;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
Board {
props,
canvas: None,
canvas2d: None,
link,
node_ref: NodeRef::default(),
render_loop: None,
mouse_pos: None,
selection_pos: None,
}
}
fn rendered(&mut self, first_render: bool) {
// Once rendered, store references for the canvas and GL context. These can be used for
// resizing the rendering area when the window or canvas element are resized, as well as
// for making GL calls.
let canvas = self.node_ref.cast::<HtmlCanvasElement>().unwrap();
let canvas2d: Canvas2d = canvas
.get_context("2d")
.unwrap()
.unwrap()
.dyn_into()
.unwrap();
{
let mouse_move = self.link.callback(Msg::MouseMove);
let closure = Closure::wrap(Box::new(move |event: web_sys::MouseEvent| {
mouse_move.emit((event.offset_x() as f64, event.offset_y() as f64));
}) as Box<dyn FnMut(_)>);
canvas
.add_event_listener_with_callback("mousemove", closure.as_ref().unchecked_ref())
.unwrap();
closure.forget();
}
{
let mouse_click = self.link.callback(Msg::Click);
let closure = Closure::wrap(Box::new(move |event: web_sys::MouseEvent| {
mouse_click.emit((event.offset_x() as f64, event.offset_y() as f64));
}) as Box<dyn FnMut(_)>);
canvas
.add_event_listener_with_callback("mousedown", closure.as_ref().unchecked_ref())
.unwrap();
closure.forget();
}
{
let mouse_leave = self.link.callback(|_| Msg::MouseLeave);
let closure = Closure::wrap(Box::new(move |_event: web_sys::MouseEvent| {
mouse_leave.emit(());
}) as Box<dyn FnMut(_)>);
canvas
.add_event_listener_with_callback("mouseleave", closure.as_ref().unchecked_ref())
.unwrap();
closure.forget();
}
self.canvas = Some(canvas);
self.canvas2d = Some(canvas2d);
// In a more complex use-case, there will be additional WebGL initialization that should be
// done here, such as enabling or disabling depth testing, depth functions, face
// culling etc.
if first_render {
self.render_gl(0.0).unwrap();
// The callback to request animation frame is passed a time value which can be used for
// rendering motion independent of the framerate which may vary.
let render_frame = self.link.callback(Msg::Render);
let handle = RenderService::request_animation_frame(render_frame);
// A reference to the handle must be stored, otherwise it is dropped and the render won't
// occur.
self.render_loop = Some(Box::new(handle));
}
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
if self.props != props {
self.props = props;
self.render_gl(0.0).unwrap();
false
} else {
false
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
let game = &self.props.game;
let canvas = &self.canvas;
let mouse_to_coord = |p: (f64, f64)| {
let canvas = canvas.as_ref().expect("Canvas not initialized!");
match game.mods.pixel {
true => (
(p.0 / (canvas.width() as f64 / game.size.0 as f64) + 0.5) as u32,
(p.1 / (canvas.width() as f64 / game.size.1 as f64) + 0.5) as u32,
),
false => (
(p.0 / (canvas.width() as f64 / game.size.0 as f64)) as u32,
(p.1 / (canvas.width() as f64 / game.size.1 as f64)) as u32,
),
}
};
match msg {
Msg::Render(_timestamp) => {
//self.render_gl(timestamp).unwrap();
}
Msg::MouseMove(p) => {
self.mouse_pos = Some(p);
self.selection_pos = Some(mouse_to_coord(p));
self.render_gl(0.0).unwrap();
}
Msg::Click(p) => {
// Ignore clicks while viewing history
if self.props.game.history.is_some() {
return false;
}
self.mouse_pos = Some(p);
self.selection_pos = Some(mouse_to_coord(p));
networking::send(ClientMessage::GameAction(GameAction::Place(
self.selection_pos.unwrap().0,
self.selection_pos.unwrap().1,
)));
}
Msg::MouseLeave => {
self.mouse_pos = None;
self.selection_pos = None;
self.render_gl(0.0).unwrap();
}
}
false
}
fn view(&self) -> Html {
html! {
<canvas ref={self.node_ref.clone()} width=800 height=800 />
}
}
}
impl Board {
fn render_gl(&mut self, timestamp: f64) -> Result<(), JsValue> {
// TODO: remove hardcoding for 19x19
let shadow_stone_colors = ["#555555", "#bbbbbb", "#7b91bd"];
let shadow_border_colors = ["#bbbbbb", "#555555", "#555555"];
let stone_colors = ["#000000", "#eeeeee", "#5074bc"];
let stone_colors_hidden = ["#00000080", "#eeeeee80", "#5074bc80"];
let border_colors = ["#555555", "#000000", "#000000"];
let dead_mark_color = ["#eeeeee", "#000000", "#000000"];
let context = self
.canvas2d
.as_ref()
.expect("Canvas Context not initialized!");
let canvas = self.canvas.as_ref().expect("Canvas not initialized!");
context.clear_rect(0.0, 0.0, canvas.width().into(), canvas.height().into());
context.set_fill_style(&JsValue::from_str("#e0bb6c"));
context.fill_rect(0.0, 0.0, canvas.width().into(), canvas.height().into());
context.set_line_width(1.0);
context.set_stroke_style(&JsValue::from_str("#000000"));
context.set_fill_style(&JsValue::from_str("#000000"));
let game = &self.props.game;
let board = match &game.history {
Some(h) => &h.board,
None => &game.board,
};
let board_visibility = match &game.history {
Some(h) => &h.board_visibility,
None => &game.board_visibility,
};
// TODO: actually handle non-square boards
let board_size = game.size.0 as usize;
let size = canvas.width() as f64 / board_size as f64;
let turn = game.seats[game.turn as usize].1;
// Lines
for y in 0..game.size.1 {
context.begin_path();
context.move_to(size * 0.5, (y as f64 + 0.5) * size);
context.line_to(size * (board_size as f64 - 0.5), (y as f64 + 0.5) * size);
context.stroke();
}
for x in 0..game.size.0 {
context.begin_path();
context.move_to((x as f64 + 0.5) * size, size * 0.5);
context.line_to((x as f64 + 0.5) * size, size * (board_size as f64 - 0.5));
context.stroke();
}
// Starpoints - by popular demand
let points: &[(u8, u8)] = match game.size.0 {
19 => &[
(3, 3),
(9, 3),
(15, 3),
(3, 9),
(9, 9),
(15, 9),
(3, 15),
(9, 15),
(15, 15),
],
13 => &[(3, 3), (9, 3), (6, 6), (3, 9), (9, 9)],
_ => &[],
};
for &(x, y) in points {
context.begin_path();
context.arc(
(x as f64 + 0.5) * size,
(y as f64 + 0.5) * size,
size / 8.,
0.0,
2.0 * std::f64::consts::PI,
)?;
context.fill();
}
if let Some(selection_pos) = self.selection_pos {
let mut p = (selection_pos.0 as i32, selection_pos.1 as i32);
if game.mods.pixel {
p.0 -= 1;
p.1 -= 1;
}
// TODO: This allocation is horrible, figure out how to avoid it
let points = match game.mods.pixel {
true => vec![
(p.0, p.1),
(p.0 + 1, p.1),
(p.0, p.1 + 1),
(p.0 + 1, p.1 + 1),
],
false => vec![p],
};
let color = turn;
// Teams start from 1
context.set_fill_style(&JsValue::from_str(shadow_stone_colors[color as usize - 1]));
context.set_stroke_style(&JsValue::from_str(shadow_border_colors[color as usize - 1]));
for p in points {
context.begin_path();
context.arc(
(p.0 as f64 + 0.5) * size,
(p.1 as f64 + 0.5) * size,
size / 2.,
0.0,
2.0 * std::f64::consts::PI,
)?;
context.fill();
context.stroke();
}
}
for (idx, &color) in board.iter().enumerate() {
let x = idx % board_size;
let y = idx / board_size;
let visible = board_visibility
.as_ref()
.map(|v| v[idx] == 0)
.unwrap_or(true);
if color == 0 || !visible {
continue;
}
context.set_fill_style(&JsValue::from_str(stone_colors[color as usize - 1]));
context.set_stroke_style(&JsValue::from_str(border_colors[color as usize - 1]));
let size = canvas.width() as f64 / board_size as f64;
// create shape of radius 'size' around center point (size, size)
context.begin_path();
context.arc(
(x as f64 + 0.5) * size,
(y as f64 + 0.5) * size,
size / 2.,
0.0,
2.0 * std::f64::consts::PI,
)?;
context.fill();
context.stroke();
}
for (idx, &colors) in board_visibility.iter().flatten().enumerate() {
let x = idx % board_size;
let y = idx / board_size;
let colors = Visibility::from_value(colors);
if colors.is_empty() {
continue;
}
for color in &colors {
context.set_fill_style(&JsValue::from_str(stone_colors_hidden[color as usize - 1]));
context.set_stroke_style(&JsValue::from_str(border_colors[color as usize - 1]));
let size = canvas.width() as f64 / board_size as f64;
// create shape of radius 'size' around center point (size, size)
context.begin_path();
context.arc(
(x as f64 + 0.5) * size,
(y as f64 + 0.5) * size,
size / 2.,
0.0,
2.0 * std::f64::consts::PI,
)?;
context.fill();
context.stroke();
}
}
let last_stone = match (&game.state, &game.history) {
(_, Some(h)) => h.last_stone.as_ref(),
(GameState::Play(state), _) => state.last_stone.as_ref(),
_ => None,
};
if let Some(points) = last_stone {
for &(x, y) in points {
let mut color = board[y as usize * game.size.0 as usize + x as usize];
if color == 0 {
// White stones have the most fitting (read: black) marker for empty board
color = 2;
}
context.set_stroke_style(&JsValue::from_str(dead_mark_color[color as usize - 1]));
context.set_line_width(2.0);
let size = canvas.width() as f64 / board_size as f64;
// create shape of radius 'size' around center point (size, size)
context.begin_path();
context.arc(
(x as f64 + 0.5) * size,
(y as f64 + 0.5) * size,
size / 4.,
0.0,
2.0 * std::f64::consts::PI,
)?;
context.stroke();
}
} else if game.history.is_none() {
match &game.state {
GameState::Scoring(scoring) | GameState::Done(scoring) => {
for group in &scoring.groups {
if group.alive {
continue;
}
for &(x, y) in &group.points {
context.set_line_width(2.0);
context.set_stroke_style(&JsValue::from_str(
dead_mark_color[group.team.0 as usize - 1],
));
context.set_stroke_style(&JsValue::from_str(
dead_mark_color[group.team.0 as usize - 1],
));
context.begin_path();
context.move_to((x as f64 + 0.2) * size, (y as f64 + 0.2) * size);
context.line_to((x as f64 + 0.8) * size, (y as f64 + 0.8) * size);
context.stroke();
context.begin_path();
context.move_to((x as f64 + 0.8) * size, (y as f64 + 0.2) * size);
context.line_to((x as f64 + 0.2) * size, (y as f64 + 0.8) * size);
context.stroke();
}
}
for (idx, &color) in scoring.points.points.iter().enumerate() {
let x = (idx % board_size) as f64;
let y = (idx / board_size) as f64;
if color.is_empty() {
continue;
}
context
.set_fill_style(&JsValue::from_str(stone_colors[color.0 as usize - 1]));
context.set_stroke_style(&JsValue::from_str(
border_colors[color.0 as usize - 1],
));
context.fill_rect(
(x + 1. / 3.) * size,
(y + 1. / 3.) * size,
(1. / 3.) * size,
(1. / 3.) * size,
);
}
}
_ => {}
}
}
let render_frame = self.link.callback(Msg::Render);
let handle = RenderService::request_animation_frame(render_frame);
// A reference to the new handle must be retained for the next render to run.
self.render_loop = Some(Box::new(handle));
Ok(())
}
}
|
use super::{GreyType,Token};
use std::rc::Rc;
#[derive(Clone,Debug)]
pub enum Expr {
RawList(Vec<Token>),
List(Vec<Rc<Expr>>),
Literal(GreyType),
Variable(Token),
Call(Rc<Expr>,Rc<Expr>)
}
|
/// command line interface for intiface/buttplug.
///
#[macro_use]
extern crate log;
mod frontend;
mod options;
use buttplug::{
connector::{
ButtplugRemoteServerConnector, ButtplugWebsocketServerTransport,
ButtplugWebsocketServerTransportBuilder,
},
core::{
errors::ButtplugError,
messages::{serializer::ButtplugServerJSONSerializer, ButtplugServerMessage},
},
server::{remote_server::ButtplugRemoteServerEvent, ButtplugRemoteServer, ButtplugServerBuilder},
util::logging::ChannelWriter,
};
use frontend::intiface_gui::server_process_message::{
ClientConnected, ClientDisconnected, ClientRejected, DeviceConnected, DeviceDisconnected, Msg,
ProcessEnded, ProcessError, ProcessLog, ProcessStarted,
};
use frontend::FrontendPBufChannel;
use futures::{pin_mut, select, FutureExt, Stream, StreamExt};
use log_panics;
use std::{error::Error, fmt, time::Duration};
use tokio::{
self,
net::TcpListener,
signal::ctrl_c,
sync::mpsc::{channel, Receiver},
time::sleep,
};
use tokio_util::sync::CancellationToken;
use tracing_subscriber::filter::EnvFilter;
#[derive(Default, Clone)]
pub struct ConnectorOptions {
server_builder: ButtplugServerBuilder,
stay_open: bool,
use_frontend_pipe: bool,
ws_listen_on_all_interfaces: bool,
ws_insecure_port: Option<u16>,
ipc_pipe_name: Option<String>,
}
#[derive(Debug)]
pub struct IntifaceError {
reason: String,
}
impl IntifaceError {
pub fn new(error_msg: &str) -> Self {
Self {
reason: error_msg.to_owned(),
}
}
}
impl fmt::Display for IntifaceError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "self.reason")
}
}
impl Error for IntifaceError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
None
}
}
#[derive(Debug)]
pub enum IntifaceCLIErrorEnum {
IoError(std::io::Error),
ButtplugError(ButtplugError),
IntifaceError(IntifaceError),
}
impl From<std::io::Error> for IntifaceCLIErrorEnum {
fn from(err: std::io::Error) -> Self {
IntifaceCLIErrorEnum::IoError(err)
}
}
impl From<ButtplugError> for IntifaceCLIErrorEnum {
fn from(err: ButtplugError) -> Self {
IntifaceCLIErrorEnum::ButtplugError(err)
}
}
impl From<IntifaceError> for IntifaceCLIErrorEnum {
fn from(err: IntifaceError) -> Self {
IntifaceCLIErrorEnum::IntifaceError(err)
}
}
#[allow(dead_code)]
fn setup_frontend_filter_channel<T>(
mut receiver: Receiver<ButtplugServerMessage>,
frontend_channel: FrontendPBufChannel,
) -> Receiver<ButtplugServerMessage> {
let (sender_filtered, recv_filtered) = channel(256);
tokio::spawn(async move {
loop {
match receiver.recv().await {
Some(msg) => {
match msg {
ButtplugServerMessage::ServerInfo(_) => {
let msg = ClientConnected {
client_name: "Unknown Name".to_string(),
};
frontend_channel.send(Msg::ClientConnected(msg)).await;
}
_ => {}
}
sender_filtered.send(msg).await.unwrap();
}
None => break,
}
}
});
recv_filtered
}
async fn server_event_receiver(
receiver: impl Stream<Item = ButtplugRemoteServerEvent>,
frontend_sender: Option<FrontendPBufChannel>,
connection_cancellation_token: CancellationToken,
) {
pin_mut!(receiver);
loop {
select! {
maybe_event = receiver.next().fuse() => {
match maybe_event {
Some(event) => match event {
ButtplugRemoteServerEvent::Connected(client_name) => {
info!("Client connected: {}", client_name);
let sender = frontend_sender.clone();
let token = connection_cancellation_token.child_token();
tokio::spawn(async move {
reject_all_incoming(sender, "localhost", 12345, token).await;
});
if let Some(frontend_sender) = &frontend_sender {
frontend_sender
.send(Msg::ClientConnected(ClientConnected {
client_name: client_name,
}))
.await;
}
}
ButtplugRemoteServerEvent::Disconnected => {
info!("Client disconnected.");
// If we disconnect, go ahead and break out of our loop.
break;
}
ButtplugRemoteServerEvent::DeviceAdded(device_id, device_name) => {
info!("Device Added: {} - {}", device_id, device_name);
if let Some(frontend_sender) = &frontend_sender {
frontend_sender
.send(Msg::DeviceConnected(DeviceConnected {
device_name,
device_id,
}))
.await;
}
}
ButtplugRemoteServerEvent::DeviceRemoved(device_id) => {
info!("Device Removed: {}", device_id);
if let Some(frontend_sender) = &frontend_sender {
frontend_sender
.send(Msg::DeviceDisconnected(DeviceDisconnected { device_id }))
.await;
}
}
},
None => {
warn!("Lost connection with main thread, breaking.");
break;
},
}
},
_ = connection_cancellation_token.cancelled().fuse() => {
info!("Connection cancellation token activated, breaking");
break;
}
}
}
info!("Exiting server event receiver loop");
if let Some(frontend_sender) = &frontend_sender {
frontend_sender
.send(Msg::ClientDisconnected(ClientDisconnected {}))
.await;
}
}
async fn reject_all_incoming(
frontend_sender: Option<FrontendPBufChannel>,
address: &str,
port: u16,
token: CancellationToken,
) {
info!("Rejecting all incoming clients while connected");
let addr = format!("{}:{}", address, port);
let try_socket = TcpListener::bind(&addr).await;
let listener = try_socket.expect("Cannot hold port while connected?!");
loop {
select! {
_ = token.cancelled().fuse() => {
break;
}
ret = listener.accept().fuse() => {
match ret {
Ok(_) => {
error!("Someone tried to connect while we're already connected!!!!");
if let Some(frontend_sender) = &frontend_sender {
frontend_sender
.send(Msg::ClientRejected(ClientRejected {
client_name: "Unknown".to_owned(),
}))
.await;
}
}
Err(_) => {
break;
}
}
}
}
}
info!("Leaving client rejection loop.");
}
#[tokio::main]
async fn main() -> Result<(), IntifaceCLIErrorEnum> {
let parent_token = CancellationToken::new();
let process_token = parent_token.child_token();
// Intiface GUI communicates with its child process via protobufs through
// stdin/stdout. Checking for this is the first thing we should do, as any
// output after this either needs to be printed strings or pbuf messages.
//
// Only set up the env logger if we're not outputting pbufs to a frontend
// pipe.
let frontend_sender = options::check_frontend_pipe(parent_token);
let log_level = options::check_log_level();
log_panics::init();
#[allow(unused_variables)]
if let Some(sender) = &frontend_sender {
// Add panic hook for emitting backtraces through the logging system.
sender
.send(Msg::ProcessStarted(ProcessStarted::default()))
.await;
let (bp_log_sender, mut receiver) = channel::<Vec<u8>>(256);
let log_sender = sender.clone();
tokio::spawn(async move {
while let Some(log) = receiver.recv().await {
log_sender
.send(Msg::ProcessLog(ProcessLog {
message: std::str::from_utf8(&log).unwrap().to_owned(),
}))
.await;
}
});
tracing_subscriber::fmt()
.json()
.with_max_level(log_level)
.with_ansi(false)
.with_writer(move || ChannelWriter::new(bp_log_sender.clone()))
.init();
} else {
if log_level.is_some() {
tracing_subscriber::fmt()
.with_max_level(log_level.unwrap())
.init();
} else {
let filter = EnvFilter::try_from_default_env()
.or_else(|_| EnvFilter::try_new("info"))
.unwrap();
tracing_subscriber::fmt().with_env_filter(filter).init();
}
println!("Intiface Server, starting up with stdout output.");
}
// Parse options, get back our connection information and a curried server
// factory closure.
let connector_opts = match options::parse_options() {
Ok(opts) => match opts {
Some(o) => o,
None => return Ok(()),
},
Err(e) => return Err(e),
};
// Hang out until those listeners get sick of listening.
info!("Intiface CLI Setup finished, running server tasks until all joined.");
let frontend_sender_clone = frontend_sender.clone();
if connector_opts.stay_open {
let core_server = match connector_opts.server_builder.finish() {
Ok(server) => server,
Err(e) => {
process_token.cancel();
error!("Error starting server: {:?}", e);
if let Some(sender) = &frontend_sender_clone {
sender
.send(Msg::ProcessError(ProcessError {
message: format!("Process Error: {:?}", e).to_owned(),
}))
.await;
}
return Err(IntifaceCLIErrorEnum::ButtplugError(e));
}
};
let server = ButtplugRemoteServer::new(core_server);
options::setup_server_device_comm_managers(&server);
info!("Starting new stay open loop");
loop {
let token = CancellationToken::new();
let mut exit_requested = false;
let child_token = token.child_token();
let event_receiver = server.event_stream();
let fscc = frontend_sender_clone.clone();
tokio::spawn(async move {
info!("Spawning server event receiver");
server_event_receiver(event_receiver, fscc, child_token).await;
info!("Shutting down server event receiver");
});
info!("Creating new stay open connector");
let transport = ButtplugWebsocketServerTransportBuilder::default()
.port(connector_opts.ws_insecure_port.unwrap())
.listen_on_all_interfaces(connector_opts.ws_listen_on_all_interfaces)
.finish();
let connector = ButtplugRemoteServerConnector::<
ButtplugWebsocketServerTransport,
ButtplugServerJSONSerializer,
>::new(transport);
info!("Starting server");
select! {
_ = ctrl_c().fuse() => {
info!("Control-c hit, exiting.");
exit_requested = true;
}
_ = process_token.cancelled().fuse() => {
info!("Owner requested process exit, exiting.");
exit_requested = true;
}
result = server.start(connector).fuse() => {
match result {
Ok(_) => info!("Connection dropped, restarting stay open loop."),
Err(e) => {
error!("{}", format!("Process Error: {:?}", e));
if let Some(sender) = &frontend_sender_clone {
sender
.send(Msg::ProcessError(ProcessError { message: format!("Process Error: {:?}", e).to_owned() }))
.await;
}
exit_requested = true;
}
}
}
};
token.cancel();
if let Some(sender) = &frontend_sender_clone {
sender
.send(Msg::ClientDisconnected(ClientDisconnected::default()))
.await;
}
if exit_requested {
info!("Breaking out of event loop in order to exit");
if let Some(sender) = &frontend_sender {
// If the ProcessEnded message is sent too soon after client disconnected, electron has a
// tendency to miss it completely. This sucks.
sleep(Duration::from_millis(100)).await;
sender
.send(Msg::ProcessEnded(ProcessEnded::default()))
.await;
}
break;
}
info!("Server connection dropped, restarting");
}
} else {
let core_server = match connector_opts.server_builder.finish() {
Ok(server) => server,
Err(e) => {
process_token.cancel();
error!("Error starting server: {:?}", e);
if let Some(sender) = &frontend_sender_clone {
sender
.send(Msg::ProcessError(ProcessError {
message: format!("Process Error: {:?}", e).to_owned(),
}))
.await;
}
return Err(IntifaceCLIErrorEnum::ButtplugError(e));
}
};
let server = ButtplugRemoteServer::new(core_server);
let event_receiver = server.event_stream();
let fscc = frontend_sender_clone.clone();
let token = CancellationToken::new();
let child_token = token.child_token();
tokio::spawn(async move {
server_event_receiver(event_receiver, fscc, child_token).await;
});
options::setup_server_device_comm_managers(&server);
let transport = ButtplugWebsocketServerTransportBuilder::default()
.port(connector_opts.ws_insecure_port.unwrap())
.listen_on_all_interfaces(connector_opts.ws_listen_on_all_interfaces)
.finish();
let connector = ButtplugRemoteServerConnector::<
ButtplugWebsocketServerTransport,
ButtplugServerJSONSerializer,
>::new(transport);
select! {
_ = ctrl_c().fuse() => {
info!("Control-c hit, exiting.");
}
_ = process_token.cancelled().fuse() => {
info!("Owner requested process exit, exiting.");
}
result = server.start(connector).fuse() => {
match result {
Ok(_) => info!("Connection dropped, restarting stay open loop."),
Err(e) => {
error!("{}", format!("Process Error: {:?}", e));
if let Some(sender) = &frontend_sender_clone {
sender
.send(Msg::ProcessError(ProcessError { message: format!("Process Error: {:?}", e).to_owned() }))
.await;
}
}
}
}
};
token.cancel();
if let Some(sender) = &frontend_sender_clone {
sender
.send(Msg::ClientDisconnected(ClientDisconnected::default()))
.await;
}
}
info!("Exiting");
Ok(())
}
|
use super::ParsingError;
use super::syntax_definition::*;
use super::scope::*;
#[cfg(feature = "metadata")]
use super::metadata::{LoadMetadata, Metadata, RawMetadataEntry};
#[cfg(feature = "yaml-load")]
use super::super::LoadingError;
use std::collections::{HashMap, HashSet, BTreeSet};
use std::path::Path;
use std::io::{self, BufRead, BufReader};
use std::fs::File;
use std::mem;
use super::regex::Regex;
use crate::parsing::syntax_definition::ContextId;
use once_cell::sync::OnceCell;
use serde::{Deserialize, Serialize};
/// A syntax set holds multiple syntaxes that have been linked together.
///
/// Use a [`SyntaxSetBuilder`] to load syntax definitions and build a syntax set.
///
/// After building, the syntax set is immutable and can no longer be modified, but you can convert
/// it back into a builder by using the [`into_builder`] method.
///
/// [`SyntaxSetBuilder`]: struct.SyntaxSetBuilder.html
/// [`into_builder`]: #method.into_builder
#[derive(Debug, Serialize, Deserialize)]
pub struct SyntaxSet {
syntaxes: Vec<SyntaxReference>,
/// Stores the syntax index for every path that was loaded
path_syntaxes: Vec<(String, usize)>,
#[serde(skip_serializing, skip_deserializing, default = "OnceCell::new")]
first_line_cache: OnceCell<FirstLineCache>,
/// Metadata, e.g. indent and commenting information.
///
/// NOTE: if serializing, you should handle metadata manually; that is, you should serialize and
/// deserialize it separately. See `examples/gendata.rs` for an example.
#[cfg(feature = "metadata")]
#[serde(skip, default)]
pub(crate) metadata: Metadata,
}
/// A linked version of a [`SyntaxDefinition`] that is only useful as part of the
/// [`SyntaxSet`] that contains it. See docs for [`SyntaxSetBuilder::build`] for
/// more info.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SyntaxReference {
pub name: String,
pub file_extensions: Vec<String>,
pub scope: Scope,
pub first_line_match: Option<String>,
pub hidden: bool,
#[serde(serialize_with = "ordered_map")]
pub variables: HashMap<String, String>,
#[serde(skip)]
pub(crate) lazy_contexts: OnceCell<LazyContexts>,
pub(crate) serialized_lazy_contexts: Vec<u8>,
}
/// The lazy-loaded parts of a [`SyntaxReference`].
#[derive(Clone, Debug, Serialize, Deserialize)]
pub(crate) struct LazyContexts {
#[serde(serialize_with = "ordered_map")]
pub(crate) context_ids: HashMap<String, ContextId>,
pub(crate) contexts: Vec<Context>,
}
/// A syntax set builder is used for loading syntax definitions from the file
/// system or by adding [`SyntaxDefinition`] objects.
///
/// Once all the syntaxes have been added, call [`build`] to turn the builder into
/// a [`SyntaxSet`] that can be used for parsing or highlighting.
///
/// [`SyntaxDefinition`]: syntax_definition/struct.SyntaxDefinition.html
/// [`build`]: #method.build
/// [`SyntaxSet`]: struct.SyntaxSet.html
#[derive(Clone, Default)]
pub struct SyntaxSetBuilder {
syntaxes: Vec<SyntaxDefinition>,
path_syntaxes: Vec<(String, usize)>,
#[cfg(feature = "metadata")]
raw_metadata: LoadMetadata,
/// If this `SyntaxSetBuilder` is created with `SyntaxSet::into_builder`
/// from a `SyntaxSet` that already had metadata, we keep that metadata,
/// merging it with newly loaded metadata.
#[cfg(feature = "metadata")]
existing_metadata: Option<Metadata>,
}
#[cfg(feature = "yaml-load")]
fn load_syntax_file(p: &Path,
lines_include_newline: bool)
-> Result<SyntaxDefinition, LoadingError> {
let s = std::fs::read_to_string(p)?;
SyntaxDefinition::load_from_str(
&s,
lines_include_newline,
p.file_stem().and_then(|x| x.to_str()),
)
.map_err(|e| LoadingError::ParseSyntax(e, format!("{}", p.display())))
}
impl Clone for SyntaxSet {
fn clone(&self) -> SyntaxSet {
SyntaxSet {
syntaxes: self.syntaxes.clone(),
path_syntaxes: self.path_syntaxes.clone(),
// Will need to be re-initialized
first_line_cache: OnceCell::new(),
#[cfg(feature = "metadata")]
metadata: self.metadata.clone(),
}
}
}
impl Default for SyntaxSet {
fn default() -> Self {
SyntaxSet {
syntaxes: Vec::new(),
path_syntaxes: Vec::new(),
first_line_cache: OnceCell::new(),
#[cfg(feature = "metadata")]
metadata: Metadata::default(),
}
}
}
impl SyntaxSet {
pub fn new() -> SyntaxSet {
SyntaxSet::default()
}
/// Convenience constructor for creating a builder, then loading syntax
/// definitions from a folder and then building the syntax set.
///
/// Note that this uses `lines_include_newline` set to `false`, see the
/// [`add_from_folder`] method docs on [`SyntaxSetBuilder`] for an explanation
/// as to why this might not be the best.
///
/// [`add_from_folder`]: struct.SyntaxSetBuilder.html#method.add_from_folder
/// [`SyntaxSetBuilder`]: struct.SyntaxSetBuilder.html
#[cfg(feature = "yaml-load")]
pub fn load_from_folder<P: AsRef<Path>>(folder: P) -> Result<SyntaxSet, LoadingError> {
let mut builder = SyntaxSetBuilder::new();
builder.add_from_folder(folder, false)?;
Ok(builder.build())
}
/// The list of syntaxes in the set
pub fn syntaxes(&self) -> &[SyntaxReference] {
&self.syntaxes[..]
}
#[cfg(feature = "metadata")]
pub fn set_metadata(&mut self, metadata: Metadata) {
self.metadata = metadata;
}
/// The loaded metadata for this set.
#[cfg(feature = "metadata")]
pub fn metadata(&self) -> &Metadata {
&self.metadata
}
/// Finds a syntax by its default scope, for example `source.regexp` finds the regex syntax.
///
/// This and all similar methods below do a linear search of syntaxes, this should be fast
/// because there aren't many syntaxes, but don't think you can call it a bajillion times per
/// second.
pub fn find_syntax_by_scope(&self, scope: Scope) -> Option<&SyntaxReference> {
self.syntaxes.iter().rev().find(|&s| s.scope == scope)
}
pub fn find_syntax_by_name<'a>(&'a self, name: &str) -> Option<&'a SyntaxReference> {
self.syntaxes.iter().rev().find(|&s| name == s.name)
}
pub fn find_syntax_by_extension<'a>(&'a self, extension: &str) -> Option<&'a SyntaxReference> {
self.syntaxes.iter().rev().find(|&s| s.file_extensions.iter().any(|e| e.eq_ignore_ascii_case(extension)))
}
/// Searches for a syntax first by extension and then by case-insensitive name
///
/// This is useful for things like Github-flavoured-markdown code block highlighting where all
/// you have to go on is a short token given by the user
pub fn find_syntax_by_token<'a>(&'a self, s: &str) -> Option<&'a SyntaxReference> {
{
let ext_res = self.find_syntax_by_extension(s);
if ext_res.is_some() {
return ext_res;
}
}
self.syntaxes.iter().rev().find(|&syntax| syntax.name.eq_ignore_ascii_case(s))
}
/// Try to find the syntax for a file based on its first line
///
/// This uses regexes that come with some sublime syntax grammars for matching things like
/// shebangs and mode lines like `-*- Mode: C -*-`
pub fn find_syntax_by_first_line<'a>(&'a self, s: &str) -> Option<&'a SyntaxReference> {
let cache = self.first_line_cache();
for &(ref reg, i) in cache.regexes.iter().rev() {
if reg.search(s, 0, s.len(), None) {
return Some(&self.syntaxes[i]);
}
}
None
}
/// Searches for a syntax by it's original file path when it was first loaded from disk
///
/// This is primarily useful for syntax tests. Some may specify a
/// `Packages/PackageName/SyntaxName.sublime-syntax` path, and others may just have
/// `SyntaxName.sublime-syntax`. This caters for these by matching the end of the path of the
/// loaded syntax definition files
// however, if a syntax name is provided without a folder, make sure we don't accidentally match the end of a different syntax definition's name - by checking a / comes before it or it is the full path
pub fn find_syntax_by_path<'a>(&'a self, path: &str) -> Option<&'a SyntaxReference> {
let mut slash_path = "/".to_string();
slash_path.push_str(path);
self.path_syntaxes.iter().rev().find(|t| t.0.ends_with(&slash_path) || t.0 == path).map(|&(_,i)| &self.syntaxes[i])
}
/// Convenience method that tries to find the syntax for a file path, first by extension/name
/// and then by first line of the file if that doesn't work.
///
/// May IO Error because it sometimes tries to read the first line of the file.
///
/// # Examples
///
/// When determining how to highlight a file, use this in combination with a fallback to plain
/// text:
///
/// ```
/// use syntect::parsing::SyntaxSet;
/// let ss = SyntaxSet::load_defaults_newlines();
/// let syntax = ss.find_syntax_for_file("testdata/highlight_test.erb")
/// .unwrap() // for IO errors, you may want to use try!() or another plain text fallback
/// .unwrap_or_else(|| ss.find_syntax_plain_text());
/// assert_eq!(syntax.name, "HTML (Rails)");
/// ```
pub fn find_syntax_for_file<P: AsRef<Path>>(&self,
path_obj: P)
-> io::Result<Option<&SyntaxReference>> {
let path: &Path = path_obj.as_ref();
let file_name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
let extension = path.extension().and_then(|x| x.to_str()).unwrap_or("");
let ext_syntax = self.find_syntax_by_extension(file_name).or_else(
|| self.find_syntax_by_extension(extension));
let line_syntax = if ext_syntax.is_none() {
let mut line = String::new();
let f = File::open(path)?;
let mut line_reader = BufReader::new(&f);
line_reader.read_line(&mut line)?;
self.find_syntax_by_first_line(&line)
} else {
None
};
let syntax = ext_syntax.or(line_syntax);
Ok(syntax)
}
/// Finds a syntax for plain text, which usually has no highlighting rules.
///
/// This is good as a fallback when you can't find another syntax but you still want to use the
/// same highlighting pipeline code.
///
/// This syntax should always be present, if not this method will panic. If the way you load
/// syntaxes doesn't create one, use [`add_plain_text_syntax`].
///
/// # Examples
/// ```
/// use syntect::parsing::SyntaxSetBuilder;
/// let mut builder = SyntaxSetBuilder::new();
/// builder.add_plain_text_syntax();
/// let ss = builder.build();
/// let syntax = ss.find_syntax_by_token("rs").unwrap_or_else(|| ss.find_syntax_plain_text());
/// assert_eq!(syntax.name, "Plain Text");
/// ```
///
/// [`add_plain_text_syntax`]: struct.SyntaxSetBuilder.html#method.add_plain_text_syntax
pub fn find_syntax_plain_text(&self) -> &SyntaxReference {
self.find_syntax_by_name("Plain Text")
.expect("All syntax sets ought to have a plain text syntax")
}
/// Converts this syntax set into a builder so that more syntaxes can be
/// added to it.
///
/// Note that newly added syntaxes can have references to existing syntaxes
/// in the set, but not the other way around.
pub fn into_builder(self) -> SyntaxSetBuilder {
#[cfg(feature = "metadata")]
let SyntaxSet { syntaxes, path_syntaxes, metadata, .. } = self;
#[cfg(not(feature = "metadata"))]
let SyntaxSet { syntaxes, path_syntaxes, .. } = self;
let mut context_map = HashMap::new();
for (syntax_index, syntax) in syntaxes.iter().enumerate() {
for (context_index, context) in syntax.contexts().iter().enumerate() {
context_map.insert(ContextId { syntax_index, context_index }, context.clone());
}
}
let mut builder_syntaxes = Vec::with_capacity(syntaxes.len());
for syntax in syntaxes {
let SyntaxReference {
name,
file_extensions,
scope,
first_line_match,
hidden,
variables,
serialized_lazy_contexts,
..
} = syntax;
let lazy_contexts = LazyContexts::deserialize(&serialized_lazy_contexts[..]);
let mut builder_contexts = HashMap::with_capacity(lazy_contexts.context_ids.len());
for (name, context_id) in lazy_contexts.context_ids {
if let Some(context) = context_map.remove(&context_id) {
builder_contexts.insert(name, context);
}
}
let syntax_definition = SyntaxDefinition {
name,
file_extensions,
scope,
first_line_match,
hidden,
variables,
contexts: builder_contexts,
};
builder_syntaxes.push(syntax_definition);
}
SyntaxSetBuilder {
syntaxes: builder_syntaxes,
path_syntaxes,
#[cfg(feature = "metadata")]
existing_metadata: Some(metadata),
#[cfg(feature = "metadata")]
raw_metadata: LoadMetadata::default(),
}
}
#[inline(always)]
pub(crate) fn get_context(&self, context_id: &ContextId) -> Result<&Context, ParsingError> {
let syntax = &self
.syntaxes
.get(context_id.syntax_index)
.ok_or_else(|| ParsingError::MissingContext(*context_id))?;
syntax
.contexts()
.get(context_id.context_index)
.ok_or_else(|| ParsingError::MissingContext(*context_id))
}
fn first_line_cache(&self) -> &FirstLineCache {
self.first_line_cache
.get_or_init(|| FirstLineCache::new(self.syntaxes()))
}
pub fn find_unlinked_contexts(&self) -> BTreeSet<String> {
let SyntaxSet { syntaxes, .. } = self;
let mut unlinked_contexts = BTreeSet::new();
for syntax in syntaxes {
let SyntaxReference {
name,
scope,
..
} = syntax;
for context in syntax.contexts() {
Self::find_unlinked_contexts_in_context(name, scope, context, &mut unlinked_contexts);
}
}
unlinked_contexts
}
fn find_unlinked_contexts_in_context(
name: &str,
scope: &Scope,
context: &Context,
unlinked_contexts: &mut BTreeSet<String>,
) {
for pattern in context.patterns.iter() {
let maybe_refs_to_check = match pattern {
Pattern::Match(match_pat) => match &match_pat.operation {
MatchOperation::Push(context_refs) => Some(context_refs),
MatchOperation::Set(context_refs) => Some(context_refs),
_ => None,
},
_ => None,
};
for context_ref in maybe_refs_to_check.into_iter().flatten() {
match context_ref {
ContextReference::Direct(_) => {}
_ => {
unlinked_contexts.insert(format!(
"Syntax '{}' with scope '{}' has unresolved context reference {:?}",
name, scope, &context_ref
));
}
}
}
}
}
}
impl SyntaxReference {
pub(crate) fn context_ids(&self) -> &HashMap<String, ContextId> {
&self.lazy_contexts().context_ids
}
fn contexts(&self) -> &[Context] {
&self.lazy_contexts().contexts
}
fn lazy_contexts(&self) -> &LazyContexts {
self.lazy_contexts
.get_or_init(|| LazyContexts::deserialize(&self.serialized_lazy_contexts[..]))
}
}
impl LazyContexts {
fn deserialize(data: &[u8]) -> LazyContexts {
crate::dumps::from_reader(data).expect("data is not corrupt or out of sync with the code")
}
}
impl SyntaxSetBuilder {
pub fn new() -> SyntaxSetBuilder {
SyntaxSetBuilder::default()
}
/// Add a syntax to the set.
pub fn add(&mut self, syntax: SyntaxDefinition) {
self.syntaxes.push(syntax);
}
/// The list of syntaxes added so far.
pub fn syntaxes(&self) -> &[SyntaxDefinition] {
&self.syntaxes[..]
}
/// A rarely useful method that loads in a syntax with no highlighting rules for plain text
///
/// Exists mainly for adding the plain text syntax to syntax set dumps, because for some reason
/// the default Sublime plain text syntax is still in `.tmLanguage` format.
#[cfg(feature = "yaml-load")]
pub fn add_plain_text_syntax(&mut self) {
let s = "---\nname: Plain Text\nfile_extensions: [txt]\nscope: text.plain\ncontexts: \
{main: []}";
let syn = SyntaxDefinition::load_from_str(s, false, None).unwrap();
self.syntaxes.push(syn);
}
/// Loads all the `.sublime-syntax` files in a folder into this builder.
///
/// The `lines_include_newline` parameter is used to work around the fact that Sublime Text
/// normally passes line strings including newline characters (`\n`) to its regex engine. This
/// results in many syntaxes having regexes matching `\n`, which doesn't work if you don't pass
/// in newlines. It is recommended that if you can you pass in lines with newlines if you can
/// and pass `true` for this parameter. If that is inconvenient pass `false` and the loader
/// will do some hacky find and replaces on the match regexes that seem to work for the default
/// syntax set, but may not work for any other syntaxes.
///
/// In the future I might include a "slow mode" that copies the lines passed in and appends a
/// newline if there isn't one, but in the interest of performance currently this hacky fix will
/// have to do.
#[cfg(feature = "yaml-load")]
pub fn add_from_folder<P: AsRef<Path>>(
&mut self,
folder: P,
lines_include_newline: bool
) -> Result<(), LoadingError> {
for entry in crate::utils::walk_dir(folder).sort_by(|a, b| a.file_name().cmp(b.file_name())) {
let entry = entry.map_err(LoadingError::WalkDir)?;
if entry.path().extension().map_or(false, |e| e == "sublime-syntax") {
let syntax = load_syntax_file(entry.path(), lines_include_newline)?;
if let Some(path_str) = entry.path().to_str() {
// Split the path up and rejoin with slashes so that syntaxes loaded on Windows
// can still be loaded the same way.
let path = Path::new(path_str);
let path_parts: Vec<_> = path.iter().map(|c| c.to_str().unwrap()).collect();
self.path_syntaxes.push((path_parts.join("/").to_string(), self.syntaxes.len()));
}
self.syntaxes.push(syntax);
}
#[cfg(feature = "metadata")]
{
if entry.path().extension() == Some("tmPreferences".as_ref()) {
match RawMetadataEntry::load(entry.path()) {
Ok(meta) => self.raw_metadata.add_raw(meta),
Err(_err) => (),
}
}
}
}
Ok(())
}
/// Build a [`SyntaxSet`] from the syntaxes that have been added to this
/// builder.
///
/// ### Linking
///
/// The contexts in syntaxes can reference other contexts in the same syntax
/// or even other syntaxes. For example, a HTML syntax can reference a CSS
/// syntax so that CSS blocks in HTML work as expected.
///
/// Those references work in various ways and involve one or two lookups.
/// To avoid having to do these lookups during parsing/highlighting, the
/// references are changed to directly reference contexts via index. That's
/// called linking.
///
/// Linking is done in this build step. So in order to get the best
/// performance, you should try to avoid calling this too much. Ideally,
/// create a [`SyntaxSet`] once and then use it many times. If you can,
/// serialize a [`SyntaxSet`] for your program and when you run the program,
/// directly load the [`SyntaxSet`].
///
/// [`SyntaxSet`]: struct.SyntaxSet.html
pub fn build(self) -> SyntaxSet {
#[cfg(not(feature = "metadata"))]
let SyntaxSetBuilder { syntaxes: syntax_definitions, path_syntaxes } = self;
#[cfg(feature = "metadata")]
let SyntaxSetBuilder {
syntaxes: syntax_definitions,
path_syntaxes,
raw_metadata,
existing_metadata,
} = self;
let mut syntaxes = Vec::with_capacity(syntax_definitions.len());
let mut all_context_ids = Vec::new();
let mut all_contexts = vec![Vec::new(); syntax_definitions.len()];
for (syntax_index, syntax_definition) in syntax_definitions.into_iter().enumerate() {
let SyntaxDefinition {
name,
file_extensions,
scope,
first_line_match,
hidden,
variables,
contexts,
} = syntax_definition;
let mut context_ids = HashMap::new();
let mut contexts: Vec<(String, Context)> = contexts.into_iter().collect();
// Sort the values of the HashMap so that the contexts in the
// resulting SyntaxSet have a deterministic order for serializing.
// Because we're sorting by the keys which are unique, we can use
// an unstable sort.
contexts.sort_unstable_by(|(name_a, _), (name_b, _)| name_a.cmp(name_b));
for (name, context) in contexts {
let context_index = all_contexts[syntax_index].len();
context_ids.insert(name, ContextId { syntax_index, context_index });
all_contexts[syntax_index].push(context);
}
let syntax = SyntaxReference {
name,
file_extensions,
scope,
first_line_match,
hidden,
variables,
lazy_contexts: OnceCell::new(),
serialized_lazy_contexts: Vec::new(), // initialized in the last step
};
syntaxes.push(syntax);
all_context_ids.push(context_ids);
}
let mut found_more_backref_includes = true;
for (syntax_index, syntax) in syntaxes.iter().enumerate() {
let mut no_prototype = HashSet::new();
let prototype = all_context_ids[syntax_index].get("prototype");
if let Some(prototype_id) = prototype {
// TODO: We could do this after parsing YAML, instead of here?
Self::recursively_mark_no_prototype(syntax, prototype_id, &all_context_ids[syntax_index], &all_contexts, &mut no_prototype);
}
for context_id in all_context_ids[syntax_index].values() {
let context = &mut all_contexts[context_id.syntax_index][context_id.context_index];
if let Some(prototype_id) = prototype {
if context.meta_include_prototype && !no_prototype.contains(context_id) {
context.prototype = Some(*prototype_id);
}
}
Self::link_context(context, syntax_index, &all_context_ids, &syntaxes);
if context.uses_backrefs {
found_more_backref_includes = true;
}
}
}
// We need to recursively mark contexts that include contexts which
// use backreferences as using backreferences. In theory we could use
// a more efficient method here like doing a toposort or constructing
// a representation with reversed edges and then tracing in the
// opposite direction, but I benchmarked this and it adds <2% to link
// time on the default syntax set, and linking doesn't even happen
// when loading from a binary dump.
while found_more_backref_includes {
found_more_backref_includes = false;
// find any contexts which include a context which uses backrefs
// and mark those as using backrefs - to support nested includes
for syntax_index in 0..syntaxes.len() {
for context_index in 0..all_contexts[syntax_index].len() {
let context = &all_contexts[syntax_index][context_index];
if !context.uses_backrefs && context.patterns.iter().any(|pattern| {
matches!(pattern, Pattern::Include(ContextReference::Direct(id)) if all_contexts[id.syntax_index][id.context_index].uses_backrefs)
}) {
let context = &mut all_contexts[syntax_index][context_index];
context.uses_backrefs = true;
// look for contexts including this context
found_more_backref_includes = true;
}
}
}
}
#[cfg(feature = "metadata")]
let metadata = match existing_metadata {
Some(existing) => existing.merged_with_raw(raw_metadata),
None => raw_metadata.into(),
};
// The combination of
// * the algorithms above
// * the borrow checker
// makes it necessary to set these up as the last step.
for syntax in &mut syntaxes {
let lazy_contexts = LazyContexts {
context_ids: all_context_ids.remove(0),
contexts: all_contexts.remove(0),
};
syntax.serialized_lazy_contexts = crate::dumps::dump_binary(&lazy_contexts);
};
SyntaxSet {
syntaxes,
path_syntaxes,
first_line_cache: OnceCell::new(),
#[cfg(feature = "metadata")]
metadata,
}
}
/// Anything recursively included by the prototype shouldn't include the prototype.
/// This marks them as such.
fn recursively_mark_no_prototype(
syntax: &SyntaxReference,
context_id: &ContextId,
syntax_context_ids: &HashMap<String, ContextId>,
all_contexts: &[Vec<Context>],
no_prototype: &mut HashSet<ContextId>,
) {
let first_time = no_prototype.insert(*context_id);
if !first_time {
return;
}
for pattern in &all_contexts[context_id.syntax_index][context_id.context_index].patterns {
match *pattern {
// Apparently inline blocks also don't include the prototype when within the prototype.
// This is really weird, but necessary to run the YAML syntax.
Pattern::Match(ref match_pat) => {
let maybe_context_refs = match match_pat.operation {
MatchOperation::Push(ref context_refs) |
MatchOperation::Set(ref context_refs) => Some(context_refs),
MatchOperation::Pop | MatchOperation::None => None,
};
if let Some(context_refs) = maybe_context_refs {
for context_ref in context_refs.iter() {
match context_ref {
ContextReference::Inline(ref s) | ContextReference::Named(ref s) => {
if let Some(i) = syntax_context_ids.get(s) {
Self::recursively_mark_no_prototype(syntax, i, syntax_context_ids, all_contexts, no_prototype);
}
},
ContextReference::Direct(ref id) => {
Self::recursively_mark_no_prototype(syntax, id, syntax_context_ids, all_contexts, no_prototype);
},
_ => (),
}
}
}
}
Pattern::Include(ref reference) => {
match reference {
ContextReference::Named(ref s) => {
if let Some(id) = syntax_context_ids.get(s) {
Self::recursively_mark_no_prototype(syntax, id, syntax_context_ids, all_contexts, no_prototype);
}
},
ContextReference::Direct(ref id) => {
Self::recursively_mark_no_prototype(syntax, id, syntax_context_ids, all_contexts, no_prototype);
},
_ => (),
}
}
}
}
}
fn link_context(
context: &mut Context,
syntax_index: usize,
all_context_ids: &[HashMap<String, ContextId>],
syntaxes: &[SyntaxReference],
) {
for pattern in &mut context.patterns {
match *pattern {
Pattern::Match(ref mut match_pat) => Self::link_match_pat(match_pat, syntax_index, all_context_ids, syntaxes),
Pattern::Include(ref mut context_ref) => Self::link_ref(context_ref, syntax_index, all_context_ids, syntaxes),
}
}
}
fn link_ref(
context_ref: &mut ContextReference,
syntax_index: usize,
all_context_ids: &[HashMap<String, ContextId>],
syntaxes: &[SyntaxReference],
) {
// println!("{:?}", context_ref);
use super::syntax_definition::ContextReference::*;
let linked_context_id = match *context_ref {
Named(ref s) | Inline(ref s) => {
// This isn't actually correct, but it is better than nothing/crashing.
// This is being phased out anyhow, see https://github.com/sublimehq/Packages/issues/73
// Fixes issue #30
if s == "$top_level_main" {
all_context_ids[syntax_index].get("main")
} else {
all_context_ids[syntax_index].get(s)
}
}
ByScope { scope, ref sub_context, with_escape } => {
Self::with_plain_text_fallback(all_context_ids, syntaxes, with_escape, Self::find_id(sub_context, all_context_ids, syntaxes, |index_and_syntax| {
index_and_syntax.1.scope == scope
}))
}
File { ref name, ref sub_context, with_escape } => {
Self::with_plain_text_fallback(all_context_ids, syntaxes, with_escape, Self::find_id(sub_context, all_context_ids, syntaxes, |index_and_syntax| {
&index_and_syntax.1.name == name
}))
}
Direct(_) => None,
};
if let Some(context_id) = linked_context_id {
let mut new_ref = Direct(*context_id);
mem::swap(context_ref, &mut new_ref);
}
}
fn with_plain_text_fallback<'a>(
all_context_ids: &'a [HashMap<String, ContextId>],
syntaxes: &'a [SyntaxReference],
with_escape: bool,
context_id: Option<&'a ContextId>,
) -> Option<&'a ContextId> {
context_id.or_else(|| {
if with_escape {
// If we keep this reference unresolved, syntect will crash
// when it encounters the reference. Rather than crashing,
// we instead fall back to "Plain Text". This seems to be
// how Sublime Text behaves. It should be a safe thing to do
// since `embed`s always includes an `escape` to get out of
// the `embed`.
Self::find_id(
&None,
all_context_ids,
syntaxes,
|index_and_syntax| index_and_syntax.1.name == "Plain Text",
)
} else {
None
}
})
}
fn find_id<'a>(
sub_context: &Option<String>,
all_context_ids: &'a [HashMap<String, ContextId>],
syntaxes: &'a [SyntaxReference],
predicate: impl FnMut(&(usize, &SyntaxReference)) -> bool,
) -> Option<&'a ContextId> {
let context_name = sub_context.as_ref().map_or("main", |x| &**x);
syntaxes
.iter()
.enumerate()
.rev()
.find(predicate)
.and_then(|index_and_syntax| all_context_ids[index_and_syntax.0].get(context_name))
}
fn link_match_pat(
match_pat: &mut MatchPattern,
syntax_index: usize,
all_context_ids: &[HashMap<String, ContextId>],
syntaxes: &[SyntaxReference],
) {
let maybe_context_refs = match match_pat.operation {
MatchOperation::Push(ref mut context_refs) |
MatchOperation::Set(ref mut context_refs) => Some(context_refs),
MatchOperation::Pop | MatchOperation::None => None,
};
if let Some(context_refs) = maybe_context_refs {
for context_ref in context_refs.iter_mut() {
Self::link_ref(context_ref, syntax_index, all_context_ids, syntaxes);
}
}
if let Some(ref mut context_ref) = match_pat.with_prototype {
Self::link_ref(context_ref, syntax_index, all_context_ids, syntaxes);
}
}
}
#[derive(Debug)]
struct FirstLineCache {
/// (first line regex, syntax index) pairs for all syntaxes with a first line regex
regexes: Vec<(Regex, usize)>,
}
impl FirstLineCache {
fn new(syntaxes: &[SyntaxReference]) -> FirstLineCache {
let mut regexes = Vec::new();
for (i, syntax) in syntaxes.iter().enumerate() {
if let Some(ref reg_str) = syntax.first_line_match {
let reg = Regex::new(reg_str.into());
regexes.push((reg, i));
}
}
FirstLineCache {
regexes,
}
}
}
#[cfg(feature = "yaml-load")]
#[cfg(test)]
mod tests {
use super::*;
use crate::parsing::{ParseState, Scope, syntax_definition};
use std::collections::HashMap;
#[test]
fn can_load() {
let mut builder = SyntaxSetBuilder::new();
builder.add_from_folder("testdata/Packages", false).unwrap();
let cmake_dummy_syntax = SyntaxDefinition {
name: "CMake".to_string(),
file_extensions: vec!["CMakeLists.txt".to_string(), "cmake".to_string()],
scope: Scope::new("source.cmake").unwrap(),
first_line_match: None,
hidden: false,
variables: HashMap::new(),
contexts: HashMap::new(),
};
builder.add(cmake_dummy_syntax);
builder.add_plain_text_syntax();
let ps = builder.build();
assert_eq!(&ps.find_syntax_by_first_line("#!/usr/bin/env node").unwrap().name,
"JavaScript");
let rails_scope = Scope::new("source.ruby.rails").unwrap();
let syntax = ps.find_syntax_by_name("Ruby on Rails").unwrap();
ps.find_syntax_plain_text();
assert_eq!(&ps.find_syntax_by_extension("rake").unwrap().name, "Ruby");
assert_eq!(&ps.find_syntax_by_extension("RAKE").unwrap().name, "Ruby");
assert_eq!(&ps.find_syntax_by_token("ruby").unwrap().name, "Ruby");
assert_eq!(&ps.find_syntax_by_first_line("lol -*- Mode: C -*- such line").unwrap().name,
"C");
assert_eq!(&ps.find_syntax_for_file("testdata/parser.rs").unwrap().unwrap().name,
"Rust");
assert_eq!(&ps.find_syntax_for_file("testdata/test_first_line.test")
.expect("Error finding syntax for file")
.expect("No syntax found for file")
.name,
"Ruby");
assert_eq!(&ps.find_syntax_for_file(".bashrc").unwrap().unwrap().name,
"Bourne Again Shell (bash)");
assert_eq!(&ps.find_syntax_for_file("CMakeLists.txt").unwrap().unwrap().name,
"CMake");
assert_eq!(&ps.find_syntax_for_file("test.cmake").unwrap().unwrap().name,
"CMake");
assert_eq!(&ps.find_syntax_for_file("Rakefile").unwrap().unwrap().name, "Ruby");
assert!(&ps.find_syntax_by_first_line("derp derp hi lol").is_none());
assert_eq!(&ps.find_syntax_by_path("Packages/Rust/Rust.sublime-syntax").unwrap().name,
"Rust");
// println!("{:#?}", syntax);
assert_eq!(syntax.scope, rails_scope);
// unreachable!();
let main_context = ps.get_context(&syntax.context_ids()["main"]).expect("#[cfg(test)]");
let count = syntax_definition::context_iter(&ps, main_context).count();
assert_eq!(count, 109);
}
#[test]
fn can_clone() {
let cloned_syntax_set = {
let mut builder = SyntaxSetBuilder::new();
builder.add(syntax_a());
builder.add(syntax_b());
let syntax_set_original = builder.build();
#[allow(clippy::redundant_clone)] // We want to test .clone()
syntax_set_original.clone()
// Note: The original syntax set is dropped
};
let syntax = cloned_syntax_set.find_syntax_by_extension("a").unwrap();
let mut parse_state = ParseState::new(syntax);
let ops = parse_state.parse_line("a go_b b", &cloned_syntax_set).expect("#[cfg(test)]");
let expected = (7, ScopeStackOp::Push(Scope::new("b").unwrap()));
assert_ops_contain(&ops, &expected);
}
#[test]
fn can_list_added_syntaxes() {
let mut builder = SyntaxSetBuilder::new();
builder.add(syntax_a());
builder.add(syntax_b());
let syntaxes = builder.syntaxes();
assert_eq!(syntaxes.len(), 2);
assert_eq!(syntaxes[0].name, "A");
assert_eq!(syntaxes[1].name, "B");
}
#[test]
fn can_add_more_syntaxes_with_builder() {
let syntax_set_original = {
let mut builder = SyntaxSetBuilder::new();
builder.add(syntax_a());
builder.add(syntax_b());
builder.build()
};
let mut builder = syntax_set_original.into_builder();
let syntax_c = SyntaxDefinition::load_from_str(r#"
name: C
scope: source.c
file_extensions: [c]
contexts:
main:
- match: 'c'
scope: c
- match: 'go_a'
push: scope:source.a#main
"#, true, None).unwrap();
builder.add(syntax_c);
let syntax_set = builder.build();
let syntax = syntax_set.find_syntax_by_extension("c").unwrap();
let mut parse_state = ParseState::new(syntax);
let ops = parse_state.parse_line("c go_a a go_b b", &syntax_set).expect("#[cfg(test)]");
let expected = (14, ScopeStackOp::Push(Scope::new("b").unwrap()));
assert_ops_contain(&ops, &expected);
}
#[test]
fn falls_back_to_plain_text_when_embedded_scope_is_missing() {
test_plain_text_fallback(r#"
name: Z
scope: source.z
file_extensions: [z]
contexts:
main:
- match: 'z'
scope: z
- match: 'go_x'
embed: scope:does.not.exist
escape: 'leave_x'
"#);
}
#[test]
fn falls_back_to_plain_text_when_embedded_file_is_missing() {
test_plain_text_fallback(r#"
name: Z
scope: source.z
file_extensions: [z]
contexts:
main:
- match: 'z'
scope: z
- match: 'go_x'
embed: DoesNotExist.sublime-syntax
escape: 'leave_x'
"#);
}
fn test_plain_text_fallback(syntax_definition: &str) {
let syntax =
SyntaxDefinition::load_from_str(syntax_definition, true, None).unwrap();
let mut builder = SyntaxSetBuilder::new();
builder.add_plain_text_syntax();
builder.add(syntax);
let syntax_set = builder.build();
let syntax = syntax_set.find_syntax_by_extension("z").unwrap();
let mut parse_state = ParseState::new(syntax);
let ops = parse_state.parse_line("z go_x x leave_x z", &syntax_set).unwrap();
let expected_ops = vec![
(0, ScopeStackOp::Push(Scope::new("source.z").unwrap())),
(0, ScopeStackOp::Push(Scope::new("z").unwrap())),
(1, ScopeStackOp::Pop(1)),
(6, ScopeStackOp::Push(Scope::new("text.plain").unwrap())),
(9, ScopeStackOp::Pop(1)),
(17, ScopeStackOp::Push(Scope::new("z").unwrap())),
(18, ScopeStackOp::Pop(1)),
];
assert_eq!(ops, expected_ops);
}
#[test]
fn can_find_unlinked_contexts() {
let syntax_set = {
let mut builder = SyntaxSetBuilder::new();
builder.add(syntax_a());
builder.add(syntax_b());
builder.build()
};
let unlinked_contexts = syntax_set.find_unlinked_contexts();
assert_eq!(unlinked_contexts.len(), 0);
let syntax_set = {
let mut builder = SyntaxSetBuilder::new();
builder.add(syntax_a());
builder.build()
};
let unlinked_contexts : Vec<String> = syntax_set.find_unlinked_contexts().into_iter().collect();
assert_eq!(unlinked_contexts.len(), 1);
assert_eq!(unlinked_contexts[0], "Syntax 'A' with scope 'source.a' has unresolved context reference ByScope { scope: <source.b>, sub_context: Some(\"main\"), with_escape: false }");
}
#[test]
fn can_use_in_multiple_threads() {
use rayon::prelude::*;
let syntax_set = {
let mut builder = SyntaxSetBuilder::new();
builder.add(syntax_a());
builder.add(syntax_b());
builder.build()
};
let lines = vec![
"a a a",
"a go_b b",
"go_b b",
"go_b b b",
];
let results: Vec<Vec<(usize, ScopeStackOp)>> = lines
.par_iter()
.map(|line| {
let syntax = syntax_set.find_syntax_by_extension("a").unwrap();
let mut parse_state = ParseState::new(syntax);
parse_state.parse_line(line, &syntax_set).expect("#[cfg(test)]")
})
.collect();
assert_ops_contain(&results[0], &(4, ScopeStackOp::Push(Scope::new("a").unwrap())));
assert_ops_contain(&results[1], &(7, ScopeStackOp::Push(Scope::new("b").unwrap())));
assert_ops_contain(&results[2], &(5, ScopeStackOp::Push(Scope::new("b").unwrap())));
assert_ops_contain(&results[3], &(8, ScopeStackOp::Push(Scope::new("b").unwrap())));
}
#[test]
fn is_sync() {
check_sync::<SyntaxSet>();
}
#[test]
fn is_send() {
check_send::<SyntaxSet>();
}
#[test]
fn can_override_syntaxes() {
let syntax_set = {
let mut builder = SyntaxSetBuilder::new();
builder.add(syntax_a());
builder.add(syntax_b());
let syntax_a2 = SyntaxDefinition::load_from_str(r#"
name: A improved
scope: source.a
file_extensions: [a]
first_line_match: syntax\s+a
contexts:
main:
- match: a
scope: a2
- match: go_b
push: scope:source.b#main
"#, true, None).unwrap();
builder.add(syntax_a2);
let syntax_c = SyntaxDefinition::load_from_str(r#"
name: C
scope: source.c
file_extensions: [c]
first_line_match: syntax\s+.*
contexts:
main:
- match: c
scope: c
- match: go_a
push: scope:source.a#main
"#, true, None).unwrap();
builder.add(syntax_c);
builder.build()
};
let mut syntax = syntax_set.find_syntax_by_extension("a").unwrap();
assert_eq!(syntax.name, "A improved");
syntax = syntax_set.find_syntax_by_scope(Scope::new("source.a").unwrap()).unwrap();
assert_eq!(syntax.name, "A improved");
syntax = syntax_set.find_syntax_by_first_line("syntax a").unwrap();
assert_eq!(syntax.name, "C");
let mut parse_state = ParseState::new(syntax);
let ops = parse_state.parse_line("c go_a a", &syntax_set).expect("msg");
let expected = (7, ScopeStackOp::Push(Scope::new("a2").unwrap()));
assert_ops_contain(&ops, &expected);
}
#[test]
fn can_parse_issue219() {
// Go to builder and back after loading so that build() gets Direct references instead of
// Named ones. The bug was that Direct references were not handled when marking as
// "no prototype", so prototype contexts accidentally had the prototype set, which made
// the parser loop forever.
let syntax_set = SyntaxSet::load_defaults_newlines().into_builder().build();
let syntax = syntax_set.find_syntax_by_extension("yaml").unwrap();
let mut parse_state = ParseState::new(syntax);
let ops = parse_state.parse_line("# test\n", &syntax_set).expect("#[cfg(test)]");
let expected = (0, ScopeStackOp::Push(Scope::new("comment.line.number-sign.yaml").unwrap()));
assert_ops_contain(&ops, &expected);
}
#[test]
fn no_prototype_for_contexts_included_from_prototype() {
let mut builder = SyntaxSetBuilder::new();
let syntax = SyntaxDefinition::load_from_str(r#"
name: Test Prototype
scope: source.test
file_extensions: [test]
contexts:
prototype:
- include: included_from_prototype
main:
- match: main
- match: other
push: other
other:
- match: o
included_from_prototype:
- match: p
scope: p
"#, true, None).unwrap();
builder.add(syntax);
let ss = builder.build();
// "main" and "other" should have context set, "prototype" and "included_from_prototype"
// must not have a prototype set.
assert_prototype_only_on(&["main", "other"], &ss, &ss.syntaxes()[0]);
// Building again should have the same result. The difference is that after the first
// build(), the references have been replaced with Direct references, so the code needs to
// handle that correctly.
let rebuilt = ss.into_builder().build();
assert_prototype_only_on(&["main", "other"], &rebuilt, &rebuilt.syntaxes()[0]);
}
#[test]
fn no_prototype_for_contexts_inline_in_prototype() {
let mut builder = SyntaxSetBuilder::new();
let syntax = SyntaxDefinition::load_from_str(r#"
name: Test Prototype
scope: source.test
file_extensions: [test]
contexts:
prototype:
- match: p
push:
- match: p2
main:
- match: main
"#, true, None).unwrap();
builder.add(syntax);
let ss = builder.build();
assert_prototype_only_on(&["main"], &ss, &ss.syntaxes()[0]);
let rebuilt = ss.into_builder().build();
assert_prototype_only_on(&["main"], &rebuilt, &rebuilt.syntaxes()[0]);
}
fn assert_ops_contain(
ops: &[(usize, ScopeStackOp)],
expected: &(usize, ScopeStackOp)
) {
assert!(ops.contains(expected),
"expected operations to contain {:?}: {:?}", expected, ops);
}
fn assert_prototype_only_on(expected: &[&str], syntax_set: &SyntaxSet, syntax: &SyntaxReference) {
for (name, id) in syntax.context_ids() {
if name == "__main" || name == "__start" {
// Skip special contexts
continue;
}
let context = syntax_set.get_context(id).expect("#[cfg(test)]");
if expected.contains(&name.as_str()) {
assert!(context.prototype.is_some(), "Expected context {} to have prototype", name);
} else {
assert!(context.prototype.is_none(), "Expected context {} to not have prototype", name);
}
}
}
fn check_send<T: Send>() {}
fn check_sync<T: Sync>() {}
fn syntax_a() -> SyntaxDefinition {
SyntaxDefinition::load_from_str(
r#"
name: A
scope: source.a
file_extensions: [a]
contexts:
main:
- match: 'a'
scope: a
- match: 'go_b'
push: scope:source.b#main
"#,
true,
None,
).unwrap()
}
fn syntax_b() -> SyntaxDefinition {
SyntaxDefinition::load_from_str(
r#"
name: B
scope: source.b
file_extensions: [b]
contexts:
main:
- match: 'b'
scope: b
"#,
true,
None,
).unwrap()
}
}
|
use wasm_bindgen::prelude::*;
mod dependencies;
mod provider;
#[wasm_bindgen]
pub fn add_two_ints(a: u32, b: u32) -> u32 {
a + b
}
#[wasm_bindgen]
pub async fn get_account_data(address_str: String) -> Result<JsValue, JsValue> {
let provider = dependencies::provider(dependencies::algod());
let address = address_str.parse()?;
let res = provider.get_infos(&address).await;
match res {
Ok(view_data) => {
Ok(JsValue::from_serde(&serde_json::to_value(&view_data).unwrap()).unwrap())
}
Err(e) => Err(JsValue::from(format!("{}", e))),
}
}
|
extern crate clap;
extern crate libipt;
use std::path::PathBuf;
use std::str::FromStr;
use clap::{App, Arg};
use libipt::packet::{Packet, PacketDecoder};
use libipt::ConfigBuilder as IptConfigBuilder;
macro_rules! report_error {
($($arg:tt)*) => ({
eprintln!($($arg)*);
std::process::exit(1);
})
}
fn main() {
let matches = App::new("ptdecoder")
.version("0.1.0")
.author("Sirui Mu <msrlancern@126.com>")
.about("Command line utility to decode Intel PT packets from binary data stream")
.arg(
Arg::with_name("INPUT")
.help("Path to the file holding encoded Intel PT packets data")
.required(true),
)
.get_matches();
let input_file_path = matches.value_of("INPUT").unwrap();
let input_file_path = PathBuf::from_str(input_file_path).expect("invalid path");
println!("Loading packet data");
let mut file_data = match std::fs::read(&input_file_path) {
Ok(data) => data,
Err(err) => report_error!("Cannot read input file: {}", err),
};
let file_data_len = file_data.len();
println!("Initializing packet decoder");
let ipt_config_builder = match IptConfigBuilder::new(&mut file_data) {
Ok(builder) => builder,
Err(err) => report_error!("Cannot create Intel PT config: {}", err),
};
let ipt_config = ipt_config_builder.finish();
let mut ipt_decoder = match PacketDecoder::new(&ipt_config) {
Ok(decoder) => decoder,
Err(err) => report_error!("Cannot create packet decoder: {}", err),
};
println!("Synchronizing packet decoder");
match ipt_decoder.sync_forward() {
Ok(()) => (),
Err(err) => report_error!("Cannot synchronize packet decoder: {}", err),
};
println!("Start decoding packets");
loop {
let offset = match ipt_decoder.offset() {
Ok(offset) => offset,
Err(err) => report_error!("cannot tell offset: {}", err),
};
if offset as usize >= file_data_len {
break;
}
let packet = match ipt_decoder.next() {
Ok(packet) => packet,
Err(err) => report_error!("cannot decode packet at offset {}: {}", offset, err),
};
print!("Offset {}: ", offset);
let packet_msg = match packet {
Packet::Invalid(_) => "Invalid",
Packet::Psbend(_) => "Psbend",
Packet::Stop(_) => "Stop",
Packet::Pad(_) => "Pad",
Packet::Psb(_) => "Psb",
Packet::Ovf(_) => "Ovf",
Packet::Unknown(_) => "Unknown",
Packet::Fup(_) => "Fup",
Packet::Tip(_) => "Tip",
Packet::TipPge(_) => "TipPge",
Packet::TipPgd(_) => "TipPgd",
Packet::Tnt8(_) => "Tnt8",
Packet::Tnt64(_) => "Tnt64",
Packet::Mode(_) => "Mode",
Packet::Pip(_) => "Pip",
Packet::Vmcs(_) => "Vmcs",
Packet::Cbr(_) => "Cbr",
Packet::Tsc(_) => "Tsc",
Packet::Tma(_) => "Tma",
Packet::Mtc(_) => "Mtc",
Packet::Cyc(_) => "Cyc",
Packet::Mnt(_) => "Mnt",
Packet::Exstop(_) => "Exstop",
Packet::Mwait(_) => "Mwait",
Packet::Pwre(_) => "Pwre",
Packet::Pwrx(_) => "Pwrx",
Packet::Ptw(_) => "Ptw",
};
println!("{}", packet_msg);
}
}
|
use std::fs;
use aoc20::days::day9;
#[test]
fn day9_part1() {
let contents = fs::read_to_string("data/day9example.txt")
.expect("Something went wrong reading the file");
let numbers: Vec<usize> = contents.lines().map(|l| l.parse::<usize>().unwrap()).collect();
assert_eq!(day9::part1(&numbers, 5), Some(127));
}
#[test]
fn day9_part2() {
let contents = fs::read_to_string("data/day9example.txt")
.expect("Something went wrong reading the file");
let numbers: Vec<usize> = contents.lines().map(|l| l.parse::<usize>().unwrap()).collect();
assert_eq!(day9::part2(&numbers, 127), Some((15, 47)));
} |
use pyo3::prelude::*;
use numpy::
{
IntoPyArray,
PyArrayDyn,
PyReadonlyArrayDyn
};
pub fn register_module(py: Python<'_>, parent_module: &PyModule) -> PyResult<()>
{
let isometric = PyModule::new(py, "isometric")?;
super::legendre::py::register_module(py, isometric)?;
parent_module.add_submodule(isometric)?;
isometric.add_class::<FJC>()?;
Ok(())
}
/// The freely-jointed chain (FJC) model thermodynamics in the isometric ensemble.
#[pyclass]
#[derive(Copy, Clone)]
pub struct FJC
{
/// The mass of each hinge in the chain in units of kg/mol.
#[pyo3(get)]
pub hinge_mass: f64,
/// The length of each link in the chain in units of nm.
#[pyo3(get)]
pub link_length: f64,
/// The number of links in the chain.
#[pyo3(get)]
pub number_of_links: u8,
/// The thermodynamic functions of the model in the isometric ensemble approximated using a Legendre transformation.
#[pyo3(get)]
pub legendre: super::legendre::py::FJC
}
#[pymethods]
impl FJC
{
#[new]
pub fn init(number_of_links: u8, link_length: f64, hinge_mass: f64) -> Self
{
FJC
{
hinge_mass,
link_length,
number_of_links,
legendre: super::legendre::py::FJC::init(number_of_links, link_length, hinge_mass)
}
}
/// The expected force as a function of the applied end-to-end length and temperature,
///
/// .. math::
/// f(\xi, T) = \frac{\partial \psi}{\partial\xi} = \frac{kT}{\xi} + \frac{kT}{\ell_b}\left(\frac{1}{2} - \frac{1}{N_b}\right)\frac{\sum_{s=0}^{s_\mathrm{max}}(-1)^s\binom{N_b}{s}\left(m - \frac{s}{N_b}\right)^{N_b - 3}}{\sum_{s=0}^{s_\mathrm{max}}(-1)^s\binom{N_b}{s}\left(m - \frac{s}{N_b}\right)^{N_b - 2}},
///
/// where :math:`m\equiv(1 - \xi/N_b\ell_b)/2` and :math:`s_\mathrm{max}/N_b\leq m\leq (s_\mathrm{max}+1)/N_b`.
///
/// Args:
/// end_to_end_length (numpy.ndarray): The end-to-end length :math:`\xi`.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The force :math:`f`.
///
pub fn force<'py>(&self, py: Python<'py>, end_to_end_length: PyReadonlyArrayDyn<f64>, temperature: f64) -> &'py PyArrayDyn<f64>
{
end_to_end_length.as_array().mapv(|end_to_end_length: f64| super::force(&self.number_of_links, &self.link_length, &end_to_end_length, &temperature)).into_pyarray(py)
}
/// The expected nondimensional force as a function of the applied nondimensional end-to-end length per link,
///
/// .. math::
/// \eta(\gamma) = \frac{\partial\vartheta}{\partial\gamma} = \frac{1}{N_b\gamma} + \left(\frac{1}{2} - \frac{1}{N_b}\right)\frac{\sum_{s=0}^{s_\mathrm{max}}(-1)^s\binom{N_b}{s}\left(m - \frac{s}{N_b}\right)^{N_b - 3}}{\sum_{s=0}^{s_\mathrm{max}}(-1)^s\binom{N_b}{s}\left(m - \frac{s}{N_b}\right)^{N_b - 2}},
///
/// where :math:`m\equiv(1 - \gamma)/2` and :math:`s_\mathrm{max}/N_b\leq m\leq (s_\mathrm{max}+1)/N_b`.
///
/// Args:
/// nondimensional_end_to_end_length_per_link (numpy.ndarray): The nondimensional end-to-end length per link :math:`\gamma\equiv \xi/N_b\ell_b`.
///
/// Returns:
/// numpy.ndarray: The nondimensional force :math:`\eta\equiv\beta f\ell_b`.
///
pub fn nondimensional_force<'py>(&self, py: Python<'py>, nondimensional_end_to_end_length_per_link: PyReadonlyArrayDyn<f64>) -> &'py PyArrayDyn<f64>
{
nondimensional_end_to_end_length_per_link.as_array().mapv(|nondimensional_end_to_end_length_per_link: f64| super::nondimensional_force(&self.number_of_links, &nondimensional_end_to_end_length_per_link)).into_pyarray(py)
}
/// The Helmholtz free energy as a function of the applied end-to-end length and temperature,
///
/// .. math::
/// \psi(\xi, T) = -kT\ln Q(\xi, T).
///
/// Args:
/// end_to_end_length (numpy.ndarray): The end-to-end length :math:`\xi`.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The Helmholtz free energy :math:`\psi`.
///
pub fn helmholtz_free_energy<'py>(&self, py: Python<'py>, end_to_end_length: PyReadonlyArrayDyn<f64>, temperature: f64) -> &'py PyArrayDyn<f64>
{
end_to_end_length.as_array().mapv(|end_to_end_length: f64| super::helmholtz_free_energy(&self.number_of_links, &self.link_length, &self.hinge_mass, &end_to_end_length, &temperature)).into_pyarray(py)
}
/// The Helmholtz free energy per link as a function of the applied end-to-end length and temperature.
///
/// Args:
/// end_to_end_length (numpy.ndarray): The end-to-end length :math:`\xi`.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The Helmholtz free energy per link :math:`\psi/N_b`.
///
pub fn helmholtz_free_energy_per_link<'py>(&self, py: Python<'py>, end_to_end_length: PyReadonlyArrayDyn<f64>, temperature: f64) -> &'py PyArrayDyn<f64>
{
end_to_end_length.as_array().mapv(|end_to_end_length: f64| super::helmholtz_free_energy_per_link(&self.number_of_links, &self.link_length, &self.hinge_mass, &end_to_end_length, &temperature)).into_pyarray(py)
}
/// The relative Helmholtz free energy as a function of the applied end-to-end length and temperature,
///
/// .. math::
/// \Delta\psi(\xi, T) = kT\ln\left[\frac{P_\mathrm{eq}(0)}{P_\mathrm{eq}(\xi)}\right].
///
/// Args:
/// end_to_end_length (numpy.ndarray): The end-to-end length :math:`\xi`.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The relative Helmholtz free energy :math:`\Delta\psi\equiv\psi(\xi,T)-\psi(0,T)`.
///
pub fn relative_helmholtz_free_energy<'py>(&self, py: Python<'py>, end_to_end_length: PyReadonlyArrayDyn<f64>, temperature: f64) -> &'py PyArrayDyn<f64>
{
end_to_end_length.as_array().mapv(|end_to_end_length: f64| super::relative_helmholtz_free_energy(&self.number_of_links, &self.link_length, &end_to_end_length, &temperature)).into_pyarray(py)
}
/// The relative Helmholtz free energy per link as a function of the applied end-to-end length and temperature.
///
/// Args:
/// end_to_end_length (numpy.ndarray): The end-to-end length :math:`\xi`.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The relative Helmholtz free energy per link :math:`\Delta\psi/N_b`.
///
pub fn relative_helmholtz_free_energy_per_link<'py>(&self, py: Python<'py>, end_to_end_length: PyReadonlyArrayDyn<f64>, temperature: f64) -> &'py PyArrayDyn<f64>
{
end_to_end_length.as_array().mapv(|end_to_end_length: f64| super::relative_helmholtz_free_energy_per_link(&self.number_of_links, &self.link_length, &end_to_end_length, &temperature)).into_pyarray(py)
}
/// The nondimensional Helmholtz free energy as a function of the applied nondimensional end-to-end length per link and temperature.
///
/// Args:
/// nondimensional_end_to_end_length_per_link (numpy.ndarray): The nondimensional end-to-end length per link :math:`\gamma\equiv \xi/N_b\ell_b`.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The nondimensional Helmholtz free energy :math:`N_b\vartheta=\beta\psi`.
///
pub fn nondimensional_helmholtz_free_energy<'py>(&self, py: Python<'py>, nondimensional_end_to_end_length_per_link: PyReadonlyArrayDyn<f64>, temperature: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_end_to_end_length_per_link.as_array().mapv(|nondimensional_end_to_end_length_per_link: f64| super::nondimensional_helmholtz_free_energy(&self.number_of_links, &self.link_length, &self.hinge_mass, &nondimensional_end_to_end_length_per_link, &temperature)).into_pyarray(py)
}
/// The nondimensional Helmholtz free energy per link as a function of the applied nondimensional end-to-end length per link and temperature.
///
/// Args:
/// nondimensional_end_to_end_length_per_link (numpy.ndarray): The nondimensional end-to-end length per link :math:`\gamma\equiv \xi/N_b\ell_b`.
/// temperature (float): The temperature :math:`T`.
///
/// Returns:
/// numpy.ndarray: The nondimensional Helmholtz free energy per link :math:`\vartheta\equiv\beta\psi/N_b`.
///
pub fn nondimensional_helmholtz_free_energy_per_link<'py>(&self, py: Python<'py>, nondimensional_end_to_end_length_per_link: PyReadonlyArrayDyn<f64>, temperature: f64) -> &'py PyArrayDyn<f64>
{
nondimensional_end_to_end_length_per_link.as_array().mapv(|nondimensional_end_to_end_length_per_link: f64| super::nondimensional_helmholtz_free_energy_per_link(&self.number_of_links, &self.link_length, &self.hinge_mass, &nondimensional_end_to_end_length_per_link, &temperature)).into_pyarray(py)
}
/// The nondimensional relative Helmholtz free energy as a function of the applied nondimensional end-to-end length per link,
///
/// .. math::
/// \beta\Delta\psi(\gamma) = \ln\left[\frac{\mathscr{P}_\mathrm{eq}(0)}{\mathscr{P}_\mathrm{eq}(\gamma)}\right].
///
/// Args:
/// nondimensional_end_to_end_length_per_link (numpy.ndarray): The nondimensional end-to-end length per link :math:`\gamma\equiv \xi/N_b\ell_b`.
///
/// Returns:
/// numpy.ndarray: The nondimensional relative Helmholtz free energy :math:`N_b\Delta\vartheta=\beta\Delta\psi`.
///
pub fn nondimensional_relative_helmholtz_free_energy<'py>(&self, py: Python<'py>, nondimensional_end_to_end_length_per_link: PyReadonlyArrayDyn<f64>) -> &'py PyArrayDyn<f64>
{
nondimensional_end_to_end_length_per_link.as_array().mapv(|nondimensional_end_to_end_length_per_link: f64| super::nondimensional_relative_helmholtz_free_energy(&self.number_of_links, &nondimensional_end_to_end_length_per_link)).into_pyarray(py)
}
/// The nondimensional relative Helmholtz free energy per link as a function of the applied nondimensional end-to-end length per link,
///
/// .. math::
/// \Delta\vartheta(\gamma) = \ln\left[\frac{\mathscr{P}_\mathrm{eq}(0)}{\mathscr{P}_\mathrm{eq}(\gamma)}\right]^{1/N_b}.
///
/// Args:
/// nondimensional_end_to_end_length_per_link (numpy.ndarray): The nondimensional end-to-end length per link :math:`\gamma\equiv \xi/N_b\ell_b`.
///
/// Returns:
/// numpy.ndarray: The nondimensional relative Helmholtz free energy per link :math:`\Delta\vartheta\equiv\beta\Delta\psi/N_b`.
///
pub fn nondimensional_relative_helmholtz_free_energy_per_link<'py>(&self, py: Python<'py>, nondimensional_end_to_end_length_per_link: PyReadonlyArrayDyn<f64>) -> &'py PyArrayDyn<f64>
{
nondimensional_end_to_end_length_per_link.as_array().mapv(|nondimensional_end_to_end_length_per_link: f64| super::nondimensional_relative_helmholtz_free_energy_per_link(&self.number_of_links, &nondimensional_end_to_end_length_per_link)).into_pyarray(py)
}
/// The equilibrium probability density of end-to-end vectors as a function of the end-to-end length,
///
/// .. math::
/// P_\mathrm{eq}(\xi) = \frac{e^{-\beta\psi(\xi, T)}}{4\pi\int e^{-\beta\psi(\xi', T)} \,{\xi'}{}^2 d\xi'} = \frac{1}{8\pi\ell_b^2\xi}\frac{N_b^{N_b - 2}}{(N_b - 2)!}\sum_{s=0}^{s_\mathrm{max}}(-1)^s\binom{N_b}{s}\left(m - \frac{s}{N_b}\right)^{N_b - 2},
///
/// where :math:`m\equiv(1 - \xi/N_b\ell_b)/2` and :math:`s_\mathrm{max}/N_b\leq m\leq (s_\mathrm{max}+1)/N_b`.
///
/// Args:
/// end_to_end_length (numpy.ndarray): The end-to-end length :math:`\xi`.
///
/// Returns:
/// numpy.ndarray: The equilibrium probability density :math:`P_\mathrm{eq}`.
///
pub fn equilibrium_distribution<'py>(&self, py: Python<'py>, end_to_end_length: PyReadonlyArrayDyn<f64>) -> &'py PyArrayDyn<f64>
{
end_to_end_length.as_array().mapv(|end_to_end_length: f64| super::equilibrium_distribution(&self.number_of_links, &self.link_length, &end_to_end_length)).into_pyarray(py)
}
/// The nondimensional equilibrium probability density of nondimensional end-to-end vectors per link as a function of the nondimensional end-to-end length per link,
///
/// .. math::
/// \mathscr{P}_\mathrm{eq}(\gamma) = \frac{e^{-\Delta\vartheta(\gamma)}}{4\pi\int e^{-\Delta\vartheta(\gamma')} \,{\gamma'}{}^2 d\gamma'} = \frac{1}{8\pi\gamma}\frac{N_b^{N_b}}{(N_b - 2)!}\sum_{s=0}^{s_\mathrm{max}}(-1)^s\binom{N_b}{s}\left(m - \frac{s}{N_b}\right)^{N_b - 2},
///
/// where :math:`m\equiv(1 - \gamma)/2` and :math:`s_\mathrm{max}/N_b\leq m\leq (s_\mathrm{max}+1)/N_b`.
///
/// Args:
/// nondimensional_end_to_end_length_per_link (numpy.ndarray): The nondimensional end-to-end length per link :math:`\gamma\equiv \xi/N_b\ell_b`.
///
/// Returns:
/// numpy.ndarray: The nondimensional equilibrium probability density :math:`\mathscr{P}_\mathrm{eq}\equiv (N_b\ell_b)^3 P_\mathrm{eq}`.
///
pub fn nondimensional_equilibrium_distribution<'py>(&self, py: Python<'py>, nondimensional_end_to_end_length_per_link: PyReadonlyArrayDyn<f64>) -> &'py PyArrayDyn<f64>
{
nondimensional_end_to_end_length_per_link.as_array().mapv(|nondimensional_end_to_end_length_per_link: f64| super::nondimensional_equilibrium_distribution(&self.number_of_links, &nondimensional_end_to_end_length_per_link)).into_pyarray(py)
}
/// The equilibrium probability density of end-to-end lengths as a function of the end-to-end length, given by :footcite:t:`treloar1949physics` as
///
/// .. math::
/// g_\mathrm{eq}(\xi) = 4\pi\xi^2 P_\mathrm{eq}(\xi) = \frac{\xi}{2\ell_b^2}\frac{N_b^{N_b-2}}{(N_b - 2)!}\sum_{s=0}^{s_\mathrm{max}}(-1)^s\binom{N_b}{s}\left(m - \frac{s}{N_b}\right)^{N_b - 2},
///
/// where :math:`m\equiv(1 - \xi/N_b\ell_b)/2` and :math:`s_\mathrm{max}/N_b\leq m\leq (s_\mathrm{max}+1)/N_b`.
///
/// Args:
/// end_to_end_length (numpy.ndarray): The end-to-end length :math:`\xi`.
///
/// Returns:
/// numpy.ndarray: The equilibrium probability density :math:`g_\mathrm{eq}`.
///
pub fn equilibrium_radial_distribution<'py>(&self, py: Python<'py>, end_to_end_length: PyReadonlyArrayDyn<f64>) -> &'py PyArrayDyn<f64>
{
end_to_end_length.as_array().mapv(|end_to_end_length: f64| super::equilibrium_radial_distribution(&self.number_of_links, &self.link_length, &end_to_end_length)).into_pyarray(py)
}
/// The nondimensional equilibrium probability density of nondimensional end-to-end lengths per link as a function of the nondimensional end-to-end length per link,
///
/// .. math::
/// \mathscr{g}_\mathrm{eq}(\gamma) = 4\pi\gamma^2 \mathscr{P}_\mathrm{eq}(\gamma) = \frac{\gamma}{2}\frac{N_b^{N_b}}{(N_b - 2)!}\sum_{s=0}^{s_\mathrm{max}}(-1)^s\binom{N_b}{s}\left(m - \frac{s}{N_b}\right)^{N_b - 2},
///
/// where :math:`m\equiv(1 - \gamma)/2` and :math:`s_\mathrm{max}/N_b\leq m\leq (s_\mathrm{max}+1)/N_b`.
///
/// Args:
/// nondimensional_end_to_end_length_per_link (numpy.ndarray): The nondimensional end-to-end length per link :math:`\gamma\equiv \xi/N_b\ell_b`.
///
/// Returns:
/// numpy.ndarray: The nondimensional equilibrium probability density :math:`\mathscr{g}_\mathrm{eq}\equiv N_b\ell_b g_\mathrm{eq}`.
///
pub fn nondimensional_equilibrium_radial_distribution<'py>(&self, py: Python<'py>, nondimensional_end_to_end_length_per_link: PyReadonlyArrayDyn<f64>) -> &'py PyArrayDyn<f64>
{
nondimensional_end_to_end_length_per_link.as_array().mapv(|nondimensional_end_to_end_length_per_link: f64| super::nondimensional_equilibrium_radial_distribution(&self.number_of_links, &nondimensional_end_to_end_length_per_link)).into_pyarray(py)
}
}
|
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2017 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT.
macro_rules! panic_on_error
{
($function: path$(,$argument: expr)*) =>
{
{
let result = unsafe { $function($($argument),*) };
debug_assert!(result == 0 || result == -1, "{} returned a result '{}' which was not 0 or -1", stringify!($function), result);
if $crate::rust_extra::unlikely(result == -1)
{
let errno = $crate::errno::errno();
panic!("{} failed with error number '{}' ('{}')", stringify!($function), errno.0, errno);
}
}
}
}
|
// Copyright 2020 The MWC Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use grin_wallet_util::grin_util::secp::key::{PublicKey, SecretKey};
use grin_wallet_util::grin_util::secp::pedersen::Commitment;
use grin_wallet_util::grin_util::secp::{Message, Secp256k1, Signature};
use super::base58;
use crate::error::{Error, ErrorKind};
use crate::grin_util as util;
use sha2::{Digest, Sha256};
/// Build a public key for the given private key
pub fn public_key_from_secret_key(secret_key: &SecretKey) -> Result<PublicKey, Error> {
let secp = Secp256k1::new();
PublicKey::from_secret_key(&secp, secret_key).map_err(|e| Error::from(e))
}
/// Verify signature, usual way
pub fn verify_signature(
challenge: &str,
signature: &Signature,
public_key: &PublicKey,
) -> Result<(), Error> {
let mut hasher = Sha256::new();
hasher.update(challenge.as_bytes());
let message = Message::from_slice(hasher.finalize().as_slice())?;
let secp = Secp256k1::new();
secp.verify(&message, signature, public_key)
.map_err(|e| Error::from(e))?;
Ok(())
}
/// Sing the challenge with a private key.
pub fn sign_challenge(challenge: &str, secret_key: &SecretKey) -> Result<Signature, Error> {
let mut hasher = Sha256::new();
hasher.update(challenge.as_bytes());
let message = Message::from_slice(hasher.finalize().as_slice())?;
let secp = Secp256k1::new();
secp.sign(&message, secret_key).map_err(|e| Error::from(e))
}
/// convert to a signature from string
pub fn signature_from_string(sig_str: &str) -> Result<Signature, Error> {
let signature_ser = util::from_hex(sig_str).map_err(|e| {
ErrorKind::TxProofGenericError(format!(
"Unable to build signature from HEX {}, {}",
sig_str, e
))
})?;
let signature = Signature::from_der(&signature_ser)
.map_err(|e| ErrorKind::TxProofGenericError(format!("Unable to build signature, {}", e)))?;
Ok(signature)
}
///////////////////////////////////////////////////////////////////////////////////////////////////
/// to and from Hex conversion
pub trait Hex<T> {
/// HEX to object conversion
fn from_hex(str: &str) -> Result<T, Error>;
/// Object to HEX conversion
fn to_hex(&self) -> String;
}
impl Hex<PublicKey> for PublicKey {
fn from_hex(str: &str) -> Result<PublicKey, Error> {
let hex = util::from_hex(str).map_err(|e| {
ErrorKind::HexError(format!("Unable convert Publi Key HEX {}, {}", str, e))
})?;
PublicKey::from_slice(&hex).map_err(|e| {
ErrorKind::HexError(format!(
"Unable to build public key from HEX {}, {}",
str, e
))
.into()
})
}
fn to_hex(&self) -> String {
util::to_hex(&base58::serialize_public_key(self))
}
}
impl Hex<Signature> for Signature {
fn from_hex(str: &str) -> Result<Signature, Error> {
let hex = util::from_hex(str).map_err(|e| {
ErrorKind::HexError(format!("Unable convert Signature HEX {}, {}", str, e))
})?;
Signature::from_der(&hex).map_err(|e| {
ErrorKind::HexError(format!("Unable to build Signature from HEX {}, {}", str, e)).into()
})
}
fn to_hex(&self) -> String {
let signature = self.serialize_der();
util::to_hex(&signature)
}
}
impl Hex<SecretKey> for SecretKey {
fn from_hex(str: &str) -> Result<SecretKey, Error> {
let data = util::from_hex(str)
.map_err(|e| ErrorKind::HexError(format!("Unable convert key HEX, {}", e)))?;
SecretKey::from_slice(&data)
.map_err(|e| ErrorKind::HexError(format!("Unable to build Key from HEX, {}", e)).into())
}
fn to_hex(&self) -> String {
util::to_hex(&self.0)
}
}
impl Hex<Commitment> for Commitment {
fn from_hex(str: &str) -> Result<Commitment, Error> {
let data = util::from_hex(str).map_err(|e| {
ErrorKind::HexError(format!("Unable convert Commitment HEX {}, {}", str, e))
})?;
Ok(Commitment::from_vec(data))
}
fn to_hex(&self) -> String {
util::to_hex(&self.0)
}
}
|
#![feature(decl_macro, proc_macro_hygiene)]
#[macro_use]
extern crate mongodb;
extern crate juniper;
pub mod db;
pub mod graphql;
pub mod models;
pub mod routes;
pub mod stripe;
|
use clap::{App, AppSettings, SubCommand};
pub enum NetmonSubcommand {
Watch,
Check,
}
pub struct Args {
pub subcommand: NetmonSubcommand,
}
pub fn get_cli_args() -> Args {
let matches = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.setting(AppSettings::SubcommandRequiredElseHelp)
.subcommand(SubCommand::with_name("watch").about("Watch about help string"))
.subcommand(SubCommand::with_name("check").about("Check about help string"))
.get_matches();
let subcommand = match matches.subcommand() {
("watch", _) => NetmonSubcommand::Watch,
("check", _) => NetmonSubcommand::Check,
_ => unreachable!(),
};
Args { subcommand }
}
|
use std::env;
fn is_palindrome(word: &str) -> bool {
let chars: Vec<_> = word.chars().collect();
let len = word.len() - 1;
let mut cur = 0;
while len - cur > 0 {
if chars[cur] != chars[len - cur] {
return false;
}
cur += 1;
}
true
}
fn main() {
let words: Vec<_> = env::args().skip(1).collect();
println!("Palindromes?");
for w in words {
println!(" {:>5?}: {}", is_palindrome(&w), &w);
}
}
|
#[macro_use]
extern crate log;
use azure_core::prelude::*;
use azure_identity::token_credentials::DefaultAzureCredential;
use azure_identity::token_credentials::TokenCredential;
use azure_storage::blob::prelude::*;
use azure_storage::core::prelude::*;
use std::error::Error;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error + Send + Sync>> {
env_logger::init();
// First we retrieve the account name, container and blob name from command line args
let account = std::env::args()
.nth(1)
.expect("please specify the account name as first command line parameter");
let container = std::env::args()
.nth(2)
.expect("please specify the container name as second command line parameter");
let blob = std::env::args()
.nth(3)
.expect("please specify the blob name as third command line parameter");
let bearer_token = DefaultAzureCredential::default()
.get_token("https://storage.azure.com/")
.await?;
let http_client = new_http_client();
let storage_account_client = StorageAccountClient::new_bearer_token(
http_client.clone(),
&account,
bearer_token.token.secret(),
);
let storage_client = storage_account_client.as_storage_client();
let blob = storage_client
.as_container_client(&container)
.as_blob_client(&blob);
trace!("Requesting blob");
let response = blob.get().execute().await?;
let s_content = String::from_utf8(response.data.to_vec())?;
println!("blob == {:?}", blob);
println!("s_content == {}", s_content);
Ok(())
}
|
//! PASCAL-VOC features and helpers.
mod label_map;
mod parser;
mod tfrecord;
mod features;
pub use features::prepare::{prepare, PrepareOpts, Report as PrepareReport};
|
mod math;
fn main() {
let n = 10;
let result = math::sum_one_to_n(n);
println!("{}", result);
let mut v :Vec<i32> = vec![3, 4, 5];
v.push(6);
}
|
pub mod cr1 {
pub mod ckd {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015000u32 as *const u32) >> 8) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015000u32 as *const u32);
reg &= 0xFFFFFCFFu32;
reg |= (val & 0x3) << 8;
core::ptr::write_volatile(0x40015000u32 as *mut u32, reg);
}
}
}
pub mod arpe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015000u32 as *const u32) >> 7) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015000u32 as *const u32);
reg &= 0xFFFFFF7Fu32;
reg |= (val & 0x1) << 7;
core::ptr::write_volatile(0x40015000u32 as *mut u32, reg);
}
}
}
pub mod urs {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015000u32 as *const u32) >> 2) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015000u32 as *const u32);
reg &= 0xFFFFFFFBu32;
reg |= (val & 0x1) << 2;
core::ptr::write_volatile(0x40015000u32 as *mut u32, reg);
}
}
}
pub mod udis {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015000u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015000u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40015000u32 as *mut u32, reg);
}
}
}
pub mod cen {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40015000u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015000u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40015000u32 as *mut u32, reg);
}
}
}
}
pub mod cr2 {
pub mod mms {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015004u32 as *const u32) >> 4) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015004u32 as *const u32);
reg &= 0xFFFFFF8Fu32;
reg |= (val & 0x7) << 4;
core::ptr::write_volatile(0x40015004u32 as *mut u32, reg);
}
}
}
}
pub mod dier {
pub mod cc1ie {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x4001500Cu32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4001500Cu32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x4001500Cu32 as *mut u32, reg);
}
}
}
pub mod uie {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x4001500Cu32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4001500Cu32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x4001500Cu32 as *mut u32, reg);
}
}
}
}
pub mod sr {
pub mod cc1of {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015010u32 as *const u32) >> 9) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015010u32 as *const u32);
reg &= 0xFFFFFDFFu32;
reg |= (val & 0x1) << 9;
core::ptr::write_volatile(0x40015010u32 as *mut u32, reg);
}
}
}
pub mod cc1if {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015010u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015010u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40015010u32 as *mut u32, reg);
}
}
}
pub mod uif {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40015010u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015010u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40015010u32 as *mut u32, reg);
}
}
}
}
pub mod egr {
pub mod cc1g {
pub fn set(val: u32) {
unsafe {
let reg = (val & 0x1) << 1;
core::ptr::write_volatile(0x40015014u32 as *mut u32, reg);
}
}
}
pub mod ug {
pub fn set(val: u32) {
unsafe {
let reg = val & 0x1;
core::ptr::write_volatile(0x40015014u32 as *mut u32, reg);
}
}
}
}
pub mod ccmr1_output {
pub mod oc1m {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015018u32 as *const u32) >> 4) & 0x7
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015018u32 as *const u32);
reg &= 0xFFFFFF8Fu32;
reg |= (val & 0x7) << 4;
core::ptr::write_volatile(0x40015018u32 as *mut u32, reg);
}
}
}
pub mod oc1pe {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015018u32 as *const u32) >> 3) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015018u32 as *const u32);
reg &= 0xFFFFFFF7u32;
reg |= (val & 0x1) << 3;
core::ptr::write_volatile(0x40015018u32 as *mut u32, reg);
}
}
}
pub mod cc1s {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40015018u32 as *const u32) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015018u32 as *const u32);
reg &= 0xFFFFFFFCu32;
reg |= val & 0x3;
core::ptr::write_volatile(0x40015018u32 as *mut u32, reg);
}
}
}
}
pub mod ccmr1_input {
pub mod ic1f {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015018u32 as *const u32) >> 4) & 0xF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015018u32 as *const u32);
reg &= 0xFFFFFF0Fu32;
reg |= (val & 0xF) << 4;
core::ptr::write_volatile(0x40015018u32 as *mut u32, reg);
}
}
}
pub mod ic1psc {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015018u32 as *const u32) >> 2) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015018u32 as *const u32);
reg &= 0xFFFFFFF3u32;
reg |= (val & 0x3) << 2;
core::ptr::write_volatile(0x40015018u32 as *mut u32, reg);
}
}
}
pub mod cc1s {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40015018u32 as *const u32) & 0x3
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015018u32 as *const u32);
reg &= 0xFFFFFFFCu32;
reg |= val & 0x3;
core::ptr::write_volatile(0x40015018u32 as *mut u32, reg);
}
}
}
}
pub mod ccer {
pub mod cc1np {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015020u32 as *const u32) >> 3) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015020u32 as *const u32);
reg &= 0xFFFFFFF7u32;
reg |= (val & 0x1) << 3;
core::ptr::write_volatile(0x40015020u32 as *mut u32, reg);
}
}
}
pub mod cc1p {
pub fn get() -> u32 {
unsafe {
(core::ptr::read_volatile(0x40015020u32 as *const u32) >> 1) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015020u32 as *const u32);
reg &= 0xFFFFFFFDu32;
reg |= (val & 0x1) << 1;
core::ptr::write_volatile(0x40015020u32 as *mut u32, reg);
}
}
}
pub mod cc1e {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40015020u32 as *const u32) & 0x1
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015020u32 as *const u32);
reg &= 0xFFFFFFFEu32;
reg |= val & 0x1;
core::ptr::write_volatile(0x40015020u32 as *mut u32, reg);
}
}
}
}
pub mod cnt {
pub mod cnt {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40015024u32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015024u32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x40015024u32 as *mut u32, reg);
}
}
}
}
pub mod psc {
pub mod psc {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40015028u32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015028u32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x40015028u32 as *mut u32, reg);
}
}
}
}
pub mod arr {
pub mod arr {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x4001502Cu32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x4001502Cu32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x4001502Cu32 as *mut u32, reg);
}
}
}
}
pub mod ccr1 {
pub mod ccr1 {
pub fn get() -> u32 {
unsafe {
core::ptr::read_volatile(0x40015034u32 as *const u32) & 0xFFFF
}
}
pub fn set(val: u32) {
unsafe {
let mut reg = core::ptr::read_volatile(0x40015034u32 as *const u32);
reg &= 0xFFFF0000u32;
reg |= val & 0xFFFF;
core::ptr::write_volatile(0x40015034u32 as *mut u32, reg);
}
}
}
}
|
use ast::abstract_syntax_tree::Ast;
use std::boxed::Box;
/// Operators that store their operands.
/// The Ast's evaluate() method will preform different logic on the operands depending on the operator.
#[derive(PartialEq, PartialOrd, Debug, Clone)]
pub enum SExpression {
//BinaryOperators
Add(Box<Ast>, Box<Ast>),
Subtract(Box<Ast>, Box<Ast>),
Multiply(Box<Ast>, Box<Ast>),
Divide(Box<Ast>, Box<Ast>),
Modulo(Box<Ast>, Box<Ast>),
Equals(Box<Ast>, Box<Ast>),
NotEquals(Box<Ast>, Box<Ast>),
GreaterThan(Box<Ast>, Box<Ast>),
LessThan(Box<Ast>, Box<Ast>),
GreaterThanOrEqual(Box<Ast>, Box<Ast>),
LessThanOrEqual(Box<Ast>, Box<Ast>),
LogicalAnd(Box<Ast>, Box<Ast>),
LogicalOr(Box<Ast>, Box<Ast>),
//Unary Operators
Print(Box<Ast>),
Include(Box<Ast>),
Invert(Box<Ast>),
Negate(Box<Ast>),
Increment(Box<Ast>),
Decrement(Box<Ast>),
// Language Features
VariableDeclaration { identifier: Box<Ast>, ast: Box<Ast>},
ConstDeclaration { identifier: Box<Ast>, ast: Box<Ast>},
Assignment { identifier: Box<Ast>, ast: Box<Ast> },
TypeAssignment {
identifier: Box<Ast>,
type_info: Box<Ast>,
},
FieldAssignment { identifier: Box<Ast>, ast: Box<Ast> },
DeclareFunction {
identifier: Box<Ast>,
function_datatype: Box<Ast>,
},
CreateStruct {
identifier: Box<Ast>,
struct_datatype: Box<Ast>,
},
Loop {
conditional: Box<Ast>,
body: Box<Ast>,
},
AccessArray {
identifier: Box<Ast>,
index: Box<Ast>,
},
GetArrayLength ( Box<Ast> ),
Range{
start: Box<Ast>,
end: Box<Ast>
},
StructDeclaration {
identifier: Box<Ast>,
struct_type_info: Box<Ast>,
},
AccessStructField {
identifier: Box<Ast>,
field_identifier: Box<Ast>,
},
ExecuteFn {
identifier: Box<Ast>,
parameters: Box<Ast>,
},
} |
extern crate ansi_term;
extern crate difference;
extern crate itertools;
use ansi_term::{ANSIGenericString, Colour};
use difference::{Changeset, Difference};
use itertools::Itertools;
use std::fmt;
fn red(s: &str) -> ANSIGenericString<str> {
Colour::Red.paint(s)
}
fn on_red(s: &str) -> ANSIGenericString<str> {
Colour::White.on(Colour::Red).bold().paint(s)
}
fn green(s: &str) -> ANSIGenericString<str> {
Colour::Green.paint(s)
}
fn on_green(s: &str) -> ANSIGenericString<str> {
Colour::White.on(Colour::Green).bold().paint(s)
}
static LEFT: &str = "<";
static NL_LEFT: &str = "\n<";
static RIGHT: &str = ">";
static NL_RIGHT: &str = "\n>";
#[cfg(windows)]
#[inline(always)]
fn enable_ansi() {
use std::sync::Once;
static ONCE: Once = Once::new();
ONCE.call_once(|| {ansi_term::enable_ansi_support().ok();});
}
#[cfg(not(windows))]
#[inline(always)]
fn enable_ansi() {
}
#[derive(Copy, Clone, Debug)]
pub struct PrettyDifference<'a> {
pub expected: &'a str,
pub actual: &'a str,
}
impl<'a> fmt::Display for PrettyDifference<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
diff(f, self.expected, self.actual)
}
}
/// Format the difference between strings using GitHub-like formatting with ANSI coloring.
pub fn diff(f: &mut fmt::Formatter, expected: &str, actual: &str) -> fmt::Result {
let changeset = Changeset::new(expected, actual, "\n");
fmt_changeset(f, &changeset)
}
fn fmt_changeset(f: &mut fmt::Formatter, changeset: &Changeset) -> fmt::Result {
enable_ansi();
writeln!(f, "{} {} / {} {}",
red(LEFT), red("left"),
green(RIGHT), green("right"),
)?;
let diffs = &changeset.diffs;
for (i, diff) in diffs.iter().enumerate() {
match diff {
Difference::Same(text) => {
format_same(f, text)?;
}
Difference::Add(added) => {
if let Some(Difference::Rem(removed)) = i.checked_sub(1).map(|i| &diffs[i]) {
format_add_rem(f, added, removed)?;
} else {
format_add(f, added)?;
}
}
Difference::Rem(removed) => {
if let Some(Difference::Add(_)) = diffs.get(i + 1) {
continue;
} else {
format_rem(f, removed)?;
}
}
}
}
Ok(())
}
fn format_add_rem(f: &mut fmt::Formatter, added: &str, removed: &str) -> fmt::Result {
let Changeset { diffs, .. } = Changeset::new(removed, added, "");
// LEFT (removed)
write!(f, "{}", red(LEFT))?;
for diff in &diffs {
match diff {
Difference::Same(text) => {
for blob in Itertools::intersperse(text.split('\n'), NL_LEFT) {
write!(f, "{}", red(blob))?;
}
}
Difference::Rem(text) => {
for blob in Itertools::intersperse(text.split('\n'), NL_LEFT) {
write!(f, "{}", on_red(blob))?;
}
}
Difference::Add(_) => continue,
}
}
writeln!(f)?;
// RIGHT (added)
write!(f, "{}", green(RIGHT))?;
for diff in &diffs {
match diff {
Difference::Same(text) => {
for blob in Itertools::intersperse(text.split('\n'), NL_RIGHT) {
write!(f, "{}", green(blob))?;
}
}
Difference::Add(text) => {
for blob in Itertools::intersperse(text.split('\n'), NL_RIGHT) {
write!(f, "{}", on_green(blob))?;
}
}
Difference::Rem(_) => continue,
}
}
writeln!(f)?;
Ok(())
}
fn format_same(f: &mut fmt::Formatter, text: &str) -> fmt::Result {
for line in text.split('\n') {
writeln!(f, " {}", line)?;
}
Ok(())
}
fn format_add(f: &mut fmt::Formatter, text: &str) -> fmt::Result {
for line in text.split('\n') {
writeln!(f, "{}{}", green(RIGHT), green(line))?;
}
Ok(())
}
fn format_rem(f: &mut fmt::Formatter, text: &str) -> fmt::Result {
for line in text.split('\n') {
writeln!(f, "{}{}", red(LEFT), red(line))?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn single_add() {
PrettyDifference {
expected: "",
actual: "foo",
}
.to_string();
}
}
|
use super::VersionDao;
use apllodb_immutable_schema_engine_domain::version::active_version::ActiveVersion;
use serde::{Deserialize, Serialize};
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default, Serialize, Deserialize)]
pub(in crate::sqlite::transaction::sqlite_tx) struct CreateTableSqlForVersion(String);
impl CreateTableSqlForVersion {
pub(super) fn as_str(&self) -> &str {
&self.0
}
}
impl From<&ActiveVersion> for CreateTableSqlForVersion {
fn from(version: &ActiveVersion) -> Self {
use crate::sqlite::to_sql_string::ToSqlString;
use apllodb_immutable_schema_engine_domain::entity::Entity;
let version_table_name = VersionDao::table_name(version.id());
// TODO Make CNAME_NAVI_ROWID primary key for performance.
let sql = format!(
"
CREATE TABLE {table_name} (
{navi_rowid} INTEGER NOT NULL{comma_if_non_pk_columns}
{non_pk_columns}
)
",
table_name = version_table_name.as_str(),
navi_rowid = super::CNAME_NAVI_ROWID,
comma_if_non_pk_columns = if version.column_data_types().is_empty() {
""
} else {
","
},
non_pk_columns = version.column_data_types().to_sql_string(),
);
// TODO materialize Version::constraints
Self(sql)
}
}
|
#[macro_use]
extern crate assert_json_diff;
#[macro_use]
extern crate serde_json;
#[test]
fn can_pass() {
assert_json_include!(
actual: json!({ "a": { "b": true }, "c": [true, null, 1] }),
expected: json!({ "a": { "b": true }, "c": [true, null, 1] })
);
assert_json_include!(
actual: json!({ "a": { "b": true } }),
expected: json!({ "a": {} })
);
assert_json_include!(
actual: json!({ "a": { "b": true } }),
expected: json!({ "a": {} }),
);
assert_json_include!(
expected: json!({ "a": {} }),
actual: json!({ "a": { "b": true } }),
);
}
#[test]
#[should_panic]
fn can_fail() {
assert_json_include!(
actual: json!({ "a": { "b": true }, "c": [true, null, 1] }),
expected: json!({ "a": { "b": false }, "c": [false, null, {}] })
);
}
#[test]
fn can_pass_with_exact_match() {
assert_json_eq!(json!({ "a": { "b": true } }), json!({ "a": { "b": true } }));
assert_json_eq!(json!({ "a": { "b": true } }), json!({ "a": { "b": true } }),);
}
#[test]
#[should_panic]
fn can_fail_with_exact_match() {
assert_json_eq!(json!({ "a": { "b": true } }), json!({ "a": {} }));
}
#[test]
fn can_pass_with_partial_match() {
assert_json_eq!(json!({ "a": ["a", "b"] }), json!({ "a": ["b", "a"] }), ordered: false);
}
#[test]
fn can_pass_with_empty_vec() {
assert_json_eq!(json!({ "a": [] }), json!({ "a": [] }), ordered: false);
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use examples::{rescue, Example};
use std::time::Duration;
use winterfell::{FieldExtension, HashFunction, ProofOptions};
const SIZES: [usize; 2] = [256, 512];
fn rescue(c: &mut Criterion) {
let mut group = c.benchmark_group("rescue");
group.sample_size(10);
group.measurement_time(Duration::from_secs(25));
let options = ProofOptions::new(
32,
32,
0,
HashFunction::Blake3_256,
FieldExtension::None,
4,
256,
);
for &size in SIZES.iter() {
let resc = rescue::RescueExample::new(size, options.clone());
group.bench_function(BenchmarkId::from_parameter(size), |bench| {
bench.iter(|| resc.prove());
});
}
group.finish();
}
criterion_group!(rescue_group, rescue);
criterion_main!(rescue_group);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.