text stringlengths 8 4.13M |
|---|
use cocoa::base::id;
pub struct BlitCommandEncoder(id);
impl_from_into_raw!(BlitCommandEncoder, of protocol "MTLBlitCommandEncoder");
|
use std::collections::{HashMap, HashSet};
use std::net::{SocketAddr, UdpSocket};
use std::str;
use wa_fsp::*;
struct FspServer {
socket: UdpSocket,
files: HashMap<String, HashSet<SocketAddr>>,
}
impl FspServer {
fn new() -> FspServer {
let socket =
UdpSocket::bind("0.0.0.0:8080").expect("Cannot bind socket");
FspServer {
socket,
files: HashMap::new(),
}
}
fn run(&mut self) {
let mut buffer: Vec<u8> = Vec::new();
buffer.resize(BUF_SIZE, 0);
loop {
match self.socket.recv_from(&mut buffer) {
Ok((bytes_read, src)) => {
// println!("{}", str::from_utf8(&buffer).unwrap());
let msg: Message = serde_json::from_str(
str::from_utf8(&buffer[..bytes_read]).unwrap(),
)
.expect("Error parsing message");
match msg.msg_type {
MsgType::Register => self.register(src, &msg),
MsgType::List => self.list(src),
MsgType::FileReq => self.resp_file(&msg.content, src),
_ => {}
}
// thread::spawn(move || {
// println!("Handling connection from {}", src);
// });
}
Err(e) => {
eprintln!("Couldn't receive a datagram: {}", e);
}
}
println!("{:?}", self.files);
buffer.clear();
buffer.resize(BUF_SIZE, 0);
}
}
fn register(&mut self, socket_addr: SocketAddr, msg: &Message) {
println!("Registering files from UDP");
let filenames: Vec<String> = serde_json::from_str(&msg.content)
.expect("Unable to parse file name list");
for filename in filenames {
// create new set
if !self.files.contains_key(&filename) {
self.files.insert(filename.clone(), HashSet::new());
}
self.files.entry(filename).and_modify(|v| {
v.insert(socket_addr);
});
}
}
fn list(&self, socket_addr: SocketAddr) {
println!("Sending file list to UDP {}", socket_addr);
let filenames = self
.files
.keys()
.map(|k| k.clone())
.collect::<Vec<String>>();
let msg = serde_json::to_string(&Message {
msg_type: MsgType::List,
content: serde_json::to_string(&filenames).unwrap(),
})
.unwrap();
println!("{}", msg);
let bytes_write = self
.socket
.send_to(&msg.as_bytes(), socket_addr)
.expect("Cannot sent to client");
println!("{} bytes written", bytes_write);
}
fn resp_file(&self, filename: &String, socket_addr: SocketAddr) {
println!("Response to file request for {}", filename);
let clients = match self.files.get(filename) {
Some(clients) => clients.clone(),
None => HashSet::new(),
};
let msg = serde_json::to_string(&Message {
msg_type: MsgType::FileResp,
content: serde_json::to_string(&(filename, clients)).unwrap(),
})
.unwrap();
self.socket
.send_to(msg.as_bytes(), socket_addr)
.expect("Cannot send to client");
}
}
fn main() {
// create server and initialize connections
let mut server = FspServer::new();
server.run();
}
|
#![cfg_attr(feature = "unstable", feature(test))]
// Launch program : cargo run --release < input/input.txt
// Launch benchmark : cargo +nightly bench --features "unstable"
/*
Benchmark results:
* When not sorting expenses during parsing
running 3 tests
test bench::bench_parse_input ... bench: 8,950 ns/iter (+/- 727)
test bench::bench_part_1 ... bench: 12,541 ns/iter (+/- 1,166)
test bench::bench_part_2 ... bench: 1,598,317 ns/iter (+/- 93,409)
* When sorting expenses during parsing
running 3 tests
test bench::bench_parse_input ... bench: 11,818 ns/iter (+/- 573)
test bench::bench_part_1 ... bench: 535 ns/iter (+/- 52)
test bench::bench_part_2 ... bench: 25,408 ns/iter (+/- 1,038)
*/
use std::error::Error;
use std::io::{self, Read, Write};
type Result<T> = ::std::result::Result<T, Box<dyn Error>>;
macro_rules! err {
($($tt:tt)*) => { return Err(Box::<dyn Error>::from(format!($($tt)*))) }
}
fn main() -> Result<()> {
let mut input = String::new();
io::stdin().read_to_string(&mut input)?;
let expense_report = parse_input(&input)?;
writeln!(io::stdout(), "Part 1 : {}", part_1(&expense_report)?)?;
writeln!(io::stdout(), "Part 2 : {}", part_2(&expense_report)?)?;
Ok(())
}
fn parse_input(input: &str) -> Result<Vec<usize>> {
let mut expense_report = vec![];
for line in input.lines() {
expense_report.push(line.parse::<usize>()?);
}
expense_report.sort_unstable();
Ok(expense_report)
}
fn part_1(expense_report: &[usize]) -> Result<usize> {
for i in 0..(expense_report.len() - 1) {
for j in (i + 1)..expense_report.len() {
if expense_report[i] + expense_report[j] == 2020 {
return Ok(expense_report[i] * expense_report[j]);
}
}
}
err!("Part 1 : No combination found!")
}
fn part_2(expense_report: &[usize]) -> Result<usize> {
for i in 0..(expense_report.len() - 2) {
for j in (i + 1)..(expense_report.len() - 1) {
for k in (j + 1)..expense_report.len() {
if expense_report[i] + expense_report[j] + expense_report[k] == 2020 {
return Ok(expense_report[i] * expense_report[j] * expense_report[k]);
}
}
}
}
err!("Part 2 : No combination found!")
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::File;
fn read_test_file() -> Result<String> {
let mut input = String::new();
File::open("input/test.txt")?.read_to_string(&mut input)?;
Ok(input)
}
#[test]
fn test_part_1() -> Result<()> {
let expense_report = parse_input(&read_test_file()?)?;
assert_eq!(part_1(&expense_report)?, 514579);
Ok(())
}
#[test]
fn test_part_2() -> Result<()> {
let expense_report = parse_input(&read_test_file()?)?;
assert_eq!(part_2(&expense_report)?, 241861950);
Ok(())
}
}
#[cfg(all(feature = "unstable", test))]
mod bench {
extern crate test;
use super::*;
use std::fs::File;
use test::Bencher;
fn read_input_file() -> Result<String> {
let mut input = String::new();
File::open("input/input.txt")?.read_to_string(&mut input)?;
Ok(input)
}
#[bench]
fn bench_parse_input(b: &mut Bencher) -> Result<()> {
let input = read_input_file()?;
b.iter(|| test::black_box(parse_input(&input)));
Ok(())
}
#[bench]
fn bench_part_1(b: &mut Bencher) -> Result<()> {
let expense_report = parse_input(&read_input_file()?)?;
b.iter(|| test::black_box(part_1(&expense_report)));
Ok(())
}
#[bench]
fn bench_part_2(b: &mut Bencher) -> Result<()> {
let expense_report = parse_input(&read_input_file()?)?;
b.iter(|| test::black_box(part_2(&expense_report)));
Ok(())
}
}
|
use std::collections::HashMap;
use std::hash::{Hash};
use std::io::{self};
// Fun experiment with abstract rewriting machine. Not actually used.
// @personal
// test
#[derive(Debug, Clone)]
enum Op<T> {
Match(T, T),
CopyArgument(usize),
CopyTraversal(usize),
Push(T),
DropArgument(usize),
DropTraversal(usize),
Skip(usize),
Retract(usize),
Build(T, usize),
Goto(T),
Recycle
}
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
enum Program {
Zero,
Succ,
Plus,
PlusZero,
PlusSucc,
PlusC,
}
#[derive(Debug, Clone)]
struct Machine<T> {
argument_stack: Vec<T>,
control_stack: Vec<T>,
traversal_stack: Vec<T>,
executable_stack: Vec<Op<T>>,
code_table: HashMap<T, Vec<Op<T>>>
}
impl<T> Machine<T> where T: PartialEq + Eq + Hash + Clone {
fn step(&mut self) -> bool {
// Maybe I should have a bottom value? Not sure?
if self.executable_stack.is_empty() && self.control_stack.is_empty() {
return false;
}
match self.executable_stack.pop().unwrap() {
Op::Match(g, h) => {
if let Some(elem) = self.argument_stack.last_mut() {
if *elem == g {
self.argument_stack.remove(self.argument_stack.len() - 1);
// Figure out better way to do this.
self.executable_stack = self.code_table.get(&h).unwrap().clone();
}
}
}
Op::CopyArgument(k) => {
self.argument_stack.push(self.argument_stack.get(k).unwrap().clone());
}
Op::CopyTraversal(k) => {
self.argument_stack.push(self.traversal_stack.get(k).unwrap().clone());
}
Op::Push(h) => {
self.control_stack.push(h);
}
Op::DropArgument(k) => {
for _ in 0..k {
self.argument_stack.pop();
}
}
Op::DropTraversal(k) => {
for _ in 0..k {
self.traversal_stack.pop();
}
}
Op::Skip(k) => {
for i in (self.argument_stack.len()-k..self.argument_stack.len()).rev() {
let elem = self.argument_stack.remove(i);
self.traversal_stack.push(elem);
}
}
Op::Retract(k) => {
for i in (self.traversal_stack.len()-k..self.traversal_stack.len()).rev() {
let elem = self.traversal_stack.remove(i);
self.argument_stack.push(elem);
}
}
Op::Build(f,_k) => {
// This says to replace t1...tk with f(t1...tk)
// I have been having call implicit is that oka/
self.argument_stack.push(f);
}
Op::Goto(h) => {
self.executable_stack = self.code_table.get(&h).unwrap().clone();
}
Op::Recycle => {
// Instead of clone can I change a pointer?
if let Some(elem) = self.control_stack.pop() {
self.executable_stack = self.code_table.get(&elem).unwrap().clone();
}
}
};
true
}
}
pub fn main_arm() -> io::Result<()> {
let mut code_table = HashMap::new();
code_table.insert(Program::Zero, vec![Op::Recycle, Op::Build(Program::Zero, 0)]);
code_table.insert(Program::Succ, vec![Op::Recycle, Op::Build(Program::Succ, 1)]);
code_table.insert(Program::Plus, vec![Op::Goto(Program::PlusC), Op::Match(Program::Succ, Program::PlusSucc), Op::Match(Program::Zero, Program::PlusZero) ]);
code_table.insert(Program::PlusZero, vec![Op::Recycle]);
code_table.insert(Program::PlusSucc, vec![Op::Goto(Program::Plus), Op::Push(Program::Succ)]);
code_table.insert(Program::PlusC, vec![Op::Recycle, Op::Build(Program::PlusC, 2), ]);
let mut machine = Machine {
control_stack: vec![ Program::Plus, Program::Succ, Program::Zero, Program::Succ, Program::Zero],
executable_stack: vec![Op::Recycle],
argument_stack: vec![],
traversal_stack: vec![],
code_table: code_table
};
loop {
println!("{:?} | {:?} | {:?} | {:?}", machine.control_stack, machine.executable_stack, machine.argument_stack, machine.traversal_stack);
let result = machine.step();
if !result {
break;
}
}
Ok(())
}
// P, C, E, A T |
extern crate toml;
use std::io;
use std::fs;
use std::io::Read;
#[derive(Debug, RustcDecodable)]
pub struct Config {
api_key: String,
}
#[derive(Debug)]
pub enum ConfigParseError {
Io(io::Error),
ParseError(String),
}
impl From<io::Error> for ConfigParseError {
fn from(err: io::Error) -> ConfigParseError {
ConfigParseError::Io(err)
}
}
impl ConfigParseError {
fn from_toml_errors(parser: &toml::Parser) -> ConfigParseError {
let errors = &parser.errors;
let msgs = errors.into_iter()
.map(|err| {
let ((loline, locol), (hiline, hicol)) = (parser.to_linecol(err.lo),
parser.to_linecol(err.hi));
format!("[{}:{}-{}:{}]: {}", loline, locol, hiline, hicol, err.desc)
})
.collect::<Vec<_>>()
.join(",");
ConfigParseError::ParseError(msgs)
}
}
impl Config {
pub fn parse(path: &str) -> Result<Config, ConfigParseError> {
let mut file: fs::File = try!(fs::File::open(path));
let mut contents = String::new();
try!(file.read_to_string(&mut contents));
let mut parser = toml::Parser::new(&contents);
let val = parser.parse();
if val.is_none() {
return Err(ConfigParseError::from_toml_errors(&parser));
}
return match toml::decode::<Config>(toml::Value::Table(val.unwrap())) {
Some(conf) => Ok(conf),
_ => Err(ConfigParseError::from_toml_errors(&parser)),
};
}
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - MDIOS configuration register"]
pub mdios_cr: MDIOS_CR,
#[doc = "0x04 - MDIOS write flag register"]
pub mdios_wrfr: MDIOS_WRFR,
#[doc = "0x08 - MDIOS clear write flag register"]
pub mdios_cwrfr: MDIOS_CWRFR,
#[doc = "0x0c - MDIOS read flag register"]
pub mdios_rdfr: MDIOS_RDFR,
#[doc = "0x10 - MDIOS clear read flag register"]
pub mdios_crdfr: MDIOS_CRDFR,
#[doc = "0x14 - MDIOS status register"]
pub mdios_sr: MDIOS_SR,
#[doc = "0x18 - MDIOS clear flag register"]
pub mdios_clrfr: MDIOS_CLRFR,
_reserved7: [u8; 0xe4],
#[doc = "0x100 - MDIOS input data register"]
pub mdios_dinr0: MDIOS_DINR0,
#[doc = "0x104 - MDIOS input data register"]
pub mdios_dinr1: MDIOS_DINR1,
#[doc = "0x108 - MDIOS input data register"]
pub mdios_dinr2: MDIOS_DINR2,
#[doc = "0x10c - MDIOS input data register"]
pub mdios_dinr3: MDIOS_DINR3,
#[doc = "0x110 - MDIOS input data register"]
pub mdios_dinr4: MDIOS_DINR4,
#[doc = "0x114 - MDIOS input data register"]
pub mdios_dinr5: MDIOS_DINR5,
#[doc = "0x118 - MDIOS input data register"]
pub mdios_dinr6: MDIOS_DINR6,
#[doc = "0x11c - MDIOS input data register"]
pub mdios_dinr7: MDIOS_DINR7,
#[doc = "0x120 - MDIOS input data register"]
pub mdios_dinr8: MDIOS_DINR8,
#[doc = "0x124 - MDIOS input data register"]
pub mdios_dinr9: MDIOS_DINR9,
#[doc = "0x128 - MDIOS input data register"]
pub mdios_dinr10: MDIOS_DINR10,
#[doc = "0x12c - MDIOS input data register"]
pub mdios_dinr11: MDIOS_DINR11,
#[doc = "0x130 - MDIOS input data register"]
pub mdios_dinr12: MDIOS_DINR12,
#[doc = "0x134 - MDIOS input data register"]
pub mdios_dinr13: MDIOS_DINR13,
#[doc = "0x138 - MDIOS input data register"]
pub mdios_dinr14: MDIOS_DINR14,
#[doc = "0x13c - MDIOS input data register"]
pub mdios_dinr15: MDIOS_DINR15,
#[doc = "0x140 - MDIOS input data register"]
pub mdios_dinr16: MDIOS_DINR16,
#[doc = "0x144 - MDIOS input data register"]
pub mdios_dinr17: MDIOS_DINR17,
#[doc = "0x148 - MDIOS input data register"]
pub mdios_dinr18: MDIOS_DINR18,
#[doc = "0x14c - MDIOS input data register"]
pub mdios_dinr19: MDIOS_DINR19,
#[doc = "0x150 - MDIOS input data register"]
pub mdios_dinr20: MDIOS_DINR20,
#[doc = "0x154 - MDIOS input data register"]
pub mdios_dinr21: MDIOS_DINR21,
#[doc = "0x158 - MDIOS input data register"]
pub mdios_dinr22: MDIOS_DINR22,
#[doc = "0x15c - MDIOS input data register"]
pub mdios_dinr23: MDIOS_DINR23,
#[doc = "0x160 - MDIOS input data register"]
pub mdios_dinr24: MDIOS_DINR24,
#[doc = "0x164 - MDIOS input data register"]
pub mdios_dinr25: MDIOS_DINR25,
#[doc = "0x168 - MDIOS input data register"]
pub mdios_dinr26: MDIOS_DINR26,
#[doc = "0x16c - MDIOS input data register"]
pub mdios_dinr27: MDIOS_DINR27,
#[doc = "0x170 - MDIOS input data register"]
pub mdios_dinr28: MDIOS_DINR28,
#[doc = "0x174 - MDIOS input data register"]
pub mdios_dinr29: MDIOS_DINR29,
#[doc = "0x178 - MDIOS input data register"]
pub mdios_dinr30: MDIOS_DINR30,
#[doc = "0x17c - MDIOS input data register"]
pub mdios_dinr31: MDIOS_DINR31,
#[doc = "0x180 - MDIOS input data register"]
pub mdios_doutr0: MDIOS_DOUTR0,
#[doc = "0x184 - MDIOS input data register"]
pub mdios_doutr1: MDIOS_DOUTR1,
#[doc = "0x188 - MDIOS output data register"]
pub mdios_doutr2: MDIOS_DOUTR2,
#[doc = "0x18c - MDIOS output data register"]
pub mdios_doutr3: MDIOS_DOUTR3,
#[doc = "0x190 - MDIOS output data register"]
pub mdios_doutr4: MDIOS_DOUTR4,
#[doc = "0x194 - MDIOS output data register"]
pub mdios_doutr5: MDIOS_DOUTR5,
#[doc = "0x198 - MDIOS output data register"]
pub mdios_doutr6: MDIOS_DOUTR6,
#[doc = "0x19c - MDIOS output data register"]
pub mdios_doutr7: MDIOS_DOUTR7,
#[doc = "0x1a0 - MDIOS output data register"]
pub mdios_doutr8: MDIOS_DOUTR8,
#[doc = "0x1a4 - MDIOS output data register"]
pub mdios_doutr9: MDIOS_DOUTR9,
#[doc = "0x1a8 - MDIOS output data register"]
pub mdios_doutr10: MDIOS_DOUTR10,
#[doc = "0x1ac - MDIOS output data register"]
pub mdios_doutr11: MDIOS_DOUTR11,
#[doc = "0x1b0 - MDIOS output data register"]
pub mdios_doutr12: MDIOS_DOUTR12,
#[doc = "0x1b4 - MDIOS output data register"]
pub mdios_doutr13: MDIOS_DOUTR13,
#[doc = "0x1b8 - MDIOS output data register"]
pub mdios_doutr14: MDIOS_DOUTR14,
#[doc = "0x1bc - MDIOS output data register"]
pub mdios_doutr15: MDIOS_DOUTR15,
#[doc = "0x1c0 - MDIOS output data register"]
pub mdios_doutr16: MDIOS_DOUTR16,
#[doc = "0x1c4 - MDIOS output data register"]
pub mdios_doutr17: MDIOS_DOUTR17,
#[doc = "0x1c8 - MDIOS output data register"]
pub mdios_doutr18: MDIOS_DOUTR18,
#[doc = "0x1cc - MDIOS output data register"]
pub mdios_doutr19: MDIOS_DOUTR19,
#[doc = "0x1d0 - MDIOS output data register"]
pub mdios_doutr20: MDIOS_DOUTR20,
#[doc = "0x1d4 - MDIOS output data register"]
pub mdios_doutr21: MDIOS_DOUTR21,
#[doc = "0x1d8 - MDIOS output data register"]
pub mdios_doutr22: MDIOS_DOUTR22,
#[doc = "0x1dc - MDIOS output data register"]
pub mdios_doutr23: MDIOS_DOUTR23,
#[doc = "0x1e0 - MDIOS output data register"]
pub mdios_doutr24: MDIOS_DOUTR24,
#[doc = "0x1e4 - MDIOS output data register"]
pub mdios_doutr25: MDIOS_DOUTR25,
#[doc = "0x1e8 - MDIOS output data register"]
pub mdios_doutr26: MDIOS_DOUTR26,
#[doc = "0x1ec - MDIOS output data register"]
pub mdios_doutr27: MDIOS_DOUTR27,
#[doc = "0x1f0 - MDIOS output data register"]
pub mdios_doutr28: MDIOS_DOUTR28,
#[doc = "0x1f4 - MDIOS output data register"]
pub mdios_doutr29: MDIOS_DOUTR29,
#[doc = "0x1f8 - MDIOS output data register"]
pub mdios_doutr30: MDIOS_DOUTR30,
#[doc = "0x1fc - MDIOS output data register"]
pub mdios_doutr31: MDIOS_DOUTR31,
_reserved71: [u8; 0x01f0],
#[doc = "0x3f0 - MDIOS HW configuration register"]
pub mdios_hwcfgr: MDIOS_HWCFGR,
#[doc = "0x3f4 - MDIOS version register"]
pub mdios_verr: MDIOS_VERR,
#[doc = "0x3f8 - MDIOS identification register"]
pub mdios_ipidr: MDIOS_IPIDR,
#[doc = "0x3fc - MDIOS size identification register"]
pub mdios_sidr: MDIOS_SIDR,
}
#[doc = "MDIOS_CR (rw) register accessor: MDIOS configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mdios_cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_cr`]
module"]
pub type MDIOS_CR = crate::Reg<mdios_cr::MDIOS_CR_SPEC>;
#[doc = "MDIOS configuration register"]
pub mod mdios_cr;
#[doc = "MDIOS_WRFR (r) register accessor: MDIOS write flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_wrfr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_wrfr`]
module"]
pub type MDIOS_WRFR = crate::Reg<mdios_wrfr::MDIOS_WRFR_SPEC>;
#[doc = "MDIOS write flag register"]
pub mod mdios_wrfr;
#[doc = "MDIOS_CWRFR (rw) register accessor: MDIOS clear write flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_cwrfr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mdios_cwrfr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_cwrfr`]
module"]
pub type MDIOS_CWRFR = crate::Reg<mdios_cwrfr::MDIOS_CWRFR_SPEC>;
#[doc = "MDIOS clear write flag register"]
pub mod mdios_cwrfr;
#[doc = "MDIOS_RDFR (r) register accessor: MDIOS read flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_rdfr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_rdfr`]
module"]
pub type MDIOS_RDFR = crate::Reg<mdios_rdfr::MDIOS_RDFR_SPEC>;
#[doc = "MDIOS read flag register"]
pub mod mdios_rdfr;
#[doc = "MDIOS_CRDFR (rw) register accessor: MDIOS clear read flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_crdfr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mdios_crdfr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_crdfr`]
module"]
pub type MDIOS_CRDFR = crate::Reg<mdios_crdfr::MDIOS_CRDFR_SPEC>;
#[doc = "MDIOS clear read flag register"]
pub mod mdios_crdfr;
#[doc = "MDIOS_SR (r) register accessor: MDIOS status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_sr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_sr`]
module"]
pub type MDIOS_SR = crate::Reg<mdios_sr::MDIOS_SR_SPEC>;
#[doc = "MDIOS status register"]
pub mod mdios_sr;
#[doc = "MDIOS_CLRFR (rw) register accessor: MDIOS clear flag register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_clrfr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mdios_clrfr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_clrfr`]
module"]
pub type MDIOS_CLRFR = crate::Reg<mdios_clrfr::MDIOS_CLRFR_SPEC>;
#[doc = "MDIOS clear flag register"]
pub mod mdios_clrfr;
#[doc = "MDIOS_DINR0 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr0::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr0`]
module"]
pub type MDIOS_DINR0 = crate::Reg<mdios_dinr0::MDIOS_DINR0_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr0;
#[doc = "MDIOS_DINR1 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr1::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr1`]
module"]
pub type MDIOS_DINR1 = crate::Reg<mdios_dinr1::MDIOS_DINR1_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr1;
#[doc = "MDIOS_DINR2 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr2::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr2`]
module"]
pub type MDIOS_DINR2 = crate::Reg<mdios_dinr2::MDIOS_DINR2_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr2;
#[doc = "MDIOS_DINR3 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr3::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr3`]
module"]
pub type MDIOS_DINR3 = crate::Reg<mdios_dinr3::MDIOS_DINR3_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr3;
#[doc = "MDIOS_DINR4 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr4::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr4`]
module"]
pub type MDIOS_DINR4 = crate::Reg<mdios_dinr4::MDIOS_DINR4_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr4;
#[doc = "MDIOS_DINR5 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr5::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr5`]
module"]
pub type MDIOS_DINR5 = crate::Reg<mdios_dinr5::MDIOS_DINR5_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr5;
#[doc = "MDIOS_DINR6 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr6::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr6`]
module"]
pub type MDIOS_DINR6 = crate::Reg<mdios_dinr6::MDIOS_DINR6_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr6;
#[doc = "MDIOS_DINR7 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr7::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr7`]
module"]
pub type MDIOS_DINR7 = crate::Reg<mdios_dinr7::MDIOS_DINR7_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr7;
#[doc = "MDIOS_DINR8 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr8::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr8`]
module"]
pub type MDIOS_DINR8 = crate::Reg<mdios_dinr8::MDIOS_DINR8_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr8;
#[doc = "MDIOS_DINR9 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr9::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr9`]
module"]
pub type MDIOS_DINR9 = crate::Reg<mdios_dinr9::MDIOS_DINR9_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr9;
#[doc = "MDIOS_DINR10 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr10::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr10`]
module"]
pub type MDIOS_DINR10 = crate::Reg<mdios_dinr10::MDIOS_DINR10_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr10;
#[doc = "MDIOS_DINR11 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr11::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr11`]
module"]
pub type MDIOS_DINR11 = crate::Reg<mdios_dinr11::MDIOS_DINR11_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr11;
#[doc = "MDIOS_DINR12 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr12::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr12`]
module"]
pub type MDIOS_DINR12 = crate::Reg<mdios_dinr12::MDIOS_DINR12_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr12;
#[doc = "MDIOS_DINR13 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr13::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr13`]
module"]
pub type MDIOS_DINR13 = crate::Reg<mdios_dinr13::MDIOS_DINR13_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr13;
#[doc = "MDIOS_DINR14 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr14::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr14`]
module"]
pub type MDIOS_DINR14 = crate::Reg<mdios_dinr14::MDIOS_DINR14_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr14;
#[doc = "MDIOS_DINR15 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr15::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr15`]
module"]
pub type MDIOS_DINR15 = crate::Reg<mdios_dinr15::MDIOS_DINR15_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr15;
#[doc = "MDIOS_DINR16 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr16::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr16`]
module"]
pub type MDIOS_DINR16 = crate::Reg<mdios_dinr16::MDIOS_DINR16_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr16;
#[doc = "MDIOS_DINR17 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr17::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr17`]
module"]
pub type MDIOS_DINR17 = crate::Reg<mdios_dinr17::MDIOS_DINR17_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr17;
#[doc = "MDIOS_DINR18 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr18::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr18`]
module"]
pub type MDIOS_DINR18 = crate::Reg<mdios_dinr18::MDIOS_DINR18_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr18;
#[doc = "MDIOS_DINR19 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr19::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr19`]
module"]
pub type MDIOS_DINR19 = crate::Reg<mdios_dinr19::MDIOS_DINR19_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr19;
#[doc = "MDIOS_DINR20 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr20::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr20`]
module"]
pub type MDIOS_DINR20 = crate::Reg<mdios_dinr20::MDIOS_DINR20_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr20;
#[doc = "MDIOS_DINR21 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr21::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr21`]
module"]
pub type MDIOS_DINR21 = crate::Reg<mdios_dinr21::MDIOS_DINR21_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr21;
#[doc = "MDIOS_DINR22 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr22::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr22`]
module"]
pub type MDIOS_DINR22 = crate::Reg<mdios_dinr22::MDIOS_DINR22_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr22;
#[doc = "MDIOS_DINR23 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr23::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr23`]
module"]
pub type MDIOS_DINR23 = crate::Reg<mdios_dinr23::MDIOS_DINR23_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr23;
#[doc = "MDIOS_DINR24 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr24::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr24`]
module"]
pub type MDIOS_DINR24 = crate::Reg<mdios_dinr24::MDIOS_DINR24_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr24;
#[doc = "MDIOS_DINR25 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr25::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr25`]
module"]
pub type MDIOS_DINR25 = crate::Reg<mdios_dinr25::MDIOS_DINR25_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr25;
#[doc = "MDIOS_DINR26 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr26::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr26`]
module"]
pub type MDIOS_DINR26 = crate::Reg<mdios_dinr26::MDIOS_DINR26_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr26;
#[doc = "MDIOS_DINR27 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr27::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr27`]
module"]
pub type MDIOS_DINR27 = crate::Reg<mdios_dinr27::MDIOS_DINR27_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr27;
#[doc = "MDIOS_DINR28 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr28::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr28`]
module"]
pub type MDIOS_DINR28 = crate::Reg<mdios_dinr28::MDIOS_DINR28_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr28;
#[doc = "MDIOS_DINR29 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr29::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr29`]
module"]
pub type MDIOS_DINR29 = crate::Reg<mdios_dinr29::MDIOS_DINR29_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr29;
#[doc = "MDIOS_DINR30 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr30::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr30`]
module"]
pub type MDIOS_DINR30 = crate::Reg<mdios_dinr30::MDIOS_DINR30_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr30;
#[doc = "MDIOS_DINR31 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_dinr31::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_dinr31`]
module"]
pub type MDIOS_DINR31 = crate::Reg<mdios_dinr31::MDIOS_DINR31_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_dinr31;
#[doc = "MDIOS_DOUTR0 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr0::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr0`]
module"]
pub type MDIOS_DOUTR0 = crate::Reg<mdios_doutr0::MDIOS_DOUTR0_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_doutr0;
#[doc = "MDIOS_DOUTR1 (r) register accessor: MDIOS input data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr1::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr1`]
module"]
pub type MDIOS_DOUTR1 = crate::Reg<mdios_doutr1::MDIOS_DOUTR1_SPEC>;
#[doc = "MDIOS input data register"]
pub mod mdios_doutr1;
#[doc = "MDIOS_DOUTR2 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr2::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr2`]
module"]
pub type MDIOS_DOUTR2 = crate::Reg<mdios_doutr2::MDIOS_DOUTR2_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr2;
#[doc = "MDIOS_DOUTR3 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr3::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr3`]
module"]
pub type MDIOS_DOUTR3 = crate::Reg<mdios_doutr3::MDIOS_DOUTR3_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr3;
#[doc = "MDIOS_DOUTR4 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr4::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr4`]
module"]
pub type MDIOS_DOUTR4 = crate::Reg<mdios_doutr4::MDIOS_DOUTR4_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr4;
#[doc = "MDIOS_DOUTR5 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr5::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr5`]
module"]
pub type MDIOS_DOUTR5 = crate::Reg<mdios_doutr5::MDIOS_DOUTR5_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr5;
#[doc = "MDIOS_DOUTR6 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr6::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr6`]
module"]
pub type MDIOS_DOUTR6 = crate::Reg<mdios_doutr6::MDIOS_DOUTR6_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr6;
#[doc = "MDIOS_DOUTR7 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr7::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr7`]
module"]
pub type MDIOS_DOUTR7 = crate::Reg<mdios_doutr7::MDIOS_DOUTR7_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr7;
#[doc = "MDIOS_DOUTR8 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr8::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr8`]
module"]
pub type MDIOS_DOUTR8 = crate::Reg<mdios_doutr8::MDIOS_DOUTR8_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr8;
#[doc = "MDIOS_DOUTR9 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr9::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr9`]
module"]
pub type MDIOS_DOUTR9 = crate::Reg<mdios_doutr9::MDIOS_DOUTR9_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr9;
#[doc = "MDIOS_DOUTR10 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr10::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr10`]
module"]
pub type MDIOS_DOUTR10 = crate::Reg<mdios_doutr10::MDIOS_DOUTR10_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr10;
#[doc = "MDIOS_DOUTR11 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr11::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr11`]
module"]
pub type MDIOS_DOUTR11 = crate::Reg<mdios_doutr11::MDIOS_DOUTR11_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr11;
#[doc = "MDIOS_DOUTR12 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr12::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr12`]
module"]
pub type MDIOS_DOUTR12 = crate::Reg<mdios_doutr12::MDIOS_DOUTR12_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr12;
#[doc = "MDIOS_DOUTR13 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr13::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr13`]
module"]
pub type MDIOS_DOUTR13 = crate::Reg<mdios_doutr13::MDIOS_DOUTR13_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr13;
#[doc = "MDIOS_DOUTR14 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr14::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr14`]
module"]
pub type MDIOS_DOUTR14 = crate::Reg<mdios_doutr14::MDIOS_DOUTR14_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr14;
#[doc = "MDIOS_DOUTR15 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr15::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr15`]
module"]
pub type MDIOS_DOUTR15 = crate::Reg<mdios_doutr15::MDIOS_DOUTR15_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr15;
#[doc = "MDIOS_DOUTR16 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr16::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr16`]
module"]
pub type MDIOS_DOUTR16 = crate::Reg<mdios_doutr16::MDIOS_DOUTR16_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr16;
#[doc = "MDIOS_DOUTR17 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr17::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr17`]
module"]
pub type MDIOS_DOUTR17 = crate::Reg<mdios_doutr17::MDIOS_DOUTR17_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr17;
#[doc = "MDIOS_DOUTR18 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr18::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr18`]
module"]
pub type MDIOS_DOUTR18 = crate::Reg<mdios_doutr18::MDIOS_DOUTR18_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr18;
#[doc = "MDIOS_DOUTR19 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr19::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr19`]
module"]
pub type MDIOS_DOUTR19 = crate::Reg<mdios_doutr19::MDIOS_DOUTR19_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr19;
#[doc = "MDIOS_DOUTR20 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr20::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr20`]
module"]
pub type MDIOS_DOUTR20 = crate::Reg<mdios_doutr20::MDIOS_DOUTR20_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr20;
#[doc = "MDIOS_DOUTR21 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr21::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr21`]
module"]
pub type MDIOS_DOUTR21 = crate::Reg<mdios_doutr21::MDIOS_DOUTR21_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr21;
#[doc = "MDIOS_DOUTR22 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr22::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr22`]
module"]
pub type MDIOS_DOUTR22 = crate::Reg<mdios_doutr22::MDIOS_DOUTR22_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr22;
#[doc = "MDIOS_DOUTR23 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr23::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr23`]
module"]
pub type MDIOS_DOUTR23 = crate::Reg<mdios_doutr23::MDIOS_DOUTR23_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr23;
#[doc = "MDIOS_DOUTR24 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr24::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr24`]
module"]
pub type MDIOS_DOUTR24 = crate::Reg<mdios_doutr24::MDIOS_DOUTR24_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr24;
#[doc = "MDIOS_DOUTR25 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr25::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr25`]
module"]
pub type MDIOS_DOUTR25 = crate::Reg<mdios_doutr25::MDIOS_DOUTR25_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr25;
#[doc = "MDIOS_DOUTR26 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr26::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr26`]
module"]
pub type MDIOS_DOUTR26 = crate::Reg<mdios_doutr26::MDIOS_DOUTR26_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr26;
#[doc = "MDIOS_DOUTR27 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr27::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr27`]
module"]
pub type MDIOS_DOUTR27 = crate::Reg<mdios_doutr27::MDIOS_DOUTR27_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr27;
#[doc = "MDIOS_DOUTR28 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr28::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr28`]
module"]
pub type MDIOS_DOUTR28 = crate::Reg<mdios_doutr28::MDIOS_DOUTR28_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr28;
#[doc = "MDIOS_DOUTR29 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr29::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr29`]
module"]
pub type MDIOS_DOUTR29 = crate::Reg<mdios_doutr29::MDIOS_DOUTR29_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr29;
#[doc = "MDIOS_DOUTR30 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr30::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr30`]
module"]
pub type MDIOS_DOUTR30 = crate::Reg<mdios_doutr30::MDIOS_DOUTR30_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr30;
#[doc = "MDIOS_DOUTR31 (r) register accessor: MDIOS output data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_doutr31::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_doutr31`]
module"]
pub type MDIOS_DOUTR31 = crate::Reg<mdios_doutr31::MDIOS_DOUTR31_SPEC>;
#[doc = "MDIOS output data register"]
pub mod mdios_doutr31;
#[doc = "MDIOS_HWCFGR (r) register accessor: MDIOS HW configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_hwcfgr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_hwcfgr`]
module"]
pub type MDIOS_HWCFGR = crate::Reg<mdios_hwcfgr::MDIOS_HWCFGR_SPEC>;
#[doc = "MDIOS HW configuration register"]
pub mod mdios_hwcfgr;
#[doc = "MDIOS_VERR (r) register accessor: MDIOS version register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_verr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_verr`]
module"]
pub type MDIOS_VERR = crate::Reg<mdios_verr::MDIOS_VERR_SPEC>;
#[doc = "MDIOS version register"]
pub mod mdios_verr;
#[doc = "MDIOS_IPIDR (r) register accessor: MDIOS identification register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_ipidr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_ipidr`]
module"]
pub type MDIOS_IPIDR = crate::Reg<mdios_ipidr::MDIOS_IPIDR_SPEC>;
#[doc = "MDIOS identification register"]
pub mod mdios_ipidr;
#[doc = "MDIOS_SIDR (r) register accessor: MDIOS size identification register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mdios_sidr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mdios_sidr`]
module"]
pub type MDIOS_SIDR = crate::Reg<mdios_sidr::MDIOS_SIDR_SPEC>;
#[doc = "MDIOS size identification register"]
pub mod mdios_sidr;
|
// Copyright 2019 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use cras_sys::gen::CRAS_STREAM_DIRECTION;
/// An enum of the valid directions of an audio stream.
/// Convertible into CRAS_STREAM_DIRECTION via direction.into()
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum StreamDirection {
Playback,
Capture,
}
impl Into<CRAS_STREAM_DIRECTION> for StreamDirection {
fn into(self) -> CRAS_STREAM_DIRECTION {
match self {
StreamDirection::Playback => CRAS_STREAM_DIRECTION::CRAS_STREAM_OUTPUT,
StreamDirection::Capture => CRAS_STREAM_DIRECTION::CRAS_STREAM_INPUT,
}
}
}
|
///
/// Blitz Explorer
///
/// Index/catalog of files content
///
/// Copyright 2019 Luis Fernando Batels <luisfbatels@gmail.com>
///
use std::fs::File;
use std::path::{Path, PathBuf};
use std::io::{BufReader, BufWriter, copy};
use std::sync::Arc;
use std::str;
use std::collections::{HashMap, LinkedList};
use flate2::read::GzDecoder;
use tar::Archive;
use sled::{Db, Tree};
use super::file::File as IndexedFile;
use super::file::FileTar;
pub struct Catalog {
pub db: Db,
pub cache_extract: String
}
impl Catalog {
// Index the content of compressed file
pub fn catalog_file(&mut self, path: &Path) -> Option<FileTar> {
info!("Indexing {}...", path.display());
if !path.is_file() {
warn!("Is not a file {}. Skiping...", path.display());
return None;
}
if !FileTar::path_to_string(path, false).ends_with(".tar.gz") {
warn!("Is not a tar.gz file {}. Skiping...", path.display());
return None;
}
let ftar = FileTar::from_path(path);
if self.is_indexed(&ftar) {
warn!("Already indexed {}. Skiping...", path.display());
return None;
}
let archive = File::open(path);
if let Err(e) = archive {
error!("Can't open the file {}: {}. Skiping...", path.display(), e);
return None;
}
let buffer_archive = BufReader::new(archive.unwrap());
let decoder = GzDecoder::new(buffer_archive);
let buffer_decoder = BufReader::new(decoder);
let mut tar = Archive::new(buffer_decoder);
let entries = tar.entries()
.expect("Error on get the entries of tar file");
let tree = self.get_tree(&ftar);
let mut parents_inos: HashMap<String, (u64, LinkedList<u64>)> = HashMap::new();
{ // Root dir of tar file
let ino = self.get_last_ino() + 1;
parents_inos.insert("".to_string(), (ino, LinkedList::new()));
self.set_last_ino(ino);
}
for file in entries {
let header = file
.expect("Erro on get the entrie file header")
.header()
.clone();
let full_path = &header.path()
.expect("Can't get the full path");
let ino = self.get_last_ino() + 1;
let full_path_str = FileTar::path_to_string(full_path, true);
let file_name_str = FileTar::path_to_string(full_path, false);
let mut level = full_path_str.clone().matches("/").count();
if header.entry_type().is_file() {
level = level + 1;
}
let parent_n = full_path_str.rfind(file_name_str.as_str())
.expect("Error on get the pos of file name");
let parent = &full_path_str.clone()[..parent_n];
if header.entry_type().is_dir() && !parents_inos.contains_key(&full_path_str) {
parents_inos.insert(full_path_str.clone(), (ino, LinkedList::new()));
}
if let Some(parent_list) = parents_inos.get_mut(parent) {
parent_list.1.push_back(ino);
}
let indexed_file = IndexedFile {
full_path: full_path_str,
file_name: file_name_str,
mtime: header.mtime()
.expect("Can't determine de mtime"),
size: header.size()
.expect("Can't determine de size"),
is_file: header.entry_type().is_file(),
level_path: level,
ino: ino
};
let data = serde_json::to_string(&indexed_file)
.expect("Error on Serialize the file")
.to_string();
tree.set(FileTar::path_to_string(full_path, true).as_bytes(), data.as_bytes().to_vec())
.expect("Error on create index for a file");
self.set_last_ino(ino);
}
for (_parent, inos) in parents_inos {
let files = self.get_tree_inos(inos.0);
for file in inos.1 {
files.set(file.to_string().as_bytes(), file.to_string().as_bytes().to_vec())
.expect("Error on set the index tree ino");
}
}
self.db.flush()
.expect("Error on flush db");
info!("Indexing {}...OK", path.display());
return Some(ftar);
}
// Return the sled Tree object for access the indexed content
// of a file
fn get_tree(&mut self, tar: &FileTar) -> Arc<Tree> {
let files = self.db.open_tree(format!("tar::{}", tar.full_path.clone()))
.expect("Can't open the file tree");
return files;
}
// Return the sled Tree object for access the indexed content
// of a ino tree cache
fn get_tree_inos(&mut self, ino: u64) -> Arc<Tree> {
let internal_files = self.db.open_tree(format!("inotree::{}", ino))
.expect("Can't open the ino tree");
return internal_files;
}
// Update the last used ino on files
fn set_last_ino(&mut self, ino: u64) {
self.db.set("last_ino".to_string(), ino.to_string().as_bytes().to_vec())
.expect("Error on update the last ino");
}
// Return the last used ino on files
fn get_last_ino(&mut self) -> u64 {
if let Ok(valop) = self.db.get("last_ino".to_string()) {
if let Some(val) = valop {
let inostr = str::from_utf8(&val)
.expect("Error on get last ino from db");
if let Ok(ino) = inostr.parse::<u64>() {
return ino;
}
}
}
return 20000;
}
// Return the childs of the ino
pub fn get_files_inos(&mut self, ino: u64) -> Vec<u64> {
let sub_inos = self.get_tree_inos(ino);
let mut files: Vec<u64> = vec![];
for val in sub_inos.iter().values() {
let uval = val.expect("Error on get the val of indexed ino");
let sub_ino_str = str::from_utf8(&uval)
.expect("Error on get string ut8 from indexed ino");
let sub_ino = sub_ino_str.parse::<u64>()
.expect("Error on parse the ino str");
files.push(sub_ino);
}
files
}
// Return the indexed files inside of the tar
pub fn get_catalog(&mut self, tar: &FileTar) -> Vec<IndexedFile> {
let tree = self.get_tree(tar);
let mut files: Vec<IndexedFile> = vec![];
for val in tree.iter().values() {
let uval = val.expect("Error on get the val of indexed file");
let file = str::from_utf8(&uval)
.expect("Error on get string ut8 from indexed file");
files.push(serde_json::from_str(&file)
.expect("Error on Deserialize the file"));
}
return files;
}
// Return the list of indexed files(catalog's)
pub fn get_catalogs(&mut self) -> Vec<FileTar> {
let mut cats: Vec<FileTar> = vec![];
for ucat in self.db.tree_names() {
let cat = str::from_utf8(&ucat)
.expect("Error on get string ut8 from catalog key");
if !cat.starts_with("tar::") {
continue;
}
let catn = cat.replacen("tar::", "", 1);
let path_buf = PathBuf::from(catn);
cats.push(FileTar::from_path(path_buf.as_path()));
}
cats
}
// Return if the tar is already indexed
pub fn is_indexed(&mut self, tar: &FileTar) -> bool {
let tree = self.get_tree(tar);
for _val in tree.iter().values() {
return true;
}
return false;
}
// Burn/remove the indexed content, if exists, of the file tar
pub fn burn_catalog(&mut self, tar: &FileTar) {
info!("Burning {}...", tar.full_path);
if !self.is_indexed(tar) {
warn!("Not indexed {}. Skiping...", tar.full_path);
return;
}
self.db.drop_tree(tar.full_path.as_bytes())
.expect("Can't drop the file tree");
info!("Burning {}...OK", tar.full_path);
}
// Extract a file from .tar file
pub fn extract_file(&self, ftar: &FileTar, ffile: &IndexedFile) -> Option<File> {
let (is_cached, cache) = self.cached_file(ftar, ffile);
if is_cached {
return Some(cache);
}
let path = Path::new(&ftar.full_path);
let archive = File::open(path);
if let Err(e) = archive {
error!("Can't open the file {}: {}. Skiping...", path.display(), e);
return None;
}
let buffer_archive = BufReader::new(archive.unwrap());
let decoder = GzDecoder::new(buffer_archive);
let buffer_decoder = BufReader::new(decoder);
let mut tar = Archive::new(buffer_decoder);
let entries = tar.entries()
.expect("Error on get the entries of tar file");
for entrie in entries {
let file = entrie
.expect("Erro on get the entrie file");
let header = file.header().clone();
let full_path = &header.path()
.expect("Can't get the full path");
if FileTar::path_to_string(full_path, true) == ffile.full_path {
// Make the cache for use in the next requests
copy(&mut BufReader::new(file), &mut BufWriter::new(cache))
.expect("Error on make the cache");
// We get the content from cache
return self.extract_file(ftar, ffile);
}
}
return None;
}
// Return the cache of indexed file, if exists
fn cached_file(&self, ftar: &FileTar, ffile: &IndexedFile) -> (bool, File) {
let cached_name = format!("{}/{}_{}", self.cache_extract, ftar.file_name, ffile.full_path.replace("/", "_"));
let path = Path::new(&cached_name);
if path.exists() {
let file = File::open(path)
.expect("Cant open the cache file");
return (true, file);
}
let file = File::create(path)
.expect("Cant create the cache file");
(false, file)
}
}
|
//! This module handles managing all the data needed for the application to run. All read/write
//! operations should go through here.
//mod config;
//mod index;
pub mod reader;
//mod repository;
//mod updater;
//use config::Config;
//use index::Index;
//use repository::Repository;
//use updater::Updater;
pub struct Data {
// config: Config,
// index: Index,
// repository: Repository,
// updater: Updater
}
impl Data {
pub fn new() -> Data {
// let config = Config::new();
return Data {
// config: config,
// index: Repository::new(config),
// repository: Repository::new(config),
// updater: Updater::new(config)
}
}
pub fn config_u32(path: &'static str) -> Option<u32> {
return None;
}
pub fn config_str(path: &'static str) -> Option<&'static str> {
return None;
}
}
|
use std::any::{Any, TypeId};
use crate::core::{
world::World,
entity::Entity,
};
pub struct ComponentManager;
impl ComponentManager {
/// Add a component to an entity.
pub fn add_component<T: Any>(component: T, entity: Entity, world: &mut World) {
if !world.living_entities.contains(&entity) {
return
}
let type_id = TypeId::of::<T>();
// Ensure that the component array of the current component type exist.
if world.components_index.get(&type_id).is_none() {
world.components_index.insert(type_id, world.components.len());
world.components.push(Vec::with_capacity(world.living_entities.capacity()));
}
// Retrieve the component array index...
let index = *world.components_index.get(&type_id).unwrap();
if world.components[index].is_empty() {
for _ in 0..world.components[index].capacity() {
world.components[index].push(None);
}
}
world.components[index][entity] = Some(Box::new(component));
// Update the entity signature.
world.entities_signature[entity].set(index, true);
}
/// Remove a component to an entity
pub fn del_component<T: Any>(entity: Entity, world: &mut World) -> bool {
if !world.living_entities.contains(&entity) {
return false
}
let type_id = TypeId::of::<T>();
// Check if the component exist
if world.components_index.get(&type_id).is_none() {
return false
}
let index = *world.components_index.get(&type_id).unwrap();
// Check if the entity has this component.
if !world.entities_signature[entity].test(index) {
return false
}
// Update the entity signature.
world.entities_signature[entity].set(index, false);
// NOTE: We don't drop() the component!
true
}
pub fn get_component<T: Any>(entity: Entity, world: &World) -> Option<&T> {
if !world.living_entities.contains(&entity) {
return None
}
let type_id = TypeId::of::<T>();
let index = match world.components_index.get(&type_id) {
Some(it) => it,
_ => return None,
};
let component_box = match &world.components[*index][entity] {
Some(boxed) => boxed,
_ => return None,
};
component_box.downcast_ref::<T>()
}
pub fn get_component_mut<T: Any>(entity: Entity, world: &mut World) -> Option<&mut T> {
if !world.living_entities.contains(&entity) {
return None
}
let type_id = TypeId::of::<T>();
let index = match world.components_index.get(&type_id) {
Some(it) => it,
_ => return None,
};
let component_box = match &mut world.components[*index][entity] {
Some(boxed) => boxed,
_ => return None,
};
component_box.downcast_mut::<T>()
}
} |
use std::{path::PathBuf, sync::{Arc, Mutex, atomic::AtomicBool}, thread::JoinHandle, time::Duration};
use steamworks::PublishedFileId;
use tauri::Webview;
use crate::{transaction_data, transactions::{Transaction, TransactionChannel, Transactions}};
struct ActiveDownload {
transaction: Arc<Transaction>,
channel: TransactionChannel,
id: PublishedFileId,
sent_data: bool
}
pub(crate) struct WorkshopDownloader {
downloads: Arc<Mutex<Vec<ActiveDownload>>>,
thread: Option<JoinHandle<()>>,
kill: Arc<AtomicBool>,
}
impl WorkshopDownloader {
pub(crate) fn init() -> Self {
Self {
thread: None,
downloads: Arc::new(Mutex::new(Vec::new())),
kill: Arc::new(AtomicBool::new(false))
}
}
pub(crate) fn kill(&mut self) {
if self.thread.is_none() { return; }
self.kill.store(true, std::sync::atomic::Ordering::Release);
self.thread.take().unwrap().join().ok();
}
pub(crate) fn listen(&mut self) {
if self.thread.is_some() { return; }
self.kill = Arc::new(AtomicBool::new(false));
let active_downloads = self.downloads.clone();
let kill = self.kill.clone();
self.thread = Some(std::thread::spawn(move || {
// TODO calculate download speed
// FIXME deadlock when switching to addon size analyzer whilst this thread is active
let mut killed = false;
loop {
if kill.load(std::sync::atomic::Ordering::Acquire) { killed = true; break; }
let mut active_downloads = active_downloads.lock().unwrap();
let mut finished = active_downloads.is_empty();
if !finished {
finished = true;
let ugc = crate::WORKSHOP.read().unwrap().client.ugc();
let mut i = 0;
while i < active_downloads.len() {
let mut download = active_downloads.get_mut(i).unwrap();
i = i + 1;
match ugc.item_download_info(download.id) {
Some((downloaded, total)) => {
if total == 0 { finished = false; continue; }
if downloaded != total {
if !download.sent_data {
download.sent_data = true;
download.channel.data(transaction_data!(total));
}
finished = false;
let progress = (downloaded as f64) / (total as f64);
if download.transaction.progress() != progress {
download.channel.progress(progress);
}
continue;
}
},
None => {}
}
download.channel.finish(transaction_data!(()));
active_downloads.remove(i - 1);
}
}
if finished { break; }
std::thread::sleep(Duration::from_millis(50));
}
if !killed {
match crate::WORKSHOP_DOWNLOADER.try_write() {
Ok(mut write) => write.thread = None,
Err(_) => {
#[cfg(debug_assertions)]
println!("[WorkshopDownloader] Failed to delete JoinHandle for listener thread.")
}
}
}
}));
}
}
pub(crate) fn download(callback: String, reject: String, webview: &mut Webview<'_>, mut ids: Vec<String>, path: Option<PathBuf>, named_dir: bool, tmp: bool, downloads: bool, addons: bool) -> Result<(), String> {
let ids = {
let input_ids = ids.len();
let ids: Vec<PublishedFileId> = ids.into_iter().filter_map(|x| x.parse::<u64>().ok().map(|x| PublishedFileId(x))).collect();
if ids.len() != input_ids { return Err("Failed to parse PublishedFileId".to_string()); }
ids
};
let mut webview_muts = Vec::with_capacity(ids.len());
for _ in 0..ids.len() { webview_muts.push(webview.as_mut()); }
let mut webview_muts = webview_muts.into_iter();
tauri::execute_promise(webview, move || {
let mut transaction_ids: Vec<(usize, PublishedFileId)> = Vec::with_capacity(ids.len());
let mut failed: Vec<PublishedFileId> = Vec::with_capacity(ids.len());
let mut downloader = crate::WORKSHOP_DOWNLOADER.write().unwrap();
let workshop = crate::WORKSHOP.read().unwrap();
let ugc = workshop.client.ugc();
let mut downloads = {
downloader.kill();
downloader.downloads.lock().unwrap()
};
for id in ids {
if !ugc.download_item(id, true) {
failed.push(id);
continue
}
let transaction = Transactions::new(webview_muts.next().unwrap()).build();
transaction_ids.push((transaction.id, id));
downloads.push(ActiveDownload {
channel: transaction.channel(),
transaction,
id,
sent_data: false
});
}
if !downloads.is_empty() {
drop(downloads);
downloader.listen();
}
Ok((transaction_ids, failed))
}, callback, reject);
Ok(())
} |
use libc;
use std::os::unix::io::RawFd;
use {Errno, Result};
use std::ffi::CStr;
bitflags!(
flags MemFdCreateFlag: libc::c_uint {
const MFD_CLOEXEC = 0x0001,
const MFD_ALLOW_SEALING = 0x0002,
}
);
pub fn memfd_create(name: &CStr, flags: MemFdCreateFlag) -> Result<RawFd> {
use sys::syscall::{syscall, MEMFD_CREATE};
let res = unsafe { syscall(MEMFD_CREATE, name.as_ptr(), flags.bits()) };
Errno::result(res).map(|r| r as RawFd)
}
|
use crate::{input_validators::*, ArgConstant};
use clap::{App, Arg};
pub const BLOCKHASH_ARG: ArgConstant<'static> = ArgConstant {
name: "blockhash",
long: "blockhash",
help: "Use the supplied blockhash",
};
pub const SIGN_ONLY_ARG: ArgConstant<'static> = ArgConstant {
name: "sign_only",
long: "sign-only",
help: "Sign the transaction offline",
};
pub const SIGNER_ARG: ArgConstant<'static> = ArgConstant {
name: "signer",
long: "signer",
help: "Provide a public-key/signature pair for the transaction",
};
pub fn blockhash_arg<'a, 'b>() -> Arg<'a, 'b> {
Arg::with_name(BLOCKHASH_ARG.name)
.long(BLOCKHASH_ARG.long)
.takes_value(true)
.value_name("BLOCKHASH")
.validator(is_hash)
.help(BLOCKHASH_ARG.help)
}
pub fn sign_only_arg<'a, 'b>() -> Arg<'a, 'b> {
Arg::with_name(SIGN_ONLY_ARG.name)
.long(SIGN_ONLY_ARG.long)
.takes_value(false)
.requires(BLOCKHASH_ARG.name)
.help(SIGN_ONLY_ARG.help)
}
fn signer_arg<'a, 'b>() -> Arg<'a, 'b> {
Arg::with_name(SIGNER_ARG.name)
.long(SIGNER_ARG.long)
.takes_value(true)
.value_name("PUBKEY=SIGNATURE")
.validator(is_pubkey_sig)
.requires(BLOCKHASH_ARG.name)
.multiple(true)
.help(SIGNER_ARG.help)
}
pub trait OfflineArgs {
fn offline_args(self, global: bool) -> Self;
}
impl OfflineArgs for App<'_, '_> {
fn offline_args(self, global: bool) -> Self {
self.arg(blockhash_arg().global(global))
.arg(sign_only_arg().global(global))
.arg(signer_arg().global(global))
}
}
|
extern crate pest;
use error::RecutError;
use field::{split_line_quotes, split_line_regex_quotes};
use fs::File;
use io::{stdin, BufRead, BufReader};
use match_field::{parse_match_indices, parse_match_indices_regex};
use pest::Parser;
use range::{parse_indices, BeginRange, EndRange, UnExpandedIndices};
use regex::Regex;
use std::{
collections::{BTreeMap, HashMap},
fs, io,
};
#[macro_use]
extern crate pest_derive;
mod error;
mod field;
mod match_field;
mod range;
#[derive(Debug)]
pub enum IoType {
FromStdIn,
FromFile(String),
}
#[derive(Clone, Debug)]
pub struct RangeDelimiter<'a> {
locations: &'a str,
delimiter: &'a str,
}
#[derive(Clone, Debug)]
pub enum CutType<'a> {
Bytes(&'a str, bool),
Characters(&'a str),
FieldsInferDelimiter(&'a str),
FieldsRegexDelimiter(RangeDelimiter<'a>),
FieldsStringDelimiter(RangeDelimiter<'a>),
MatchesInferDelimiter(&'a str),
MatchesRegexDelimiter(RangeDelimiter<'a>),
MatchesStringDelimiter(RangeDelimiter<'a>),
}
impl RangeDelimiter<'_> {
pub fn new<'a>(locations: &'a str, delimiter: &'a str) -> RangeDelimiter<'a> {
RangeDelimiter {
locations,
delimiter,
}
}
}
#[derive(Parser)]
#[grammar = "input.pest"]
pub struct InputParser;
pub fn cut(input: IoType, cut_type: CutType) -> Result<(), RecutError> {
//general handling of input for either the console or a file
let input: Box<dyn BufRead> = match input {
IoType::FromStdIn => Box::new(BufReader::new(stdin())),
IoType::FromFile(file_name) => {
let file = File::open(file_name)?;
let reader = BufReader::new(file);
Box::new(BufReader::new(reader))
}
};
match cut_type {
CutType::Bytes(range, split) => {
let parsed_indices = parse_indices(range)?;
print_by_bytes(input, split, &parsed_indices)?;
}
CutType::Characters(range) => {
let parsed_indices = parse_indices(range)?;
print_by_character(input, &parsed_indices)?;
}
CutType::FieldsInferDelimiter(range) => {
let parsed_indices = parse_indices(range)?;
print_infer_regex(input, &parsed_indices)?;
}
CutType::FieldsRegexDelimiter(range) => {
let parsed_indices = parse_indices(range.locations)?;
print_by_regex(input, &range.delimiter, &parsed_indices)?;
}
CutType::FieldsStringDelimiter(range) => {
let parsed_indices = parse_indices(range.locations)?;
print_by_string_delimiter(input, &range.delimiter, &parsed_indices)?;
}
CutType::MatchesInferDelimiter(range) => {
print_match_infer_regex(input, range)?;
}
CutType::MatchesRegexDelimiter(range) => {
print_match_regex_delimiter(input, range.delimiter, range.locations)?
}
CutType::MatchesStringDelimiter(range) => {
print_match_string_delimiter(input, range.delimiter, range.locations)?
}
}
Ok(())
}
fn print_by_character(
input_buffer: Box<dyn BufRead>,
input_indices: &[UnExpandedIndices],
) -> Result<(), RecutError> {
for line in input_buffer.lines() {
print_line_by_character(&line?, &input_indices);
}
Ok(())
}
fn print_line_by_character(input_line: &str, input_indices: &[UnExpandedIndices]) {
let length = input_line.chars().count();
let (sorted_indices, expanded_indices) = expand_indices(input_indices, length);
let first_index = *sorted_indices.first().unwrap();
let last_index = *sorted_indices.last().unwrap();
let take_length = last_index + 1;
let char_map = input_line
.char_indices()
.skip(first_index)
.take(take_length)
.collect::<HashMap<_, _>>();
let mut print_string = String::new();
for print_index in &expanded_indices {
print_string.push(*char_map.get(print_index).unwrap());
}
println!("{}", print_string);
}
fn print_by_bytes(
input_buffer: Box<dyn BufRead>,
splits_allowed: bool,
input_indices: &[UnExpandedIndices],
) -> Result<(), RecutError> {
for line in input_buffer.lines() {
print_line_by_bytes(&line?, splits_allowed, &input_indices)
}
Ok(())
}
fn print_line_by_bytes(input_line: &str, splits_alowed: bool, input_indices: &[UnExpandedIndices]) {
let length = input_line.bytes().count();
let (sorted_indices, expanded_indices) = expand_indices(input_indices, length);
let first_index = *sorted_indices.first().unwrap();
let last_index = *sorted_indices.last().unwrap();
let take_length = last_index + 1;
let byte_map = input_line
.bytes()
.enumerate()
.skip(first_index)
.take(take_length)
.collect::<HashMap<_, _>>();
let mut print_bytes = Vec::with_capacity(length);
for print_index in expanded_indices {
print_bytes.push(*byte_map.get(&print_index).unwrap());
}
let print_string = String::from_utf8_lossy(print_bytes.as_slice());
if splits_alowed {
println!("{}", print_string);
} else {
println!("{}", print_string.trim_end_matches("�"))
}
}
fn print_by_string_delimiter(
input_buffer: Box<dyn BufRead>,
delimiter: &str,
input_indices: &[UnExpandedIndices],
) -> Result<(), RecutError> {
for line in input_buffer.lines() {
let split_line = split_line_quotes(&line?, delimiter);
print_line_delimited(&split_line, &input_indices);
}
Ok(())
}
fn print_by_regex(
input_buffer: Box<dyn BufRead>,
delimiter: &str,
input_indices: &[UnExpandedIndices],
) -> Result<(), RecutError> {
let regex_delim = Regex::new(delimiter)?;
for line in input_buffer.lines() {
let split_line = split_line_regex_quotes(&line?, ®ex_delim);
print_line_delimited(&split_line, &input_indices);
}
Ok(())
}
fn print_line_delimited(split_line: &[String], input_indices: &[UnExpandedIndices]) {
let length = split_line.len();
let (sorted_indices, expanded_indices) = expand_indices(input_indices, length);
let first_index = *sorted_indices.first().unwrap();
let last_index = *sorted_indices.last().unwrap();
let take_length = last_index + 1;
let split_map = split_line
.into_iter()
.enumerate()
.skip(first_index)
.take(take_length)
.collect::<HashMap<_, _>>();
let mut print_string = Vec::with_capacity(length);
for print_index in expanded_indices {
let next = split_map.get(&print_index).unwrap().to_owned().to_owned();
print_string.push(next)
}
println!("{}", print_string.join(""));
}
fn print_infer_regex(
mut input_buffer: Box<dyn BufRead>,
input_indices: &[UnExpandedIndices],
) -> Result<(), RecutError> {
let mut line = String::new();
input_buffer.read_line(&mut line)?;
let delimiter = infer_delimiter(&line);
let split_line = split_line_quotes(&line, &delimiter);
print_line_delimited(&split_line, &input_indices);
for line in input_buffer.lines() {
let split_line = split_line_quotes(&line?, &delimiter);
print_line_delimited(&split_line, &input_indices);
}
Ok(())
}
fn print_match_infer_regex(
mut input_buffer: Box<dyn BufRead>,
match_str: &str,
) -> Result<(), RecutError> {
let mut line = String::new();
input_buffer.read_line(&mut line)?;
let delimiter = infer_delimiter(&line);
let (split_line, input_indices) = parse_match_indices(match_str, &line, &delimiter)?;
println!("{}", split_line.join(","));
for line in input_buffer.lines() {
let split_line = split_line_quotes(&line?, &delimiter);
print_line_match_delimited(&split_line, &input_indices);
}
Ok(())
}
fn print_line_match_delimited(split_line: &[String], input_indices: &[usize]) {
let mut print_string = Vec::with_capacity(input_indices.len());
let split_indices = split_line
.into_iter()
.enumerate()
.collect::<HashMap<_, _>>();
for i in input_indices {
let next = split_indices.get(&i).unwrap().to_owned().to_owned();
print_string.push(next);
}
println!("{}", print_string.join(","));
}
fn print_match_string_delimiter(
mut input_buffer: Box<dyn BufRead>,
delimiter: &str,
match_str: &str,
) -> Result<(), RecutError> {
let mut line = String::new();
input_buffer.read_line(&mut line)?;
let (split_line, input_indices) = parse_match_indices(match_str, &line, &delimiter)?;
println!("{}", split_line.join(","));
for line in input_buffer.lines() {
let split_line = split_line_quotes(&line?, &delimiter);
print_line_match_delimited(&split_line, &input_indices);
}
Ok(())
}
fn print_match_regex_delimiter(
mut input_buffer: Box<dyn BufRead>,
delimiter: &str,
match_str: &str,
) -> Result<(), RecutError> {
let regex = Regex::new(delimiter)?;
let mut line = String::new();
input_buffer.read_line(&mut line)?;
let (split_line, input_indices) = parse_match_indices_regex(match_str, &line, ®ex)?;
println!("{}", split_line.join(","));
for line in input_buffer.lines() {
let split_line = split_line_regex_quotes(&line?, ®ex);
print_line_match_delimited(&split_line, &input_indices);
}
Ok(())
}
fn infer_delimiter(input_line: &str) -> String {
let parse_result = InputParser::parse(Rule::input, input_line).unwrap(); //harcoded should succeed
let mut potential_delimiters = BTreeMap::new();
for parse_pair in parse_result {
for iner in parse_pair.into_inner() {
match iner.as_rule() {
Rule::data => {}
Rule::potential_delim => {
let next_delim = potential_delimiters.entry(iner.as_str()).or_insert(0);
*next_delim += 1;
}
_ => unreachable!(),
};
}
}
potential_delimiters
.iter()
.next_back()
.unwrap()
.0
.to_owned()
.to_owned()
}
fn expand_indices(input_indices: &[UnExpandedIndices], length: usize) -> (Vec<usize>, Vec<usize>) {
// like moduluo but number wraped around index for negative numbers
let tn = |num: i32| {
if num >= 0 {
num as usize
} else {
length - num as usize
}
};
let expanded_indices: Vec<_> = input_indices
.into_iter()
.flat_map(|range| match range {
UnExpandedIndices::Index(num) => vec![*num as usize],
UnExpandedIndices::Range(BeginRange::FromStart, EndRange::ToEnd) => {
(0..=length).collect()
}
UnExpandedIndices::Range(BeginRange::FromStart, EndRange::Index(num)) => {
(0..=tn(*num)).collect()
}
UnExpandedIndices::Range(BeginRange::Index(num), EndRange::ToEnd) => {
(tn(*num)..=length).collect()
}
UnExpandedIndices::Range(BeginRange::Index(begin_num), EndRange::Index(end_num)) => {
(tn(*begin_num)..=tn(*end_num)).collect()
}
})
.collect();
let mut sorted_indices = expanded_indices.clone();
sorted_indices.sort();
(sorted_indices, expanded_indices)
}
|
extern crate libsyntax2;
extern crate superslice;
extern crate itertools;
extern crate smol_str;
mod extend_selection;
mod symbols;
mod line_index;
mod edit;
mod code_actions;
use libsyntax2::{
ast::{self, NameOwner},
AstNode,
algo::{walk, find_leaf_at_offset},
SyntaxKind::{self, *},
};
pub use libsyntax2::{File, TextRange, TextUnit};
pub use self::{
line_index::{LineIndex, LineCol},
extend_selection::extend_selection,
symbols::{StructureNode, file_structure, FileSymbol, file_symbols},
edit::{EditBuilder, Edit, AtomEdit},
code_actions::{
ActionResult, CursorPosition, find_node,
flip_comma, add_derive,
},
};
#[derive(Debug)]
pub struct HighlightedRange {
pub range: TextRange,
pub tag: &'static str,
}
#[derive(Debug)]
pub struct Diagnostic {
pub range: TextRange,
pub msg: String,
}
#[derive(Debug)]
pub struct Runnable {
pub range: TextRange,
pub kind: RunnableKind,
}
#[derive(Debug)]
pub enum RunnableKind {
Test { name: String },
Bin,
}
pub fn parse(text: &str) -> ast::File {
ast::File::parse(text)
}
pub fn matching_brace(file: &ast::File, offset: TextUnit) -> Option<TextUnit> {
const BRACES: &[SyntaxKind] = &[
L_CURLY, R_CURLY,
L_BRACK, R_BRACK,
L_PAREN, R_PAREN,
L_ANGLE, R_ANGLE,
];
let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax_ref(), offset)
.filter_map(|node| {
let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
Some((node, idx))
})
.next()?;
let parent = brace_node.parent()?;
let matching_kind = BRACES[brace_idx ^ 1];
let matching_node = parent.children()
.find(|node| node.kind() == matching_kind)?;
Some(matching_node.range().start())
}
pub fn highlight(file: &ast::File) -> Vec<HighlightedRange> {
let mut res = Vec::new();
for node in walk::preorder(file.syntax_ref()) {
let tag = match node.kind() {
ERROR => "error",
COMMENT | DOC_COMMENT => "comment",
STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string",
ATTR => "attribute",
NAME_REF => "text",
NAME => "function",
INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE => "literal",
LIFETIME => "parameter",
k if k.is_keyword() => "keyword",
_ => continue,
};
res.push(HighlightedRange {
range: node.range(),
tag,
})
}
res
}
pub fn diagnostics(file: &ast::File) -> Vec<Diagnostic> {
let mut res = Vec::new();
for node in walk::preorder(file.syntax_ref()) {
if node.kind() == ERROR {
res.push(Diagnostic {
range: node.range(),
msg: "Syntax Error".to_string(),
});
}
}
res.extend(file.errors().into_iter().map(|err| Diagnostic {
range: TextRange::offset_len(err.offset, 1.into()),
msg: err.msg,
}));
res
}
pub fn syntax_tree(file: &ast::File) -> String {
::libsyntax2::utils::dump_tree(&file.syntax())
}
pub fn runnables(file: &ast::File) -> Vec<Runnable> {
file
.functions()
.filter_map(|f| {
let name = f.name()?.text();
let kind = if name == "main" {
RunnableKind::Bin
} else if f.has_atom_attr("test") {
RunnableKind::Test {
name: name.to_string()
}
} else {
return None;
};
Some(Runnable {
range: f.syntax().range(),
kind,
})
})
.collect()
}
|
//! File extension trait
use std::fs::File;
use std::io::Result;
/// Extension convenience trait that allows pre-allocating files, suggesting random access pattern
/// and doing cross-platform exact reads/writes
pub trait FileExt {
/// Make sure file has specified number of bytes allocated for it
fn preallocate(&self, len: u64) -> Result<()>;
/// Advise OS/file system that file will use random access and read-ahead behavior is
/// undesirable
fn advise_random_access(&self) -> Result<()>;
/// Advise OS/file system that file will use sequential access and read-ahead behavior is
/// desirable
fn advise_sequential_access(&self) -> Result<()>;
/// Read exact number of bytes at a specific offset
fn read_exact_at(&self, buf: &mut [u8], offset: u64) -> Result<()>;
/// Write all provided bytes at a specific offset
fn write_all_at(&self, buf: &[u8], offset: u64) -> Result<()>;
}
impl FileExt for File {
fn preallocate(&self, len: u64) -> Result<()> {
fs2::FileExt::allocate(self, len)
}
#[cfg(target_os = "linux")]
fn advise_random_access(&self) -> Result<()> {
use std::os::unix::io::AsRawFd;
let err = unsafe { libc::posix_fadvise(self.as_raw_fd(), 0, 0, libc::POSIX_FADV_RANDOM) };
if err != 0 {
Err(std::io::Error::from_raw_os_error(err))
} else {
Ok(())
}
}
#[cfg(target_os = "macos")]
fn advise_random_access(&self) -> Result<()> {
use std::os::unix::io::AsRawFd;
if unsafe { libc::fcntl(self.as_raw_fd(), libc::F_RDAHEAD, 0) } != 0 {
Err(std::io::Error::last_os_error())
} else {
Ok(())
}
}
#[cfg(not(any(target_os = "macos", target_os = "linux")))]
fn advise_random_access(&self) -> Result<()> {
// Not supported
Ok(())
}
#[cfg(target_os = "linux")]
fn advise_sequential_access(&self) -> Result<()> {
use std::os::unix::io::AsRawFd;
let err =
unsafe { libc::posix_fadvise(self.as_raw_fd(), 0, 0, libc::POSIX_FADV_SEQUENTIAL) };
if err != 0 {
Err(std::io::Error::from_raw_os_error(err))
} else {
Ok(())
}
}
#[cfg(target_os = "macos")]
fn advise_sequential_access(&self) -> Result<()> {
use std::os::unix::io::AsRawFd;
if unsafe { libc::fcntl(self.as_raw_fd(), libc::F_RDAHEAD, 1) } != 0 {
Err(std::io::Error::last_os_error())
} else {
Ok(())
}
}
#[cfg(not(any(target_os = "macos", target_os = "linux")))]
fn advise_sequential_access(&self) -> Result<()> {
// Not supported
Ok(())
}
#[cfg(unix)]
fn read_exact_at(&self, buf: &mut [u8], offset: u64) -> Result<()> {
std::os::unix::fs::FileExt::read_exact_at(self, buf, offset)
}
#[cfg(unix)]
fn write_all_at(&self, buf: &[u8], offset: u64) -> Result<()> {
std::os::unix::fs::FileExt::write_all_at(self, buf, offset)
}
#[cfg(windows)]
fn read_exact_at(&self, mut buf: &mut [u8], mut offset: u64) -> Result<()> {
while !buf.is_empty() {
match std::os::windows::fs::FileExt::seek_read(self, buf, offset) {
Ok(0) => {
break;
}
Ok(n) => {
buf = &mut buf[n..];
offset += n as u64;
}
Err(ref e) if e.kind() == std::io::ErrorKind::Interrupted => {
// Try again
}
Err(e) => {
return Err(e);
}
}
}
if !buf.is_empty() {
Err(std::io::Error::new(
std::io::ErrorKind::UnexpectedEof,
"failed to fill whole buffer",
))
} else {
Ok(())
}
}
#[cfg(windows)]
fn write_all_at(&self, mut buf: &[u8], mut offset: u64) -> Result<()> {
while !buf.is_empty() {
match std::os::windows::fs::FileExt::seek_write(self, buf, offset) {
Ok(0) => {
return Err(std::io::Error::new(
std::io::ErrorKind::WriteZero,
"failed to write whole buffer",
));
}
Ok(n) => {
buf = &buf[n..];
offset += n as u64;
}
Err(ref e) if e.kind() == std::io::ErrorKind::Interrupted => {
// Try again
}
Err(e) => {
return Err(e);
}
}
}
Ok(())
}
}
|
#[doc = "Reader of register BUS_RST_CNT"]
pub type R = crate::R<u32, super::BUS_RST_CNT>;
#[doc = "Writer for register BUS_RST_CNT"]
pub type W = crate::W<u32, super::BUS_RST_CNT>;
#[doc = "Register BUS_RST_CNT `reset()`'s with value 0x0a"]
impl crate::ResetValue for super::BUS_RST_CNT {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0a
}
}
#[doc = "Reader of field `BUS_RST_CNT`"]
pub type BUS_RST_CNT_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `BUS_RST_CNT`"]
pub struct BUS_RST_CNT_W<'a> {
w: &'a mut W,
}
impl<'a> BUS_RST_CNT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - Bus Reset Count Length"]
#[inline(always)]
pub fn bus_rst_cnt(&self) -> BUS_RST_CNT_R {
BUS_RST_CNT_R::new((self.bits & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - Bus Reset Count Length"]
#[inline(always)]
pub fn bus_rst_cnt(&mut self) -> BUS_RST_CNT_W {
BUS_RST_CNT_W { w: self }
}
}
|
mod text;
mod texture_rect;
use std::sync::Arc;
use vulkano::command_buffer::{AutoCommandBufferBuilder, SubpassContents};
use vulkano::sampler::Filter;
use rusttype::{PositionedGlyph, Scale, point};
use gristmill::asset::{Asset, image::{Image, NineSliceImage}};
use gristmill::color::{Color, encode_color};
use gristmill::renderer::{RenderAsset, LoadContext, RenderContext, scene};
use gristmill::renderer::loader::RenderAssetLoader;
use gristmill::geometry2d::{Rect, Size};
use super::{Gui, font::{Font, fonts}};
use text::{TextHandle, TextPipeline};
use texture_rect::{Texture, NineSliceTexture, TextureRectPipeline};
type TextureRectConstants = texture_rect::PushConstants;
type TextConstants = text::PushConstants;
#[derive(Clone)]
pub enum GuiTexture {
None,
Simple(Texture),
NineSlice(NineSliceTexture),
}
impl GuiTexture {
pub fn size(&self) -> Option<Size> {
match self {
GuiTexture::None => None,
GuiTexture::Simple(texture) => Some(texture.size()),
GuiTexture::NineSlice(_) => None,
}
}
}
impl RenderAsset for GuiTexture {
fn type_name() -> &'static str { "GuiTexture" }
fn none() -> Self { GuiTexture::None }
}
pub struct DrawCommand {
drawable: Drawable,
rect: Rect,
color: [f32; 4],
}
impl DrawCommand {
fn texture_rect_constants(&self, screen_dimensions: [f32; 2]) -> TextureRectConstants {
TextureRectConstants {
screen_size: screen_dimensions,
position: self.rect.position.into(),
size: self.rect.size.into(),
color: self.color,
_dummy0: [0; 8],
}
}
fn text_constants(&self, screen_dimensions: [f32; 2]) -> TextConstants {
TextConstants {
screen_size: screen_dimensions,
position: self.rect.position.into(),
color: self.color,
}
}
}
#[derive(Clone)]
pub enum Drawable {
None,
TextureRect(Texture),
TextureNineSlice(NineSliceTexture),
Text(Arc<TextHandle>),
}
#[derive(Copy, Clone, Debug)]
pub struct TextMetrics {
width: f32,
v_metrics: rusttype::VMetrics,
}
impl TextMetrics {
fn new(text_glyphs: &Vec<PositionedGlyph<'static>>) -> TextMetrics {
let last_glyph = text_glyphs.last().unwrap();
let width = last_glyph.position().x + last_glyph.unpositioned().h_metrics().advance_width;
let v_metrics = last_glyph.font().v_metrics(last_glyph.scale());
TextMetrics { width, v_metrics }
}
fn empty() -> TextMetrics {
TextMetrics { width: 0.0, v_metrics: rusttype::VMetrics { ascent: 0.0, descent: 0.0, line_gap: 0.0 } }
}
pub fn width(&self) -> f32 { self.width }
pub fn ascent(&self) -> f32 { self.v_metrics.ascent }
pub fn height(&self) -> f32 { self.v_metrics.ascent - self.v_metrics.descent }
}
pub struct DrawContext<'a> {
render: &'a mut GuiRenderer,
text_changed: bool,
}
impl<'a> DrawContext<'a> {
pub fn new_color_rect_drawable(&mut self) -> Drawable {
self.new_texture_rect_drawable(GuiTexture::None)
}
pub fn new_texture_rect_drawable(&mut self, texture: GuiTexture) -> Drawable {
match texture {
GuiTexture::None => Drawable::TextureRect(self.render.white_1x1.clone()),
GuiTexture::Simple(tex) => Drawable::TextureRect(tex),
GuiTexture::NineSlice(tex) => Drawable::TextureNineSlice(tex),
}
}
pub fn new_text_drawable(&mut self, font: Font, size: f32, text: &str) -> (Drawable, TextMetrics) {
if text.is_empty() {
log::warn!("Can't create text Drawable with empty string");
return (Drawable::None, TextMetrics::empty());
}
let font_asset = match fonts().get(font) {
Some(font) => font,
None => return (Drawable::None, TextMetrics::empty()),
};
let glyphs: Vec<PositionedGlyph> = font_asset.layout(text, Scale::uniform(size), point(0., 0.)).collect();
let metrics = TextMetrics::new(&glyphs);
let handle = self.render.text_pipeline.add_section(glyphs);
self.text_changed = true;
(Drawable::Text(handle), metrics)
}
pub fn draw(&mut self, drawable: &Drawable, mut rect: Rect, color: Color) {
if let Drawable::TextureNineSlice(tex) = drawable {
rect = rect.inset(tex.slices());
}
self.render.pending_draw_commands.push(DrawCommand {
drawable: drawable.clone(),
rect,
color: encode_color(color),
});
}
fn update_cache(&mut self, builder: &mut AutoCommandBufferBuilder) {
if self.text_changed {
self.render.text_pipeline.update_cache(builder);
self.text_changed = false;
}
}
}
pub struct GuiRenderer {
texture_rect_pipeline: TextureRectPipeline,
text_pipeline: TextPipeline,
screen_dimensions: Size,
pending_draw_commands: Vec<DrawCommand>,
white_1x1: Texture,
}
impl GuiRenderer {
fn make_context<'a>(&'a mut self) -> DrawContext<'a> {
self.pending_draw_commands.clear();
DrawContext { render: self, text_changed: false }
}
pub fn load_image(&mut self, context: &mut LoadContext, image: &Image) -> GuiTexture {
GuiTexture::Simple(self.texture_rect_pipeline.load_image(context, image, Filter::Linear))
}
pub fn load_nine_slice_image(&mut self, context: &mut LoadContext, image: &NineSliceImage) -> GuiTexture {
GuiTexture::NineSlice(self.texture_rect_pipeline.load_nine_slice_image(context, image))
}
}
impl scene::SceneRenderer for GuiRenderer {
type RenderType = scene::Geometry2D;
type Scene = Gui;
fn contents() -> SubpassContents { SubpassContents::Inline }
fn new(context: &mut LoadContext) -> Self {
let mut texture_rect_pipeline = TextureRectPipeline::new(context);
let text_pipeline = TextPipeline::new(context);
let white_1x1 = texture_rect_pipeline.load_image(context, &Image::new_1x1_white(), Filter::Nearest);
GuiRenderer {
texture_rect_pipeline,
text_pipeline,
screen_dimensions: Size::zero(),
pending_draw_commands: Vec::new(),
white_1x1,
}
}
fn set_dimensions(&mut self, dimensions: Size) {
self.screen_dimensions = dimensions;
}
fn pre_render(&mut self, context: &mut RenderContext, scene: &mut Gui) {
scene.layout_if_needed(self.screen_dimensions);
let mut draw_context = self.make_context();
scene.draw(&mut draw_context);
draw_context.update_cache(context.command_buffer_builder());
}
fn render(&mut self, context: &mut RenderContext, _scene: &mut Gui) {
let screen_dimensions = self.screen_dimensions.into();
for draw_command in self.pending_draw_commands.drain(..) {
match &draw_command.drawable {
Drawable::None => (),
Drawable::TextureRect(texture) =>
self.texture_rect_pipeline.draw_rect(context, texture, draw_command.texture_rect_constants(screen_dimensions)),
Drawable::TextureNineSlice(texture) =>
self.texture_rect_pipeline.draw_nine_slice(context, texture, draw_command.texture_rect_constants(screen_dimensions)),
Drawable::Text(handle) =>
self.text_pipeline.draw(context, handle, draw_command.text_constants(screen_dimensions)),
}
}
}
}
impl RenderAssetLoader for GuiRenderer {
type RenderAsset = GuiTexture;
fn name() -> &'static str { "gui" }
fn load(&mut self, context: &mut LoadContext, asset_type: &str, asset_path: &str) -> Option<Self::RenderAsset> {
match asset_type {
"" => {
Image::try_read(asset_path).map(|image| self.load_image(context, &image))
}
"nine_slice" => {
NineSliceImage::try_read(asset_path).map(|image| self.load_nine_slice_image(context, &image))
}
_ => {
log::warn!("Invalid asset type \"{}\" for loader {}", asset_type, Self::name());
None
}
}
}
}
|
/* Copyright (c) 2020-2021 Alibaba Cloud and Intel Corporation
*
* SPDX-License-Identifier: Apache-2.0
*/
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
extern crate sgx_types;
extern crate sgx_urts;
use std::thread;
use std::os::unix::io::{RawFd, AsRawFd};
use std::os::unix::net::{UnixStream, UnixListener};
use std::net::{SocketAddr, TcpStream, TcpListener};
use std::ops::{Deref, DerefMut};
use std::ptr::NonNull;
use std::sync::Arc;
use libc::{c_void};
use foreign_types::{ForeignType, ForeignTypeRef, Opaque};
use clap::{Arg, App};
use shadow_rs::shadow;
use serde_json::{json, Value};
use sgx_types::{
SgxResult, sgx_attributes_t, sgx_launch_token_t, sgx_misc_attribute_t
};
use sgx_urts::SgxEnclave;
#[macro_use]
extern crate log;
shadow!(build);
include!("ffi.rs");
include!("enclave-tls.rs");
fn sgx_enclave_create(file: &str) -> SgxResult<SgxEnclave> {
let debug = 1;
let mut token: sgx_launch_token_t = [0; 1024];
let mut updated: i32 = 0;
let mut attr = sgx_misc_attribute_t {
secs_attr: sgx_attributes_t { flags: 0, xfrm: 0 },
misc_select: 0,
};
SgxEnclave::create(file, debug, &mut token, &mut updated, &mut attr)
}
fn enclave_info_fetch(sockfd: RawFd, buf: &mut [u8],
tls_type: &Option<String>, crypto: &Option<String>,
attester: &Option<String>, verifier: &Option<String>,
mutual: bool, enclave_id: u64) -> usize {
let tls = EnclaveTls::new(false, enclave_id,
tls_type, crypto, attester, verifier, mutual).unwrap();
/* connect */
tls.negotiate(sockfd).unwrap();
let n = tls.transmit(b"hello from inclavared").unwrap();
assert!(n > 0);
let n = tls.receive(buf).unwrap();
n
}
fn client_fetch(sockaddr: &str, buf: &mut [u8],
tls_type: &Option<String>, crypto: &Option<String>,
attester: &Option<String>, verifier: &Option<String>,
mutual: bool, enclave_id: u64) -> usize {
let addr = sockaddr.parse::<SocketAddr>();
if addr.is_err() {
/* unix socket */
let stream = UnixStream::connect(sockaddr).unwrap();
enclave_info_fetch(stream.as_raw_fd(), buf,
tls_type, crypto, attester, verifier, mutual, enclave_id)
} else {
let stream = TcpStream::connect(sockaddr).unwrap();
enclave_info_fetch(stream.as_raw_fd(), buf,
tls_type, crypto, attester, verifier, mutual, enclave_id)
}
}
fn handle_client(sockfd: RawFd, upstream: &Option<String>,
tls_type: &Option<String>, crypto: &Option<String>,
attester: &Option<String>, verifier: &Option<String>,
mutual: bool, enclave_id: u64) {
/* XXX: mutual is always false */
let tls = match EnclaveTls::new(true, enclave_id, tls_type,
crypto, attester, verifier, false) {
Ok(r) => r,
Err(_e) => {
return;
}
};
/* accept */
if tls.negotiate(sockfd).is_err() {
warn!("tls_negotiate() failed, sockfd = {}", sockfd);
return;
}
/* get client request */
let mut buffer = [0u8; 512];
let n = tls.receive(&mut buffer).unwrap();
info!("req: {}", String::from_utf8((&buffer[..n]).to_vec()).unwrap());
let req: Value = match serde_json::from_slice(&buffer[..n]) {
Ok(r) => r,
Err(e) => {
error!("json::from_slice() failed, {}", e);
return;
}
};
if req["type"] == "GETENCLAVEINFO" {
if let Some(upstream) = upstream {
/* fetch enclave information from upstream */
let n = client_fetch(&upstream, &mut buffer, tls_type, crypto,
attester, verifier, mutual, enclave_id);
info!("message length from upstream: {}", n);
/* TODO */
let resp = if n > 64 {
let mrenclave = &buffer[0..32];
let mrsigner = &buffer[32..64];
let message = String::from_utf8((&buffer[64..n]).to_vec()).unwrap();
info!("message from upstream: {}", message);
let resp = json!({
"id": "123456",
"msgtype": "ENCLAVEINFO",
"version": 1,
"mrenclave": hex::encode(mrenclave),
"mrsigner": hex::encode(mrsigner),
"message": message
});
resp.to_string()
} else if n > 0 {
String::from_utf8((&buffer[..n]).to_vec()).unwrap()
} else {
String::from("reply from inclavared!\n")
};
info!("resp: {}", resp);
/* response reply */
tls.transmit(resp.as_bytes()).unwrap();
} else {
let n = tls.transmit(b"reply from inclavared!\n").unwrap();
assert!(n > 0);
}
} else {
let n = tls.transmit(b"hello from inclavared!\n").unwrap();
assert!(n > 0);
}
}
fn run_server(sockaddr: &str, upstream: Option<String>,
tls_type: Option<String>, crypto: Option<String>,
attester: Option<String>, verifier: Option<String>,
mutual: bool, enclavefile: String) {
let enclave = match sgx_enclave_create(&enclavefile) {
Ok(r) => r,
Err(e) => {
error!("sgx_enclave_create() failed, {}", e.as_str());
return;
}
};
let enclave_id = enclave.geteid();
let upstream = Arc::new(upstream);
let tls_type = Arc::new(tls_type);
let crypto = Arc::new(crypto);
let attester = Arc::new(attester);
let verifier = Arc::new(verifier);
let addr = sockaddr.parse::<SocketAddr>();
/* TODO: Abstract together */
if addr.is_err() {
/* unix socket */
let _ = std::fs::remove_file(sockaddr);
let listener = UnixListener::bind(sockaddr).unwrap();
loop {
let (socket, addr) = listener.accept().unwrap();
info!("thread for {} {:?}", socket.as_raw_fd(), addr);
let upstream = upstream.clone();
let tls_type = tls_type.clone();
let crypto = crypto.clone();
let attester = attester.clone();
let verifier = verifier.clone();
thread::spawn(move || {
handle_client(socket.as_raw_fd(), &upstream, &tls_type,
&crypto, &attester, &verifier, mutual, enclave_id);
});
}
} else {
/* tcp */
let listener = TcpListener::bind(sockaddr).unwrap();
loop {
let (socket, addr) = listener.accept().unwrap();
info!("thread for {} {:?}", socket.as_raw_fd(), addr);
let upstream = upstream.clone();
let tls_type = tls_type.clone();
let crypto = crypto.clone();
let attester = attester.clone();
let verifier = verifier.clone();
thread::spawn(move || {
handle_client(socket.as_raw_fd(), &upstream, &tls_type,
&crypto, &attester, &verifier, mutual, enclave_id);
});
}
}
/* unreachable
enclave.destroy();
*/
}
fn main() {
env_logger::builder().filter(None, log::LevelFilter::Trace).init();
let version = format!("v{}\ncommit: {}\nbuildtime: {}",
build::PKG_VERSION, build::COMMIT_HASH, build::BUILD_TIME);
let matches = App::new("inclavared")
.version(version.as_str())
.long_version(version.as_str())
.author("Inclavare-Containers Team")
.arg(Arg::with_name("listen")
.short("l")
.long("listen")
.value_name("sockaddr")
.help("Work in listen mode")
.takes_value(true)
)
.arg(Arg::with_name("xfer")
.short("x")
.long("xfer")
.value_name("sockaddr")
.help("Xfer data from client to server")
.takes_value(true)
)
.arg(Arg::with_name("connect")
.short("c")
.long("connect")
.value_name("sockaddr")
.help("Work in client mode")
.takes_value(true)
)
.arg(Arg::with_name("tls")
.long("tls")
.value_name("tls_type")
.help("Specify the TLS type")
.takes_value(true)
)
.arg(Arg::with_name("crypto")
.long("crypto")
.value_name("crypto_type")
.help("Specify the crypto type")
.takes_value(true)
)
.arg(Arg::with_name("attester")
.long("attester")
.value_name("attester_type")
.help("Specify the attester type")
.takes_value(true)
)
.arg(Arg::with_name("verifier")
.long("verifier")
.value_name("verifier_type")
.help("Specify the verifier type")
.takes_value(true)
)
.arg(Arg::with_name("mutual")
.short("m")
.long("mutual")
.help("Work in mutual mode")
)
.arg(Arg::with_name("enclave")
.short("e")
.long("enclave")
.value_name("file")
.help("Specify the enclave file")
.takes_value(true)
)
.get_matches();
info!("enter v{}, {} - {}", build::PKG_VERSION,
build::COMMIT_HASH, build::BUILD_TIME);
let tls_type = matches.value_of("tls").map(|s| s.to_string());
let crypto = matches.value_of("crypto").map(|s| s.to_string());
let attester = matches.value_of("attester").map(|s| s.to_string());
let verifier = matches.value_of("verifier").map(|s| s.to_string());
let mutual = matches.is_present("mutual");
let enclavefile = matches.value_of("enclave")
.unwrap_or("/usr/share/enclave-tls/samples/sgx_stub_enclave.signed.so");
let enclavefile = enclavefile.to_string();
if matches.is_present("listen") {
let sockaddr = matches.value_of("listen").unwrap();
let xfer = matches.value_of("xfer").map(|s| s.to_string());
run_server(sockaddr, xfer,
tls_type, crypto, attester, verifier, mutual, enclavefile);
} else {
let sockaddr = matches.value_of("connect").unwrap();
let enclave = match sgx_enclave_create(&enclavefile) {
Ok(r) => r,
Err(e) => {
error!("sgx_enclave_create() failed, {}", e.as_str());
return;
}
};
let mut buffer = [0u8; 512];
let n = client_fetch(sockaddr, &mut buffer, &tls_type, &crypto,
&attester, &verifier, mutual, enclave.geteid());
assert!(n > 0);
info!("length from upstream: {}", n);
let message = String::from_utf8((&buffer[64..]).to_vec()).unwrap();
info!("message from upstream: {}", message);
enclave.destroy();
}
info!("leave");
}
|
// This file is part of Bit.Country.
// Copyright (C) 2020-2021 Bit.Country.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![cfg(test)]
use super::*;
use frame_support::{assert_noop, assert_ok};
use mock::{Event, *};
use sp_core::blake2_256;
use sp_runtime::traits::BadOrigin;
#[test]
fn create_bitcountry_should_work() {
ExtBuilder::default().build().execute_with(|| {
assert_ok!(BitCountryModule::create_bitcountry(
Origin::signed(ALICE),
vec![1]
));
assert_eq!(
BitCountryModule::get_bitcountry(&BITCOUNTRY_ID),
Some(BitCountryStruct {
owner: ALICE,
metadata: vec![1],
currency_id: FungibleTokenId::NativeToken(0),
})
);
let event = Event::bitcountry(crate::Event::NewBitCountryCreated(BITCOUNTRY_ID));
assert_eq!(last_event(), event);
});
}
#[test]
fn create_bitcountry_should_fail() {
ExtBuilder::default().build().execute_with(|| {
assert_noop!(
BitCountryModule::create_bitcountry(Origin::none(), vec![1],),
BadOrigin
);
});
}
#[test]
fn transfer_bitcountry_should_work() {
ExtBuilder::default().build().execute_with(|| {
assert_ok!(BitCountryModule::create_bitcountry(
Origin::signed(ALICE),
vec![1]
));
assert_ok!(BitCountryModule::transfer_bitcountry(
Origin::signed(ALICE),
BOB,
BITCOUNTRY_ID
));
let event = Event::bitcountry(crate::Event::TransferredBitCountry(
BITCOUNTRY_ID,
ALICE,
BOB,
));
assert_eq!(last_event(), event);
// Make sure 2 ways transfer works
assert_ok!(BitCountryModule::transfer_bitcountry(
Origin::signed(BOB),
ALICE,
BITCOUNTRY_ID
));
let event = Event::bitcountry(crate::Event::TransferredBitCountry(
BITCOUNTRY_ID,
BOB,
ALICE,
));
assert_eq!(last_event(), event);
})
}
#[test]
fn transfer_bitcountry_should_fail() {
ExtBuilder::default().build().execute_with(|| {
assert_ok!(BitCountryModule::create_bitcountry(
Origin::signed(ALICE),
vec![1]
));
assert_noop!(
BitCountryModule::transfer_bitcountry(Origin::signed(BOB), ALICE, BITCOUNTRY_ID),
Error::<Runtime>::NoPermission
);
})
}
#[test]
fn freeze_bitcountry_should_work() {
ExtBuilder::default().build().execute_with(|| {
assert_ok!(BitCountryModule::create_bitcountry(
Origin::signed(ALICE),
vec![1]
));
assert_ok!(BitCountryModule::freeze_bitcountry(
Origin::root(),
BITCOUNTRY_ID
));
let event = Event::bitcountry(crate::Event::BitCountryFreezed(BITCOUNTRY_ID));
assert_eq!(last_event(), event);
})
}
#[test]
fn freeze_bitcountry_should_fail() {
ExtBuilder::default().build().execute_with(|| {
assert_ok!(BitCountryModule::create_bitcountry(
Origin::signed(ALICE),
vec![1]
));
//Country owner tries to freeze their own bitcountry
assert_noop!(
BitCountryModule::freeze_bitcountry(Origin::signed(ALICE), BITCOUNTRY_ID),
BadOrigin
);
})
}
#[test]
fn unfreeze_bitcountry_should_work() {
ExtBuilder::default().build().execute_with(|| {
assert_ok!(BitCountryModule::create_bitcountry(
Origin::signed(ALICE),
vec![1]
));
assert_ok!(BitCountryModule::freeze_bitcountry(
Origin::root(),
BITCOUNTRY_ID
));
let event = Event::bitcountry(crate::Event::BitCountryFreezed(BITCOUNTRY_ID));
assert_eq!(last_event(), event);
assert_ok!(BitCountryModule::unfreeze_bitcountry(
Origin::root(),
BITCOUNTRY_ID
));
let event = Event::bitcountry(crate::Event::BitCountryUnfreezed(BITCOUNTRY_ID));
assert_eq!(last_event(), event);
})
}
#[test]
fn destroy_bitcountry_should_work() {
ExtBuilder::default().build().execute_with(|| {
assert_ok!(BitCountryModule::create_bitcountry(
Origin::signed(ALICE),
vec![1]
));
assert_ok!(BitCountryModule::destroy_bitcountry(
Origin::root(),
BITCOUNTRY_ID
));
let event = Event::bitcountry(crate::Event::BitCountryDestroyed(BITCOUNTRY_ID));
assert_eq!(last_event(), event);
})
}
#[test]
fn destroy_bitcountry_without_root_should_fail() {
ExtBuilder::default().build().execute_with(|| {
assert_ok!(BitCountryModule::create_bitcountry(
Origin::signed(ALICE),
vec![1]
));
assert_noop!(
BitCountryModule::destroy_bitcountry(Origin::signed(ALICE), BITCOUNTRY_ID),
BadOrigin
);
})
}
#[test]
fn destroy_bitcountry_with_no_id_should_fail() {
ExtBuilder::default().build().execute_with(|| {
assert_ok!(BitCountryModule::create_bitcountry(
Origin::signed(ALICE),
vec![1]
));
assert_noop!(
BitCountryModule::destroy_bitcountry(Origin::root(), COUNTRY_ID_NOT_EXIST),
Error::<Runtime>::BitCountryInfoNotFound
);
})
}
|
//! Provide helper functions/trait impls to pack/unpack
//! [`SyscallArgs`].
//!
//! `io:Error` is not implemented for better `no_std` support.
/// The 6 arguments of a syscall, raw untyped version.
#[derive(PartialEq, Debug, Eq, Clone, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct SyscallArgs {
pub arg0: usize,
pub arg1: usize,
pub arg2: usize,
pub arg3: usize,
pub arg4: usize,
pub arg5: usize,
}
impl SyscallArgs {
pub fn new(
a0: usize,
a1: usize,
a2: usize,
a3: usize,
a4: usize,
a5: usize,
) -> Self {
SyscallArgs {
arg0: a0,
arg1: a1,
arg2: a2,
arg3: a3,
arg4: a4,
arg5: a5,
}
}
}
impl From<&[usize; 6]> for SyscallArgs {
fn from(args: &[usize; 6]) -> Self {
SyscallArgs {
arg0: args[0],
arg1: args[1],
arg2: args[2],
arg3: args[3],
arg4: args[4],
arg5: args[5],
}
}
}
impl From<&[usize; 5]> for SyscallArgs {
fn from(args: &[usize; 5]) -> Self {
SyscallArgs {
arg0: args[0],
arg1: args[1],
arg2: args[2],
arg3: args[3],
arg4: args[4],
arg5: 0,
}
}
}
impl From<&[usize; 4]> for SyscallArgs {
fn from(args: &[usize; 4]) -> Self {
SyscallArgs {
arg0: args[0],
arg1: args[1],
arg2: args[2],
arg3: args[3],
arg4: 0,
arg5: 0,
}
}
}
impl From<&[usize; 3]> for SyscallArgs {
fn from(args: &[usize; 3]) -> Self {
SyscallArgs {
arg0: args[0],
arg1: args[1],
arg2: args[2],
arg3: 0,
arg4: 0,
arg5: 0,
}
}
}
impl From<&[usize; 2]> for SyscallArgs {
fn from(args: &[usize; 2]) -> Self {
SyscallArgs {
arg0: args[0],
arg1: args[1],
arg2: 0,
arg3: 0,
arg4: 0,
arg5: 0,
}
}
}
impl From<&[usize; 1]> for SyscallArgs {
fn from(args: &[usize; 1]) -> Self {
SyscallArgs {
arg0: args[0],
arg1: 0,
arg2: 0,
arg3: 0,
arg4: 0,
arg5: 0,
}
}
}
impl From<&[usize; 0]> for SyscallArgs {
fn from(_args: &[usize; 0]) -> Self {
SyscallArgs {
arg0: 0,
arg1: 0,
arg2: 0,
arg3: 0,
arg4: 0,
arg5: 0,
}
}
}
#[macro_export]
macro_rules! syscall_args {
($a:expr, $b:expr, $c:expr, $d:expr, $e:expr, $f:expr) => {
$crate::SyscallArgs::new($a, $b, $c, $d, $e, $f)
};
($a:expr, $b:expr, $c:expr, $d:expr, $e:expr) => {
$crate::SyscallArgs::new($a, $b, $c, $d, $e, 0)
};
($a:expr, $b:expr, $c:expr, $d:expr) => {
$crate::SyscallArgs::new($a, $b, $c, $d, 0, 0)
};
($a:expr, $b:expr, $c:expr) => {
$crate::SyscallArgs::new($a, $b, $c, 0, 0, 0)
};
($a:expr, $b:expr) => {
$crate::SyscallArgs::new($a, $b, 0, 0, 0, 0)
};
($a:expr) => {
$crate::SyscallArgs::new($a, 0, 0, 0, 0, 0)
};
() => {
$crate::SyscallArgs::new(0, 0, 0, 0, 0, 0)
};
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn syscall_args_macro_test() {
assert_eq!(
syscall_args!(1, 2, 3, 4, 5, 6),
SyscallArgs::new(1, 2, 3, 4, 5, 6)
);
assert_eq!(
syscall_args!(1, 2, 3, 4, 5),
SyscallArgs::new(1, 2, 3, 4, 5, 0)
);
assert_eq!(
syscall_args!(1, 2, 3, 4),
SyscallArgs::new(1, 2, 3, 4, 0, 0)
);
assert_eq!(syscall_args!(1, 2, 3), SyscallArgs::new(1, 2, 3, 0, 0, 0));
assert_eq!(syscall_args!(1, 2), SyscallArgs::new(1, 2, 0, 0, 0, 0));
assert_eq!(syscall_args!(1), SyscallArgs::new(1, 0, 0, 0, 0, 0));
assert_eq!(syscall_args!(), SyscallArgs::new(0, 0, 0, 0, 0, 0));
}
#[test]
fn syscall_args_from_u64_slice() {
assert_eq!(
SyscallArgs::from(&[1, 2, 3, 4, 5, 6]),
syscall_args!(1, 2, 3, 4, 5, 6)
);
assert_eq!(
SyscallArgs::from(&[1, 2, 3, 4, 5]),
syscall_args!(1, 2, 3, 4, 5)
);
assert_eq!(SyscallArgs::from(&[1, 2, 3, 4]), syscall_args!(1, 2, 3, 4));
assert_eq!(SyscallArgs::from(&[1, 2, 3]), syscall_args!(1, 2, 3));
assert_eq!(SyscallArgs::from(&[1, 2]), syscall_args!(1, 2));
assert_eq!(SyscallArgs::from(&[1]), syscall_args!(1));
assert_eq!(SyscallArgs::from(&[0]), syscall_args!());
}
}
|
use std::io::{Read, Seek};
use iced_wgpu::wgpu;
use serde::Deserialize;
use crate::assets::{AssetError, AssetPack, Texture, Vertex, Mesh};
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct GltfAccessor {
bufferView: usize,
byteOffset: Option<usize>,
componentType: usize,
count: usize,
max: Option<Vec<serde_json::Value>>,
min: Option<Vec<serde_json::Value>>,
#[serde(rename="type")]
ty: String,
}
#[repr(C)]
struct GltfHeader {
magic: u32,
version: u32,
length: u32,
}
#[repr(C)]
struct GltfChunkHeader {
length: u32,
kind: u32,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct GltfBuffer {
// byteLength: usize,
uri: Option<String>,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct GltfBufferView {
buffer: usize,
// byteLength: usize,
byteOffset: Option<usize>,
// target: usize,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct GltfImage {
uri: String,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct GltfTextureIndex {
index: usize,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct GltfMaterialMetallicRoughness {
baseColorTexture: GltfTextureIndex,
// metallicFactor: f32,
// roughnessFactor: f32,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct GltfMaterial {
// name: String,
pbrMetallicRoughness: GltfMaterialMetallicRoughness,
// normalTexture: GltfTextureIndex,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct GltfMeshPrimitive {
attributes: serde_json::Map<String, serde_json::Value>,
indices: usize,
material: Option<usize>,
mode: Option<usize>,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct GltfMesh {
name: String,
primitives: Vec<GltfMeshPrimitive>,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct GltfTexture {
// sampler: usize,
source: usize,
}
#[allow(non_snake_case)]
#[derive(Deserialize)]
struct Gltf {
accessors: Vec<GltfAccessor>,
buffers: Vec<GltfBuffer>,
bufferViews: Vec<GltfBufferView>,
images: Option<Vec<GltfImage>>,
materials: Option<Vec<GltfMaterial>>,
meshes: Option<Vec<GltfMesh>>,
textures: Option<Vec<GltfTexture>>,
}
fn access_gltf_buffer<'a>(buffers: &'a Vec<Vec<u8>>, gltf: &Gltf, accessor: &GltfAccessor, scalars_per_element: usize) -> &'a [u8] {
assert_eq!(accessor.ty, match scalars_per_element {
1 => "SCALAR",
2 => "VEC2",
3 => "VEC3",
_ => panic!()
});
let buffer_view = &gltf.bufferViews[accessor.bufferView];
let offset = buffer_view.byteOffset.unwrap_or(0)+accessor.byteOffset.unwrap_or(0);
let buffer = &buffers[buffer_view.buffer];
&buffer[offset..]
}
pub fn load_gltf(device: &wgpu::Device, bind_group_layout: &wgpu::BindGroupLayout, sampler: &wgpu::Sampler, asset_pack: &AssetPack, path: &std::path::PathBuf) -> Result<Vec<(std::path::PathBuf, Mesh)>, AssetError> {
let directory = path.parent().unwrap();
let file = std::fs::File::open(path)?;
// let file_len = file.metadata()?.len();
let mut reader = std::io::BufReader::new(file);
let mut buffers: Vec<Vec<u8>> = Vec::new();
let gltf: Gltf = if path.extension().unwrap() == ".glb" {
let mut slice = [0; std::mem::size_of::<GltfHeader>()];
reader.read(&mut slice)?;
let header: GltfHeader = unsafe { std::mem::transmute(slice) };
assert_eq!(header.magic, 0x46546C67);
assert_eq!(header.version, 2);
let mut slice = [0; std::mem::size_of::<GltfChunkHeader>()];
reader.read(&mut slice)?;
let json_chunk_header: GltfChunkHeader = unsafe { std::mem::transmute(slice) };
assert_eq!(json_chunk_header.kind, 0x4E4F534A);
reader.seek(std::io::SeekFrom::Current(json_chunk_header.length as i64))?;
let mut slice = [0; std::mem::size_of::<GltfChunkHeader>()];
reader.read(&mut slice)?;
let bin_chunk_header: GltfChunkHeader = unsafe { std::mem::transmute(slice) };
assert_eq!(bin_chunk_header.kind, 0x004E4942);
let mut buffer: Vec<u8> = Vec::new();
reader.read_to_end(&mut buffer)?;
buffers.push(buffer);
reader.seek(std::io::SeekFrom::Start(std::mem::size_of::<GltfHeader>() as u64+std::mem::size_of::<GltfChunkHeader>() as u64))?;
serde_json::from_reader(reader.take(json_chunk_header.length as u64))?
} else {
serde_json::from_reader(reader)?
};
let path = path.parent().unwrap().join(path.file_stem().unwrap());
let images = match &gltf.images {
Some(gltf_images) => {
let mut images: Vec<&Texture> = Vec::with_capacity(gltf_images.len());
for gltf_image in gltf_images {
let image_path = std::path::PathBuf::from(&gltf_image.uri);
images.push(&asset_pack.textures[&directory.join(image_path.parent().unwrap()).join(image_path.file_stem().unwrap())]);
}
images
},
None => Vec::new(),
};
for gltf_buffer in &gltf.buffers {
if let Some(uri) = &gltf_buffer.uri {
buffers.push(std::fs::read(&directory.join(uri))?);
}
}
let mut result: Vec<(std::path::PathBuf, Mesh)> = Vec::new();
if let Some(gltf_meshes) = &gltf.meshes {
for gltf_mesh in gltf_meshes {
let gltf_primitive = &gltf_mesh.primitives[0];
let mode = gltf_primitive.mode.unwrap_or(4);
assert_eq!(mode, 4);
let element_count = gltf.accessors[gltf_primitive.attributes["POSITION"].as_u64().unwrap() as usize].count;
let mut vertices: Vec<Vertex> = Vec::with_capacity(std::mem::size_of::<Vertex>()*element_count);
vertices.resize_with(element_count, || { Vertex::default() });
let mut offset_in_vertex = 0;
for (attribute, scalars_per_element) in &[("POSITION", 3), ("NORMAL", 3), ("TEXCOORD_0", 2)] {
let element_size = *scalars_per_element*std::mem::size_of::<f32>();
let accessor = &gltf.accessors[gltf_primitive.attributes[*attribute].as_u64().unwrap() as usize];
assert_eq!(accessor.componentType, 5126); // FLOAT
let slice = access_gltf_buffer(&buffers, &gltf, &accessor, *scalars_per_element);
unsafe {
let mut dst_ptr = (vertices.as_mut_ptr() as *mut u8).offset(offset_in_vertex as isize);
let mut src_ptr = slice.as_ptr() as *const u8;
for _ in 0..accessor.count {
std::slice::from_raw_parts_mut(dst_ptr, element_size).clone_from_slice(std::slice::from_raw_parts(src_ptr, element_size));
dst_ptr = dst_ptr.offset(std::mem::size_of::<Vertex>() as isize);
src_ptr = src_ptr.offset(element_size as isize);
}
}
offset_in_vertex += element_size;
}
let accessor = &gltf.accessors[gltf_primitive.attributes["POSITION"].as_u64().unwrap() as usize];
let (min, max) = if accessor.min.is_some() && accessor.max.is_some() {
let min = accessor.min.as_ref().unwrap();
let max = accessor.max.as_ref().unwrap();
(glam::Vec3A::new(min[0].as_f64().unwrap() as f32, min[1].as_f64().unwrap() as f32, min[2].as_f64().unwrap() as f32),
glam::Vec3A::new(max[0].as_f64().unwrap() as f32, max[1].as_f64().unwrap() as f32, max[2].as_f64().unwrap() as f32))
} else {
let mut min = glam::Vec3A::splat(f32::INFINITY);
let mut max = glam::Vec3A::splat(-f32::INFINITY);
for vertex in &vertices {
min = min.min(vertex.position.into());
max = max.max(vertex.position.into());
}
(min, max)
};
let bounding_volume = crate::bounding_volume::BoundingVolume::Box(crate::bounding_volume::BoundingBox { min, max });
let accessor = &gltf.accessors[gltf_primitive.indices];
assert_eq!(accessor.componentType, 5123); // UNSIGNED_SHORT
let indices = access_gltf_buffer(&buffers, &gltf, &accessor, 1);
let bind_group = gltf_primitive.material.and_then(|gltf_material_index| {
let gltf_material = &gltf.materials.as_ref().unwrap()[gltf_material_index];
let albedo_texture = &images[gltf.textures.as_ref().unwrap()[gltf_material.pbrMetallicRoughness.baseColorTexture.index].source];
// let normal_texture = images[gltf.textures.as_ref().unwrap()[gltf_material.normalTexture.index].source];
Some(device.create_bind_group(&bind_group_descriptor!(
&bind_group_layout,
0 => Sampler(&sampler),
1 => TextureView(&albedo_texture.view),
)))
});
let mesh = Mesh::new(device, unsafe { crate::transmute_slice::<_, u8>(&vertices[..]) }, indices, accessor.count, bounding_volume, bind_group);
result.push((path.join(&gltf_mesh.name), mesh));
}
}
Ok(result)
}
|
use ggez::audio::SoundSource;
use ggez::input::keyboard::KeyCode;
use ggez::input::keyboard::KeyMods;
use ggez::*;
use rand;
use rand::Rng;
const SCREEN_SIZE: (f32, f32) = (800.0, 600.0);
const PLAYER_HEIGHT: f32 = 100.0;
#[derive(Clone, Copy, Debug)]
struct Point {
x: f32,
y: f32,
}
impl From<Point> for mint::Point2<f32> {
fn from(p: Point) -> Self {
mint::Point2::<f32> {
x: p.x,
y: SCREEN_SIZE.1 - p.y,
}
}
}
struct State {
input: InputState,
player: Player,
bullets: Vec<Bullet>,
invaders: Vec<Invader>,
points: u64,
invader_image: graphics::Image,
bg_image: graphics::Image,
font: graphics::Font,
shot_sound: audio::Source,
hit_sound: audio::Source,
bullet_image: graphics::Image,
}
impl State {
fn new(ctx: &mut Context) -> Self {
let shot_sound = audio::Source::new(ctx, "/pew.ogg").unwrap();
let hit_sound = audio::Source::new(ctx, "/boom.ogg").unwrap();
State {
input: Default::default(),
player: Player::new(ctx),
bullets: Vec::new(),
invaders: Vec::new(),
points: 0,
invader_image: graphics::Image::new(ctx, "/invader.png").unwrap(),
bullet_image: graphics::Image::new(ctx, "/bullet.png").unwrap(),
bg_image: graphics::Image::new(ctx, "/bg.jpg").unwrap(),
font: graphics::Font::new(ctx, "/DejaVuSerif.ttf").unwrap(),
shot_sound,
hit_sound,
}
}
fn restart(&mut self) {
self.input = Default::default();
self.bullets = Vec::new();
self.invaders = Vec::new();
self.points = 0;
self.player.reset();
}
}
#[derive(Debug)]
struct InputState {
xaxis: f32,
yaxis: f32,
fire: bool,
}
impl Default for InputState {
fn default() -> Self {
InputState {
xaxis: 0.0,
yaxis: 0.0,
fire: false,
}
}
}
#[derive(Debug)]
struct Bullet {
position: Point,
size: (f32, f32),
}
impl Bullet {
fn new(point: Point) -> Self {
Bullet {
position: Point {
x: point.x + 40.0,
..point
},
size: (20.0, 20.0),
}
}
fn draw(&self, ctx: &mut Context, img: &graphics::Image) -> GameResult {
let point: mint::Point2<f32> = self.position.into();
graphics::draw(
ctx,
img,
graphics::DrawParam::new()
.dest(point)
.rotation(1.570796)
.scale([0.06, 0.06])
.offset([0.0, 0.9]),
)?;
// target rect
// let rect = graphics::Mesh::new_rectangle(
// ctx,
// graphics::DrawMode::stroke(2.0),
// self.get_rect(),
// graphics::Color::from_rgb(255, 0, 0),
// )?;
// graphics::draw(ctx, &rect, (ggez::mint::Point2 { x: 0.0, y: 0.0 },))?;
Ok(())
}
}
impl Entity for Bullet {
fn get_rect(&self) -> graphics::Rect {
let point: mint::Point2<f32> = self.position.into();
graphics::Rect::new(point.x, point.y, self.size.0, self.size.1)
}
}
trait Entity {
fn get_rect(&self) -> graphics::Rect;
}
enum MoveState {
Forward,
Backwards,
}
struct Moves {
allowed: (f32, f32),
current: (f32, f32),
state: MoveState,
}
impl Moves {
fn new() -> Self {
let mut rng = rand::thread_rng();
Self {
allowed: (rng.gen_range(50.0, 300.0), 0.0),
current: (0.0, 0.0),
state: MoveState::Forward,
}
}
}
struct Invader {
position: Point,
size: (f32, f32),
health: u8,
movement: Moves,
}
impl Invader {
fn new() -> Self {
let mut rng = rand::thread_rng();
Invader {
position: Point {
x: rng.gen_range(100.0, SCREEN_SIZE.0 - 100.0),
y: rng.gen_range(SCREEN_SIZE.1 / 2.0, SCREEN_SIZE.1 - 100.0),
},
size: (80.0, 60.0),
health: 1,
movement: Moves::new(),
}
}
fn draw(&self, ctx: &mut Context, img: &graphics::Image) -> GameResult {
let point: mint::Point2<f32> = self.position.into();
graphics::draw(
ctx,
img,
graphics::DrawParam::new()
.dest(point)
.scale([0.1, 0.1])
.offset([0.13, 0.23]),
)?;
// target rect
// let rect = graphics::Mesh::new_rectangle(
// ctx,
// graphics::DrawMode::stroke(2.0),
// self.get_rect(),
// graphics::Color::from_rgb(255, 0, 0),
// )?;
// graphics::draw(ctx, &rect, (ggez::mint::Point2 { x: 0.0, y: 0.0 },))?;
Ok(())
}
}
impl Entity for Invader {
fn get_rect(&self) -> graphics::Rect {
let point: mint::Point2<f32> = self.position.into();
graphics::Rect::new(point.x, point.y, self.size.0, self.size.1)
}
}
struct Player {
position: Point,
image: graphics::Image,
health: u8,
shot_timeout: f32,
}
impl Entity for Player {
fn get_rect(&self) -> graphics::Rect {
let point: mint::Point2<f32> = self.position.into();
graphics::Rect::new(point.x, point.y, 95.0, PLAYER_HEIGHT)
}
}
impl Player {
fn new(ctx: &mut Context) -> Self {
Player {
position: Point {
x: SCREEN_SIZE.0 / 2.0,
y: PLAYER_HEIGHT,
},
image: graphics::Image::new(ctx, "/player.png").unwrap(),
health: 3,
shot_timeout: 0.0,
}
}
fn reset(&mut self) {
self.position = Point {
x: SCREEN_SIZE.0 / 2.0,
y: PLAYER_HEIGHT,
};
self.health = 3;
self.shot_timeout = 0.0;
}
fn draw(&self, ctx: &mut Context) -> GameResult {
let point: mint::Point2<f32> = self.position.into();
graphics::draw(
ctx,
&self.image,
graphics::DrawParam::new()
.dest(point)
.scale([0.2, 0.2])
.offset([0.05, 0.0]),
)?;
// target rect
// let rect = graphics::Mesh::new_rectangle(
// ctx,
// graphics::DrawMode::stroke(2.0),
// self.get_rect(),
// graphics::Color::from_rgb(255, 0, 0),
// )?;
// graphics::draw(ctx, &rect, (ggez::mint::Point2 { x: 0.0, y: 0.0 },))?;
Ok(())
}
}
const SHOT_TIMEOUT: f32 = 0.3;
const INVADER_SPEED: f32 = 3.0;
impl event::EventHandler for State {
fn update(&mut self, ctx: &mut Context) -> GameResult {
if self.player.health == 0 {
return Ok(());
}
while timer::check_update_time(ctx, 60) {
let seconds = 1.0 / 60f32;
if self.input.xaxis > 0.0
&& self.player.position.x < (SCREEN_SIZE.0 - self.player.get_rect().w)
{
self.player.position.x += self.input.xaxis;
}
if self.input.xaxis < 0.0 && self.player.position.x > 0.0 {
self.player.position.x += self.input.xaxis;
}
if self.input.yaxis > 0.0 && self.player.position.y < SCREEN_SIZE.1 {
self.player.position.y += self.input.yaxis;
}
if self.input.yaxis < 0.0 && self.player.position.y > self.player.get_rect().h {
self.player.position.y += self.input.yaxis;
}
self.player.shot_timeout -= seconds;
if self.input.fire && self.player.shot_timeout < 0.0 {
self.bullets.push(Bullet::new(self.player.position));
self.shot_sound.play()?;
self.player.shot_timeout = SHOT_TIMEOUT;
}
while self.invaders.len() < 5 {
self.invaders.push(Invader::new())
}
for bullet in &mut self.bullets {
bullet.position.y += 20.0;
let br = bullet.get_rect();
for invader in &mut self.invaders {
if invader.health > 0 {
if invader.get_rect().overlaps(&br) {
self.hit_sound.play()?;
invader.health -= 1;
if invader.health == 0 {
self.points += 1;
}
bullet.position.y = SCREEN_SIZE.1;
break;
}
}
}
}
self.bullets.retain(|s| s.position.y < SCREEN_SIZE.1);
self.invaders.retain(|i| i.health > 0 && i.position.y > 0.0);
let pl_rect = self.player.get_rect();
for invader in &mut self.invaders {
invader.position.y -= 1.0;
if invader.position.y <= 0.0 {
self.points -= 1;
}
match invader.movement.state {
MoveState::Forward => {
invader.position.x += INVADER_SPEED;
invader.movement.current.0 += INVADER_SPEED;
if invader.movement.allowed.0 < invader.movement.current.0
|| invader.position.x > (SCREEN_SIZE.0 - invader.get_rect().w)
{
invader.movement.state = MoveState::Backwards;
}
}
MoveState::Backwards => {
invader.position.x -= INVADER_SPEED;
invader.movement.current.0 -= INVADER_SPEED;
if 0.0 > invader.movement.current.0 || invader.position.x < 0.0 {
invader.movement.state = MoveState::Forward;
}
}
}
if invader.health > 0 && invader.get_rect().overlaps(&pl_rect) {
invader.health = 0;
if self.player.health > 0 {
self.player.health -= 1;
self.hit_sound.play()?;
}
}
}
}
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
graphics::clear(ctx, graphics::BLACK);
if self.player.health > 0 {
graphics::draw(
ctx,
&self.bg_image,
graphics::DrawParam::new()
.dest([0.0, 0.0])
.scale([0.5, 0.5]), // .offset([0.13, 0.23]),
)?;
}
for bullet in &self.bullets {
bullet.draw(ctx, &self.bullet_image)?;
}
for invader in &self.invaders {
invader.draw(ctx, &self.invader_image)?;
}
self.player.draw(ctx)?;
// And draw the GUI elements in the right places.
let hp_dest = [10.0, 10.0];
let score_dest = [100.0, 10.0];
let hp_str = format!("HP: {}", self.player.health);
let score_str = format!("Score: {}", self.points);
let level_display = graphics::Text::new((hp_str, self.font, 32.0));
let score_display = graphics::Text::new((score_str, self.font, 32.0));
graphics::draw(ctx, &level_display, (hp_dest, 0.0, graphics::WHITE))?;
graphics::draw(ctx, &score_display, (score_dest, 0.0, graphics::WHITE))?;
if self.player.health == 0 {
let go_text = graphics::Text::new(("GAME OVER", self.font, 100.0));
graphics::draw(
ctx,
&go_text,
(
[100.0, (SCREEN_SIZE.1 / 2.0) - 50.0],
0.0,
graphics::Color::from_rgb(255, 0, 0),
),
)?;
} else {
}
graphics::present(ctx)?;
timer::yield_now();
Ok(())
}
fn key_down_event(
&mut self,
ctx: &mut Context,
key_code: KeyCode,
_key_mods: KeyMods,
_: bool,
) {
match key_code {
KeyCode::Right => self.input.xaxis = 10.0,
KeyCode::Left => self.input.xaxis = -10.0,
KeyCode::Up => self.input.yaxis = 10.0,
KeyCode::Down => self.input.yaxis = -10.0,
KeyCode::Space => self.input.fire = true,
KeyCode::Escape => ggez::event::quit(ctx),
_ => (),
}
}
fn key_up_event(&mut self, _ctx: &mut Context, key_code: KeyCode, _key_mods: KeyMods) {
match key_code {
KeyCode::Space => self.input.fire = false,
KeyCode::Right => {
if self.input.xaxis > 0.0 {
self.input.xaxis = 0.0
}
}
KeyCode::Left => {
if self.input.xaxis < 0.0 {
self.input.xaxis = 0.0
}
}
KeyCode::Up => {
if self.input.yaxis > 0.0 {
self.input.yaxis = 0.0
}
}
KeyCode::Down => {
if self.input.yaxis < 0.0 {
self.input.yaxis = 0.0
}
}
KeyCode::Return => {
self.restart();
}
_ => (),
}
}
}
fn main() -> GameResult {
use std::path;
let (ctx, event_loop) = &mut ggez::ContextBuilder::new("plat", "dan")
.add_resource_path(path::PathBuf::from("./resources"))
.window_setup(conf::WindowSetup::default().title("Plat!"))
.window_mode(conf::WindowMode::default().dimensions(SCREEN_SIZE.0, SCREEN_SIZE.1))
.build()?;
let mut state: State = State::new(ctx);
event::run(ctx, event_loop, &mut state)
}
|
use proc_macro2::Span;
use quote::quote;
use syn::MetaList;
use syn::{
punctuated::Punctuated, token::Comma, Attribute, Data, DataStruct, DeriveInput, Field, Fields,
FieldsNamed, Ident, Lit, Meta, MetaNameValue, NestedMeta,
};
macro_rules! try_set {
($i:ident, $v:expr, $t:expr) => {
match $i {
None => $i = Some($v),
Some(_) => return Err(syn::Error::new_spanned($t, "duplicate attribute")),
}
};
}
macro_rules! fail {
($t:expr, $m:expr) => {
return Err(syn::Error::new_spanned($t, $m));
};
}
struct EntityAttributes {
entity_name: String,
}
fn parse_entity_attributes(input: &[Attribute]) -> syn::Result<EntityAttributes> {
let mut entity_name = None;
for attr in input {
let meta = attr
.parse_meta()
.map_err(|e| syn::Error::new_spanned(attr, e))?;
match meta {
Meta::List(list) if list.path.is_ident("event_sauce") => {
for value in list.nested.iter() {
match value {
NestedMeta::Meta(meta) => match meta {
Meta::NameValue(MetaNameValue {
path,
lit: Lit::Str(val),
..
}) if path.is_ident("entity_name") => {
try_set!(entity_name, val.value(), value)
}
Meta::NameValue(MetaNameValue {
path,
lit: Lit::Str(_val),
..
}) if !path.is_ident("entity_name") => fail!(
meta,
format!(
"unrecognised attribute {:?}",
path.get_ident()
.map(|i| i.to_string())
.expect("expected an attribute")
)
),
u => fail!(u, "unexpected attribute"),
},
u => fail!(u, "unexpected attribute format"),
}
}
}
_ => {}
}
}
let entity_name = entity_name.ok_or_else(|| {
syn::Error::new(
Span::call_site(),
"Attribute entity_name is required, e.g. #[event_sauce(entity_name = \"users\")]",
)
})?;
Ok(EntityAttributes { entity_name })
}
/// Return the name of the field which is to become the entity ID field
fn find_entity_id_field(fields: &Punctuated<Field, Comma>) -> syn::Result<Ident> {
// Find field with an attribute matching `#[event_sauce(id)]`
let field = fields.iter().find(|field| {
field
.attrs
.iter()
.map(|attr| attr.parse_meta().expect("Invalid field attribute provided"))
.any(|meta| match meta {
Meta::List(MetaList { nested, .. }) if nested.len() == 1 => nested
.first()
.map(|nested_meta| matches!(nested_meta, NestedMeta::Meta(Meta::Path(path)) if path.is_ident("id")))
.unwrap_or(false),
_ => false,
})
});
if let Some(field_ident) = field.and_then(|f| f.ident.as_ref()) {
Ok(field_ident.clone())
} else {
fail!(
fields,
"the #[event_sauce(id)] attribute is required on the ID field of the entity"
)
}
}
fn expand_derive_entity_struct(
input: &DeriveInput,
fields: &Punctuated<Field, Comma>,
) -> syn::Result<proc_macro2::TokenStream> {
let ident = &input.ident;
let EntityAttributes { entity_name } = parse_entity_attributes(&input.attrs)?;
let entity_id_field = find_entity_id_field(&fields)?;
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
Ok(quote!(
impl #impl_generics event_sauce::Entity for #ident #ty_generics #where_clause {
const ENTITY_TYPE: &'static str = #entity_name;
fn entity_id(&self) -> Uuid {
self.#entity_id_field
}
}
))
}
pub fn expand_derive_entity(input: &DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
match &input.data {
Data::Struct(DataStruct {
fields: Fields::Named(FieldsNamed { named, .. }),
..
}) => expand_derive_entity_struct(input, named),
Data::Struct(DataStruct {
fields: Fields::Unnamed(_),
..
}) => Err(syn::Error::new_spanned(
input,
"tuple structs are not supported",
)),
Data::Struct(DataStruct {
fields: Fields::Unit,
..
}) => Err(syn::Error::new_spanned(
input,
"unit structs are not supported",
)),
Data::Enum(_) => Err(syn::Error::new_spanned(input, "enums are not supported")),
Data::Union(_) => Err(syn::Error::new_spanned(input, "unions are not supported")),
}
}
|
#[doc = "Reader of register INTR"]
pub type R = crate::R<u32, super::INTR>;
#[doc = "Writer for register INTR"]
pub type W = crate::W<u32, super::INTR>;
#[doc = "Register INTR `reset()`'s with value 0"]
impl crate::ResetValue for super::INTR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `TR_TX_REQ`"]
pub type TR_TX_REQ_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TR_TX_REQ`"]
pub struct TR_TX_REQ_W<'a> {
w: &'a mut W,
}
impl<'a> TR_TX_REQ_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `TR_RX_REQ`"]
pub type TR_RX_REQ_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TR_RX_REQ`"]
pub struct TR_RX_REQ_W<'a> {
w: &'a mut W,
}
impl<'a> TR_RX_REQ_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `XIP_ALIGNMENT_ERROR`"]
pub type XIP_ALIGNMENT_ERROR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `XIP_ALIGNMENT_ERROR`"]
pub struct XIP_ALIGNMENT_ERROR_W<'a> {
w: &'a mut W,
}
impl<'a> XIP_ALIGNMENT_ERROR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `TX_CMD_FIFO_OVERFLOW`"]
pub type TX_CMD_FIFO_OVERFLOW_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TX_CMD_FIFO_OVERFLOW`"]
pub struct TX_CMD_FIFO_OVERFLOW_W<'a> {
w: &'a mut W,
}
impl<'a> TX_CMD_FIFO_OVERFLOW_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `TX_DATA_FIFO_OVERFLOW`"]
pub type TX_DATA_FIFO_OVERFLOW_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TX_DATA_FIFO_OVERFLOW`"]
pub struct TX_DATA_FIFO_OVERFLOW_W<'a> {
w: &'a mut W,
}
impl<'a> TX_DATA_FIFO_OVERFLOW_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `RX_DATA_FIFO_UNDERFLOW`"]
pub type RX_DATA_FIFO_UNDERFLOW_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RX_DATA_FIFO_UNDERFLOW`"]
pub struct RX_DATA_FIFO_UNDERFLOW_W<'a> {
w: &'a mut W,
}
impl<'a> RX_DATA_FIFO_UNDERFLOW_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
impl R {
#[doc = "Bit 0 - Activated in MMIO mode, when a TX data FIFO trigger 'tr_tx_req' is activated."]
#[inline(always)]
pub fn tr_tx_req(&self) -> TR_TX_REQ_R {
TR_TX_REQ_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Activated in MMIO mode, when a RX data FIFO trigger 'tr_rx_req' is activated."]
#[inline(always)]
pub fn tr_rx_req(&self) -> TR_RX_REQ_R {
TR_RX_REQ_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Activated in XIP mode, if: - The selected device's ADDR_CTL.DIV2 is '1' and the AHB-Lite bus transfer address is not a multiple of 2. - The selected device's ADDR_CTL.DIV2 is '1' and the XIP transfer request is NOT for a multiple of 2 Bytes. Note: In dual-quad SPI mode (ADDR_CTL.DIV is '1'), each memory device contributes a 4-bit nibble for read data or write data. This is only possible if the request address is a multiple of 2 and the number of requested Bytes is a multiple of 2."]
#[inline(always)]
pub fn xip_alignment_error(&self) -> XIP_ALIGNMENT_ERROR_R {
XIP_ALIGNMENT_ERROR_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Activated in MMIO mode, on an AHB-Lite write transfer to the TX command FIFO (TX_CMD_FIFO_WR) with not enough free entries available."]
#[inline(always)]
pub fn tx_cmd_fifo_overflow(&self) -> TX_CMD_FIFO_OVERFLOW_R {
TX_CMD_FIFO_OVERFLOW_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Activated in MMIO mode, on an AHB-Lite write transfer to the TX data FIFO (TX_DATA_FIFO_WR1, TX_DATA_FIFO_WR2, TX_DATA_FIFO_WR4) with not enough free entries available."]
#[inline(always)]
pub fn tx_data_fifo_overflow(&self) -> TX_DATA_FIFO_OVERFLOW_R {
TX_DATA_FIFO_OVERFLOW_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Activated in MMIO mode, on an AHB-Lite read transfer from the RX data FIFO (RX_DATA_FIFO_RD1, RX_DATA_FIFO_RD2, RX_DATA_FIFO_RD4) with not enough entries available. Only activated for NON test bus controller transfers."]
#[inline(always)]
pub fn rx_data_fifo_underflow(&self) -> RX_DATA_FIFO_UNDERFLOW_R {
RX_DATA_FIFO_UNDERFLOW_R::new(((self.bits >> 5) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Activated in MMIO mode, when a TX data FIFO trigger 'tr_tx_req' is activated."]
#[inline(always)]
pub fn tr_tx_req(&mut self) -> TR_TX_REQ_W {
TR_TX_REQ_W { w: self }
}
#[doc = "Bit 1 - Activated in MMIO mode, when a RX data FIFO trigger 'tr_rx_req' is activated."]
#[inline(always)]
pub fn tr_rx_req(&mut self) -> TR_RX_REQ_W {
TR_RX_REQ_W { w: self }
}
#[doc = "Bit 2 - Activated in XIP mode, if: - The selected device's ADDR_CTL.DIV2 is '1' and the AHB-Lite bus transfer address is not a multiple of 2. - The selected device's ADDR_CTL.DIV2 is '1' and the XIP transfer request is NOT for a multiple of 2 Bytes. Note: In dual-quad SPI mode (ADDR_CTL.DIV is '1'), each memory device contributes a 4-bit nibble for read data or write data. This is only possible if the request address is a multiple of 2 and the number of requested Bytes is a multiple of 2."]
#[inline(always)]
pub fn xip_alignment_error(&mut self) -> XIP_ALIGNMENT_ERROR_W {
XIP_ALIGNMENT_ERROR_W { w: self }
}
#[doc = "Bit 3 - Activated in MMIO mode, on an AHB-Lite write transfer to the TX command FIFO (TX_CMD_FIFO_WR) with not enough free entries available."]
#[inline(always)]
pub fn tx_cmd_fifo_overflow(&mut self) -> TX_CMD_FIFO_OVERFLOW_W {
TX_CMD_FIFO_OVERFLOW_W { w: self }
}
#[doc = "Bit 4 - Activated in MMIO mode, on an AHB-Lite write transfer to the TX data FIFO (TX_DATA_FIFO_WR1, TX_DATA_FIFO_WR2, TX_DATA_FIFO_WR4) with not enough free entries available."]
#[inline(always)]
pub fn tx_data_fifo_overflow(&mut self) -> TX_DATA_FIFO_OVERFLOW_W {
TX_DATA_FIFO_OVERFLOW_W { w: self }
}
#[doc = "Bit 5 - Activated in MMIO mode, on an AHB-Lite read transfer from the RX data FIFO (RX_DATA_FIFO_RD1, RX_DATA_FIFO_RD2, RX_DATA_FIFO_RD4) with not enough entries available. Only activated for NON test bus controller transfers."]
#[inline(always)]
pub fn rx_data_fifo_underflow(&mut self) -> RX_DATA_FIFO_UNDERFLOW_W {
RX_DATA_FIFO_UNDERFLOW_W { w: self }
}
}
|
use std::collections::HashMap;
#[derive(Debug, Copy, Clone, PartialOrd, PartialEq)]
pub enum Method {
OPTIONS,
GET,
HEAD,
POST,
PUT,
DELETE,
TRACE,
CONNECT,
}
#[derive(Debug)]
pub struct URI {
pub path: String,
pub query: HashMap<String, String>,
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum HTTPVersion {
Http10,
Http11,
Http20,
}
#[derive(Debug)]
pub struct Request {
pub method: Method,
pub uri: URI,
pub headers: HashMap<String, String>,
pub body: String,
}
lazy_static! {
pub(crate) static ref METHOD_MAP: HashMap<&'static str, Method> = {
let mut m = HashMap::new();
m.insert("OPTIONS",Method::OPTIONS);
m.insert("GET",Method::GET);
m.insert("HEAD",Method::HEAD);
m.insert("POST",Method::POST);
m.insert("PUT",Method::PUT);
m.insert("DELETE",Method::DELETE);
m.insert("TRACE",Method::TRACE);
m.insert("CONNECT",Method::CONNECT);
m
};
pub(crate) static ref HTTP_VERSION_MAP: HashMap<&'static str, HTTPVersion> = {
let mut m = HashMap::new();
m.insert("1.0",HTTPVersion::Http10);
m.insert("1.1",HTTPVersion::Http11);
m.insert("2.0",HTTPVersion::Http20);
m
};
}
|
#[doc = "Register `DIEPINT0` reader"]
pub type R = crate::R<DIEPINT0_SPEC>;
#[doc = "Register `DIEPINT0` writer"]
pub type W = crate::W<DIEPINT0_SPEC>;
#[doc = "Field `XFRC` reader - XFRC"]
pub type XFRC_R = crate::BitReader;
#[doc = "Field `XFRC` writer - XFRC"]
pub type XFRC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `EPDISD` reader - EPDISD"]
pub type EPDISD_R = crate::BitReader;
#[doc = "Field `EPDISD` writer - EPDISD"]
pub type EPDISD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TOC` reader - TOC"]
pub type TOC_R = crate::BitReader;
#[doc = "Field `TOC` writer - TOC"]
pub type TOC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ITTXFE` reader - ITTXFE"]
pub type ITTXFE_R = crate::BitReader;
#[doc = "Field `ITTXFE` writer - ITTXFE"]
pub type ITTXFE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `INEPNM` reader - IN token received with EP mismatch"]
pub type INEPNM_R = crate::BitReader;
#[doc = "Field `INEPNM` writer - IN token received with EP mismatch"]
pub type INEPNM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `INEPNE` reader - INEPNE"]
pub type INEPNE_R = crate::BitReader;
#[doc = "Field `INEPNE` writer - INEPNE"]
pub type INEPNE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TXFE` reader - TXFE"]
pub type TXFE_R = crate::BitReader;
#[doc = "Field `PKTDRPSTS` reader - Packet dropped status"]
pub type PKTDRPSTS_R = crate::BitReader;
#[doc = "Field `PKTDRPSTS` writer - Packet dropped status"]
pub type PKTDRPSTS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `NAK` reader - NAK input"]
pub type NAK_R = crate::BitReader;
#[doc = "Field `NAK` writer - NAK input"]
pub type NAK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - XFRC"]
#[inline(always)]
pub fn xfrc(&self) -> XFRC_R {
XFRC_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - EPDISD"]
#[inline(always)]
pub fn epdisd(&self) -> EPDISD_R {
EPDISD_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 3 - TOC"]
#[inline(always)]
pub fn toc(&self) -> TOC_R {
TOC_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - ITTXFE"]
#[inline(always)]
pub fn ittxfe(&self) -> ITTXFE_R {
ITTXFE_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - IN token received with EP mismatch"]
#[inline(always)]
pub fn inepnm(&self) -> INEPNM_R {
INEPNM_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - INEPNE"]
#[inline(always)]
pub fn inepne(&self) -> INEPNE_R {
INEPNE_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - TXFE"]
#[inline(always)]
pub fn txfe(&self) -> TXFE_R {
TXFE_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 11 - Packet dropped status"]
#[inline(always)]
pub fn pktdrpsts(&self) -> PKTDRPSTS_R {
PKTDRPSTS_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 13 - NAK input"]
#[inline(always)]
pub fn nak(&self) -> NAK_R {
NAK_R::new(((self.bits >> 13) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - XFRC"]
#[inline(always)]
#[must_use]
pub fn xfrc(&mut self) -> XFRC_W<DIEPINT0_SPEC, 0> {
XFRC_W::new(self)
}
#[doc = "Bit 1 - EPDISD"]
#[inline(always)]
#[must_use]
pub fn epdisd(&mut self) -> EPDISD_W<DIEPINT0_SPEC, 1> {
EPDISD_W::new(self)
}
#[doc = "Bit 3 - TOC"]
#[inline(always)]
#[must_use]
pub fn toc(&mut self) -> TOC_W<DIEPINT0_SPEC, 3> {
TOC_W::new(self)
}
#[doc = "Bit 4 - ITTXFE"]
#[inline(always)]
#[must_use]
pub fn ittxfe(&mut self) -> ITTXFE_W<DIEPINT0_SPEC, 4> {
ITTXFE_W::new(self)
}
#[doc = "Bit 5 - IN token received with EP mismatch"]
#[inline(always)]
#[must_use]
pub fn inepnm(&mut self) -> INEPNM_W<DIEPINT0_SPEC, 5> {
INEPNM_W::new(self)
}
#[doc = "Bit 6 - INEPNE"]
#[inline(always)]
#[must_use]
pub fn inepne(&mut self) -> INEPNE_W<DIEPINT0_SPEC, 6> {
INEPNE_W::new(self)
}
#[doc = "Bit 11 - Packet dropped status"]
#[inline(always)]
#[must_use]
pub fn pktdrpsts(&mut self) -> PKTDRPSTS_W<DIEPINT0_SPEC, 11> {
PKTDRPSTS_W::new(self)
}
#[doc = "Bit 13 - NAK input"]
#[inline(always)]
#[must_use]
pub fn nak(&mut self) -> NAK_W<DIEPINT0_SPEC, 13> {
NAK_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "device endpoint-x interrupt register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`diepint0::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`diepint0::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DIEPINT0_SPEC;
impl crate::RegisterSpec for DIEPINT0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`diepint0::R`](R) reader structure"]
impl crate::Readable for DIEPINT0_SPEC {}
#[doc = "`write(|w| ..)` method takes [`diepint0::W`](W) writer structure"]
impl crate::Writable for DIEPINT0_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DIEPINT0 to value 0x80"]
impl crate::Resettable for DIEPINT0_SPEC {
const RESET_VALUE: Self::Ux = 0x80;
}
|
use super::Props;
pub trait PullSpec {
type Props: Props;
}
pub trait PushSpec {
type Props<PrevProps: Props>;
}
macro_rules! spec_impl {
(
PullSpec for $($struct:ident::)* <$iname:ident $(,$gname:ident)*>
) => {
impl<$iname $(,$gname)*> PullSpec for $($struct::)* <$iname $(,$gname)*>
where
$iname: PullSpec,
{
type Props = $iname::Props;
}
};
(
PushSpec for $($struct:ident::)* <$iname:ident $(,$gname:ident)*>
) => {
impl<$iname $(,$gname)*> PushSpec for $($struct::)* <$iname $(,$gname)*>
where
$iname: PushSpec,
{
type Props<PrevProps: Props> = $iname::Props<PrevProps>;
}
};
}
spec_impl!(PullSpec for std::iter::Cloned::<I>);
spec_impl!(PullSpec for std::iter::Copied::<I>);
spec_impl!(PullSpec for std::iter::Filter::<I, P>);
spec_impl!(PullSpec for std::iter::FilterMap::<I, F>);
spec_impl!(PullSpec for std::iter::Inspect::<I, F>);
spec_impl!(PullSpec for std::iter::Map::<I, F>);
impl<I, U, F> PullSpec for std::iter::FlatMap<I, U, F>
where
I: Iterator + PullSpec,
U: IntoIterator,
F: FnMut(I::Item) -> U,
{
type Props = I::Props;
}
impl<I> PullSpec for std::iter::Flatten<I>
where
I: Iterator + PullSpec,
<I as Iterator>::Item: IntoIterator,
{
type Props = I::Props;
}
spec_impl!(PushSpec for pusherator::filter_map::FilterMap::<Next, Func, In>);
spec_impl!(PushSpec for pusherator::filter::Filter::<Next, Func>);
spec_impl!(PushSpec for pusherator::flatten::Flatten::<Next, In>);
spec_impl!(PushSpec for pusherator::for_each::ForEach::<Func, In>);
spec_impl!(PushSpec for pusherator::map::Map::<Next, Func, In>);
impl<Next1, Next2, Func> PushSpec for pusherator::partition::Partition<Next1, Next2, Func>
where
Next1: PushSpec,
Next2: PushSpec,
{
type Props<PrevProps: Props> = (Next1::Props<PrevProps>, Next2::Props<PrevProps>);
}
impl<Next1, Next2> PushSpec for pusherator::tee::Tee<Next1, Next2>
where
Next1: PushSpec,
Next2: PushSpec,
{
type Props<PrevProps: Props> = (Next1::Props<PrevProps>, Next2::Props<PrevProps>);
}
|
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate log;
extern crate pwhash;
extern crate rand;
extern crate regex;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate simplelog;
extern crate time;
extern crate typographic_linter;
pub mod api;
pub mod authentication;
pub mod database;
pub mod errors;
pub mod logger;
pub mod models;
pub mod schema;
|
use super::super::messages::WriteGTP;
use super::*;
use std::io;
// Everything but “I”
const LEGAL_LETTERS: &str =
"ABCDEFGHJKLMNOPQRSTUVWXYZ";
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Value {
Pass,
// TODO: Introduce types LetterCoord and NumberCoord?
/// The `char` is always an upper case letter except `'I'`.
/// (cf. `LEGAL_LETTERS`)
Coord(char, u8),
}
impl Value {
pub fn pass() -> Value {
Value::Pass
}
pub fn new(c: char, n: u8) -> Value {
// Convert to lower case
let c = (c as u8 & 0x20) as char;
assert!(LEGAL_LETTERS.contains(c));
assert!((0 < n) && (n <= 25));
Value::Coord(c, n)
}
}
impl WriteGTP for Value {
fn write_gtp(&self, f: &mut impl io::Write) -> io::Result<()> {
match self {
Value::Pass => write!(f, "pass"),
Value::Coord(l, n) => {
write!(f, "{}", l)?;
write!(f, "{}", n)
}
}
}
}
singleton_type!(Vertex);
impl HasType<Type> for Value {
fn has_type(&self, _t: &Type) -> bool {
true
}
}
impl Data for Value {
type Type = Type;
fn parse<'a, I: Input<'a>>(i: I, _t: &Self::Type) -> IResult<I, Self> {
#[rustfmt::skip]
alt!(i,
value!(Value::pass(), tag_no_case!("pass")) |
do_parse!(
letter: one_of!(LEGAL_LETTERS) >>
digits: call!(nom::digit) >>
(Value::new(
letter,
digits.parse_to().unwrap(),
))
)
)
}
}
|
// This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! State database maintenance. Handles canonicalization and pruning in the database. The input to
//! this module is a `ChangeSet` which is basically a list of key-value pairs (trie nodes) that
//! were added or deleted during block execution.
//!
//! # Canonicalization.
//! Canonicalization window tracks a tree of blocks identified by header hash. The in-memory
//! overlay allows to get any node that was inserted in any of the blocks within the window.
//! The tree is journaled to the backing database and rebuilt on startup.
//! Canonicalization function selects one root from the top of the tree and discards all other roots
//! and their subtrees.
//!
//! # Pruning.
//! See `RefWindow` for pruning algorithm details. `StateDb` prunes on each canonicalization until
//! pruning constraints are satisfied.
mod noncanonical;
mod pruning;
#[cfg(test)]
mod test;
use codec::Codec;
use log::trace;
use noncanonical::NonCanonicalOverlay;
use parity_util_mem::{malloc_size, MallocSizeOf};
use parking_lot::RwLock;
use pruning::RefWindow;
use sc_client_api::{MemorySize, StateDbMemoryInfo};
use std::collections::{hash_map::Entry, HashMap};
use std::fmt;
const PRUNING_MODE: &[u8] = b"mode";
const PRUNING_MODE_ARCHIVE: &[u8] = b"archive";
const PRUNING_MODE_ARCHIVE_CANON: &[u8] = b"archive_canonical";
const PRUNING_MODE_CONSTRAINED: &[u8] = b"constrained";
/// Database value type.
pub type DBValue = Vec<u8>;
/// Basic set of requirements for the Block hash and node key types.
pub trait Hash:
Send
+ Sync
+ Sized
+ Eq
+ PartialEq
+ Clone
+ Default
+ fmt::Debug
+ Codec
+ std::hash::Hash
+ 'static
{
}
impl<
T: Send
+ Sync
+ Sized
+ Eq
+ PartialEq
+ Clone
+ Default
+ fmt::Debug
+ Codec
+ std::hash::Hash
+ 'static,
> Hash for T
{
}
/// Backend database trait. Read-only.
pub trait MetaDb {
type Error: fmt::Debug;
/// Get meta value, such as the journal.
fn get_meta(&self, key: &[u8]) -> Result<Option<DBValue>, Self::Error>;
}
/// Backend database trait. Read-only.
pub trait NodeDb {
type Key: ?Sized;
type Error: fmt::Debug;
/// Get state trie node.
fn get(&self, key: &Self::Key) -> Result<Option<DBValue>, Self::Error>;
}
/// Error type.
pub enum Error<E: fmt::Debug> {
/// Database backend error.
Db(E),
/// `Codec` decoding error.
Decoding(codec::Error),
/// Trying to canonicalize invalid block.
InvalidBlock,
/// Trying to insert block with invalid number.
InvalidBlockNumber,
/// Trying to insert block with unknown parent.
InvalidParent,
/// Invalid pruning mode specified. Contains expected mode.
InvalidPruningMode(String),
}
/// Pinning error type.
pub enum PinError {
/// Trying to pin invalid block.
InvalidBlock,
}
impl<E: fmt::Debug> From<codec::Error> for Error<E> {
fn from(x: codec::Error) -> Self {
Error::Decoding(x)
}
}
impl<E: fmt::Debug> fmt::Debug for Error<E> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::Db(e) => e.fmt(f),
Error::Decoding(e) => write!(f, "Error decoding sliceable value: {}", e.what()),
Error::InvalidBlock => write!(f, "Trying to canonicalize invalid block"),
Error::InvalidBlockNumber => write!(f, "Trying to insert block with invalid number"),
Error::InvalidParent => write!(f, "Trying to insert block with unknown parent"),
Error::InvalidPruningMode(e) => write!(f, "Expected pruning mode: {}", e),
}
}
}
/// A set of state node changes.
#[derive(Default, Debug, Clone)]
pub struct ChangeSet<H: Hash> {
/// Inserted nodes.
pub inserted: Vec<(H, DBValue)>,
/// Deleted nodes.
pub deleted: Vec<H>,
}
/// A set of changes to the backing database.
#[derive(Default, Debug, Clone)]
pub struct CommitSet<H: Hash> {
/// State node changes.
pub data: ChangeSet<H>,
/// Metadata changes.
pub meta: ChangeSet<Vec<u8>>,
}
/// Pruning constraints. If none are specified pruning is
#[derive(Default, Debug, Clone, Eq, PartialEq)]
pub struct Constraints {
/// Maximum blocks. Defaults to 0 when unspecified, effectively keeping only non-canonical
/// states.
pub max_blocks: Option<u32>,
/// Maximum memory in the pruning overlay.
pub max_mem: Option<usize>,
}
/// Pruning mode.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum PruningMode {
/// Maintain a pruning window.
Constrained(Constraints),
/// No pruning. Canonicalization is a no-op.
ArchiveAll,
/// Canonicalization discards non-canonical nodes. All the canonical nodes are kept in the DB.
ArchiveCanonical,
}
impl PruningMode {
/// Create a mode that keeps given number of blocks.
pub fn keep_blocks(n: u32) -> PruningMode {
PruningMode::Constrained(Constraints { max_blocks: Some(n), max_mem: None })
}
/// Is this an archive (either ArchiveAll or ArchiveCanonical) pruning mode?
pub fn is_archive(&self) -> bool {
match *self {
PruningMode::ArchiveAll | PruningMode::ArchiveCanonical => true,
PruningMode::Constrained(_) => false,
}
}
/// Is this an archive (either ArchiveAll or ArchiveCanonical) pruning mode?
pub fn id(&self) -> &[u8] {
match self {
PruningMode::ArchiveAll => PRUNING_MODE_ARCHIVE,
PruningMode::ArchiveCanonical => PRUNING_MODE_ARCHIVE_CANON,
PruningMode::Constrained(_) => PRUNING_MODE_CONSTRAINED,
}
}
}
impl Default for PruningMode {
fn default() -> Self {
PruningMode::keep_blocks(256)
}
}
fn to_meta_key<S: Codec>(suffix: &[u8], data: &S) -> Vec<u8> {
let mut buffer = data.encode();
buffer.extend(suffix);
buffer
}
struct StateDbSync<BlockHash: Hash, Key: Hash> {
mode: PruningMode,
non_canonical: NonCanonicalOverlay<BlockHash, Key>,
pruning: Option<RefWindow<BlockHash, Key>>,
pinned: HashMap<BlockHash, u32>,
}
impl<BlockHash: Hash + MallocSizeOf, Key: Hash + MallocSizeOf> StateDbSync<BlockHash, Key> {
fn new<D: MetaDb>(
mode: PruningMode,
ref_counting: bool,
db: &D,
) -> Result<StateDbSync<BlockHash, Key>, Error<D::Error>> {
trace!(target: "state-db", "StateDb settings: {:?}. Ref-counting: {}", mode, ref_counting);
// Check that settings match
Self::check_meta(&mode, db)?;
let non_canonical: NonCanonicalOverlay<BlockHash, Key> = NonCanonicalOverlay::new(db)?;
let pruning: Option<RefWindow<BlockHash, Key>> = match mode {
PruningMode::Constrained(Constraints { max_mem: Some(_), .. }) => unimplemented!(),
PruningMode::Constrained(_) => Some(RefWindow::new(db, ref_counting)?),
PruningMode::ArchiveAll | PruningMode::ArchiveCanonical => None,
};
Ok(StateDbSync { mode, non_canonical, pruning, pinned: Default::default() })
}
fn check_meta<D: MetaDb>(mode: &PruningMode, db: &D) -> Result<(), Error<D::Error>> {
let db_mode = db.get_meta(&to_meta_key(PRUNING_MODE, &())).map_err(Error::Db)?;
trace!(target: "state-db",
"DB pruning mode: {:?}",
db_mode.as_ref().map(|v| std::str::from_utf8(&v))
);
match &db_mode {
Some(v) if v.as_slice() == mode.id() => Ok(()),
Some(v) => Err(Error::InvalidPruningMode(String::from_utf8_lossy(v).into())),
None => Ok(()),
}
}
fn insert_block<E: fmt::Debug>(
&mut self,
hash: &BlockHash,
number: u64,
parent_hash: &BlockHash,
mut changeset: ChangeSet<Key>,
) -> Result<CommitSet<Key>, Error<E>> {
let mut meta = ChangeSet::default();
if number == 0 {
// Save pruning mode when writing first block.
meta.inserted.push((to_meta_key(PRUNING_MODE, &()), self.mode.id().into()));
}
match self.mode {
PruningMode::ArchiveAll => {
changeset.deleted.clear();
// write changes immediately
Ok(CommitSet { data: changeset, meta })
},
PruningMode::Constrained(_) | PruningMode::ArchiveCanonical => {
let commit = self.non_canonical.insert(hash, number, parent_hash, changeset);
commit.map(|mut c| {
c.meta.inserted.extend(meta.inserted);
c
})
},
}
}
fn canonicalize_block<E: fmt::Debug>(
&mut self,
hash: &BlockHash,
) -> Result<CommitSet<Key>, Error<E>> {
let mut commit = CommitSet::default();
if self.mode == PruningMode::ArchiveAll {
return Ok(commit)
}
match self.non_canonical.canonicalize(&hash, &mut commit) {
Ok(()) =>
if self.mode == PruningMode::ArchiveCanonical {
commit.data.deleted.clear();
},
Err(e) => return Err(e),
};
if let Some(ref mut pruning) = self.pruning {
pruning.note_canonical(&hash, &mut commit);
}
self.prune(&mut commit);
Ok(commit)
}
fn best_canonical(&self) -> Option<u64> {
return self.non_canonical.last_canonicalized_block_number()
}
fn is_pruned(&self, hash: &BlockHash, number: u64) -> bool {
match self.mode {
PruningMode::ArchiveAll => false,
PruningMode::ArchiveCanonical | PruningMode::Constrained(_) => {
if self.best_canonical().map(|c| number > c).unwrap_or(true) {
!self.non_canonical.have_block(hash)
} else {
self.pruning.as_ref().map_or(false, |pruning| {
number < pruning.pending() || !pruning.have_block(hash)
})
}
},
}
}
fn prune(&mut self, commit: &mut CommitSet<Key>) {
if let (&mut Some(ref mut pruning), &PruningMode::Constrained(ref constraints)) =
(&mut self.pruning, &self.mode)
{
loop {
if pruning.window_size() <= constraints.max_blocks.unwrap_or(0) as u64 {
break
}
if constraints.max_mem.map_or(false, |m| pruning.mem_used() > m) {
break
}
let pinned = &self.pinned;
if pruning.next_hash().map_or(false, |h| pinned.contains_key(&h)) {
break
}
pruning.prune_one(commit);
}
}
}
/// Revert all non-canonical blocks with the best block number.
/// Returns a database commit or `None` if not possible.
/// For archive an empty commit set is returned.
fn revert_one(&mut self) -> Option<CommitSet<Key>> {
match self.mode {
PruningMode::ArchiveAll => Some(CommitSet::default()),
PruningMode::ArchiveCanonical | PruningMode::Constrained(_) =>
self.non_canonical.revert_one(),
}
}
fn pin(&mut self, hash: &BlockHash) -> Result<(), PinError> {
match self.mode {
PruningMode::ArchiveAll => Ok(()),
PruningMode::ArchiveCanonical | PruningMode::Constrained(_) => {
if self.non_canonical.have_block(hash) ||
self.pruning.as_ref().map_or(false, |pruning| pruning.have_block(hash))
{
let refs = self.pinned.entry(hash.clone()).or_default();
if *refs == 0 {
trace!(target: "state-db-pin", "Pinned block: {:?}", hash);
self.non_canonical.pin(hash);
}
*refs += 1;
Ok(())
} else {
Err(PinError::InvalidBlock)
}
},
}
}
fn unpin(&mut self, hash: &BlockHash) {
match self.pinned.entry(hash.clone()) {
Entry::Occupied(mut entry) => {
*entry.get_mut() -= 1;
if *entry.get() == 0 {
trace!(target: "state-db-pin", "Unpinned block: {:?}", hash);
entry.remove();
self.non_canonical.unpin(hash);
} else {
trace!(target: "state-db-pin", "Releasing reference for {:?}", hash);
}
},
Entry::Vacant(_) => {},
}
}
pub fn get<D: NodeDb, Q: ?Sized>(
&self,
key: &Q,
db: &D,
) -> Result<Option<DBValue>, Error<D::Error>>
where
Q: AsRef<D::Key>,
Key: std::borrow::Borrow<Q>,
Q: std::hash::Hash + Eq,
{
if let Some(value) = self.non_canonical.get(key) {
return Ok(Some(value))
}
db.get(key.as_ref()).map_err(|e| Error::Db(e))
}
fn apply_pending(&mut self) {
self.non_canonical.apply_pending();
if let Some(pruning) = &mut self.pruning {
pruning.apply_pending();
}
trace!(
target: "forks",
"First available: {:?} ({}), Last canon: {:?} ({}), Best forks: {:?}",
self.pruning.as_ref().and_then(|p| p.next_hash()),
self.pruning.as_ref().map(|p| p.pending()).unwrap_or(0),
self.non_canonical.last_canonicalized_hash(),
self.non_canonical.last_canonicalized_block_number().unwrap_or(0),
self.non_canonical.top_level(),
);
}
fn revert_pending(&mut self) {
if let Some(pruning) = &mut self.pruning {
pruning.revert_pending();
}
self.non_canonical.revert_pending();
}
fn memory_info(&self) -> StateDbMemoryInfo {
StateDbMemoryInfo {
non_canonical: MemorySize::from_bytes(malloc_size(&self.non_canonical)),
pruning: self.pruning.as_ref().map(|p| MemorySize::from_bytes(malloc_size(p))),
pinned: MemorySize::from_bytes(malloc_size(&self.pinned)),
}
}
}
/// State DB maintenance. See module description.
/// Can be shared across threads.
pub struct StateDb<BlockHash: Hash, Key: Hash> {
db: RwLock<StateDbSync<BlockHash, Key>>,
}
impl<BlockHash: Hash + MallocSizeOf, Key: Hash + MallocSizeOf> StateDb<BlockHash, Key> {
/// Creates a new instance. Does not expect any metadata in the database.
pub fn new<D: MetaDb>(
mode: PruningMode,
ref_counting: bool,
db: &D,
) -> Result<StateDb<BlockHash, Key>, Error<D::Error>> {
Ok(StateDb { db: RwLock::new(StateDbSync::new(mode, ref_counting, db)?) })
}
/// Add a new non-canonical block.
pub fn insert_block<E: fmt::Debug>(
&self,
hash: &BlockHash,
number: u64,
parent_hash: &BlockHash,
changeset: ChangeSet<Key>,
) -> Result<CommitSet<Key>, Error<E>> {
self.db.write().insert_block(hash, number, parent_hash, changeset)
}
/// Finalize a previously inserted block.
pub fn canonicalize_block<E: fmt::Debug>(
&self,
hash: &BlockHash,
) -> Result<CommitSet<Key>, Error<E>> {
self.db.write().canonicalize_block(hash)
}
/// Prevents pruning of specified block and its descendants.
pub fn pin(&self, hash: &BlockHash) -> Result<(), PinError> {
self.db.write().pin(hash)
}
/// Allows pruning of specified block.
pub fn unpin(&self, hash: &BlockHash) {
self.db.write().unpin(hash)
}
/// Get a value from non-canonical/pruning overlay or the backing DB.
pub fn get<D: NodeDb, Q: ?Sized>(
&self,
key: &Q,
db: &D,
) -> Result<Option<DBValue>, Error<D::Error>>
where
Q: AsRef<D::Key>,
Key: std::borrow::Borrow<Q>,
Q: std::hash::Hash + Eq,
{
self.db.read().get(key, db)
}
/// Revert all non-canonical blocks with the best block number.
/// Returns a database commit or `None` if not possible.
/// For archive an empty commit set is returned.
pub fn revert_one(&self) -> Option<CommitSet<Key>> {
self.db.write().revert_one()
}
/// Returns last finalized block number.
pub fn best_canonical(&self) -> Option<u64> {
return self.db.read().best_canonical()
}
/// Check if block is pruned away.
pub fn is_pruned(&self, hash: &BlockHash, number: u64) -> bool {
return self.db.read().is_pruned(hash, number)
}
/// Apply all pending changes
pub fn apply_pending(&self) {
self.db.write().apply_pending();
}
/// Revert all pending changes
pub fn revert_pending(&self) {
self.db.write().revert_pending();
}
/// Returns the current memory statistics of this instance.
pub fn memory_info(&self) -> StateDbMemoryInfo {
self.db.read().memory_info()
}
}
#[cfg(test)]
mod tests {
use crate::test::{make_changeset, make_db, TestDb};
use crate::{Constraints, PruningMode, StateDb};
use sp_core::H256;
use std::io;
fn make_test_db(settings: PruningMode) -> (TestDb, StateDb<H256, H256>) {
let mut db = make_db(&[91, 921, 922, 93, 94]);
let state_db = StateDb::new(settings, false, &db).unwrap();
db.commit(
&state_db
.insert_block::<io::Error>(
&H256::from_low_u64_be(1),
1,
&H256::from_low_u64_be(0),
make_changeset(&[1], &[91]),
)
.unwrap(),
);
db.commit(
&state_db
.insert_block::<io::Error>(
&H256::from_low_u64_be(21),
2,
&H256::from_low_u64_be(1),
make_changeset(&[21], &[921, 1]),
)
.unwrap(),
);
db.commit(
&state_db
.insert_block::<io::Error>(
&H256::from_low_u64_be(22),
2,
&H256::from_low_u64_be(1),
make_changeset(&[22], &[922]),
)
.unwrap(),
);
db.commit(
&state_db
.insert_block::<io::Error>(
&H256::from_low_u64_be(3),
3,
&H256::from_low_u64_be(21),
make_changeset(&[3], &[93]),
)
.unwrap(),
);
state_db.apply_pending();
db.commit(&state_db.canonicalize_block::<io::Error>(&H256::from_low_u64_be(1)).unwrap());
state_db.apply_pending();
db.commit(
&state_db
.insert_block::<io::Error>(
&H256::from_low_u64_be(4),
4,
&H256::from_low_u64_be(3),
make_changeset(&[4], &[94]),
)
.unwrap(),
);
state_db.apply_pending();
db.commit(&state_db.canonicalize_block::<io::Error>(&H256::from_low_u64_be(21)).unwrap());
state_db.apply_pending();
db.commit(&state_db.canonicalize_block::<io::Error>(&H256::from_low_u64_be(3)).unwrap());
state_db.apply_pending();
(db, state_db)
}
#[test]
fn full_archive_keeps_everything() {
let (db, sdb) = make_test_db(PruningMode::ArchiveAll);
assert!(db.data_eq(&make_db(&[1, 21, 22, 3, 4, 91, 921, 922, 93, 94])));
assert!(!sdb.is_pruned(&H256::from_low_u64_be(0), 0));
}
#[test]
fn canonical_archive_keeps_canonical() {
let (db, _) = make_test_db(PruningMode::ArchiveCanonical);
assert!(db.data_eq(&make_db(&[1, 21, 3, 91, 921, 922, 93, 94])));
}
#[test]
fn prune_window_0() {
let (db, _) = make_test_db(PruningMode::Constrained(Constraints {
max_blocks: Some(0),
max_mem: None,
}));
assert!(db.data_eq(&make_db(&[21, 3, 922, 94])));
}
#[test]
fn prune_window_1() {
let (db, sdb) = make_test_db(PruningMode::Constrained(Constraints {
max_blocks: Some(1),
max_mem: None,
}));
assert!(sdb.is_pruned(&H256::from_low_u64_be(0), 0));
assert!(sdb.is_pruned(&H256::from_low_u64_be(1), 1));
assert!(sdb.is_pruned(&H256::from_low_u64_be(21), 2));
assert!(sdb.is_pruned(&H256::from_low_u64_be(22), 2));
assert!(db.data_eq(&make_db(&[21, 3, 922, 93, 94])));
}
#[test]
fn prune_window_2() {
let (db, sdb) = make_test_db(PruningMode::Constrained(Constraints {
max_blocks: Some(2),
max_mem: None,
}));
assert!(sdb.is_pruned(&H256::from_low_u64_be(0), 0));
assert!(sdb.is_pruned(&H256::from_low_u64_be(1), 1));
assert!(!sdb.is_pruned(&H256::from_low_u64_be(21), 2));
assert!(sdb.is_pruned(&H256::from_low_u64_be(22), 2));
assert!(db.data_eq(&make_db(&[1, 21, 3, 921, 922, 93, 94])));
}
#[test]
fn detects_incompatible_mode() {
let mut db = make_db(&[]);
let state_db = StateDb::new(PruningMode::ArchiveAll, false, &db).unwrap();
db.commit(
&state_db
.insert_block::<io::Error>(
&H256::from_low_u64_be(0),
0,
&H256::from_low_u64_be(0),
make_changeset(&[], &[]),
)
.unwrap(),
);
let new_mode = PruningMode::Constrained(Constraints { max_blocks: Some(2), max_mem: None });
let state_db: Result<StateDb<H256, H256>, _> = StateDb::new(new_mode, false, &db);
assert!(state_db.is_err());
}
}
|
use std::collections::HashMap;
use crate::agent::{AgentCheck, AgentService};
use crate::errors::Result;
use crate::request::{get, put};
use crate::{Client, QueryMeta, QueryOptions, WriteMeta, WriteOptions};
#[serde(default)]
#[derive(Eq, Default, PartialEq, Serialize, Deserialize, Debug)]
pub struct Weights {
pub Passing: u32,
pub Warning: u32,
}
#[serde(default)]
#[derive(Eq, Default, PartialEq, Serialize, Deserialize, Debug)]
pub struct Node {
pub ID: String,
pub Node: String,
pub Address: String,
pub Datacenter: String,
pub TaggedAddresses: HashMap<String, String>,
pub Meta: HashMap<String, String>,
pub CreateIndex: u64,
pub ModifyIndex: u64,
}
impl Node {
pub fn id(&self) -> &String {
&self.ID
}
}
#[serde(default)]
#[derive(Eq, Default, PartialEq, Serialize, Deserialize, Debug)]
pub struct CatalogService {
pub ID: String,
pub Node: String,
pub Address: String,
pub Datacenter: String,
pub TaggedAddresses: HashMap<String, String>,
pub NodeMeta: HashMap<String, String>,
pub ServiceID: String,
pub ServiceName: String,
pub ServiceAddress: String,
pub ServiceTags: Vec<String>,
pub ServiceMeta: HashMap<String, String>,
pub ServicePort: u32,
pub ServiceWeights: Weights,
pub ServiceEnableTagOverride: bool,
pub CreateIndex: u64,
pub ModifyIndex: u64,
}
#[serde(default)]
#[derive(Eq, Default, PartialEq, Serialize, Deserialize, Debug)]
pub struct CatalogNode {
pub Node: Option<Node>,
pub Services: HashMap<String, AgentService>,
}
#[serde(default)]
#[derive(Eq, Default, PartialEq, Serialize, Deserialize, Debug)]
pub struct CatalogRegistration {
pub ID: String,
pub Node: String,
pub Address: String,
pub TaggedAddresses: HashMap<String, String>,
pub NodeMeta: HashMap<String, String>,
pub Datacenter: String,
pub Service: Option<AgentService>,
pub Check: Option<AgentCheck>,
pub SkipNodeUpdate: bool,
}
#[serde(default)]
#[derive(Eq, Default, PartialEq, Serialize, Deserialize, Debug)]
pub struct CatalogDeregistration {
pub Node: String,
pub Address: String,
pub Datacenter: String,
pub ServiceID: String,
pub CheckID: String,
}
pub trait Catalog {
fn register(
&self,
reg: &CatalogRegistration,
q: Option<&WriteOptions>,
) -> Result<((), WriteMeta)>;
fn deregister(
&self,
dereg: &CatalogDeregistration,
q: Option<&WriteOptions>,
) -> Result<((), WriteMeta)>;
fn datacenters(&self) -> Result<(Vec<String>, QueryMeta)>;
fn nodes(&self, q: Option<&QueryOptions>) -> Result<(Vec<Node>, QueryMeta)>;
fn node(&self, node_id:&str, q: Option<&QueryOptions>) -> Result<(CatalogNode, QueryMeta)>;
fn services(&self, q: Option<&QueryOptions>) -> Result<(HashMap<String, Vec<String>>, QueryMeta)>;
fn service(&self, service_id:&str, q: Option<&QueryOptions>) -> Result<(Vec<CatalogService>, QueryMeta)>;
}
impl Catalog for Client {
/// https://www.consul.io/api/catalog.html#register-entity
fn register(
&self,
reg: &CatalogRegistration,
q: Option<&WriteOptions>,
) -> Result<((), WriteMeta)> {
put(
"/v1/session/create",
Some(reg),
&self.config,
HashMap::new(),
q,
)
}
/// https://www.consul.io/api/catalog.html#deregister-entity
fn deregister(
&self,
dereg: &CatalogDeregistration,
q: Option<&WriteOptions>,
) -> Result<((), WriteMeta)> {
put(
"/v1/catalog/deregister",
Some(dereg),
&self.config,
HashMap::new(),
q,
)
}
/// https://www.consul.io/api/catalog.html#list-datacenters
fn datacenters(&self) -> Result<(Vec<String>, QueryMeta)> {
get(
"/v1/catalog/datacenters",
&self.config,
HashMap::new(),
None,
)
}
/// https://www.consul.io/api/catalog.html#list-nodes
fn nodes(&self, q: Option<&QueryOptions>) -> Result<(Vec<Node>, QueryMeta)> {
get("/v1/catalog/nodes", &self.config, HashMap::new(), q)
}
fn node(&self, node_id:&str, q: Option<&QueryOptions>) -> Result<(CatalogNode, QueryMeta)> {
get(format!("/v1/catalog/node/{}",node_id).as_str(), &self.config, HashMap::new(), q)
}
fn services(&self, q: Option<&QueryOptions>) -> Result<(HashMap<String, Vec<String>>, QueryMeta)> {
get("/v1/catalog/services", &self.config, HashMap::new(), q)
}
fn service(&self, service_id: &str, q: Option<&QueryOptions>) -> Result<(Vec<CatalogService>, QueryMeta)> {
get(format!("/v1/catalog/service/{}", service_id).as_str(), &self.config, HashMap::new(), q)
}
}
|
use std::io::{Error, ErrorKind};
use bytes::{Buf, BytesMut};
use rsocket_rust::frame::Frame;
use rsocket_rust::utils::{u24, Writeable};
use tokio_util::codec::{Decoder, Encoder};
pub struct LengthBasedFrameCodec;
const LEN_BYTES: usize = 3;
impl Decoder for LengthBasedFrameCodec {
type Item = Frame;
type Error = Error;
fn decode(&mut self, buf: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
let actual = buf.len();
if actual < LEN_BYTES {
return Ok(None);
}
let l = u24::read(buf).into();
if actual < LEN_BYTES + l {
return Ok(None);
}
buf.advance(LEN_BYTES);
let mut bb = buf.split_to(l);
match Frame::decode(&mut bb) {
Ok(v) => Ok(Some(v)),
Err(_e) => Err(Error::from(ErrorKind::InvalidInput)),
}
}
}
impl Encoder<Frame> for LengthBasedFrameCodec {
type Error = Error;
fn encode(&mut self, item: Frame, buf: &mut BytesMut) -> Result<(), Self::Error> {
let l = item.len();
buf.reserve(LEN_BYTES + l);
u24::from(l).write_to(buf);
item.write_to(buf);
Ok(())
}
}
|
use std::{
collections::HashMap,
sync::{atomic::AtomicI64, Arc, Mutex},
};
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use crate::{
model::Items,
repo::{IRepository, DEFAULT_LIMIT},
DoneItem, ItemId, OpenItem, Result,
};
pub struct InMemoryRepo {
open_items: Arc<Mutex<HashMap<ItemId, OpenItem>>>,
done_items: Arc<Mutex<HashMap<ItemId, DoneItem>>>,
id: AtomicI64,
}
impl Default for InMemoryRepo {
fn default() -> Self {
let open_items = Arc::default();
let done_items = Arc::default();
let id = AtomicI64::new(0);
Self {
open_items,
done_items,
id,
}
}
}
#[async_trait]
impl IRepository for InMemoryRepo {
async fn get_items(
&self,
offset: Option<usize>,
limit: Option<usize>,
show_done_items: bool,
) -> Result<Items> {
let open = self.open_items(offset, limit);
let done = if show_done_items {
Some(self.done_items(offset, limit))
} else {
None
};
Ok(Items { open, done })
}
async fn add_open_item(&self, mut item: OpenItem) -> Result<()> {
if item.id.is_zero() {
item.id = ItemId(self.id.fetch_add(1, std::sync::atomic::Ordering::Relaxed));
}
self.open_items.lock().unwrap().insert(item.id, item);
Ok(())
}
async fn complete_item(&self, id: ItemId, now: DateTime<Utc>) -> Result<()> {
let item = self.open_items.lock().unwrap().remove(&id);
if let Some(item) = item {
let item = item.complete(now);
self.done_items.lock().unwrap().insert(item.id, item);
}
Ok(())
}
async fn undo_item(&self, id: ItemId) -> Result<()> {
let item = self.done_items.lock().unwrap().remove(&id);
if let Some(item) = item {
let item = item.undo();
self.open_items.lock().unwrap().insert(item.id, item);
}
Ok(())
}
async fn edit_item(&self, id: ItemId, item: OpenItem) -> Result<()> {
let mut mutex_guard = self.open_items.lock().unwrap();
let existing = mutex_guard.get_mut(&id);
if let Some(existing) = existing {
existing.name = item.name;
}
Ok(())
}
}
impl InMemoryRepo {
fn open_items(&self, offset: Option<usize>, limit: Option<usize>) -> Vec<OpenItem> {
let mutex_guard = self.open_items.lock().unwrap();
let mut values: Vec<_> = mutex_guard.values().collect();
values.sort_unstable_by_key(|it| it.created_at);
values.reverse();
values
.into_iter()
.skip(offset.unwrap_or(0))
.take(limit.unwrap_or(DEFAULT_LIMIT))
.cloned()
.collect()
}
fn done_items(&self, offset: Option<usize>, limit: Option<usize>) -> Vec<DoneItem> {
let mutex_guard = self.done_items.lock().unwrap();
let mut values: Vec<_> = mutex_guard.values().collect();
values.sort_unstable_by_key(|it| it.created_at);
values.reverse();
values
.into_iter()
.skip(offset.unwrap_or(0))
.take(limit.unwrap_or(DEFAULT_LIMIT))
.cloned()
.collect()
}
}
|
#![no_std]
#![cfg_attr(test,no_main)]//enable no_main in test-mode, so lib.rs need to own a _start entry and a panic handler
#![feature(custom_test_frameworks)]
#![test_runner(crate::test_runner)]
#![reexport_test_harness_main = "test_main"]
#![feature(abi_x86_interrupt)]
#![feature(alloc_error_handler)]//feature gate for handler function when allocation error occur
use core::panic::PanicInfo;
extern crate alloc;
//mod which added here can be used for test
pub mod serial; //'pub mod' make module usable out of lib.rs.
pub mod vga_buffer;
pub mod interrupts;
pub mod gdt;
pub mod memory;
pub mod allocator;
pub fn hlt_loop()->! {
loop {
x86_64::instructions::hlt();
}
}
pub fn init() {
interrupts::init_idt();
gdt::init();
unsafe{ interrupts::PICS.lock().initialize()};
x86_64::instructions::interrupts::enable();
}
#[alloc_error_handler]
fn alloc_error_handler(layout: alloc::alloc::Layout)->!{
panic!("allocation error:{:?}",layout)
}
#[derive(Debug,Clone,Copy,PartialEq,Eq)]
#[repr(u32)]
pub enum QemuExitCode {
Success = 0x10,
Failed = 0x11,
}
pub fn exit_qemu(exit_code:QemuExitCode){
use x86_64::instructions::port::Port;
unsafe {
let mut port = Port::new(0xf4);
port.write(exit_code as u32);
}
}
///User-specified runner function
pub fn test_runner(tests: &[&dyn Fn()]) {//'tests' are functions which annotated with #[test_case]
serial_println!("Running {} tests", tests.len());
for test in tests {
test();
}
exit_qemu(QemuExitCode::Success);
}
pub fn test_panic_handler(info:&PanicInfo)->! {
serial_println!("[failed]\n");
serial_println!("Error: {}\n", info);
exit_qemu(QemuExitCode::Failed);
hlt_loop();
}
/// Entry point for `cargo xtest`
#[cfg(test)]
use bootloader::{entry_point,BootInfo};
#[cfg(test)]
entry_point!(test_kernel_main);
#[cfg(test)]
fn test_kernel_main(_boot_info: &'static BootInfo) -> ! {
init();
test_main();
hlt_loop();
}
#[cfg(test)]
#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
test_panic_handler(info)
}
|
#[derive(Debug)]
struct Person {
name: String,
age: u8,
}
struct Unit;
struct Pair(i32, f32);
#[derive(Debug)]
struct Point {
x: f32,
y: f32,
}
// structs can be reused as fields of another struct
#[derive(Debug)]
struct Rectangle {
top_left: Point,
bottom_right: Point,
}
fn rect_area(rect: Rectangle) -> f32 {
let Rectangle {
bottom_right: Point { x: x1, y: y1 },
top_left: Point { x: x2, y: y2 },
} = rect;
(x1 - x2) * (y2 - y1)
}
fn square(point: Point, len: f32) -> Rectangle {
Rectangle {
bottom_right: Point { x: point.x + len, y: point.y },
top_left: Point { x: point.x, y: point.y + len },
}
}
fn main() {
let name = String::from("Maxim");
let age = 27;
let maxim = Person { name, age };
println!("{:?}", maxim);
let point: Point = Point { x: 10.3, y: 0.4 };
println!("point coordinates: ({}, {})", point.x, point.y);
// make a new point by using struct update syntax to use the fields of
// our other one
let bottom_right = Point { x: 5.2, ..point };
println!("second point: ({}, {})", bottom_right.x, bottom_right.y);
// destructure the point using a `let` binding
let Point { x: left_edge, y: top_edge } = point;
let _rectangle = Rectangle {
top_left: Point { x: left_edge, y: top_edge },
bottom_right: bottom_right,
};
let new_rect = Rectangle {
top_left: Point { x: 1.5, y: 3.5 },
bottom_right: Point { x: 4.5, y: 2.0 },
};
println!("new rect = {:?}", new_rect);
println!("new_rect area = {}", rect_area(new_rect));
let _unit = Unit;
let pair = Pair(1, 0.1);
println!("pair contains {:?} {:?}", pair.0, pair.1);
let Pair(integer, decimal) = pair;
println!("pair contains {:?} {:?}", integer, decimal);
println!("square {:?}", square(point, 1.0));
}
|
pub mod bio_types;
|
use anyhow::Result;
use futures::stream::StreamExt;
use rusoto_core::{ByteStream, Region};
use rusoto_s3::{GetObjectRequest, S3Client, S3};
use structopt::StructOpt;
use tokio;
use tokio::fs::File;
use tokio::io::AsyncWriteExt;
use std::path::PathBuf;
#[derive(StructOpt, Debug)]
#[structopt(name = "s3_get_object")]
struct Options {
#[structopt(short, long)]
bucket: String,
#[structopt(short, long)]
key: String,
#[structopt(parse(from_os_str))]
path: PathBuf,
}
impl Options {
fn create_request(&self) -> GetObjectRequest {
GetObjectRequest {
bucket: self.bucket.to_owned(),
key: self.key.to_owned(),
expected_bucket_owner: None,
if_match: None,
if_modified_since: None,
if_none_match: None,
if_unmodified_since: None,
part_number: None,
range: None,
request_payer: None,
response_cache_control: None,
response_content_disposition: None,
response_content_encoding: None,
response_content_language: None,
response_content_type: None,
response_expires: None,
sse_customer_algorithm: None,
sse_customer_key: None,
sse_customer_key_md5: None,
version_id: None,
}
}
}
async fn download(path: &PathBuf, body: &mut ByteStream) -> Result<()> {
let mut file = File::create(path).await?;
while let Some(Ok(bytes)) = body.next().await {
file.write_all(bytes.as_ref()).await?;
}
Ok(())
}
#[tokio::main]
async fn main() -> Result<()> {
let options = Options::from_args();
let region = Region::default();
let client = S3Client::new(region);
let request = options.create_request();
let response = client.get_object(request).await?;
if let Some(mut body) = response.body {
download(&options.path, &mut body).await?;
}
Ok(())
}
|
use flux::semantic::{
nodes::{Expression, Symbol},
walk::{self, Node, Visitor},
};
use lspower::lsp;
mod completion;
mod functions;
mod lint;
mod symbols;
pub use completion::{
FunctionFinderVisitor, ObjectFunctionFinderVisitor,
};
pub use lint::{
ContribDiagnosticVisitor, ExperimentalDiagnosticVisitor,
InfluxDBIdentifierDiagnosticVisitor,
};
pub use symbols::SymbolsVisitor;
fn contains_position(node: Node<'_>, pos: lsp::Position) -> bool {
if let Node::Package(_) = node {
// flux::semantic::nodes::Package is walkable, but when multiple ast files are joined, Package appears to have
// a start/end location of 0:0.
return false;
}
let range: lsp::Range = node.loc().clone().into();
if pos.line < range.start.line {
return false;
}
if pos.line > range.end.line {
return false;
}
if pos.line == range.start.line
&& pos.character < range.start.character
{
return false;
}
if pos.line == range.end.line
&& pos.character > range.end.character
{
return false;
}
true
}
#[derive(Debug)]
pub struct NodeFinderVisitor<'a> {
pub node: Option<Node<'a>>,
pub position: lsp::Position,
pub path: Vec<Node<'a>>,
}
impl<'a> Visitor<'a> for NodeFinderVisitor<'a> {
fn visit(&mut self, node: Node<'a>) -> bool {
let contains = contains_position(node, self.position);
if contains {
self.path.push(node);
self.node = Some(node);
}
true
}
}
impl<'a> NodeFinderVisitor<'a> {
pub fn new(pos: lsp::Position) -> NodeFinderVisitor<'a> {
NodeFinderVisitor {
node: None,
position: pos,
path: vec![],
}
}
}
pub struct IdentFinderVisitor<'a> {
pub name: Symbol,
pub identifiers: Vec<walk::Node<'a>>,
}
impl<'a> Visitor<'a> for IdentFinderVisitor<'a> {
fn visit(&mut self, node: walk::Node<'a>) -> bool {
match node {
walk::Node::MemberExpr(m) => {
if let Expression::Identifier(i) = &m.object {
if i.name == self.name {
return true;
}
}
return false;
}
walk::Node::Identifier(n) => {
if n.name == self.name {
self.identifiers.push(node);
}
}
walk::Node::IdentifierExpr(n) => {
if n.name == self.name {
self.identifiers.push(node);
}
}
_ => {}
}
true
}
}
impl<'a> IdentFinderVisitor<'a> {
pub fn new(name: Symbol) -> IdentFinderVisitor<'a> {
IdentFinderVisitor {
name,
identifiers: vec![],
}
}
}
pub struct DefinitionFinderVisitor<'a> {
pub name: Symbol,
pub node: Option<Node<'a>>,
}
impl<'a> Visitor<'a> for DefinitionFinderVisitor<'a> {
fn visit(&mut self, node: Node<'a>) -> bool {
match node {
walk::Node::VariableAssgn(v) => {
if v.id.name == self.name {
self.node = Some(node);
return false;
}
true
}
walk::Node::BuiltinStmt(v) => {
if v.id.name == self.name {
self.node = Some(walk::Node::Identifier(&v.id));
return false;
}
true
}
walk::Node::FunctionParameter(param) => {
if param.key.name == self.name {
self.node = Some(node);
return false;
}
true
}
_ => true,
}
}
}
impl<'a> DefinitionFinderVisitor<'a> {
pub fn new(name: Symbol) -> DefinitionFinderVisitor<'a> {
DefinitionFinderVisitor { name, node: None }
}
}
#[derive(Default)]
pub struct FoldFinderVisitor<'a> {
pub nodes: Vec<Node<'a>>,
}
impl<'a> Visitor<'a> for FoldFinderVisitor<'a> {
fn visit(&mut self, node: Node<'a>) -> bool {
if let Node::Block(_) = node {
self.nodes.push(node);
}
true
}
}
#[derive(Clone, Debug)]
pub struct Import {
pub path: String,
pub name: String,
}
#[derive(Default)]
pub struct ImportFinderVisitor {
pub imports: Vec<Import>,
}
impl<'a> Visitor<'a> for ImportFinderVisitor {
fn visit(&mut self, node: Node<'a>) -> bool {
if let Node::ImportDeclaration(import) = node {
let name = match &import.alias {
Some(alias) => alias.name.to_string(),
None => {
// XXX: rockstar (15 Jul 2022) - This block duplicates effort found
// in `lang`.
import
.path
.value
.as_str()
.split('/')
.last()
.expect("Invalid package path/name supplied")
.to_string()
}
};
self.imports.push(Import {
path: import.path.value.clone(),
name,
});
}
true
}
}
#[derive(Default)]
pub struct PackageNodeFinderVisitor {
pub location: Option<lsp::Range>,
}
impl<'a> Visitor<'a> for PackageNodeFinderVisitor {
fn visit(&mut self, node: Node<'a>) -> bool {
if let Node::PackageClause(n) = node {
self.location = Some(n.loc.clone().into());
return false;
}
true
}
}
|
use core::fmt;
static mut STR_BUF: [u8; 512] = [0;512];
unsafe fn to_cstring(s: &str) -> *const u8 {
let mut pos = 0;
for c in s.bytes() {
assert!(c < 128);
STR_BUF[pos] = c;
pos += 1;
}
STR_BUF[pos] = 0;
&STR_BUF[0]
}
extern {
fn InitFS() -> bool;
fn DeinitFS();
fn FileGetSize() -> usize;
fn FileOpen(path: *const u8) -> bool;
fn FileCreate(path: *const u8, truncate: bool) -> bool;
fn FileRead(buf: *mut u8, size: usize, foffset: usize) -> usize;
fn FileWrite(buf: *const u8, size: usize, foffset: usize) -> usize;
fn FileClose();
}
pub struct Fs {
_private: ()
}
impl Fs {
pub fn init() -> Self {
assert!(unsafe { InitFS() });
Self {_private: ()}
}
pub fn _create(&mut self, file: &str) -> File {
let filename = unsafe { to_cstring(file) };
assert!(unsafe { FileCreate(filename, true) });
File {
_fs: self,
offset: 0,
buf: [0; 2048],
buf_offset: 0
}
}
pub fn open(&mut self, file: &str) -> File {
let filename = unsafe { to_cstring(file) };
assert!(unsafe { FileOpen(filename) });
File {
_fs: self,
offset: 0,
buf: [0; 2048],
buf_offset: 0
}
}
}
impl Drop for Fs {
fn drop(&mut self) {
unsafe {
DeinitFS();
}
}
}
pub struct File<'a> {
_fs: &'a mut Fs,
offset: usize,
buf: [u8; 2048],
buf_offset: usize,
}
impl<'a> File<'a> {
pub fn _seek(&mut self, offset: usize) {
self.flush();
self.offset = offset;
}
pub fn write(&mut self, mut bytes: &[u8]) {
while !bytes.is_empty() {
let space = self.buf.len() - self.buf_offset;
let write_len = space.min(bytes.len());
self.buf[self.buf_offset .. self.buf_offset + write_len]
.copy_from_slice(&bytes[..write_len]);
self.buf_offset += write_len;
bytes = &bytes[write_len..];
if self.buf_offset == self.buf.len() {
self.flush();
}
}
}
pub fn read(&mut self, dst: &mut [u8]) -> usize {
let amount = unsafe { FileRead(dst.as_mut_ptr(), dst.len(), self.offset) };
unsafe {
let ptr = dst.as_mut_ptr();
log!("Reading {} of {} bytes from {:#X}: {:02X} {:02X} {:02X} {:02X}",
amount, dst.len(), self.offset, *ptr, *ptr.add(1), *ptr.add(2), *ptr.add(3));
}
self.offset += amount;
amount
}
pub fn flush(&mut self) {
if self.buf_offset == 0 {
return
}
unsafe {
self.offset += FileWrite(self.buf.as_ptr(), self.buf.len(), self.offset);
}
self.buf_offset = 0;
}
pub fn size(&self) -> usize {
unsafe { FileGetSize() }
}
}
impl<'a> fmt::Write for File<'a> {
fn write_str(&mut self, s: &str) -> fmt::Result {
self.write(s.as_bytes());
Ok(())
}
}
impl<'a> Drop for File<'a> {
fn drop(&mut self) {
unsafe {
self.flush();
FileClose();
}
}
}
|
use rust_oauth2_study::{
config::Config, db_conn::DbConn, handlers::shopify_handler, routes::shopify_route,
};
use std::net::SocketAddr;
use std::sync::Arc;
use warp::Filter;
pub mod api;
#[tokio::main]
async fn main() {
let config = Arc::new(Config::new(false));
let db_conn = Arc::new(DbConn::new(&config.db_path));
let client = Arc::new(reqwest::Client::new());
let shopify =
shopify!(config.clone(), db_conn.clone(), client.clone()).with(warp::log("shopify"));
let end = shopify;
let socket_address = config
.clone()
.app_addr
.parse::<SocketAddr>()
.expect("Could not parse Addr");
println!("Listening at {}", &config.app_addr);
if config.clone().tls {
println!("TLS Enabled!");
warp::serve(end)
.tls()
.cert_path(config.clone().cert_path.as_ref().unwrap())
.key_path(config.clone().key_path.as_ref().unwrap())
.run(socket_address)
.await;
} else {
warp::serve(end).run(socket_address).await;
}
}
|
use std::borrow::Cow;
use std::rc::Rc;
use std::rc::Weak;
use std::sync::Arc;
fn main() {
fn print_int(i: i32) {
println!("{}", i);
}
{
// ----- Box (unique_ptr) -----
let my_one = 1;
print_int(my_one);
let my_two = Box::new(2);
print_int(*my_two);
}
fn print_int_ref(i: &i32) {
println!("{}", i);
}
{
// ----- Deref Coercion -----
let my_one = 1;
print_int_ref(&my_one);
let my_two = Box::new(2);
print_int_ref(&my_two);
}
{
// ----- Drop (~) -----
struct Beret {};
impl Drop for Beret {
fn drop(&mut self) {
println!("Dropped");
}
};
let _ = Beret {};
println!("Still alive");
}
{
// ----- Early drop -----
struct Beret {};
//impl Copy for Beret {};
impl Drop for Beret {
fn drop(&mut self) {
println!("Dropped");
}
};
let b = Beret {};
drop(b);
println!("Alive?");
}
{
// ----- Unsized type - won't compile -----
/*
fn foo(seq: [i32]) {
seq.iter().for_each(|v| println!("{}", v));
}
let sequence = [32, 12, 56];
foo(sequence);
*/
}
{
// ----- Unsized type in-a-box -----
fn foo(seq: Box<[i32]>) {
seq.iter().for_each(|v| println!("{}", v));
}
let sequence = [32, 12, 56];
foo(Box::new(sequence));
}
{
// ----- Reference Counted (shared_ptr) -----
struct Beret {};
impl Drop for Beret {
fn drop(&mut self) {
println!("Dropped");
}
};
let b1 = Rc::new(Beret {});
let b2 = Rc::clone(&b1);
let _b3 = b2.clone();
println!("{}", Rc::strong_count(&b1));
}
{
// ----- Asynchronous Reference Counted (?) -----
struct Beret {};
impl Drop for Beret {
fn drop(&mut self) {
println!("A Dropped");
}
};
let b1 = Arc::new(Beret {});
let b2 = Arc::clone(&b1);
let _b3 = b2.clone();
println!("{}", Arc::strong_count(&b1));
}
{
// ----- Weak (weak_ptr) -----
struct Beret {};
impl Drop for Beret {
fn drop(&mut self) {
println!("Dropped");
}
};
let b3: Weak<Beret>;
{
let b1 = Rc::new(Beret {});
let _b2 = Rc::clone(&b1);
b3 = Rc::downgrade(&b1);
println!("{}", Rc::strong_count(&b1));
println!("{}", b3.upgrade().is_some());
}
println!("{}", b3.upgrade().is_some());
}
{
// ----- Cow (?) -----
struct Beret {
pub i: i32,
};
impl Copy for Beret {}
impl Clone for Beret {
fn clone(&self) -> Beret {
println!("Cloning...");
*self
}
}
let b1 = Beret { i: 1 };
let mut b2 = Cow::Borrowed(&b1);
b2.to_mut().i = 3;
}
}
|
use std::fs;
use std::path::Path;
use std::process::Command;
fn visit_dir(path: &Path) {
if let Ok(entries) = fs::read_dir(path) {
for opt_entry in entries {
if let Ok(entry) = opt_entry {
if let Ok(ty) = entry.file_type() {
let entry_path = entry.path();
if ty.is_dir() {
visit_dir(&entry_path);
} else if ty.is_file() {
if let Some(ext) = entry_path.extension() {
if ext == "ts" {
println!("cargo:rerun-if-changed={}", entry_path.to_string_lossy());
}
}
}
}
}
}
}
}
#[cfg(target_os = "windows")]
fn get_typescript_command_name() -> &'static str {
"tsc.cmd"
}
#[cfg(not(target_os = "windows"))]
fn get_typescript_command_name() -> &'static str {
"tsc"
}
fn main() {
Command::new(get_typescript_command_name()).status().unwrap();
visit_dir(Path::new("./public-ts"));
println!("cargo:rerun-if-changed=./tsconfig.json");
}
|
pub type ConnectionId = String;
use crate::connection_error::{ConnectionError, ConnectionResult};
pub type DidWork = bool;
#[derive(Debug, PartialEq, Clone)]
pub enum ConnectionEvent {
ConnectionError(ConnectionId, ConnectionError),
Connect(ConnectionId),
Message(ConnectionId, Vec<u8>),
Close(ConnectionId),
}
pub trait Connection {
fn connect(&mut self, uri: &str) -> ConnectionResult<ConnectionId>;
fn close(&mut self, id: ConnectionId) -> ConnectionResult<()>;
fn poll(&mut self) -> ConnectionResult<(DidWork, Vec<ConnectionEvent>)>;
fn send(&mut self, id_list: Vec<ConnectionId>, payload: Vec<u8>) -> ConnectionResult<()>;
}
|
#[cfg(test)]
#[path = "../../../tests/unit/construction/heuristics/selectors_test.rs"]
mod selectors_test;
use crate::construction::heuristics::*;
use crate::models::problem::Job;
use crate::utils::{map_reduce, parallel_collect, Either, Noise};
use rand::prelude::*;
/// On each insertion step, selects a list of routes where jobs can be inserted.
/// It is up to implementation to decide whether list consists of all possible routes or just some subset.
pub trait RouteSelector {
/// Returns routes for job insertion.
fn select<'a>(&'a self, ctx: &'a mut InsertionContext, jobs: &[Job])
-> Box<dyn Iterator<Item = RouteContext> + 'a>;
}
/// Returns a list of all possible routes for insertion.
pub struct AllRouteSelector {}
impl Default for AllRouteSelector {
fn default() -> Self {
Self {}
}
}
impl RouteSelector for AllRouteSelector {
fn select<'a>(
&'a self,
ctx: &'a mut InsertionContext,
_jobs: &[Job],
) -> Box<dyn Iterator<Item = RouteContext> + 'a> {
ctx.solution.routes.shuffle(&mut ctx.environment.random.get_rng());
Box::new(ctx.solution.routes.iter().cloned().chain(ctx.solution.registry.next()))
}
}
/// On each insertion step, selects a list of jobs to be inserted.
/// It is up to implementation to decide whether list consists of all jobs or just some subset.
pub trait JobSelector {
/// Returns a portion of all jobs.
fn select<'a>(&'a self, ctx: &'a mut InsertionContext) -> Box<dyn Iterator<Item = Job> + 'a>;
}
/// Returns a list of all jobs to be inserted.
pub struct AllJobSelector {}
impl Default for AllJobSelector {
fn default() -> Self {
Self {}
}
}
impl JobSelector for AllJobSelector {
fn select<'a>(&'a self, ctx: &'a mut InsertionContext) -> Box<dyn Iterator<Item = Job> + 'a> {
ctx.solution.required.shuffle(&mut ctx.environment.random.get_rng());
Box::new(ctx.solution.required.iter().cloned())
}
}
/// Evaluates insertion.
pub trait InsertionEvaluator {
/// Evaluates insertion of a single job into given collection of routes.
fn evaluate_job(
&self,
ctx: &InsertionContext,
job: &Job,
routes: &[RouteContext],
result_selector: &(dyn ResultSelector + Send + Sync),
) -> InsertionResult;
/// Evaluates insertion of multiple jobs into given route.
fn evaluate_route(
&self,
ctx: &InsertionContext,
route: &RouteContext,
jobs: &[Job],
result_selector: &(dyn ResultSelector + Send + Sync),
) -> InsertionResult;
/// Evaluates insertion of a job collection into given collection of routes.
fn evaluate_all(
&self,
ctx: &InsertionContext,
jobs: &[Job],
routes: &[RouteContext],
result_selector: &(dyn ResultSelector + Send + Sync),
) -> InsertionResult;
}
/// Evaluates job insertion in routes at given position.
pub struct PositionInsertionEvaluator {
insertion_position: InsertionPosition,
}
impl Default for PositionInsertionEvaluator {
fn default() -> Self {
Self::new(InsertionPosition::Any)
}
}
impl PositionInsertionEvaluator {
/// Creates a new instance of `PositionInsertionEvaluator`.
pub fn new(insertion_position: InsertionPosition) -> Self {
Self { insertion_position }
}
/// Evaluates all jobs ad routes.
pub(crate) fn evaluate_and_collect_all(
&self,
ctx: &InsertionContext,
jobs: &[Job],
routes: &[RouteContext],
result_selector: &(dyn ResultSelector + Send + Sync),
) -> Vec<InsertionResult> {
if Self::is_fold_jobs(ctx) {
parallel_collect(&jobs, |job| self.evaluate_job(ctx, job, routes, result_selector))
} else {
parallel_collect(&routes, |route| self.evaluate_route(ctx, route, jobs, result_selector))
}
}
fn is_fold_jobs(ctx: &InsertionContext) -> bool {
// NOTE can be performance beneficial to use concrete strategy depending on jobs/routes ratio,
// but this approach brings better exploration results
ctx.environment.random.is_head_not_tails()
}
}
impl InsertionEvaluator for PositionInsertionEvaluator {
fn evaluate_job(
&self,
ctx: &InsertionContext,
job: &Job,
routes: &[RouteContext],
result_selector: &(dyn ResultSelector + Send + Sync),
) -> InsertionResult {
routes.iter().fold(InsertionResult::make_failure(), |acc, route_ctx| {
evaluate_job_insertion_in_route(&ctx, &route_ctx, job, self.insertion_position, acc, result_selector)
})
}
fn evaluate_route(
&self,
ctx: &InsertionContext,
route: &RouteContext,
jobs: &[Job],
result_selector: &(dyn ResultSelector + Send + Sync),
) -> InsertionResult {
jobs.iter().fold(InsertionResult::make_failure(), |acc, job| {
evaluate_job_insertion_in_route(&ctx, &route, job, self.insertion_position, acc, result_selector)
})
}
fn evaluate_all(
&self,
ctx: &InsertionContext,
jobs: &[Job],
routes: &[RouteContext],
result_selector: &(dyn ResultSelector + Send + Sync),
) -> InsertionResult {
if Self::is_fold_jobs(ctx) {
map_reduce(
jobs,
|job| self.evaluate_job(ctx, job, routes, result_selector),
InsertionResult::make_failure,
|a, b| result_selector.select_insertion(&ctx, a, b),
)
} else {
map_reduce(
routes,
|route| self.evaluate_route(ctx, route, jobs, result_selector),
InsertionResult::make_failure,
|a, b| result_selector.select_insertion(&ctx, a, b),
)
}
}
}
/// Insertion result selector.
pub trait ResultSelector {
/// Selects one insertion result from two to promote as best.
fn select_insertion(
&self,
ctx: &InsertionContext,
left: InsertionResult,
right: InsertionResult,
) -> InsertionResult;
/// Selects one insertion result from two to promote as best.
fn select_cost(&self, _route_ctx: &RouteContext, left: f64, right: f64) -> Either {
if left < right {
Either::Left
} else {
Either::Right
}
}
}
/// Selects best result.
pub struct BestResultSelector {}
impl Default for BestResultSelector {
fn default() -> Self {
Self {}
}
}
impl ResultSelector for BestResultSelector {
fn select_insertion(&self, _: &InsertionContext, left: InsertionResult, right: InsertionResult) -> InsertionResult {
InsertionResult::choose_best_result(left, right)
}
}
/// Selects results with noise.
pub struct NoiseResultSelector {
noise: Noise,
}
impl NoiseResultSelector {
/// Creates a new instance of `NoiseResultSelector`.
pub fn new(noise: Noise) -> Self {
Self { noise }
}
}
impl ResultSelector for NoiseResultSelector {
fn select_insertion(&self, _: &InsertionContext, left: InsertionResult, right: InsertionResult) -> InsertionResult {
match (&left, &right) {
(InsertionResult::Success(_), InsertionResult::Failure(_)) => left,
(InsertionResult::Failure(_), InsertionResult::Success(_)) => right,
(InsertionResult::Success(left_success), InsertionResult::Success(right_success)) => {
let left_cost = self.noise.add(left_success.cost);
let right_cost = self.noise.add(right_success.cost);
if left_cost < right_cost {
left
} else {
right
}
}
_ => right,
}
}
fn select_cost(&self, _route_ctx: &RouteContext, left: f64, right: f64) -> Either {
let left = self.noise.add(left);
let right = self.noise.add(right);
if left < right {
Either::Left
} else {
Either::Right
}
}
}
|
//This only handles the portaudio bindings
//Note we should change the audiostream stuff to generic
//but essentially we just need to give it an object which
//contains the data reference and some implementation
//that can be used in the call back to do some dsp
use std::sync::mpsc;
extern crate portaudio;
use portaudio::{
PortAudio,
Stream,
NonBlocking,
Input,
StreamParameters,
InputStreamSettings,
InputStreamCallbackArgs,
Continue,
};
pub type PortAudioStream = Stream<NonBlocking, Input<f32>>;
use crate::data_stream::audio_stream as local_mod;
use local_mod::common::{
SAMPLE_RATE,
INTERLEAVED,
BUFF_SIZE,
CHANNELS,
AudioStream
};
use local_mod::common::Package;
use local_mod::common::InputHandler;
use local_mod::common::ADCResolution;
use local_mod::common::AudioSampleStream;
use local_mod::common::Process;
type StreamType = AudioStream<AudioSampleStream>;
pub struct StreamInterface<'a> {
pub receiver : mpsc::Receiver<&'a StreamType>,
pub stream : PortAudioStream
}
pub fn startup<'a>()->Result<StreamInterface<'a>, portaudio::Error> {
let pa = PortAudio::new().expect("Unable to init portaudio");
let def_input = pa.default_input_device().expect("Unable to get default device");
let input_info = pa.device_info(def_input).expect("Unable to get device info");
let latency = input_info.default_low_input_latency;
// Set parameters for the stream settings.
// We pass which mic should be used, how many channels are used,
// whether all the values of all the channels should be passed in a
// single audiobuffer and the latency that should be considered
let input_params = StreamParameters::<ADCResolution>::new(def_input, CHANNELS as i32, INTERLEAVED, latency);
pa.is_input_format_supported(input_params, SAMPLE_RATE)?;
// Settings for an inputstream.
// Here we pass the stream parameters we set before,
// the sample rate of the mic and the amount values we want
let settings = InputStreamSettings::new(input_params, SAMPLE_RATE, BUFF_SIZE as u32);
// This is a lambda which I want called with the samples
let (receiver, callback) = {
let (sender, receiver) = mpsc::channel::<&'a StreamType>();
(receiver, move |InputStreamCallbackArgs { buffer : mut data, .. }| {
let mut stream_handler : AudioStream<AudioSampleStream> = AudioStream::default();
let handle_res = stream_handler.handle_input(&mut data).unwrap();
let processed_buffer = stream_handler.process(handle_res.0, handle_res.1).unwrap();
if stream_handler.package(processed_buffer).unwrap() {
sender.send(&stream_handler).ok();
}
Continue
})
};
let startup_res = StreamInterface {
receiver : receiver,
// Registers the callback with PortAudio
stream : pa.open_non_blocking_stream(settings, callback)?
};
Ok(startup_res)
}
|
use libusb_sys as ffi;
use std::str;
use std::ffi::CStr;
fn main() {
let version = unsafe { ffi::libusb_get_version() };
let rc = str::from_utf8(unsafe { CStr::from_ptr((*version).rc) }.to_bytes()).unwrap_or("");
let describe = str::from_utf8(unsafe { CStr::from_ptr((*version).describe) }.to_bytes()).unwrap_or("");
println!("libusb v{}.{}.{}.{}{} {}",
unsafe {(*version).major}, unsafe {(*version).minor},
unsafe {(*version).micro}, unsafe {(*version).nano},
rc, describe);
}
|
#[cfg(test)]
mod tests {
use std::fs;
use std::env;
use std::fs::File;
use std::path::PathBuf;
use std::error::Error;
use std::io::Read;
use regex::Regex;
use lazy_static::lazy_static;
use scraper::Html;
use hacker_news::parser::HtmlParse;
use hacker_news::parser::ListingsParser;
use hacker_news::parser::CommentsParser;
use hacker_news::util::setup;
// Note: There is an identical setup function in src/lib.rs; however, since integration tests
// effectively use the crate as if it were an external dependancy, I don't think I can
// import that test function.
lazy_static! {
static ref RE_LISTINGS_FILES: Regex = Regex::new("listings(.*).html")
.expect("Failed to parse Regex instance: RE_LISTINGS_FILES");
static ref RE_COMMENTS_FILES: Regex = Regex::new("comments(.*).html")
.expect("Failed to parse Regex instance: RE_COMMENTS_FILES");
}
fn data_dir() -> Result<PathBuf, Box<dyn Error>> {
let mut data_dir = env::current_dir()?;
data_dir.push("data");
Ok(data_dir)
}
fn list_test_files(regex: &Regex) -> Result<Vec<String>, Box<dyn Error>>{
let filenames: Vec<String> = fs::read_dir(data_dir()?)?
.filter_map(|path| path.ok())
.filter_map(|dir_entry| dir_entry.file_name().into_string().ok())
.filter(|path| regex.is_match(&path))
.collect();
Ok(filenames)
}
#[test]
fn test_listings() -> Result<(), Box<dyn Error>> {
setup();
for filename in list_test_files(&RE_LISTINGS_FILES)? {
log::info!("Starting integration test from listings file '{}'", filename);
let mut path = data_dir()?;
path.push(filename.clone());
let mut f = File::open(path)?;
let mut text = String::new();
f.read_to_string(&mut text)?;
let html = Html::parse_document(&text);
let listings = ListingsParser::parse(&html)?;
log::info!("Successfully parsed listings from file '{}'", filename);
log::trace!("Listings parsed from '{}' = {:?}", filename, listings);
}
Ok(())
}
#[test]
fn test_comments() -> Result<(), Box<dyn Error>> {
setup();
for filename in list_test_files(&RE_COMMENTS_FILES)? {
log::info!("Starting integration test from comments file '{}'", filename);
let mut path = data_dir()?;
path.push(filename.clone());
let mut f = File::open(path)?;
let mut text = String::new();
f.read_to_string(&mut text)?;
let html = Html::parse_document(&text);
let comments = CommentsParser::parse(&html)?;
log::info!("Successfully parsed comments from file '{}'", filename);
log::trace!("Comments parsed from '{}' = {:?}", filename, comments);
}
Ok(())
}
// #[test]
// fn test_comment_tree() -> Result<(), Box<dyn Error>> {
// let text = get_test_text()?;
// let html = Html::parse_document(&text);
// let comments = extract_comments(&html)?;
// let forest = create_comment_tree(comments);
// println!("forest = {:#?}", forest);
// Ok(())
// }
}
|
extern crate hyper;
use hyper::Client;
use hyper::header::{ContentType, UserAgent};
use hyper::mime::{Mime, TopLevel, SubLevel};
use hyper::Ok;
use std::env;
use std::io::Read;
fn main(){
// curl -XPOST 'http://www.nickelback.com/service/emaillist'
// -H 'Host: www.nickelback.com'
// -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:51.0) Gecko/20100101 Firefox/51.0'
// -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
// -H 'Accept-Language: en-US,en;q=0.5'
// --compressed
// -H 'Referer: http://www.nickelback.com/service/emaillist'
// -H 'Connection: keep-alive'
// -H 'Upgrade-Insecure-Requests: 1'
// --data 'formName=emailListForm&email=<email>&country=US&tos=0&tos=1&proceed=Submit'
let url = "http://www.nickelback.com/service/emaillist";
let mut post_form: String = "formName=emailListForm&country=US&tos=0&tos=1&proceed=Submit&email=".to_owned();
let email: String = match env::args().nth(1) {
Some(email) => email.to_owned(),
None => {
println!("Usage: <email_adddress>");
return;
}
};
post_form.push_str(&email);
let client = Client::new();
let mut res = client.post(url)
.body(&post_form.to_string())
.header(ContentType("application/x-www-form-urlencoded".parse().unwrap()))
.header(UserAgent("User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:51.0) Gecko/20100101 Firefox/51.0".to_string()))
.send()
.unwrap();
assert_eq!(res.status, hyper::Ok);
let mut s = String::new();
res.read_to_string(&mut s).unwrap();
//println!("{}", s); //DEBUG
println!("{} has been added the world's best email list ;)", email);
}
|
use crate::cube::{Cube, Move};
use std::collections::{HashMap, VecDeque};
// 11 moves using half-turn metric,
// or 14 using the quarter-turn metric
const GODS_NUMBER: usize = 11;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
enum Direction {
Forward,
Backward,
}
impl Direction {
fn opposite(self) -> Self {
match self {
Direction::Forward => Direction::Backward,
Direction::Backward => Direction::Forward,
}
}
}
fn build_move_seq(history: HashMap<(Cube, Direction), Vec<Move>>, state: &Cube) -> Vec<Move> {
let mut moves = history.get(&(*state, Direction::Forward)).unwrap().clone();
let mut ending: Vec<Move> = history
.get(&(*state, Direction::Backward))
.unwrap()
.iter()
.map(|m| m.inverse())
.collect::<Vec<_>>();
ending.reverse();
moves.append(&mut ending);
moves
}
pub fn solve(cube: &Cube) -> Option<Vec<Move>> {
if *cube == Cube::solved() {
return Some(vec![]);
}
let mut history: HashMap<(Cube, Direction), Vec<Move>> = HashMap::new();
let mut queue: VecDeque<(Cube, Direction)> = VecDeque::new();
history.insert((*cube, Direction::Forward), vec![]);
history.insert((Cube::solved(), Direction::Backward), vec![]);
queue.push_back((*cube, Direction::Forward));
queue.push_back((Cube::solved(), Direction::Backward));
while let Some((state, direction)) = queue.pop_front() {
for (action, next_state) in &state.next_states() {
if history.contains_key(&(*next_state, direction)) {
continue;
}
let mut state_moves: Vec<Move> = history.get(&(state, direction)).unwrap().clone();
state_moves.push(*action);
if state_moves.len() > GODS_NUMBER / 2 {
continue;
}
history.insert((*next_state, direction), state_moves);
if history.contains_key(&(*next_state, direction.opposite())) {
return Some(build_move_seq(history, next_state));
}
queue.push_back((*next_state, direction));
}
}
None
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn solve_random_cube() {
let cube = Cube::random(15);
let solution = solve(&cube);
assert_eq!(Cube::solved(), cube.apply_moves(solution.unwrap()));
}
}
|
use crate::indices::{EntityId, RoomPosition, ScriptId, WorldPosition};
use arrayvec::{ArrayString, ArrayVec};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize, Copy, Default)]
#[serde(rename_all = "camelCase")]
pub struct MeleeAttackComponent {
pub strength: u16,
}
/// Has a body so it's not `null` when serializing
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct Bot;
/// Represent time to decay of bots
/// On decay the bot will loose hp
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct DecayComponent {
pub hp_amount: u16,
pub interval: u8,
pub time_remaining: u8,
}
#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CarryComponent {
pub carry: u16,
pub carry_max: u16,
}
/// Entity - Script join table
#[derive(Debug, Clone, Serialize, Deserialize, Default, Copy)]
#[serde(rename_all = "camelCase")]
pub struct EntityScript(pub ScriptId);
unsafe impl Send for EntityScript {}
pub const PATH_CACHE_LEN: usize = 64;
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct PathCacheComponent {
pub target: WorldPosition,
pub path: ArrayVec<RoomPosition, PATH_CACHE_LEN>,
}
pub const SAY_MAX_LEN: usize = 64;
pub type SayPayload = ArrayString<SAY_MAX_LEN>;
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct SayComponent(pub SayPayload);
#[derive(Debug, Clone, Serialize, Deserialize, Default, Copy)]
#[serde(rename_all = "camelCase")]
pub struct MineEventComponent(pub EntityId);
#[derive(Debug, Clone, Serialize, Deserialize, Default, Copy)]
#[serde(rename_all = "camelCase")]
pub struct DropoffEventComponent(pub EntityId);
|
/*
* Copyright (c) 2013, David Renshaw (dwrenshaw@gmail.com)
*
* See the LICENSE file in the capnproto-rust root directory.
*/
use std;
use std::rand::*;
use common::*;
use eval_capnp::*;
pub type RequestBuilder = Expression::Builder;
pub type ResponseBuilder = EvaluationResult::Builder;
pub type Expectation = i32;
|
use managed::Managed;
use Error;
use wire::{IpProtocol, IpEndpoint};
use wire::{UdpPacket, UdpRepr};
use socket::{Socket, IpRepr, IpPayload};
/// A buffered UDP packet.
#[derive(Debug)]
pub struct PacketBuffer<'a> {
endpoint: IpEndpoint,
size: usize,
payload: Managed<'a, [u8]>
}
impl<'a> PacketBuffer<'a> {
/// Create a buffered packet.
pub fn new<T>(payload: T) -> PacketBuffer<'a>
where T: Into<Managed<'a, [u8]>> {
PacketBuffer {
endpoint: IpEndpoint::default(),
size: 0,
payload: payload.into()
}
}
fn as_ref<'b>(&'b self) -> &'b [u8] {
&self.payload[..self.size]
}
fn as_mut<'b>(&'b mut self) -> &'b mut [u8] {
&mut self.payload[..self.size]
}
}
/// An UDP packet ring buffer.
#[derive(Debug)]
pub struct SocketBuffer<'a, 'b: 'a> {
storage: Managed<'a, [PacketBuffer<'b>]>,
read_at: usize,
length: usize
}
impl<'a, 'b> SocketBuffer<'a, 'b> {
/// Create a packet buffer with the given storage.
pub fn new<T>(storage: T) -> SocketBuffer<'a, 'b>
where T: Into<Managed<'a, [PacketBuffer<'b>]>> {
let mut storage = storage.into();
for elem in storage.iter_mut() {
elem.endpoint = Default::default();
elem.size = 0;
}
SocketBuffer {
storage: storage,
read_at: 0,
length: 0
}
}
fn mask(&self, index: usize) -> usize {
index % self.storage.len()
}
fn incr(&self, index: usize) -> usize {
self.mask(index + 1)
}
/// Query whether the buffer is empty.
pub fn empty(&self) -> bool {
self.length == 0
}
/// Query whether the buffer is full.
pub fn full(&self) -> bool {
self.length == self.storage.len()
}
/// Enqueue an element into the buffer, and return a pointer to it, or return
/// `Err(())` if the buffer is full.
pub fn enqueue(&mut self) -> Result<&mut PacketBuffer<'b>, ()> {
if self.full() {
Err(())
} else {
let index = self.mask(self.read_at + self.length);
let result = &mut self.storage[index];
self.length += 1;
Ok(result)
}
}
/// Dequeue an element from the buffer, and return a pointer to it, or return
/// `Err(())` if the buffer is empty.
pub fn dequeue(&mut self) -> Result<&PacketBuffer<'b>, ()> {
if self.empty() {
Err(())
} else {
self.length -= 1;
let result = &self.storage[self.read_at];
self.read_at = self.incr(self.read_at);
Ok(result)
}
}
}
/// An User Datagram Protocol socket.
///
/// An UDP socket is bound to a specific endpoint, and owns transmit and receive
/// packet buffers.
#[derive(Debug)]
pub struct UdpSocket<'a, 'b: 'a> {
endpoint: IpEndpoint,
rx_buffer: SocketBuffer<'a, 'b>,
tx_buffer: SocketBuffer<'a, 'b>,
debug_id: usize
}
impl<'a, 'b> UdpSocket<'a, 'b> {
/// Create an UDP socket with the given buffers.
pub fn new(rx_buffer: SocketBuffer<'a, 'b>,
tx_buffer: SocketBuffer<'a, 'b>) -> Socket<'a, 'b> {
Socket::Udp(UdpSocket {
endpoint: IpEndpoint::default(),
rx_buffer: rx_buffer,
tx_buffer: tx_buffer,
debug_id: 0
})
}
/// Return the debug identifier.
pub fn debug_id(&self) -> usize {
self.debug_id
}
/// Set the debug identifier.
///
/// The debug identifier is a number printed in socket trace messages.
/// It could as well be used by the user code.
pub fn set_debug_id(&mut self, id: usize) {
self.debug_id = id
}
/// Return the bound endpoint.
#[inline]
pub fn endpoint(&self) -> IpEndpoint {
self.endpoint
}
/// Bind the socket to the given endpoint.
pub fn bind<T: Into<IpEndpoint>>(&mut self, endpoint: T) {
self.endpoint = endpoint.into()
}
/// Check whether the transmit buffer is full.
pub fn can_send(&self) -> bool {
!self.tx_buffer.full()
}
/// Check whether the receive buffer is not empty.
pub fn can_recv(&self) -> bool {
!self.rx_buffer.empty()
}
/// Enqueue a packet to be sent to a given remote endpoint, and return a pointer
/// to its payload.
///
/// This function returns `Err(())` if the size is greater than what
/// the transmit buffer can accomodate.
pub fn send(&mut self, size: usize, endpoint: IpEndpoint) -> Result<&mut [u8], ()> {
let packet_buf = try!(self.tx_buffer.enqueue());
packet_buf.endpoint = endpoint;
packet_buf.size = size;
// net_trace!("[{}]{}:{}: buffer to send {} octets",
// self.debug_id, self.endpoint,
// packet_buf.endpoint, packet_buf.size);
Ok(&mut packet_buf.as_mut()[..size])
}
/// Enqueue a packet to be sent to a given remote endpoint, and fill it from a slice.
///
/// See also [send](#method.send).
pub fn send_slice(&mut self, data: &[u8], endpoint: IpEndpoint) -> Result<usize, ()> {
let buffer = try!(self.send(data.len(), endpoint));
let data = &data[..buffer.len()];
buffer.copy_from_slice(data);
Ok(data.len())
}
/// Dequeue a packet received from a remote endpoint, and return the endpoint as well
/// as a pointer to the payload.
///
/// This function returns `Err(())` if the receive buffer is empty.
pub fn recv(&mut self) -> Result<(&[u8], IpEndpoint), ()> {
//trace!("inside socket recv");
let packet_buf = try!(self.rx_buffer.dequeue());
// net_trace!("[{}]{}:{}: receive {} buffered octets",
// self.debug_id, self.endpoint,
// packet_buf.endpoint, packet_buf.size);
trace!("[{}]{}:{}: receive {} buffered octets",
self.debug_id, self.endpoint,
packet_buf.endpoint, packet_buf.size);
Ok((&packet_buf.as_ref()[..packet_buf.size], packet_buf.endpoint))
}
/// Dequeue a packet received from a remote endpoint, and return the endpoint as well
/// as copy the payload into the given slice.
///
/// See also [recv](#method.recv).
pub fn recv_slice(&mut self, data: &mut [u8]) -> Result<(usize, IpEndpoint), ()> {
let (buffer, endpoint) = try!(self.recv());
data[..buffer.len()].copy_from_slice(buffer);
Ok((buffer.len(), endpoint))
}
/// See [Socket::process](enum.Socket.html#method.process).
pub fn process(&mut self, _timestamp: u64, ip_repr: &IpRepr,
payload: &[u8]) -> Result<(), Error> {
if ip_repr.protocol() != IpProtocol::Udp { return Err(Error::Rejected) }
let packet = try!(UdpPacket::new(&payload[..ip_repr.payload_len()]));
let repr = try!(UdpRepr::parse(&packet, &ip_repr.src_addr(), &ip_repr.dst_addr()));
if repr.dst_port != self.endpoint.port { return Err(Error::Rejected) }
if !self.endpoint.addr.is_unspecified() {
if self.endpoint.addr != ip_repr.dst_addr() { return Err(Error::Rejected) }
}
let packet_buf = try!(self.rx_buffer.enqueue().map_err(|()| Error::Exhausted));
packet_buf.endpoint = IpEndpoint { addr: ip_repr.src_addr(), port: repr.src_port };
packet_buf.size = repr.payload.len();
packet_buf.as_mut()[..repr.payload.len()].copy_from_slice(repr.payload);
net_trace!("[{}]{}:{}: receiving {} octets",
self.debug_id, self.endpoint,
packet_buf.endpoint, packet_buf.size);
Ok(())
}
/// See [Socket::dispatch](enum.Socket.html#method.dispatch).
pub fn dispatch<F, R>(&mut self, _timestamp: u64, _mtu: usize,
emit: &mut F) -> Result<R, Error>
where F: FnMut(&IpRepr, &IpPayload) -> Result<R, Error> {
let packet_buf = try!(self.tx_buffer.dequeue().map_err(|()| Error::Exhausted));
net_trace!("[{}]{}:{}: sending {} octets",
self.debug_id, self.endpoint,
packet_buf.endpoint, packet_buf.size);
let repr = UdpRepr {
src_port: self.endpoint.port,
dst_port: packet_buf.endpoint.port,
payload: &packet_buf.as_ref()[..]
};
let ip_repr = IpRepr::Unspecified {
src_addr: self.endpoint.addr,
dst_addr: packet_buf.endpoint.addr,
protocol: IpProtocol::Udp,
payload_len: repr.buffer_len()
};
emit(&ip_repr, &repr)
}
}
impl<'a> IpPayload for UdpRepr<'a> {
fn buffer_len(&self) -> usize {
self.buffer_len()
}
fn emit(&self, repr: &IpRepr, payload: &mut [u8]) {
let mut packet = UdpPacket::new(payload).expect("undersized payload");
self.emit(&mut packet, &repr.src_addr(), &repr.dst_addr())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
pub fn test_buffer() {
let mut storage = vec![];
for _ in 0..5 {
storage.push(PacketBuffer::new(vec![0]))
}
let mut buffer = SocketBuffer::new(&mut storage[..]);
assert_eq!(buffer.empty(), true);
assert_eq!(buffer.full(), false);
buffer.enqueue().unwrap().size = 1;
assert_eq!(buffer.empty(), false);
assert_eq!(buffer.full(), false);
buffer.enqueue().unwrap().size = 2;
buffer.enqueue().unwrap().size = 3;
assert_eq!(buffer.dequeue().unwrap().size, 1);
assert_eq!(buffer.dequeue().unwrap().size, 2);
buffer.enqueue().unwrap().size = 4;
buffer.enqueue().unwrap().size = 5;
buffer.enqueue().unwrap().size = 6;
buffer.enqueue().unwrap().size = 7;
assert_eq!(buffer.enqueue().unwrap_err(), ());
assert_eq!(buffer.empty(), false);
assert_eq!(buffer.full(), true);
assert_eq!(buffer.dequeue().unwrap().size, 3);
assert_eq!(buffer.dequeue().unwrap().size, 4);
assert_eq!(buffer.dequeue().unwrap().size, 5);
assert_eq!(buffer.dequeue().unwrap().size, 6);
assert_eq!(buffer.dequeue().unwrap().size, 7);
assert_eq!(buffer.dequeue().unwrap_err(), ());
assert_eq!(buffer.empty(), true);
assert_eq!(buffer.full(), false);
}
}
|
use std::cmp;
use std::net::{SocketAddr, ToSocketAddrs};
use std::time::{Instant, Duration};
use std::collections::HashMap;
use rand;
use libc::c_int;
use nix::poll::{poll, EventFlags, PollFd, POLLIN};
use socket::RawSocket;
use packet::EchoRequest;
pub const POLL_TIMEOUT_MS: c_int = 1000;
/// An object for sending and receiving ICMP messages.
pub struct Icmp {
socket: RawSocket,
host: SocketAddr,
echo: EchoRequest,
echoes: HashMap<u16, Instant>,
}
impl Icmp {
/// Creates a new object for sending and receiving ICMP messages between
/// the local machine and a host machine identified by `ip`.
pub fn new(ip: &str) -> Icmp {
let host = (ip, 0).to_socket_addrs().unwrap().next().unwrap();
let socket = RawSocket::new().unwrap();
socket.set_nonblocking(true).unwrap();
Icmp {
socket: socket,
host: host,
echo: EchoRequest::new(rand::random()),
echoes: HashMap::new(),
}
}
/// Polls for new messages. Returns true if data becomes available before
/// the timeout expires.
pub fn poll(&self) -> Option<bool> {
let icmpfd = PollFd::new(self.socket.fd(), POLLIN, EventFlags::empty());
let rdy = poll(& mut[icmpfd], POLL_TIMEOUT_MS).unwrap();
Some(rdy >= 0)
}
/// Send an echo request with an incrementing sequence number.
pub fn echo_request(&mut self) {
self.echo.inc_seq();
self.echo.do_checksum();
self.socket.send_to(&self.echo.as_bytes(), &self.host).unwrap();
self.echoes.insert(self.echo.seq(), Instant::now());
}
/// Issue a non-blocking read for any available echo replies from the host.
/// Returns an object representing the reply and the time elapsed between
/// sending the request and receiving its reply.
pub fn get_echo_reply(&self) -> Option<(EchoRequest, Duration)> {
let mut buf = [0; 65536];
match self.socket.recv_from(&mut buf) {
Err(_) => None,
Ok(recv_len) => {
let len = cmp::min(recv_len, buf.len());
let echo = EchoRequest::from(&buf[20..len]);
// verify the echo reply is one of ours
if echo.id() != self.echo.id() {
return None;
}
let time = Instant::now().duration_since(*self.echoes.get(&echo.seq()).unwrap());
Some((echo, time))
},
}
}
}
|
use std::process::*;
fn unseen_mail_count() -> Result<usize, String> {
let _ = Command::new("bash")
.arg("-c")
.arg("notmuch new")
.output()
.map_err(|e| e.to_string())?;
let stdout = Command::new("bash")
.arg("-c")
.arg("notmuch search 'tag:unread and folder:/.*INBOX/'")
.output()
.map_err(|e| e.to_string())?
.stdout;
Ok(std::str::from_utf8(&stdout)
.map_err(|e| e.to_string())?
.lines()
.count())
}
fn main() {
use std::fs::File;
use std::io::prelude::*;
File::create("/tmp/buzz-default.svg")
.unwrap()
.write_all(include_bytes!("../default.svg"))
.unwrap();
File::create("/tmp/buzz-unread.svg")
.unwrap()
.write_all(include_bytes!("../unread.svg"))
.unwrap();
let app = match systray::Application::new() {
Ok(app) => app,
Err(e) => {
println!("Could not create gtk application: {}", e);
return;
}
};
loop {
match unseen_mail_count() {
Ok(count) => {
if count > 0 {
app.set_icon_from_file(&"/tmp/buzz-unread.svg".to_string())
.unwrap();
} else {
app.set_icon_from_file(&"/tmp/buzz-default.svg".to_string())
.unwrap();
}
}
Err(e) => {
println!("Something wrong!, err: {}", e);
return;
}
}
std::thread::sleep(std::time::Duration::from_secs(1));
}
}
|
use std::default::Default;
trait ID {
fn id(&self) -> usize;
}
impl<T:Sized> ID for T {
fn id(&self) -> usize {
let ptr : * const T = self;
unsafe { std::mem::transmute(ptr) }
}
}
#[derive(Default)]
struct Empty ;
struct S {
pub name : &'static str
}
fn main() {
let i = 32_i32;
let e : Empty = Default::default();
let s = S{name : "hello"};
println!("{:?} {:?} {:?}", i.id(), e.id(), s.id());
} |
use std::collections::HashMap;
use std::io::BufRead;
fn prob1_adder(mut freq: i32, mut line: String) -> i32 {
let sign_str: String = line.drain(..1).collect();
let mut sign = 1;
if sign_str == "-" {
sign = -1;
}
let i = line.parse::<i32>().expect("problem parsing number");
freq += i * sign;
freq
}
pub fn one_a<I>(buf: I) -> i32
where
I: BufRead,
{
let mut freq: i32 = 0;
for line in buf.lines() {
let line = line.unwrap();
freq = prob1_adder(freq, line);
}
freq
}
pub fn one_b<I>(buf: I) -> i32
where
I: BufRead,
{
let mut seen_freqs = HashMap::new();
let mut freq: i32 = 0;
seen_freqs.insert(freq, true);
// save lines for rerun
let mut replay_lines = Vec::new();
for line in buf.lines() {
let line = line.unwrap();
replay_lines.push(line.clone());
freq = prob1_adder(freq, line);
if seen_freqs.contains_key(&freq) {
return freq;
}
seen_freqs.insert(freq, true);
}
loop {
for line in replay_lines.iter() {
freq = prob1_adder(freq, line.to_string());
if seen_freqs.contains_key(&freq) {
return freq;
}
seen_freqs.insert(freq, true);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_one_a() {
let input = b"+1\n-2\n+3\n+1\n";
assert_eq!(3, one_a(&input[..]));
}
#[test]
fn test_one_b() {
let input = b"+1\n-2\n+3\n+1\n";
assert_eq!(2, one_b(&input[..]));
let input = b"+1\n-1\n";
assert_eq!(0, one_b(&input[..]));
let input = b"+3\n+3\n+4\n-2\n-4\n";
assert_eq!(10, one_b(&input[..]));
let input = b"-6\n+3\n+8\n+5\n-6\n";
assert_eq!(5, one_b(&input[..]));
let input = b"+7\n+7\n-2\n-7\n-4\n";
assert_eq!(14, one_b(&input[..]));
}
}
|
use crate::{
jack_tokenizer::{Keyword, TokenData, Tokenizer},
symbol_table::{SymbolTable, VarKind},
vm_writer::VMWriter,
};
pub struct CompilationEngine {
tokenizer: Tokenizer,
symbol_table: SymbolTable,
vm_writer: VMWriter,
class_name: String,
label_index: usize,
is_void: bool,
}
impl CompilationEngine {
pub fn new(tokenizer: Tokenizer) -> Self {
CompilationEngine {
tokenizer: tokenizer,
symbol_table: SymbolTable::new(),
vm_writer: VMWriter::new(),
class_name: String::new(),
label_index: 0,
is_void: false,
}
}
pub fn start_compile(&mut self) {
self.advance();
self.compile_class();
}
fn compile_class(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE_CLASS");
// class className '{'
self.advance();
self.set_class_name();
self.advance();
self.advance();
// classVarDec*
while self.tokenizer.is_class_var_dec() {
self.compile_class_var_dec();
}
// subroutineDec*
while self.tokenizer.is_subroutine_dec() {
self.compile_subroutine();
}
// '}'
self.advance();
}
fn compile_class_var_dec(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE_CLASS_VAR_DEC");
// attribute type varName
let v_attribute = self.get_attribute();
self.advance();
let v_type = self.get_var_type();
self.advance();
let v_name = self.get_identifier();
self.symbol_table.define(&v_name, &v_type, &v_attribute);
self.advance();
// (',' varName)*
while !self.tokenizer.is_semicolon() {
self.skip_comma();
let v_name = self.get_identifier();
self.symbol_table.define(&v_name, &v_type, &v_attribute);
self.advance();
}
// ';'
self.advance();
}
fn compile_subroutine(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE SUBROUTINE");
self.symbol_table.startSubroutine();
// attribute type subroutineName
let subroutine_attribute = self.get_token().clone();
self.advance();
self.set_type();
self.advance();
let subroutine_name = self.get_identifier();
self.advance();
// methodの場合
// symbol table の arg 0 は this を設定。 :TODO
// *** vm_writer
match subroutine_attribute {
Some(TokenData::TKeyword(Keyword::Constructor)) => {
self.symbol_table.startSubroutine();
}
Some(TokenData::TKeyword(Keyword::Function)) => {
self.symbol_table.startSubroutine();
}
Some(TokenData::TKeyword(Keyword::Method)) => {
self.symbol_table.startSubroutine();
self.symbol_table.define("this", &self.class_name, &VarKind::Argument);
}
_ => {}
}
// ( parameterList )
self.advance();
self.compile_parameter_list();
self.advance();
// '{'
self.advance();
// varDec*
let mut n_args = 0;
while self.tokenizer.is_var_dec() {
n_args += self.compile_var_dec();
}
// *** vm_writer
match subroutine_attribute {
Some(TokenData::TKeyword(Keyword::Constructor)) => {
self.vm_writer.write_function(&self.class_name, &subroutine_name, n_args);
let n_size = self.symbol_table.var_count(&VarKind::Field)
+ self.symbol_table.var_count(&VarKind::Static);
self.vm_writer.write_push("constant", n_size as u16);
self.vm_writer.write_call("Memory.alloc", 1);
self.vm_writer.write_pop("pointer", 0);
}
Some(TokenData::TKeyword(Keyword::Function)) => {
self.vm_writer.write_function(&self.class_name, &subroutine_name, n_args);
}
Some(TokenData::TKeyword(Keyword::Method)) => {
// メソッドの場合、arg 0 に 何か を設定。
// push argument 0
// pop pointer 0 を書く
self.vm_writer.write_function(&self.class_name, &subroutine_name, n_args);
self.vm_writer.write_push("argument", 0);
self.vm_writer.write_pop("pointer", 0);
}
_ => {}
}
// statements
self.compile_statements();
// '}'
self.advance();
self.debug_priint_symbol_table();
}
fn compile_parameter_list(&mut self) -> usize {
// ((type varName) (',' type varName)* )?
let mut n_args = 0;
while !self.is_close_paren() {
if self.is_comma() {
self.advance();
}
// type varName
let v_attribute = VarKind::Argument;
let v_type = self.get_var_type();
self.advance();
let v_name = self.get_identifier();
self.advance();
self.symbol_table.define(&v_name, &v_type, &v_attribute);
n_args += 1;
}
n_args
}
fn compile_var_dec(&mut self) -> usize {
self.debug_print_this_token("FUNCTION: COMPILE_VAR_DEC");
let mut n_arg = 1;
// var type varName
let v_attribute = VarKind::Var;
self.advance();
let v_type = self.get_var_type();
self.advance();
let v_name = self.get_identifier();
self.advance();
self.symbol_table.define(&v_name, &v_type, &v_attribute);
while !self.is_semicolon() {
if self.is_comma() {
n_arg += 1;
self.advance();
}
let v_name = self.get_identifier();
self.advance();
self.symbol_table.define(&v_name, &v_type, &v_attribute);
}
// ';'
self.advance();
n_arg
}
fn compile_statements(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE_STATEMENTS");
// statements*
while self.is_statement() {
if let &Some(TokenData::TKeyword(keyword)) = &self.get_token() {
match keyword {
Keyword::Let => {
self.compile_let();
}
Keyword::Do => {
self.compile_do();
}
Keyword::If => {
self.compile_if();
}
Keyword::While => {
self.compile_while();
}
Keyword::Return => {
self.compile_return();
}
_ => {
panic!("ERROR: This token is not statement");
}
}
}
}
}
fn compile_let(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE_LET");
let mut is_array = false;
// let varName
self.advance();
let v_name = self.get_identifier();
self.advance();
// ('[' expression ']' )?
if self.is_open_sq() {
is_array = true;
self.advance();
self.compile_expression();
self.advance();
self.write_push_to_vm(&v_name);
self.vm_writer.push("add");
self.vm_writer.push("pop pointer 1");
self.vm_writer.write_push("pointer", 1);
}
// =
self.advance();
// expression
self.compile_expression();
// ;
self.advance();
if is_array {
self.vm_writer.write_pop("temp", 0);
self.vm_writer.write_pop("pointer", 1);
self.vm_writer.write_push("temp", 0);
self.vm_writer.write_pop("that", 0);
} else {
self.write_pop_to_vm(&v_name);
}
self.vm_writer.push("push constant 0");
self.vm_writer.push("pop pointer 1");
}
fn compile_do(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE_DO");
// do
self.advance();
// className '.' subroutineName '(' expressionList ')' ';'
if self.is_class_name() {
let class_name = self.get_identifier();
self.advance();
self.advance();
let subroutine_name = self.get_identifier();
self.advance();
self.advance();
// self.vm_writer.write_push("pointer", 0);
let n_args = self.compile_expression_list();
self.advance();
self.advance();
let command = format!("{}.{}", class_name, subroutine_name);
self.vm_writer.write_call(&command, n_args);
}
// varName '.' subroutineName '(' expressionList ')' ';'
else if self.is_var_name() {
let var_name = self.get_identifier();
let v_idx = self.symbol_table.index_of(&var_name).unwrap();
let v_kind = self.symbol_table.kind_of(&var_name).unwrap().to_string();
let v_type = self.symbol_table.type_of(&var_name).unwrap();
self.advance();
self.advance();
let subroutine_name = self.get_identifier();
self.advance();
self.advance();
self.vm_writer.write_push(&v_kind, v_idx as u16);
let n_args = self.compile_expression_list();
self.advance();
self.advance();
let command = format!("{}.{}", &v_type, subroutine_name);
self.vm_writer.write_call(&command, n_args + 1);
}
// subroutineName '(' expressionList ')' ';'
else {
let subroutine_name = self.get_identifier();
self.advance();
self.advance();
self.vm_writer.write_push("pointer", 0);
let n_args = self.compile_expression_list();
self.advance();
self.advance();
let command = format!("{}.{}", self.class_name, subroutine_name);
self.vm_writer.write_call(&command, n_args + 1);
}
// dispose return
self.vm_writer.write_pop("temp", 0);
}
fn compile_if(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE_IF");
let label_1 = self.get_label();
let label_2 = self.get_label();
// if '(' expression ')'
self.advance();
self.advance();
self.compile_expression();
self.advance();
self.vm_writer.write_if(&label_1);
// '{' statements '}'
self.advance();
self.compile_statements();
self.advance();
self.vm_writer.write_goto(&label_2);
self.vm_writer.write_label(&label_1);
// ( else '{' statements '}' )?
if self.is_else() {
self.advance();
self.advance();
self.compile_statements();
self.advance();
}
self.vm_writer.write_label(&label_2);
}
fn compile_while(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE_WHILE");
let label_1 = self.get_label();
let label_2 = self.get_label();
self.vm_writer.write_label(&label_1);
// while '(' expression ')'
self.advance();
self.advance();
self.compile_expression();
self.advance();
self.vm_writer.write_if(&label_2);
// '{' statements '}'
self.advance();
self.compile_statements();
self.advance();
self.vm_writer.write_goto(&label_1);
self.vm_writer.write_label(&label_2);
}
fn compile_return(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE_RETURN");
// return
self.advance();
// expression?
while !self.is_semicolon() {
self.compile_expression();
}
// ';'
self.advance();
if self.is_void {
self.vm_writer.push("push constant 0");
}
self.vm_writer.write_return();
}
fn compile_expression_list(&mut self) -> usize {
self.debug_print_this_token("FUNCTION: COMPILE_EXPRESSION_LIST");
let mut n_args = 0;
while !self.is_close_paren() {
if self.is_comma() {
self.advance();
}
self.compile_expression();
n_args += 1;
}
n_args
}
fn compile_expression(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE_EXPRESSION");
let mut arithmetic = None;
// term
self.compile_term();
// (op term)*
while self.is_op() {
arithmetic = self.get_token().clone();
self.advance();
self.compile_term();
}
if let Some(TokenData::TSymbol(symbol)) = arithmetic {
self.vm_writer.write_arithmetic(&symbol);
}
}
fn compile_term(&mut self) {
self.debug_print_this_token("FUNCTION: COMPILE_TERM");
// integerConst (FIX)
if self.is_integer_const() {
let n = self.get_integer_const();
self.vm_writer.write_push("constant", n);
self.advance();
}
// stringConst (FIX)
else if self.is_string_constant() {
let string_val = self.tokenizer.get_string_val().unwrap();
let string_len = string_val.len();
self.vm_writer.write_push("constant", string_len as u16);
self.vm_writer.write_call("String.new", 1);
for char in string_val.chars() {
let b = char as u8;
self.vm_writer.write_push("constant", b as u16);
self.vm_writer.write_call("String.appendChar", 2);
}
self.advance();
}
// keywordConstant (FIX)
else if self.is_keyword_constant() {
if let &Some(TokenData::TKeyword(keyword)) = &self.get_token() {
match keyword {
Keyword::True => {
self.vm_writer.write_push("constant", 0);
self.vm_writer.push("not");
}
Keyword::False => {
self.vm_writer.write_push("constant", 0);
}
Keyword::Null => {
self.vm_writer.write_push("constant", 0);
}
Keyword::This => {
self.vm_writer.write_push("pointer", 0);
}
_ => {}
}
}
self.advance();
}
// unaryOp term
else if self.is_unary_op() {
let unary_op = self.get_token().clone();
self.advance();
self.compile_term();
if let Some(TokenData::TSymbol(op)) = unary_op {
self.vm_writer.write_unary_op(&op);
}
}
// '(' expression ')'
else if self.is_open_paren() {
self.advance();
self.compile_expression();
self.advance();
} else {
let next_token = self.peek_token().unwrap();
// varName '[' expression ']'
if next_token == TokenData::TSymbol("[".to_string()) {
let var_name = self.get_identifier();
self.write_push_to_vm(&var_name);
self.advance();
self.advance();
self.compile_expression();
self.vm_writer.push("add");
self.vm_writer.write_pop("pointer", 1);
self.vm_writer.write_push("that", 0);
self.vm_writer.write_push("constant", 0);
self.vm_writer.write_pop("pointer", 1);
self.advance();
}
// name '.' subroutineName '(' expressionList ')'
else if next_token == TokenData::TSymbol(".".to_string()) {
// className '.' subroutineName '(' expresionList ')'
if self.is_class_name() {
let class_name = self.get_identifier();
self.advance();
self.advance();
let subroutine_name = self.get_identifier();
self.advance();
self.advance();
let n_args = self.compile_expression_list();
self.advance();
let command = format!("{}.{}", class_name, subroutine_name);
self.vm_writer.write_call(&command, n_args);
}
// varName '.' subroutineName '(' expressionLIst ')'
else if self.is_var_name() {
let var_name = self.get_identifier();
self.advance();
self.advance();
let subroutine_name = self.get_identifier();
self.advance();
self.advance();
let v_idx = self.symbol_table.index_of(&var_name).unwrap();
let v_type = self.symbol_table.type_of(&var_name).unwrap();
let v_kind = self.symbol_table.kind_of(&var_name).unwrap().to_string();
self.vm_writer.write_push(&v_kind, v_idx as u16);
let n_args = self.compile_expression_list();
self.advance();
let command = format!("{}.{}", v_type, subroutine_name);
self.vm_writer.write_call(&command, n_args + 1);
}
// subroutineName '(' expressionLIst ')'
else {
let subroutine_name = self.get_identifier();
self.advance();
self.advance();
let n_args = self.compile_expression_list();
self.advance();
let command = format!("{}.{}", self.class_name, subroutine_name);
self.vm_writer.write_push("argument", 0);
self.vm_writer.write_call(&command, n_args + 1);
}
}
// varName
else {
let v_name = self.get_identifier();
self.advance();
self.write_push_to_vm(&v_name);
}
}
}
// helper functions
fn advance(&mut self) {
self.tokenizer.advance();
}
fn skip_comma(&mut self) {
if self.tokenizer.is_comma() {
self.advance();
}
}
fn terminate(&mut self) {
while self.tokenizer.has_more_tokens() {
self.advance();
}
println!("*** TERMINATE ***"); // DEBUG
self.push_vm("*** TERMINATE ***");
}
pub fn output_vm(&self) -> Vec<String> {
self.vm_writer.output()
}
// push vm
fn push_vm(&mut self, str: &str) {
self.vm_writer.push(str);
}
// write vm helper
fn write_push_to_vm(&mut self, name: &str) {
let v_attribute = self.symbol_table.kind_of(name).unwrap();
let v_idx = self.symbol_table.index_of(name).unwrap() as u16;
match v_attribute {
VarKind::Static => {
self.vm_writer.write_push("static", v_idx);
}
VarKind::Field => {
self.vm_writer.write_push("this", v_idx);
}
VarKind::Argument => {
self.vm_writer.write_push("argument", v_idx);
}
VarKind::Var => {
self.vm_writer.write_push("local", v_idx);
}
}
}
fn write_pop_to_vm(&mut self, name: &str) {
let v_attribute = self.symbol_table.kind_of(name).unwrap();
let v_idx = self.symbol_table.index_of(name).unwrap();
match v_attribute {
VarKind::Static => {
self.vm_writer.write_pop("static", v_idx);
}
VarKind::Field => {
self.vm_writer.write_pop("this", v_idx);
}
VarKind::Argument => {
self.vm_writer.write_pop("argument", v_idx);
}
VarKind::Var => {
self.vm_writer.write_pop("local", v_idx);
}
}
}
// flag check
fn is_class_name(&self) -> bool {
if self.tokenizer.next_is_dot() {
let name = self.get_identifier();
return !self.symbol_table.contains(&name);
}
false
}
fn is_var_name(&self) -> bool {
if self.tokenizer.next_is_dot() {
let name = self.get_identifier();
return self.symbol_table.contains(&name);
}
false
}
fn is_statement(&self) -> bool {
self.get_token() == &Some(TokenData::TKeyword(Keyword::Let))
|| self.get_token() == &Some(TokenData::TKeyword(Keyword::Do))
|| self.get_token() == &Some(TokenData::TKeyword(Keyword::If))
|| self.get_token() == &Some(TokenData::TKeyword(Keyword::While))
|| self.get_token() == &Some(TokenData::TKeyword(Keyword::Return))
}
fn is_keyword_constant(&self) -> bool {
self.get_token() == &Some(TokenData::TKeyword(Keyword::True))
|| self.get_token() == &Some(TokenData::TKeyword(Keyword::False))
|| self.get_token() == &Some(TokenData::TKeyword(Keyword::Null))
|| self.get_token() == &Some(TokenData::TKeyword(Keyword::This))
}
fn is_integer_const(&self) -> bool {
if let &Some(TokenData::TIntVal(_)) = self.get_token() {
true
} else {
false
}
}
fn is_string_constant(&self) -> bool {
if let &Some(TokenData::TStringVal(_)) = self.get_token() {
true
} else {
false
}
}
fn is_close_paren(&self) -> bool {
self.get_token() == &Some(TokenData::TSymbol(")".to_string()))
}
fn is_open_paren(&self) -> bool {
self.get_token() == &Some(TokenData::TSymbol("(".to_string()))
}
fn is_open_sq(&self) -> bool {
self.get_token() == &Some(TokenData::TSymbol("[".to_string()))
}
fn is_comma(&self) -> bool {
self.get_token() == &Some(TokenData::TSymbol(",".to_string()))
}
fn is_semicolon(&self) -> bool {
self.get_token() == &Some(TokenData::TSymbol(";".to_string()))
}
fn is_else(&self) -> bool {
self.get_token() == &Some(TokenData::TKeyword(Keyword::Else))
}
fn is_op(&self) -> bool {
if let &Some(TokenData::TSymbol(symbol)) = &self.get_token() {
symbol == "+"
|| symbol == "-"
|| symbol == "*"
|| symbol == "/"
|| symbol == "&"
|| symbol == "|"
|| symbol == "<"
|| symbol == ">"
|| symbol == "="
} else {
false
}
}
fn is_unary_op(&self) -> bool {
self.get_token() == &Some(TokenData::TSymbol("-".to_string()))
|| self.get_token() == &Some(TokenData::TSymbol("~".to_string()))
}
// get functions
fn get_attribute(&self) -> VarKind {
if self.tokenizer.is_class_static() {
VarKind::Static
} else if self.tokenizer.is_class_field() {
VarKind::Field
} else {
VarKind::Var
}
}
fn get_token(&self) -> &Option<TokenData> {
self.tokenizer.get_token()
}
fn get_identifier(&self) -> String {
if let Some(TokenData::TIdentifier(id)) = self.get_token() {
id.to_string()
} else {
panic!("ERROR: CE.get_identifier()");
}
}
fn get_integer_const(&self) -> u16 {
if let Some(TokenData::TIntVal(n)) = self.get_token() {
*n
} else {
panic!("ERROR: CE.get_integer_const()");
}
}
fn get_var_type(&self) -> String {
if let Some(token) = self.get_token() {
match token {
TokenData::TKeyword(keyword) => match keyword {
Keyword::Int => "int".to_string(),
Keyword::Boolean => "boolean".to_string(),
Keyword::Char => "char".to_string(),
_ => {
panic!("ERROR: get var name");
}
},
TokenData::TIdentifier(id) => id.to_string(),
_ => {
panic!("ERROR: get var name");
}
}
} else {
panic!("ERROR: get var name");
}
}
fn get_label(&mut self) -> String {
let label = format!("L{}", self.label_index);
self.label_index += 1;
label
}
fn peek_token(&self) -> Option<TokenData> {
self.tokenizer.peek_token()
}
// set functions
fn set_class_name(&mut self) {
self.class_name = self.get_identifier();
}
fn set_type(&mut self) {
if let &Some(TokenData::TKeyword(keyword)) = &self.get_token() {
self.is_void = keyword == &Keyword::Void;
} else {
self.is_void = false;
}
}
// debugger
fn debug_print_this_token(&self, message: &str) {
println!("DEBUG TOKEN >> /* {} */ {:?}", message, self.get_token());
}
fn debug_priint_symbol_table(&self) {
self.symbol_table.debug_print_class_table();
self.symbol_table.debug_print_subroutine_table();
}
}
|
use kube_derive::CustomResource;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
#[derive(CustomResource, Serialize, Deserialize, JsonSchema)]
union FooSpec {
int: u32,
}
fn main() {}
|
use std::error::Error;
#[async_std::main]
async fn main() -> Result<(), Box<dyn Error>> {
env_logger::try_init()?;
listen_moe::auth::login("[Username/EMail address]".into(), "[Password]".into(), None).await?;
println!("Login successful");
Ok(())
}
|
// This file is part of Substrate.
// Copyright (C) 2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! # Running
//! Running this test can be done with
//! ```text
//! wasm-pack test --firefox --release --headless bin/node/browser-testing
//! ```
//! or (without `wasm-pack`)
//! ```text
//! CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUNNER=wasm-bindgen-test-runner WASM_BINDGEN_TEST_TIMEOUT=60 cargo test --target wasm32-unknown-unknown
//! ```
//! For debug infomation, such as the informant, run without the `--headless`
//! flag and open a browser to the url that `wasm-pack test` outputs.
//! For more infomation see https://rustwasm.github.io/docs/wasm-pack/.
use jsonrpc_core::types::{Id, MethodCall, Params, Success, Version};
use serde::de::DeserializeOwned;
use wasm_bindgen::JsValue;
use wasm_bindgen_futures::JsFuture;
use wasm_bindgen_test::{wasm_bindgen_test, wasm_bindgen_test_configure};
wasm_bindgen_test_configure!(run_in_browser);
fn rpc_call(method: &str) -> String {
serde_json::to_string(&MethodCall {
jsonrpc: Some(Version::V2),
method: method.into(),
params: Params::None,
id: Id::Num(1),
})
.unwrap()
}
fn deserialize_rpc_result<T: DeserializeOwned>(js_value: JsValue) -> T {
let string = js_value.as_string().unwrap();
let value = serde_json::from_str::<Success>(&string).unwrap().result;
// We need to convert a `Value::Object` into a proper type.
let value_string = serde_json::to_string(&value).unwrap();
serde_json::from_str(&value_string).unwrap()
}
#[wasm_bindgen_test]
async fn runs() {
let mut client = node_cli::start_client(None, "info".into()).await.unwrap();
// Check that the node handles rpc calls.
// TODO: Re-add the code that checks if the node is syncing.
let chain_name: String = deserialize_rpc_result(
JsFuture::from(client.rpc_send(&rpc_call("system_chain"))).await.unwrap(),
);
assert_eq!(chain_name, "Development");
}
|
//! The CXX code generator for constructing and compiling C++ code.
//!
//! This is intended to be used from Cargo build scripts to execute CXX's
//! C++ code generator, set up any additional compiler flags depending on
//! the use case, and make the C++ compiler invocation.
//!
//! <br>
//!
//! # Example
//!
//! Example of a canonical Cargo build script that builds a CXX bridge:
//!
//! ```no_run
//! // build.rs
//!
//! fn main() {
//! cxx_build::bridge("src/main.rs")
//! .file("src/demo.cc")
//! .flag_if_supported("-std=c++11")
//! .compile("cxxbridge-demo");
//!
//! println!("cargo:rerun-if-changed=src/main.rs");
//! println!("cargo:rerun-if-changed=src/demo.cc");
//! println!("cargo:rerun-if-changed=include/demo.h");
//! }
//! ```
//!
//! A runnable working setup with this build script is shown in the *demo*
//! directory of [https://github.com/dtolnay/cxx].
//!
//! [https://github.com/dtolnay/cxx]: https://github.com/dtolnay/cxx
//!
//! <br>
//!
//! # Alternatives
//!
//! For use in non-Cargo builds like Bazel or Buck, CXX provides an
//! alternate way of invoking the C++ code generator as a standalone command
//! line tool. The tool is packaged as the `cxxbridge-cmd` crate.
//!
//! ```bash
//! $ cargo install cxxbridge-cmd # or build it from the repo
//!
//! $ cxxbridge src/main.rs --header > path/to/mybridge.h
//! $ cxxbridge src/main.rs > path/to/mybridge.cc
//! ```
#![doc(html_root_url = "https://docs.rs/cxx-build/1.0.107")]
#![allow(
clippy::cast_sign_loss,
clippy::default_trait_access,
clippy::derive_partial_eq_without_eq,
clippy::doc_markdown,
clippy::enum_glob_use,
clippy::explicit_auto_deref,
clippy::if_same_then_else,
clippy::inherent_to_string,
clippy::items_after_statements,
clippy::match_bool,
clippy::match_on_vec_items,
clippy::match_same_arms,
clippy::module_name_repetitions,
clippy::needless_doctest_main,
clippy::needless_pass_by_value,
clippy::new_without_default,
clippy::nonminimal_bool,
clippy::option_if_let_else,
clippy::or_fun_call,
clippy::redundant_else,
clippy::shadow_unrelated,
clippy::significant_drop_in_scrutinee,
clippy::similar_names,
clippy::single_match_else,
clippy::struct_excessive_bools,
clippy::too_many_arguments,
clippy::too_many_lines,
clippy::toplevel_ref_arg,
clippy::uninlined_format_args,
clippy::upper_case_acronyms,
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/6983
clippy::wrong_self_convention
)]
mod cargo;
mod cfg;
mod deps;
mod error;
mod gen;
mod intern;
mod out;
mod paths;
mod syntax;
mod target;
mod vec;
use crate::cargo::CargoEnvCfgEvaluator;
use crate::deps::{Crate, HeaderDir};
use crate::error::{Error, Result};
use crate::gen::error::report;
use crate::gen::Opt;
use crate::paths::PathExt;
use crate::syntax::map::{Entry, UnorderedMap};
use crate::target::TargetDir;
use cc::Build;
use std::collections::BTreeSet;
use std::env;
use std::ffi::{OsStr, OsString};
use std::io::{self, Write};
use std::iter;
use std::path::{Path, PathBuf};
use std::process;
pub use crate::cfg::{Cfg, CFG};
/// This returns a [`cc::Build`] on which you should continue to set up any
/// additional source files or compiler flags, and lastly call its [`compile`]
/// method to execute the C++ build.
///
/// [`compile`]: https://docs.rs/cc/1.0.49/cc/struct.Build.html#method.compile
#[must_use]
pub fn bridge(rust_source_file: impl AsRef<Path>) -> Build {
bridges(iter::once(rust_source_file))
}
/// `cxx_build::bridge` but for when more than one file contains a
/// #\[cxx::bridge\] module.
///
/// ```no_run
/// let source_files = vec!["src/main.rs", "src/path/to/other.rs"];
/// cxx_build::bridges(source_files)
/// .file("src/demo.cc")
/// .flag_if_supported("-std=c++11")
/// .compile("cxxbridge-demo");
/// ```
#[must_use]
pub fn bridges(rust_source_files: impl IntoIterator<Item = impl AsRef<Path>>) -> Build {
let ref mut rust_source_files = rust_source_files.into_iter();
build(rust_source_files).unwrap_or_else(|err| {
let _ = writeln!(io::stderr(), "\n\ncxxbridge error: {}\n\n", report(err));
process::exit(1);
})
}
struct Project {
include_prefix: PathBuf,
manifest_dir: PathBuf,
// The `links = "..."` value from Cargo.toml.
links_attribute: Option<OsString>,
// Output directory as received from Cargo.
out_dir: PathBuf,
// Directory into which to symlink all generated code.
//
// This is *not* used for an #include path, only as a debugging convenience.
// Normally available at target/cxxbridge/ if we are able to know where the
// target dir is, otherwise under a common scratch dir.
//
// The reason this isn't the #include dir is that we do not want builds to
// have access to headers from arbitrary other parts of the dependency
// graph. Using a global directory for all builds would be both a race
// condition depending on what order Cargo randomly executes the build
// scripts, as well as semantically undesirable for builds not to have to
// declare their real dependencies.
shared_dir: PathBuf,
}
impl Project {
fn init() -> Result<Self> {
let include_prefix = Path::new(CFG.include_prefix);
assert!(include_prefix.is_relative());
let include_prefix = include_prefix.components().collect();
let links_attribute = env::var_os("CARGO_MANIFEST_LINKS");
let manifest_dir = paths::manifest_dir()?;
let out_dir = paths::out_dir()?;
let shared_dir = match target::find_target_dir(&out_dir) {
TargetDir::Path(target_dir) => target_dir.join("cxxbridge"),
TargetDir::Unknown => scratch::path("cxxbridge"),
};
Ok(Project {
include_prefix,
manifest_dir,
links_attribute,
out_dir,
shared_dir,
})
}
}
// We lay out the OUT_DIR as follows. Everything is namespaced under a cxxbridge
// subdirectory to avoid stomping on other things that the caller's build script
// might be doing inside OUT_DIR.
//
// $OUT_DIR/
// cxxbridge/
// crate/
// $CARGO_PKG_NAME -> $CARGO_MANIFEST_DIR
// include/
// rust/
// cxx.h
// $CARGO_PKG_NAME/
// .../
// lib.rs.h
// sources/
// $CARGO_PKG_NAME/
// .../
// lib.rs.cc
//
// The crate/ and include/ directories are placed on the #include path for the
// current build as well as for downstream builds that have a direct dependency
// on the current crate.
fn build(rust_source_files: &mut dyn Iterator<Item = impl AsRef<Path>>) -> Result<Build> {
let ref prj = Project::init()?;
validate_cfg(prj)?;
let this_crate = make_this_crate(prj)?;
let mut build = Build::new();
build.cpp(true);
build.cpp_link_stdlib(None); // linked via link-cplusplus crate
for path in rust_source_files {
generate_bridge(prj, &mut build, path.as_ref())?;
}
this_crate.print_to_cargo();
eprintln!("\nCXX include path:");
for header_dir in this_crate.header_dirs {
build.include(&header_dir.path);
if header_dir.exported {
eprintln!(" {}", header_dir.path.display());
} else {
eprintln!(" {} (private)", header_dir.path.display());
}
}
Ok(build)
}
fn validate_cfg(prj: &Project) -> Result<()> {
for exported_dir in &CFG.exported_header_dirs {
if !exported_dir.is_absolute() {
return Err(Error::ExportedDirNotAbsolute(exported_dir));
}
}
for prefix in &CFG.exported_header_prefixes {
if prefix.is_empty() {
return Err(Error::ExportedEmptyPrefix);
}
}
if prj.links_attribute.is_none() {
if !CFG.exported_header_dirs.is_empty() {
return Err(Error::ExportedDirsWithoutLinks);
}
if !CFG.exported_header_prefixes.is_empty() {
return Err(Error::ExportedPrefixesWithoutLinks);
}
if !CFG.exported_header_links.is_empty() {
return Err(Error::ExportedLinksWithoutLinks);
}
}
Ok(())
}
fn make_this_crate(prj: &Project) -> Result<Crate> {
let crate_dir = make_crate_dir(prj);
let include_dir = make_include_dir(prj)?;
let mut this_crate = Crate {
include_prefix: Some(prj.include_prefix.clone()),
links: prj.links_attribute.clone(),
header_dirs: Vec::new(),
};
// The generated code directory (include_dir) is placed in front of
// crate_dir on the include line so that `#include "path/to/file.rs"` from
// C++ "magically" works and refers to the API generated from that Rust
// source file.
this_crate.header_dirs.push(HeaderDir {
exported: true,
path: include_dir,
});
this_crate.header_dirs.push(HeaderDir {
exported: true,
path: crate_dir,
});
for exported_dir in &CFG.exported_header_dirs {
this_crate.header_dirs.push(HeaderDir {
exported: true,
path: PathBuf::from(exported_dir),
});
}
let mut header_dirs_index = UnorderedMap::new();
let mut used_header_links = BTreeSet::new();
let mut used_header_prefixes = BTreeSet::new();
for krate in deps::direct_dependencies() {
let mut is_link_exported = || match &krate.links {
None => false,
Some(links_attribute) => CFG.exported_header_links.iter().any(|&exported| {
let matches = links_attribute == exported;
if matches {
used_header_links.insert(exported);
}
matches
}),
};
let mut is_prefix_exported = || match &krate.include_prefix {
None => false,
Some(include_prefix) => CFG.exported_header_prefixes.iter().any(|&exported| {
let matches = include_prefix.starts_with(exported);
if matches {
used_header_prefixes.insert(exported);
}
matches
}),
};
let exported = is_link_exported() || is_prefix_exported();
for dir in krate.header_dirs {
// Deduplicate dirs reachable via multiple transitive dependencies.
match header_dirs_index.entry(dir.path.clone()) {
Entry::Vacant(entry) => {
entry.insert(this_crate.header_dirs.len());
this_crate.header_dirs.push(HeaderDir {
exported,
path: dir.path,
});
}
Entry::Occupied(entry) => {
let index = *entry.get();
this_crate.header_dirs[index].exported |= exported;
}
}
}
}
if let Some(unused) = CFG
.exported_header_links
.iter()
.find(|&exported| !used_header_links.contains(exported))
{
return Err(Error::UnusedExportedLinks(unused));
}
if let Some(unused) = CFG
.exported_header_prefixes
.iter()
.find(|&exported| !used_header_prefixes.contains(exported))
{
return Err(Error::UnusedExportedPrefix(unused));
}
Ok(this_crate)
}
fn make_crate_dir(prj: &Project) -> PathBuf {
if prj.include_prefix.as_os_str().is_empty() {
return prj.manifest_dir.clone();
}
let crate_dir = prj.out_dir.join("cxxbridge").join("crate");
let ref link = crate_dir.join(&prj.include_prefix);
let ref manifest_dir = prj.manifest_dir;
if out::relative_symlink_dir(manifest_dir, link).is_err() && cfg!(not(unix)) {
let cachedir_tag = "\
Signature: 8a477f597d28d172789f06886806bc55\n\
# This file is a cache directory tag created by cxx.\n\
# For information about cache directory tags see https://bford.info/cachedir/\n";
let _ = out::write(crate_dir.join("CACHEDIR.TAG"), cachedir_tag.as_bytes());
let max_depth = 6;
best_effort_copy_headers(manifest_dir, link, max_depth);
}
crate_dir
}
fn make_include_dir(prj: &Project) -> Result<PathBuf> {
let include_dir = prj.out_dir.join("cxxbridge").join("include");
let cxx_h = include_dir.join("rust").join("cxx.h");
let ref shared_cxx_h = prj.shared_dir.join("rust").join("cxx.h");
if let Some(ref original) = env::var_os("DEP_CXXBRIDGE1_HEADER") {
out::absolute_symlink_file(original, cxx_h)?;
out::absolute_symlink_file(original, shared_cxx_h)?;
} else {
out::write(shared_cxx_h, gen::include::HEADER.as_bytes())?;
out::relative_symlink_file(shared_cxx_h, cxx_h)?;
}
Ok(include_dir)
}
fn generate_bridge(prj: &Project, build: &mut Build, rust_source_file: &Path) -> Result<()> {
let opt = Opt {
allow_dot_includes: false,
cfg_evaluator: Box::new(CargoEnvCfgEvaluator),
doxygen: CFG.doxygen,
..Opt::default()
};
let generated = gen::generate_from_path(rust_source_file, &opt);
let ref rel_path = paths::local_relative_path(rust_source_file);
let cxxbridge = prj.out_dir.join("cxxbridge");
let include_dir = cxxbridge.join("include").join(&prj.include_prefix);
let sources_dir = cxxbridge.join("sources").join(&prj.include_prefix);
let ref rel_path_h = rel_path.with_appended_extension(".h");
let ref header_path = include_dir.join(rel_path_h);
out::write(header_path, &generated.header)?;
let ref link_path = include_dir.join(rel_path);
let _ = out::relative_symlink_file(header_path, link_path);
let ref rel_path_cc = rel_path.with_appended_extension(".cc");
let ref implementation_path = sources_dir.join(rel_path_cc);
out::write(implementation_path, &generated.implementation)?;
build.file(implementation_path);
let shared_h = prj.shared_dir.join(&prj.include_prefix).join(rel_path_h);
let shared_cc = prj.shared_dir.join(&prj.include_prefix).join(rel_path_cc);
let _ = out::relative_symlink_file(header_path, shared_h);
let _ = out::relative_symlink_file(implementation_path, shared_cc);
Ok(())
}
fn best_effort_copy_headers(src: &Path, dst: &Path, max_depth: usize) {
// Not using crate::gen::fs because we aren't reporting the errors.
use std::fs;
let mut dst_created = false;
let mut entries = match fs::read_dir(src) {
Ok(entries) => entries,
Err(_) => return,
};
while let Some(Ok(entry)) = entries.next() {
let file_name = entry.file_name();
if file_name.to_string_lossy().starts_with('.') {
continue;
}
match entry.file_type() {
Ok(file_type) if file_type.is_dir() && max_depth > 0 => {
let src = entry.path();
if src.join("Cargo.toml").exists() || src.join("CACHEDIR.TAG").exists() {
continue;
}
let dst = dst.join(file_name);
best_effort_copy_headers(&src, &dst, max_depth - 1);
}
Ok(file_type) if file_type.is_file() => {
let src = entry.path();
match src.extension().and_then(OsStr::to_str) {
Some("h" | "hh" | "hpp") => {}
_ => continue,
}
if !dst_created && fs::create_dir_all(dst).is_err() {
return;
}
dst_created = true;
let dst = dst.join(file_name);
let _ = fs::remove_file(&dst);
let _ = fs::copy(src, dst);
}
_ => {}
}
}
}
fn env_os(key: impl AsRef<OsStr>) -> Result<OsString> {
let key = key.as_ref();
env::var_os(key).ok_or_else(|| Error::NoEnv(key.to_owned()))
}
|
use super::webgl::{WebGlAttributeLocation, WebGlRenderingContext};
use web_sys::WebGlUniformLocation;
const VERTEX_SHADER: &str = r#"
attribute vec4 a_vertex;
attribute vec2 a_textureCoord;
uniform mat4 u_translate;
varying vec2 v_textureCoord;
void main() {
v_textureCoord = a_textureCoord;
gl_Position = u_translate * a_vertex;
}
"#;
const FRAGMENT_SHADER: &str = r#"
precision mediump float;
uniform vec4 u_bgColor;
varying vec2 v_textureCoord;
uniform sampler2D u_texture0;
uniform sampler2D u_texture1;
uniform int u_flagTexture1;
vec4 blend(vec4 bg, vec4 fr) {
float dist_a = bg.w;
float src_a = fr.w;
float out_a = src_a + dist_a * (1.0 - src_a);
vec3 out_rgb = (fr.xyz * src_a + bg.xyz * dist_a * (1.0 - src_a)) / out_a;
return vec4(out_rgb, out_a);
}
void main() {
vec4 smpColor0 = texture2D(u_texture0, v_textureCoord);
vec4 smpColor1 = u_flagTexture1 != 0 ? texture2D(u_texture1, v_textureCoord) : vec4(0.0,0.0,0.0,0.0);
vec4 color_a = u_bgColor;
vec4 color_b = blend(color_a, smpColor1);
vec4 color_c = blend(color_b, smpColor0);
gl_FragColor = color_c;
}
"#;
pub struct TableTextureProgram {
program: web_sys::WebGlProgram,
pub a_vertex_location: WebGlAttributeLocation,
pub a_texture_coord_location: WebGlAttributeLocation,
pub u_translate_location: WebGlUniformLocation,
pub u_bg_color_location: WebGlUniformLocation,
pub u_texture_0_location: WebGlUniformLocation,
pub u_texture_1_location: WebGlUniformLocation,
pub u_flag_texture_1_location: WebGlUniformLocation,
}
impl TableTextureProgram {
pub fn new(gl: &WebGlRenderingContext) -> Self {
let v_shader = super::compile_shader(
gl,
VERTEX_SHADER,
web_sys::WebGlRenderingContext::VERTEX_SHADER,
)
.unwrap();
let f_shader = super::compile_shader(
gl,
FRAGMENT_SHADER,
web_sys::WebGlRenderingContext::FRAGMENT_SHADER,
)
.unwrap();
let program = super::link_program(gl, &v_shader, &f_shader).unwrap();
let a_vertex_location =
WebGlAttributeLocation(gl.get_attrib_location(&program, "a_vertex") as u32);
let a_texture_coord_location =
WebGlAttributeLocation(gl.get_attrib_location(&program, "a_textureCoord") as u32);
let u_translate_location = gl.get_uniform_location(&program, "u_translate").unwrap();
let u_bg_color_location = gl.get_uniform_location(&program, "u_bgColor").unwrap();
let u_texture_0_location = gl.get_uniform_location(&program, "u_texture0").unwrap();
let u_texture_1_location = gl.get_uniform_location(&program, "u_texture1").unwrap();
let u_flag_texture_1_location =
gl.get_uniform_location(&program, "u_flagTexture1").unwrap();
Self {
program,
a_vertex_location,
a_texture_coord_location,
u_translate_location,
u_bg_color_location,
u_texture_0_location,
u_texture_1_location,
u_flag_texture_1_location,
}
}
pub fn use_program(&self, gl: &WebGlRenderingContext) {
gl.use_program(Some(&self.program));
}
}
|
//! Library for decoding and encoding JSON streams.
//!
//! Example:
//! ```
//! # use futures::executor::block_on;
//! let expected = ("one".to_string(), 2.0, vec![3, 4]);
//! let stream = destream_json::encode(&expected).unwrap();
//! let actual = block_on(destream_json::try_decode((), stream)).unwrap();
//! assert_eq!(expected, actual);
//! ```
//!
//! Deviations from the [JSON spec](https://www.json.org/):
//! - `destream_json` will not error out if asked to decode or encode a non-string key in a JSON
//! object (i.e., it supports a superset of the official JSON spec). This may cause issues
//! when using another JSON library to decode a stream encoded by `destream_json`. This can be
//! mitigated by simply not using non-string object keys, or adding an explicit check at encoding
//! time.
mod constants;
pub mod de;
pub mod en;
pub use de::{decode, try_decode};
pub use en::{encode, encode_map, encode_seq};
#[cfg(tokio_io)]
pub use de::read_from;
#[cfg(test)]
mod tests {
use std::collections::{BTreeMap, HashMap, HashSet};
use std::fmt;
use std::iter::FromIterator;
use std::marker::PhantomData;
use async_trait::async_trait;
use bytes::Bytes;
use destream::de::{self, FromStream, Visitor};
use destream::en::IntoStream;
use futures::future;
use futures::stream::{self, Stream, StreamExt, TryStreamExt};
use super::de::*;
use super::en::*;
async fn test_decode<T: FromStream<Context = ()> + PartialEq + fmt::Debug>(
encoded: &str,
expected: T,
) {
for i in 1..encoded.len() {
let source = stream::iter(encoded.as_bytes().into_iter().cloned())
.chunks(i)
.map(Bytes::from);
let actual: T = decode((), source).await.unwrap();
assert_eq!(expected, actual)
}
}
async fn test_encode<'en, S: Stream<Item = Result<Bytes, super::en::Error>> + 'en>(
encoded_stream: S,
expected: &str,
) {
let encoded = encoded_stream
.try_fold(vec![], |mut buffer, chunk| {
buffer.extend(chunk);
future::ready(Ok(buffer))
})
.await
.unwrap();
assert_eq!(expected, String::from_utf8(encoded).unwrap());
}
async fn test_encode_value<'en, T: IntoStream<'en> + PartialEq + fmt::Debug + 'en>(
value: T,
expected: &str,
) {
test_encode(encode(value).unwrap(), expected).await;
}
async fn test_encode_list<
'en,
T: IntoStream<'en> + 'en,
S: Stream<Item = T> + Send + Unpin + 'en,
>(
seq: S,
expected: &str,
) {
test_encode(encode_seq(seq), expected).await;
}
async fn test_encode_map<
'en,
K: IntoStream<'en> + 'en,
V: IntoStream<'en> + 'en,
S: Stream<Item = (K, V)> + Send + Unpin + 'en,
>(
map: S,
expected: &str,
) {
test_encode(encode_map(map), expected).await;
}
#[tokio::test]
async fn test_json_primitives() {
test_decode("null", ()).await;
test_decode("true", true).await;
test_decode("false", false).await;
test_encode_value(true, "true").await;
test_encode_value(false, "false").await;
test_decode("1", 1u8).await;
test_decode(" 2 ", 2u16).await;
test_decode("4658 ", 4658_u32).await;
test_decode(&2u64.pow(63).to_string(), 2u64.pow(63)).await;
test_encode_value(1u8, "1").await;
test_encode_value(2u16, "2").await;
test_encode_value(4658_u32, "4658").await;
test_encode_value(2u64.pow(63), &2u64.pow(63).to_string()).await;
test_decode("-1", -1i8).await;
test_decode("\t\n-32", -32i16).await;
test_decode("53\t", 53i32).await;
test_decode(&(-2i64).pow(63).to_string(), (-2i64).pow(63)).await;
test_encode_value(-1i8, "-1").await;
test_encode_value(-32i16, "-32").await;
test_encode_value(53i32, "53").await;
test_encode_value((-2i64).pow(63), &(-2i64).pow(63).to_string()).await;
test_decode("2e2", 2e2_f32).await;
test_decode("-2e-3", -2e-3_f64).await;
test_decode("3.14", 3.14_f32).await;
test_decode("-1.414e4", -1.414e4_f64).await;
test_encode_value(2e2_f32, "200").await;
test_encode_value(-2e3, "-2000").await;
test_encode_value(3.14_f32, "3.14").await;
test_encode_value(-1.414e4_f64, "-14140").await;
test_decode("\t\r\n\" hello world \"", " hello world ".to_string()).await;
test_encode_value("hello world", "\"hello world\"").await;
let nested = "string \"within\" string".to_string();
let expected = "\"string \\\"within\\\" string\"";
test_encode_value(nested.clone(), expected).await;
test_decode(expected, nested).await;
let terminal = "ends in a \\".to_string();
let expected = "\"ends in a \\\\\"";
test_encode_value(terminal.clone(), expected).await;
test_decode(expected, terminal).await;
}
#[tokio::test]
async fn test_bytes() {
struct BytesVisitor;
impl Visitor for BytesVisitor {
type Value = Vec<u8>;
fn expecting() -> &'static str {
"a byte buffer"
}
fn visit_byte_buf<E: de::Error>(self, v: Vec<u8>) -> Result<Self::Value, E> {
Ok(v)
}
}
let utf8_str = "मकर संक्रान्ति";
let encoded = encode(Bytes::from(utf8_str.as_bytes())).unwrap();
let decoded: Bytes = try_decode((), encoded).await.unwrap();
assert_eq!(utf8_str, std::str::from_utf8(&decoded).unwrap());
}
#[tokio::test]
async fn test_seq() {
test_encode_list(stream::empty::<u8>(), "[]").await;
test_decode("[1, 2, 3]", vec![1, 2, 3]).await;
test_encode_value(&[1u8, 2u8, 3u8], "[1,2,3]").await;
test_decode("[1, 2, null]", (1, 2, ())).await;
test_encode_value((1u8, 2u8, ()), "[1,2,null]").await;
test_encode_list(stream::iter(&[1u8, 2u8, 3u8]), "[1,2,3]").await;
test_encode_list(
stream::iter(vec![vec![1, 2, 3], vec![], vec![4]]),
"[[1,2,3],[],[4]]",
)
.await;
test_decode(
"\t[\r\n\rtrue,\r\n\t-1,\r\n\t\"hello world. \"\r\n]",
(true, -1i16, "hello world. ".to_string()),
)
.await;
test_encode_value(
(true, -1i16, "hello world. "),
"[true,-1,\"hello world. \"]",
)
.await;
test_encode_list(
stream::iter(vec!["hello ", "\tworld"]),
"[\"hello \",\"\tworld\"]",
)
.await;
test_decode(" [ 1.23, 4e3, -3.45]\n", [1.23, 4e3, -3.45]).await;
test_encode_value(&[1.23, 4e3, -3.45], "[1.23,4000,-3.45]").await;
test_decode(
"[\"one\", \"two\", \"three\"]",
HashSet::<String>::from_iter(vec!["one", "two", "three"].into_iter().map(String::from)),
)
.await;
test_encode_value(&["one", "two", "three"], "[\"one\",\"two\",\"three\"]").await;
}
#[tokio::test]
async fn test_map() {
let mut map = HashMap::<String, bool>::from_iter(vec![
("k1".to_string(), true),
("k2".to_string(), false),
]);
test_decode("\r\n\t{ \"k1\":\ttrue , \"k2\": false\r\n}", map.clone()).await;
map.remove("k2");
test_encode_value(map.clone(), "{\"k1\":true}").await;
test_encode_map(stream::iter(map), "{\"k1\":true}").await;
let map = BTreeMap::<i8, Option<bool>>::from_iter(vec![(-1, Some(true)), (2, None)]);
test_decode("\r\n\t{ -1:\ttrue, 2:null}", map.clone()).await;
test_encode_value(map.clone(), "{-1:true,2:null}").await;
test_encode_map(stream::iter(map), "{-1:true,2:null}").await;
}
#[tokio::test]
async fn test_err() {
#[derive(Debug, Default, Eq, PartialEq)]
struct TestMap;
#[async_trait]
impl FromStream for TestMap {
type Context = ();
async fn from_stream<D: de::Decoder>(_: (), decoder: &mut D) -> Result<Self, D::Error> {
decoder.decode_map(TestVisitor::<Self>::default()).await
}
}
#[derive(Debug, Default, Eq, PartialEq)]
struct TestSeq;
#[async_trait]
impl FromStream for TestSeq {
type Context = ();
async fn from_stream<D: de::Decoder>(_: (), decoder: &mut D) -> Result<Self, D::Error> {
decoder.decode_seq(TestVisitor::<Self>::default()).await
}
}
#[derive(Default)]
struct TestVisitor<T> {
phantom: PhantomData<T>,
}
#[async_trait]
impl<T: Default + Send> de::Visitor for TestVisitor<T> {
type Value = T;
fn expecting() -> &'static str {
"a Test struct"
}
async fn visit_map<A: de::MapAccess>(self, mut access: A) -> Result<T, A::Error> {
let _key = access.next_key::<String>(()).await?;
assert!(access.next_value::<String>(()).await.is_err());
assert!(access.next_value::<Vec<i64>>(()).await.is_ok());
Ok(T::default())
}
async fn visit_seq<A: de::SeqAccess>(self, mut access: A) -> Result<T, A::Error> {
assert!(access.next_element::<String>(()).await.is_err());
assert!(access.next_element::<Vec<i64>>(()).await.is_err());
assert!(access.next_element::<i64>(()).await.is_ok());
Ok(T::default())
}
}
let encoded = "{\"k1\": [1, 2, 3]}";
let source = stream::iter(encoded.as_bytes().into_iter().cloned())
.chunks(5)
.map(Bytes::from);
let actual: TestMap = decode((), source).await.unwrap();
assert_eq!(actual, TestMap);
let encoded = "\t[ 1,2, 3]";
let source = stream::iter(encoded.as_bytes().into_iter().cloned())
.chunks(2)
.map(Bytes::from);
let actual: TestSeq = decode((), source).await.unwrap();
assert_eq!(actual, TestSeq);
}
#[cfg(feature = "tokio-io")]
#[tokio::test]
async fn test_async_read() {
use std::io::Cursor;
let encoded = "[\"hello\", 1, {}]";
let cursor = Cursor::new(encoded.as_bytes());
let decoded: (String, i64, HashMap<String, bool>) = read_from((), cursor).await.unwrap();
assert_eq!(
decoded,
("hello".to_string(), 1i64, HashMap::<String, bool>::new())
);
}
}
|
use crate::delay::Delay;
pub struct DelayAPF {
delay: Delay,
g: f64,
}
impl DelayAPF {
pub fn new(delay_length: usize, g: f64) -> DelayAPF {
DelayAPF {
delay: Delay::new(delay_length),
g: g,
}
}
pub fn next(&mut self, s: f64) -> f64 {
let delayed_sample = self.delay.read();
let next_sample = (delayed_sample * self.g) + s;
self.delay.write_and_advance(next_sample);
next_sample * -self.g
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_util::*;
#[test]
fn test_delay_apf() {
let noise = generate_noise(44100);
save(&noise, "test_delay_apf_original.wav");
let mut lpfcf = DelayAPF::new(140, 0.5);
let filtered: Vec<f64> = noise.into_iter().map(|s| lpfcf.next(s)).collect();
save(&filtered, "test_delay_apf_filtered.wav");
}
}
|
pub mod tape {
pub const SNAPSHOT_SNA_48K: &[u8] = include_bytes!("tape_48k.sna");
pub const SNAPSHOT_SNA_128K: &[u8] = include_bytes!("tape_128k.sna");
}
|
extern crate skim;
use skim::prelude::*;
/// This example illustrates downcasting custom structs that implement
/// `SkimItem` after calling `Skim::run_with`.
#[derive(Debug, Clone)]
struct Item {
text: String,
}
impl SkimItem for Item {
fn text(&self) -> Cow<str> {
Cow::Borrowed(&self.text)
}
fn preview(&self, _context: PreviewContext) -> ItemPreview {
ItemPreview::Text(self.text.to_owned())
}
}
pub fn main() {
let options = SkimOptionsBuilder::default()
.height(Some("50%"))
.multi(true)
.preview(Some(""))
.build()
.unwrap();
let (tx, rx): (SkimItemSender, SkimItemReceiver) = unbounded();
tx.send(Arc::new(Item { text: "a".to_string() })).unwrap();
tx.send(Arc::new(Item { text: "b".to_string() })).unwrap();
tx.send(Arc::new(Item { text: "c".to_string() })).unwrap();
drop(tx);
let selected_items = Skim::run_with(&options, Some(rx))
.map(|out| out.selected_items)
.unwrap_or_else(Vec::new)
.iter()
.map(|selected_item| (**selected_item).as_any().downcast_ref::<Item>().unwrap().to_owned())
.collect::<Vec<Item>>();
for item in selected_items {
println!("{:?}", item);
}
}
|
/// Loads the file and if some expected output is given checks that it matches
fn test_file(file: &str, expected: Option<&[u8]>) {
use scryer_prolog::*;
let input = machine::Stream::from("");
let output = machine::Stream::from(String::new());
let mut wam = machine::Machine::new(input, output.clone());
wam.load_file(
file.into(),
machine::Stream::from(
std::fs::read_to_string(AsRef::<std::path::Path>::as_ref(file)).unwrap(),
),
);
if let Some(expected) = expected {
let output = output.bytes().unwrap();
assert_eq!(output.as_slice(), expected);
}
}
#[test]
fn builtins() {
test_file("src/tests/builtins.pl", Some(b""));
}
#[test]
fn call_with_inference_limit() {
test_file("src/tests/call_with_inference_limit.pl", Some(b""));
}
#[test]
fn facts() {
test_file("src/tests/facts.pl", Some(b""));
}
#[test]
fn hello_world() {
test_file(
"src/tests/hello_world.pl",
Some("Hello World!\n".as_bytes()),
);
}
#[test]
fn predicates() {
test_file("src/tests/predicates.pl", Some(b""));
}
#[test]
fn rules() {
test_file("src/tests/rules.pl", Some(b""));
}
#[test]
#[ignore] // ignored as this does not appear to terminate
fn setup_call_cleanup() {
test_file("src/tests/setup_call_cleanup.pl", Some(b""));
}
#[test]
fn clpz() {
test_file("src/tests/clpz/test_clpz.pl", Some(b""));
}
|
//! Generates OpenTelemetry OTLP trace payloads
//!
//! [Specification](https://opentelemetry.io/docs/reference/specification/protocol/otlp/)
//!
//! This format is valid for OTLP/gRPC and binary OTLP/HTTP messages. The
//! experimental JSON OTLP/HTTP format can also be supported but is not
//! currently implemented.
use crate::payload::{Error, Serialize};
use opentelemetry_proto::tonic::trace::v1;
use prost::Message;
use rand::{distributions::Standard, prelude::Distribution, Rng};
use std::io::Write;
use super::{common::AsciiString, Generator};
/// Wrapper to generate arbitrary OpenTelemetry [`ExportTraceServiceRequest`](opentelemetry_proto::tonic::collector::trace::v1::ExportTraceServiceRequest)
struct ExportTraceServiceRequest(Vec<Span>);
impl ExportTraceServiceRequest {
fn into_prost_type(
self,
) -> opentelemetry_proto::tonic::collector::trace::v1::ExportTraceServiceRequest {
opentelemetry_proto::tonic::collector::trace::v1::ExportTraceServiceRequest {
resource_spans: [v1::ResourceSpans {
resource: None,
instrumentation_library_spans: [v1::InstrumentationLibrarySpans {
instrumentation_library: None,
spans: self.0.into_iter().map(|span| span.0).collect(),
schema_url: String::new(),
}]
.to_vec(),
schema_url: String::new(),
}]
.to_vec(),
}
}
}
struct Span(v1::Span);
impl Distribution<Span> for Standard {
fn sample<R>(&self, mut rng: &mut R) -> Span
where
R: Rng + ?Sized,
{
let trace_id = Standard.sample_iter(&mut rng).take(16).collect();
let span_id = Standard.sample_iter(&mut rng).take(8).collect();
// Some collectors may immediately drop old/future spans. Consider
// constraining these to recent timestamps.
let start_time_unix_nano: u64 = rng.gen();
// end time is expected to be greater than or equal to start time
let end_time_unix_nano: u64 = rng.gen_range(start_time_unix_nano..=u64::MAX);
Span(v1::Span {
trace_id,
span_id,
// https://www.w3.org/TR/trace-context/#tracestate-header
trace_state: String::new(),
// zeros: root span
parent_span_id: vec![0; 8],
name: AsciiString::default().generate(rng),
kind: rng.gen_range(0..=5),
start_time_unix_nano,
end_time_unix_nano,
attributes: Vec::new(),
dropped_attributes_count: 0,
events: Vec::new(),
dropped_events_count: 0,
links: Vec::new(),
dropped_links_count: 0,
status: None,
})
}
}
#[derive(Debug, Default, Clone, Copy)]
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
pub(crate) struct OpentelemetryTraces;
impl Serialize for OpentelemetryTraces {
fn to_bytes<W, R>(&self, mut rng: R, max_bytes: usize, writer: &mut W) -> Result<(), Error>
where
R: Rng + Sized,
W: Write,
{
// An Export*ServiceRequest message has 5 bytes of fixed values plus
// a varint-encoded message length field. The worst case for the message
// length field is the max message size divided by 0x7F.
let bytes_remaining = max_bytes.checked_sub(5 + super::div_ceil(max_bytes, 0x7F));
let Some(mut bytes_remaining) = bytes_remaining else {
return Ok(());
};
let mut acc = ExportTraceServiceRequest(Vec::new());
loop {
let member: Span = rng.gen();
// Note: this 2 is a guessed value for an unknown size factor.
let len = member.0.encoded_len() + 2;
match bytes_remaining.checked_sub(len) {
Some(remainder) => {
acc.0.push(member);
bytes_remaining = remainder;
}
None => break,
}
}
let buf = acc.into_prost_type().encode_to_vec();
writer.write_all(&buf)?;
Ok(())
}
}
#[cfg(test)]
mod test {
use super::OpentelemetryTraces;
use crate::payload::Serialize;
use proptest::prelude::*;
use prost::Message;
use rand::{rngs::SmallRng, SeedableRng};
// We want to be sure that the serialized size of the payload does not
// exceed `max_bytes`.
proptest! {
#[test]
fn payload_not_exceed_max_bytes(seed: u64, max_bytes: u16) {
let max_bytes = max_bytes as usize;
let rng = SmallRng::seed_from_u64(seed);
let traces = OpentelemetryTraces::default();
let mut bytes = Vec::with_capacity(max_bytes);
traces.to_bytes(rng, max_bytes, &mut bytes).unwrap();
assert!(bytes.len() <= max_bytes, "max len: {max_bytes}, actual: {}", bytes.len());
}
}
// We want to be sure that the payloads are not being left empty.
proptest! {
#[test]
fn payload_is_at_least_half_of_max_bytes(seed: u64, max_bytes in 16u16..u16::MAX) {
let max_bytes = max_bytes as usize;
let rng = SmallRng::seed_from_u64(seed);
let logs = OpentelemetryTraces::default();
let mut bytes = Vec::with_capacity(max_bytes);
logs.to_bytes(rng, max_bytes, &mut bytes).unwrap();
assert!(!bytes.is_empty());
}
}
// We want to know that every payload produced by this type actually
// deserializes as a collection of OTEL Spans.
proptest! {
#[test]
fn payload_deserializes(seed: u64, max_bytes: u16) {
let max_bytes = max_bytes as usize;
let rng = SmallRng::seed_from_u64(seed);
let traces = OpentelemetryTraces::default();
let mut bytes: Vec<u8> = Vec::with_capacity(max_bytes);
traces.to_bytes(rng, max_bytes, &mut bytes).unwrap();
opentelemetry_proto::tonic::collector::trace::v1::ExportTraceServiceRequest::decode(bytes.as_slice()).unwrap();
}
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// DistributionWidgetXAxis : X Axis controls for the distribution widget.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DistributionWidgetXAxis {
/// True includes zero.
#[serde(rename = "include_zero", skip_serializing_if = "Option::is_none")]
pub include_zero: Option<bool>,
/// Specifies maximum value to show on the x-axis. It takes a number, percentile (p90 === 90th percentile), or auto for default behavior.
#[serde(rename = "max", skip_serializing_if = "Option::is_none")]
pub max: Option<String>,
/// Specifies minimum value to show on the x-axis. It takes a number, percentile (p90 === 90th percentile), or auto for default behavior.
#[serde(rename = "min", skip_serializing_if = "Option::is_none")]
pub min: Option<String>,
/// Specifies the scale type. Possible values are `linear`.
#[serde(rename = "scale", skip_serializing_if = "Option::is_none")]
pub scale: Option<String>,
}
impl DistributionWidgetXAxis {
/// X Axis controls for the distribution widget.
pub fn new() -> DistributionWidgetXAxis {
DistributionWidgetXAxis {
include_zero: None,
max: None,
min: None,
scale: None,
}
}
}
|
use std::thread;
use std::time::Duration;
use std::sync::{Arc , Mutex, mpsc::*};
fn main(){
let spoon_lock =Arc::new(Mutex::new(String::from("spoon")));
let spoon_a = Arc::clone(&spoon_lock);
let spoon_b = Arc::clone(&spoon_lock);
let (sen_a,rec_a) = channel::<String>();
let (sen_b,rec_b) = channel::<String>();
thread::spawn(||a_fn(sen_a));
thread::spawn(||b_fn(sen_b));
thread::spawn(||c_fn(rec_a,rec_b,spoon_b));
thread::spawn(move||{
loop {
{
let _mtx = spoon_a.lock();
println!("玩耍中,,..");
}
thread::sleep(Duration::from_secs(1));
}
});
thread::sleep(Duration::from_secs(100));
}
fn a_fn (sends : Sender<String>){
loop{
let a_str = String::from("a--");
thread::sleep(Duration::from_millis(200));
// println!("a-ok");
sends.send(a_str);
}
}
fn b_fn(sends : Sender<String>){
loop{
let b_str = String::from("b--");
thread::sleep(Duration::from_millis(200));
// println!("b-ok");
sends.send(b_str);
}
}
fn c_fn(rec_a: Receiver<String>, rec_b: Receiver<String>, spoon_lock: Arc<Mutex<String>>){
loop{
{
let _mtx = spoon_lock.lock();
let a_s = rec_a.recv().unwrap();
println!("got--- a {}",a_s);
thread::sleep(Duration::from_millis(50));
let b_s = rec_b.recv().unwrap();
println!("got--- b {}",b_s);
thread::sleep(Duration::from_millis(50));
println!("----完成了====");
}
thread::sleep(Duration::from_millis(400));
}
}
|
#![recursion_limit = "128"]
// extern crate fst_levenshtein;
#[macro_use]
extern crate measure_time;
#[allow(unused_imports)]
#[macro_use]
extern crate serde_json;
#[allow(unused_imports)]
use fst::{IntoStreamer, MapBuilder, Set};
// use fst_levenshtein::Levenshtein;
// use serde_json::{Deserializer, Value};
use std::str;
use veloci::*;
#[allow(unused_imports)]
use rayon::prelude::*;
static TEST_FOLDER: &str = "bench_taschenbuch";
fn load_persistence_disk() -> persistence::Persistence {
use std::path::Path;
if Path::new(TEST_FOLDER).exists() {
return persistence::Persistence::load(TEST_FOLDER).expect("Could not load persistence");
}
let object = r#"{"type":"taschenbuch","title":"mein buch"}"#.to_owned() + "\n";
let mut data = String::new();
for _ in 0..6_000_000 {
data += &object;
}
let mut pers = persistence::Persistence::create_mmap(TEST_FOLDER.to_string()).unwrap();
println!("{:?}", create::create_indices_from_str(&mut pers, &data, "{}", true));
pers
}
fn main() {
veloci::trace::enable_log();
let pers = load_persistence_disk();
info_time!("wo");
let _results = search_freestyle("taschenbuch", &pers);
}
fn search_freestyle(term: &str, pers: &persistence::Persistence) -> Vec<search::DocWithHit> {
let yop = query_generator::SearchQueryGeneratorParameters {
search_term: term.to_string(),
..Default::default()
};
let requesto = query_generator::search_query(pers, yop).unwrap();
let hits = search::search(requesto, pers).unwrap();
search::to_documents(pers, &hits.data, &None, &hits)
}
|
use super::*;
/// A footnote reference.
///
/// # Semantics
///
/// This is a reference (or link) to a [`greater_elements::FootnoteDefinition`].
///
/// # Syntax
///
/// Follows one of these patterns:
///
/// - normal footnote: `[fn:LABEL]`
/// - inline footnote: `[fn:LABEL:DEFINITION]`
/// can be references by other footnote
/// - anonymous footnote: `[fn::DEFINITION]`
///
/// `LABEL` can contain any word-constituent character, hyphens and underscores.
///
/// `DEFINITION` can contain any character. Opening and closing square brackets must be
/// balanced in it. It can contain the standard set of objects, even other footnote references.
/// Will be parsed as a secondary string and can contain the standard set of objects.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FootnoteReference {
pub kind: FootnoteReferenceKind,
}
/// The kind of a [`FootnoteReference`].
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum FootnoteReferenceKind {
Normal {
label: String,
},
Inline {
label: String,
definition: SecondaryString<StandardSet>,
},
Anonymous {
definition: SecondaryString<StandardSet>,
},
}
|
#![cfg(test)]
use super::*;
use frame_support::{construct_runtime, pallet_prelude::Hooks, parameter_types};
use orml_traits::parameter_type_with_key;
use pallet_nft::AssetHandler;
use primitives::{continuum::Continuum, Amount, AuctionId, CurrencyId, FungibleTokenId};
use sp_core::H256;
use sp_runtime::traits::AccountIdConversion;
use sp_runtime::{testing::Header, traits::IdentityLookup, ModuleId};
use crate as auction;
use auction_manager::ListingLevel;
use bc_primitives::{BitCountryStruct, BitCountryTrait, Country};
parameter_types! {
pub const BlockHashCount: u32 = 256;
}
pub type AccountId = u128;
pub type Balance = u128;
pub type BlockNumber = u64;
pub type BitCountryId = u64;
pub const ALICE: AccountId = 1;
pub const BOB: AccountId = 2;
pub const CLASS_ID: u32 = 0;
pub const COLLECTION_ID: u64 = 0;
pub const ALICE_COUNTRY_ID: BitCountryId = 1;
pub const BOB_COUNTRY_ID: BitCountryId = 2;
impl frame_system::Config for Runtime {
type Origin = Origin;
type Index = u64;
type BlockNumber = BlockNumber;
type Call = Call;
type Hash = H256;
type Hashing = ::sp_runtime::traits::BlakeTwo256;
type AccountId = AccountId;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = Event;
type BlockHashCount = BlockHashCount;
type BlockWeights = ();
type BlockLength = ();
type Version = ();
type PalletInfo = PalletInfo;
type AccountData = pallet_balances::AccountData<Balance>;
type OnNewAccount = ();
type OnKilledAccount = ();
type DbWeight = ();
type BaseCallFilter = ();
type SystemWeightInfo = ();
type SS58Prefix = ();
}
parameter_types! {
pub const BalanceExistentialDeposit: u64 = 1;
pub const SpotId: u64 = 1;
}
impl pallet_balances::Config for Runtime {
type Balance = Balance;
type Event = Event;
type DustRemoval = ();
type ExistentialDeposit = BalanceExistentialDeposit;
type AccountStore = System;
type MaxLocks = ();
type WeightInfo = ();
}
pub struct Continuumm;
impl Continuum<u128> for Continuumm {
fn transfer_spot(
spot_id: u64,
from: &AccountId,
to: &(AccountId, u64),
) -> Result<u64, DispatchError> {
Ok(1)
}
}
pub struct Handler;
impl AuctionHandler<AccountId, Balance, BlockNumber, AuctionId> for Handler {
fn on_new_bid(
now: BlockNumber,
id: AuctionId,
new_bid: (AccountId, Balance),
last_bid: Option<(AccountId, Balance)>,
) -> OnNewBidResult<BlockNumber> {
//Test with Alice bid
if new_bid.0 == ALICE {
OnNewBidResult {
accept_bid: true,
auction_end_change: Change::NoChange,
}
} else {
OnNewBidResult {
accept_bid: false,
auction_end_change: Change::NoChange,
}
}
}
fn on_auction_ended(_id: AuctionId, _winner: Option<(AccountId, Balance)>) {}
}
pub struct NftAssetHandler;
impl AssetHandler for NftAssetHandler {
fn check_item_in_auction(asset_id: AssetId) -> bool {
return MockAuctionManager::check_item_in_auction(asset_id);
}
}
parameter_type_with_key! {
pub ExistentialDeposits: |_currency_id: FungibleTokenId| -> Balance {
Default::default()
};
}
parameter_types! {
pub const BitCountryTreasuryModuleId: ModuleId = ModuleId(*b"bit/trsy");
pub TreasuryModuleAccount: AccountId = BitCountryTreasuryModuleId::get().into_account();
pub const CountryFundModuleId: ModuleId = ModuleId(*b"bit/fund");
}
impl orml_tokens::Config for Runtime {
type Event = Event;
type Balance = Balance;
type Amount = Amount;
type CurrencyId = FungibleTokenId;
type WeightInfo = ();
type ExistentialDeposits = ExistentialDeposits;
type OnDust = orml_tokens::TransferDust<Runtime, TreasuryModuleAccount>;
}
parameter_types! {
pub const AuctionTimeToClose: u64 = 100; //Test auction end within 100 blocks
pub const MinimumAuctionDuration: u64 = 10; //Test auction end within 100 blocks
}
pub struct BitCountryInfoSource {}
impl BitCountryTrait<AccountId> for BitCountryInfoSource {
fn check_ownership(who: &AccountId, country_id: &BitCountryId) -> bool {
match *who {
ALICE => *country_id == ALICE_COUNTRY_ID,
BOB => *country_id == BOB_COUNTRY_ID,
_ => false,
}
}
fn get_bitcountry(bitcountry_id: u64) -> Option<BitCountryStruct<u128>> {
None
}
fn get_bitcountry_token(bitcountry_id: u64) -> Option<FungibleTokenId> {
None
}
fn update_bitcountry_token(
bitcountry_id: u64,
currency_id: FungibleTokenId,
) -> Result<(), DispatchError> {
Ok(())
}
}
impl Config for Runtime {
type Event = Event;
type AuctionTimeToClose = AuctionTimeToClose;
type Handler = Handler;
type Currency = Balances;
type ContinuumHandler = Continuumm;
type FungibleTokenCurrency = Tokens;
type BitCountryInfoSource = BitCountryInfoSource;
type MinimumAuctionDuration = MinimumAuctionDuration;
}
parameter_types! {
pub CreateClassDeposit: Balance = 2;
pub CreateAssetDeposit: Balance = 1;
pub NftModuleId: ModuleId = ModuleId(*b"bit/bNFT");
}
impl pallet_nft::Config for Runtime {
type Event = Event;
type CreateClassDeposit = CreateClassDeposit;
type CreateAssetDeposit = CreateAssetDeposit;
type Currency = Balances;
type ModuleId = NftModuleId;
type WeightInfo = ();
type AuctionHandler = MockAuctionManager;
type AssetsHandler = NftAssetHandler;
}
impl orml_nft::Config for Runtime {
type ClassId = u32;
type TokenId = u64;
type ClassData = pallet_nft::NftClassData<Balance>;
type TokenData = pallet_nft::NftAssetData<Balance>;
}
type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Runtime>;
type Block = frame_system::mocking::MockBlock<Runtime>;
construct_runtime!(
pub enum Runtime where
Block = Block,
NodeBlock = Block,
UncheckedExtrinsic = UncheckedExtrinsic
{
System: frame_system::{Module, Call, Config, Storage, Event<T>},
Balances: pallet_balances::{Module, Call, Storage, Config<T>, Event<T>},
Tokens: orml_tokens::{Module, Call, Storage, Config<T>, Event<T>},
NFTModule: pallet_nft::{Module, Storage ,Call, Event<T>},
OrmlNft: orml_nft::{Module, Storage, Config<T>},
NftAuctionModule: auction::{Module, Call, Storage, Event<T>},
}
);
pub struct ExtBuilder;
impl Default for ExtBuilder {
fn default() -> Self {
ExtBuilder
}
}
impl ExtBuilder {
pub fn build(self) -> sp_io::TestExternalities {
self.build_with_block_number(1)
}
pub fn build_with_block_number(self, block_number: u64) -> sp_io::TestExternalities {
let mut t = frame_system::GenesisConfig::default()
.build_storage::<Runtime>()
.unwrap();
pallet_balances::GenesisConfig::<Runtime> {
balances: vec![(ALICE, 100000), (BOB, 500)],
}
.assimilate_storage(&mut t)
.unwrap();
let mut ext = sp_io::TestExternalities::new(t);
ext.execute_with(|| System::set_block_number(block_number));
ext
}
}
pub fn last_event() -> Event {
frame_system::Module::<Runtime>::events()
.pop()
.expect("Event expected")
.event
}
pub fn run_to_block(n: u64) {
while System::block_number() < n {
NftAuctionModule::on_finalize(System::block_number());
System::on_finalize(System::block_number());
System::set_block_number(System::block_number() + 1);
System::on_initialize(System::block_number());
NftAuctionModule::on_initialize(System::block_number());
}
}
pub struct MockAuctionManager;
impl Auction<AccountId, BlockNumber> for MockAuctionManager {
type Balance = Balance;
fn auction_info(id: u64) -> Option<AuctionInfo<u128, Self::Balance, u64>> {
None
}
fn update_auction(id: u64, info: AuctionInfo<u128, Self::Balance, u64>) -> DispatchResult {
None
}
fn new_auction(
recipient: u128,
initial_amount: Self::Balance,
start: u64,
end: Option<u64>,
) -> Result<u64, DispatchError> {
None
}
fn create_auction(
auction_type: AuctionType,
item_id: ItemId,
end: Option<u64>,
recipient: u128,
initial_amount: Self::Balance,
start: u64,
listing_level: ListingLevel,
) -> Result<u64, DispatchError> {
None
}
fn remove_auction(id: u64, item_id: ItemId) {
None
}
fn auction_bid_handler(
_now: u64,
id: u64,
new_bid: (u128, Self::Balance),
last_bid: Option<(u128, Self::Balance)>,
) -> DispatchResult {
None
}
fn local_auction_bid_handler(
_now: u64,
id: u64,
new_bid: (u128, Self::Balance),
last_bid: Option<(u128, Self::Balance)>,
social_currency_id: FungibleTokenId,
) -> DispatchResult {
None
}
fn check_item_in_auction(asset_id: AssetId) -> bool {
false
}
}
|
fn rot(s: &str) -> String {
// your code
s.chars().rev().collect()
}
fn add_dots(s: &str) -> String {
s.split('\n')
.map(|substr| substr.to_owned() + &".".repeat(substr.len()))
.collect::<Vec<String>>()
.join("\n")
}
fn selfie_and_rot(s: &str) -> String {
// your code
let with_dots = add_dots(s);
let rot_with_dots = rot(&with_dots);
with_dots + "\n" + &rot_with_dots
}
// first parameter: dots have to be replaced by function of one variable
fn oper(f: fn(&str) -> String, s: &str) -> String {
f(s)
}
fn testing1(s: &str, exp: &str) -> () {
assert_eq!(oper(rot, s), exp.to_string())
}
fn testing2(s: &str, exp: &str) -> () {
assert_eq!(oper(selfie_and_rot, s), exp.to_string())
}
#[test]
fn basics_oper() {
testing1("fijuoo\nCqYVct\nDrPmMJ\nerfpBA\nkWjFUG\nCVUfyL",
"LyfUVC\nGUFjWk\nABpfre\nJMmPrD\ntcVYqC\nooujif");
testing1("rkKv\ncofM\nzXkh\nflCB", "BClf\nhkXz\nMfoc\nvKkr");
testing2("xZBV\njsbS\nJcpN\nfVnP",
"xZBV....\njsbS....\nJcpN....\nfVnP....\n....PnVf\n....NpcJ\n....Sbsj\n....VBZx");
testing2("uLcq\nJkuL\nYirX\nnwMB",
"uLcq....\nJkuL....\nYirX....\nnwMB....\n....BMwn\n....XriY\n....LukJ\n....qcLu");
}
#[test]
fn test_rot() {
assert_eq!(rot("abcd\nefgh\nijkl\nmnop"), "ponm\nlkji\nhgfe\ndcba")
}
#[test]
fn test_add_dots() {
assert_eq!(add_dots("abcd\nefgh\nijkl\nmnop"), "abcd....\nefgh....\nijkl....\nmnop....")
}
#[test]
fn test_selfie_and_rot() {
assert_eq!(selfie_and_rot("abcd\nefgh\nijkl\nmnop"), "abcd....\nefgh....\nijkl....\nmnop....\n....ponm\n....lkji\n....hgfe\n....dcba")
}
|
use std::str
fn main() {
let bytestring: &[u8; 20] = b"this is a bytestring";
println!("A bytestring: {:?}", bytestring);
let escaped = b"\x52\x75\x73\x74 as bytes";
println!("Some escaped bytes: {:?}", escaped);
let raw_bytestring = br"\u{211D} is not escaped here";
println!("{:?}", raw_bytestring);
if let Ok(my_str) = str::from_utf8(raw_bytestring) {
println!("And the same as text: '{}'", my_str);
}
let quotes = br#"You can also use "fancier" formatting, \
like with normal raw strings"#;
let shift_jis = b"\x82\xe6\x82\xa8\x82\xb1\x82";
match str::from_utf8(shift_jis) {
Ok(my_str) => println!("Conversion successful: '{}'", my_str),
Err(e) => println!("Conversion failed: {:?}", e),
};
} |
use std::env;
use std::process;
use minigrep::Config;
use minigrep::run;
fn main() {
// accepting arguments
let arguments: Vec<String> = env::args().collect();
let config = Config::new(&arguments).unwrap_or_else(|error| {
println!("Problem in parsing arguments: \n\t - {}", error);
process::exit(1);
});
println!(
"searching for \"{}\" in file: \"{}\" ...",
config.query, config.filename
);
if let Err(error) = run(config) {
println!("Something went wrong: \n\t - {}", error);
process::exit(1);
}
}
|
//! [Advanced Traits]
//!
//! # Examples
//!
//! Simplified [`Iterator`] example
//!
//! ```
//! use the_book::ch19::sec02::{Counter, Iterator};
//!
//! let mut counter = Counter::new(3);
//! assert_eq!(Some(1), counter.next());
//! assert_eq!(Some(2), counter.next());
//! assert_eq!(Some(3), counter.next());
//! assert_eq!(None, counter.next());
//! ```
//!
//! Operator Overloading example
//!
//! ```
//! use the_book::ch19::sec02::Point;
//!
//! let p1 = Point(0, 1);
//! let p2 = Point(9, 3);
//! let p3 = p1 + p2;
//! assert_eq!(9, p3.0);
//! assert_eq!(4, p3.1);
//! ```
//! [Default Generic Type Parameters] example
//!
//! ```
//! use the_book::ch19::sec02::{Meters, Millimeters};
//!
//! let meter = Meters(10.0);
//! let milli = Millimeters(2.0);
//!
//! let got = meter + milli;
//! assert_eq!(Meters(10.002), got);
//!
//! let got = milli + meter;
//! assert_eq!(Millimeters(10_002.0), got);
//! ```
//! [`iterator`]: trait.Iterator.html
//! [advanced traits]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html
//! [default generic type parameters]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#default-generic-type-parameters-and-operator-overloading
/// Simple `Iterator` example.
///
/// [`iterator`]: https://doc.rust-lang.org/std/iter/trait.Iterator.html
pub trait Iterator {
type Item;
fn next(&mut self) -> Option<Self::Item>;
}
/// Simple `Counter` to demonstrate [`Iterator`] implementation.
///
/// # Examples
///
/// ```
/// use the_book::ch19::sec02::{Counter, Iterator};
///
/// let mut counter = Counter::new(3);
/// assert_eq!(Some(1), counter.next());
/// assert_eq!(Some(2), counter.next());
/// assert_eq!(Some(3), counter.next());
/// assert_eq!(None, counter.next());
/// ```
/// [`iterator`]: trait.Iterator.html
pub struct Counter {
count: u32,
max: u32,
}
impl Counter {
pub fn new(max: u32) -> Self {
Self { count: 0, max }
}
}
impl Iterator for Counter {
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
if self.count < self.max {
self.count += 1;
Some(self.count)
} else {
None
}
}
}
/// [Default Generic Type Parameters] and Operator Overloading
///
/// # Examples
///
/// ```
/// use the_book::ch19::sec02::Point;
///
/// let p1 = Point(0, 1);
/// let p2 = Point(9, 3);
/// let p3 = p1 + p2;
/// assert_eq!(9, p3.0);
/// assert_eq!(4, p3.1);
/// ```
/// [default generic type parameters]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#default-generic-type-parameters-and-operator-overloading
#[derive(Debug, PartialEq)]
pub struct Point(pub i32, pub i32);
use std::ops::Add;
impl Add for Point {
type Output = Self;
fn add(self, other: Self) -> Self {
Self(self.0 + other.0, self.1 + other.1)
}
}
/// [Default Generic Type Parameters] and Operator Overloading
///
/// # Examples
///
/// ```
/// use the_book::ch19::sec02::{Meters, Millimeters};
///
/// let meter = Meters(10.0);
/// let milli = Millimeters(2.0);
///
/// let got = meter + milli;
/// assert_eq!(Meters(10.002), got);
///
/// let got = milli + meter;
/// assert_eq!(Millimeters(10_002.0), got);
/// ```
/// [default generic type parameters]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#default-generic-type-parameters-and-operator-overloading
#[derive(Debug, PartialEq, Copy, Clone)]
pub struct Millimeters(pub f32);
impl Add<Meters> for Millimeters {
type Output = Self;
fn add(self, other: Meters) -> Self {
Self(self.0 + other.0 * 1000.0)
}
}
/// [Default Generic Type Parameters] and Operator Overloading
///
/// # Examples
///
/// ```
/// use the_book::ch19::sec02::{Meters, Millimeters};
///
/// let meter = Meters(10.0);
/// let milli = Millimeters(2.0);
///
/// let got = meter + milli;
/// assert_eq!(Meters(10.002), got);
///
/// let got = milli + meter;
/// assert_eq!(Millimeters(10_002.0), got);
/// ```
/// [default generic type parameters]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#default-generic-type-parameters-and-operator-overloading
#[derive(Debug, PartialEq, Copy, Clone)]
pub struct Meters(pub f32);
impl Add<Millimeters> for Meters {
type Output = Self;
fn add(self, other: Millimeters) -> Self {
Self(self.0 + other.0 / 1000.0)
}
}
|
/**********************************************************\
| |
| hprose |
| |
| Official WebSite: http://www.hprose.com/ |
| http://www.hprose.org/ |
| |
\**********************************************************/
/**********************************************************\
* *
* rpc/base_client.rs *
* *
* hprose rpc base client for Rust. *
* *
* LastModified: Oct 8, 2016 *
* Author: Chen Fei <cf@hprose.com> *
* *
\**********************************************************/
use std::time::Duration;
use io;
use io::{Writer, Encoder, Encodable, Reader, Decoder, Decodable};
use io::tags::*;
use super::*;
/// BaseClient is the hprose base client
pub struct BaseClient<T: Transporter> {
trans: T,
filter_manager: FilterManager,
uri: String,
timeout: Option<Duration>
}
impl<T: Transporter> BaseClient<T> {
#[inline]
pub fn new(trans: T, uri: String) -> BaseClient<T> {
BaseClient {
trans: trans,
filter_manager: FilterManager::new(),
uri: uri,
timeout: Some(Duration::from_secs(30))
}
}
pub fn invoke<R: Decodable, A: Encodable>(&self, name: &str, args: &mut Vec<A>, settings: Option<InvokeSettings>) -> InvokeResult<R> {
let odata = self.encode(name, args, settings);
self.trans.send_and_receive(&self.uri, &odata).and_then(|idata| self.decode(idata, args))
}
fn encode<A: Encodable>(&self, name: &str, args: &mut Vec<A>, settings: Option<InvokeSettings>) -> Vec<u8> {
let mut w = Writer::new(true);
w.write_byte(TAG_CALL);
w.write_str(name);
let by_ref = settings.map_or(false, |s| s.by_ref);
let len = args.len();
if len > 0 || by_ref {
w.write_seq(args.len(), |w| {
for e in args {
e.encode(w);
}
});
if by_ref {
w.write_bool(true);
}
}
w.write_byte(TAG_END);
w.into_bytes()
}
fn decode<R: Decodable, A: Encodable>(&self, data: Vec<u8>, args: &mut Vec<A>) -> InvokeResult<R> {
let mut r = Reader::new(&data, false);
r.byte_reader.read_byte()
.map_err(|e| InvokeError::DecoderError(io::DecoderError::ParserError(e)))
.and_then(|tag| match tag {
TAG_RESULT => r.unserialize::<R>().map_err(|e| InvokeError::DecoderError(e)),
// TAG_ARGUMENT => (),
TAG_ERROR => r.read_string().map_err(|e| InvokeError::DecoderError(e)).and_then(|s| Err(InvokeError::RemoteError(s))),
_ => Err(InvokeError::WrongResponse(data.clone())),
})
}
// fn before_filter(&self, request: &[u8]) -> &[u8] {
// let request = self.filter_manager.output(request);
// let response = self.after_filter(request);
// self.filter_manager.input(response)
// }
//
// fn after_filter(&self, request: &[u8]) -> &[u8] {
// self.trans.send_and_receive(&self.uri, request)
// }
}
|
use std::io;
use std::os::unix::net::SocketAddr;
use std::path::PathBuf;
use futures::{Async, Poll, Stream, Sink, StartSend, AsyncSink};
use UnixDatagram;
/// Encoding of frames via buffers.
///
/// This trait is used when constructing an instance of `UnixDatagramFramed` and
/// provides the `In` and `Out` types which are decoded and encoded from the
/// socket, respectively.
///
/// Because Unix datagrams are a connectionless protocol, the `decode` method
/// receives the address where data came from and the `encode` method is also
/// responsible for determining the remote host to which the datagram should be
/// sent
///
/// The trait itself is implemented on a type that can track state for decoding
/// or encoding, which is particularly useful for streaming parsers. In many
/// cases, though, this type will simply be a unit struct (e.g. `struct
/// HttpCodec`).
pub trait UnixDatagramCodec {
/// The type of decoded frames.
type In;
/// The type of frames to be encoded.
type Out;
/// Attempts to decode a frame from the provided buffer of bytes.
///
/// This method is called by `UnixDatagramFramed` on a single datagram which
/// has been read from a socket. The `buf` argument contains the data that
/// was received from the remote address, and `src` is the address the data
/// came from. Note that typically this method should require the entire
/// contents of `buf` to be valid or otherwise return an error with
/// trailing data.
///
/// Finally, if the bytes in the buffer are malformed then an error is
/// returned indicating why. This informs `Framed` that the stream is now
/// corrupt and should be terminated.
fn decode(&mut self, src: &SocketAddr, buf: &[u8]) -> io::Result<Self::In>;
/// Encodes a frame into the buffer provided.
///
/// This method will encode `msg` into the byte buffer provided by `buf`.
/// The `buf` provided is an internal buffer of the `Framed` instance and
/// will be written out when possible.
///
/// The encode method also determines the destination to which the buffer
/// should be directed, which will be returned as a `SocketAddr`.
fn encode(&mut self, msg: Self::Out, buf: &mut Vec<u8>)
-> io::Result<PathBuf>;
}
/// A unified `Stream` and `Sink` interface to an underlying
/// `UnixDatagramSocket`, using the `UnixDatagramCodec` trait to encode and
/// decode frames.
///
/// You can acquire a `UnixDatagramFramed` instance by using the
/// `UnixDatagramSocket::framed` adapter.
pub struct UnixDatagramFramed<C> {
socket: UnixDatagram,
codec: C,
rd: Vec<u8>,
wr: Vec<u8>,
out_addr: PathBuf,
}
impl<C: UnixDatagramCodec> Stream for UnixDatagramFramed<C> {
type Item = C::In;
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<C::In>, io::Error> {
let (n, addr) = try_nb!(self.socket.recv_from(&mut self.rd));
trace!("received {} bytes, decoding", n);
let frame = try!(self.codec.decode(&addr, &self.rd[..n]));
trace!("frame decoded from buffer");
Ok(Async::Ready(Some(frame)))
}
}
impl<C: UnixDatagramCodec> Sink for UnixDatagramFramed<C> {
type SinkItem = C::Out;
type SinkError = io::Error;
fn start_send(&mut self, item: C::Out) -> StartSend<C::Out, io::Error> {
if self.wr.len() > 0 {
try!(self.poll_complete());
if self.wr.len() > 0 {
return Ok(AsyncSink::NotReady(item));
}
}
self.out_addr = try!(self.codec.encode(item, &mut self.wr));
Ok(AsyncSink::Ready)
}
fn poll_complete(&mut self) -> Poll<(), io::Error> {
trace!("flushing framed transport");
if self.wr.is_empty() {
return Ok(Async::Ready(()))
}
trace!("writing; remaining={}", self.wr.len());
let n = try_nb!(self.socket.send_to(&self.wr, &self.out_addr));
trace!("written {}", n);
let wrote_all = n == self.wr.len();
self.wr.clear();
if wrote_all {
Ok(Async::Ready(()))
} else {
Err(io::Error::new(io::ErrorKind::Other,
"failed to write entire datagram to socket"))
}
}
fn close(&mut self) -> Poll<(), io::Error> {
try_ready!(self.poll_complete());
Ok(().into())
}
}
pub fn new<C: UnixDatagramCodec>(socket: UnixDatagram, codec: C) -> UnixDatagramFramed<C> {
UnixDatagramFramed {
socket: socket,
codec: codec,
out_addr: PathBuf::new(),
rd: vec![0; 64 * 1024],
wr: Vec::with_capacity(8 * 1024),
}
}
impl<C> UnixDatagramFramed<C> {
/// Returns a reference to the underlying I/O stream wrapped by `Framed`.
///
/// Note that care should be taken to not tamper with the underlying stream
/// of data coming in as it may corrupt the stream of frames otherwise being
/// worked with.
pub fn get_ref(&self) -> &UnixDatagram {
&self.socket
}
/// Returns a mutable reference to the underlying I/O stream wrapped by
/// `Framed`.
///
/// Note that care should be taken to not tamper with the underlying stream
/// of data coming in as it may corrupt the stream of frames otherwise being
/// worked with.
pub fn get_mut(&mut self) -> &mut UnixDatagram {
&mut self.socket
}
/// Consumes the `Framed`, returning its underlying I/O stream.
///
/// Note that care should be taken to not tamper with the underlying stream
/// of data coming in as it may corrupt the stream of frames otherwise being
/// worked with.
pub fn into_inner(self) -> UnixDatagram {
self.socket
}
}
|
// Copyright 2017 Dmitry Tantsur <divius.inside@gmail.com>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! OpenStack Identity V3 API support for access tokens.
use std::collections::hash_map::DefaultHasher;
use std::fmt;
use std::hash::{Hash, Hasher};
use chrono::{Duration, Local};
use reqwest::{Client, IntoUrl, Method, Response, StatusCode, Url, UrlError};
use reqwest::header::{ContentType, Headers};
use super::super::{Error, ErrorKind, Result};
use super::super::identity::{catalog, protocol};
use super::super::session::RequestBuilder;
use super::super::utils::ValueCache;
use super::AuthMethod;
const MISSING_USER: &'static str = "User information required";
const MISSING_SCOPE: &'static str = "Unscoped tokens are not supported now";
const MISSING_SUBJECT_HEADER: &'static str =
"Missing X-Subject-Token header";
// Required validity time in minutes. Here we refresh the token if it expires
// in 10 minutes or less.
const TOKEN_MIN_VALIDITY: i64 = 10;
/// Plain authentication token without additional details.
#[derive(Clone)]
struct Token {
value: String,
body: protocol::Token
}
impl fmt::Debug for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut hasher = DefaultHasher::new();
self.value.hash(&mut hasher);
write!(f, "Token {{ value: {}, body: {:?} }}",
hasher.finish(), self.body)
}
}
/// Authentication method factory using Identity API V3.
#[derive(Clone, Debug)]
pub struct Identity {
client: Client,
auth_url: Url,
region: Option<String>,
password_identity: Option<protocol::PasswordIdentity>,
project_scope: Option<protocol::ProjectScope>
}
/// Password authentication using Identity API V3.
///
/// Has to be created via [Identity object](struct.Identity.html) methods.
#[derive(Clone, Debug)]
pub struct PasswordAuth {
client: Client,
auth_url: Url,
region: Option<String>,
body: protocol::ProjectScopedAuthRoot,
token_endpoint: String,
cached_token: ValueCache<Token>
}
impl Identity {
/// Get a reference to the auth URL.
pub fn auth_url(&self) -> &Url {
&self.auth_url
}
/// Create a password authentication against the given Identity service.
pub fn new<U>(auth_url: U) -> ::std::result::Result<Identity, UrlError>
where U: IntoUrl {
Identity::new_with_client(auth_url, Client::new())
}
/// Create a password authentication against the given Identity service.
pub fn new_with_region<U>(auth_url: U, region: String)
-> ::std::result::Result<Identity, UrlError> where U: IntoUrl {
Ok(Identity {
client: Client::new(),
auth_url: auth_url.into_url()?,
region: Some(region),
password_identity: None,
project_scope: None,
})
}
/// Create a password authentication against the given Identity service.
pub fn new_with_client<U>(auth_url: U, client: Client)
-> ::std::result::Result<Identity, UrlError> where U: IntoUrl {
Ok(Identity {
client: client,
auth_url: auth_url.into_url()?,
region: None,
password_identity: None,
project_scope: None,
})
}
/// Add authentication based on user name and password.
pub fn with_user<S1, S2, S3>(self, user_name: S1, password: S2,
domain_name: S3) -> Identity
where S1: Into<String>, S2: Into<String>, S3: Into<String> {
Identity {
password_identity: Some(protocol::PasswordIdentity::new(user_name,
password,
domain_name)),
.. self
}
}
/// Request a token scoped to the given project.
pub fn with_project_scope<S1, S2>(self, project_name: S1, domain_name: S2)
-> Identity where S1: Into<String>, S2: Into<String> {
Identity {
project_scope: Some(protocol::ProjectScope::new(project_name,
domain_name)),
.. self
}
}
/// Create an authentication method based on provided information.
pub fn create(self) -> Result<PasswordAuth> {
// TODO: support more authentication methods (at least a token)
let password_identity = match self.password_identity {
Some(p) => p,
None =>
return Err(Error::new(ErrorKind::InvalidInput, MISSING_USER))
};
// TODO: support unscoped tokens
let project_scope = match self.project_scope {
Some(p) => p,
None =>
return Err(Error::new(ErrorKind::InvalidInput, MISSING_SCOPE))
};
Ok(PasswordAuth::new(self.auth_url, self.region, password_identity,
project_scope, self.client))
}
}
#[inline]
fn extract_subject_token(headers: &Headers) -> Option<String> {
// TODO: replace with a typed header
headers.get_raw("x-subject-token").and_then(|h| h.one())
.map(|buf| { String::from_utf8_lossy(buf).into_owned() })
}
impl PasswordAuth {
/// Get a reference to the auth URL.
pub fn auth_url(&self) -> &Url {
&self.auth_url
}
fn new(auth_url: Url, region: Option<String>,
password_identity: protocol::PasswordIdentity,
project_scope: protocol::ProjectScope,
client: Client) -> PasswordAuth {
let body = protocol::ProjectScopedAuthRoot::new(password_identity,
project_scope);
// TODO: more robust logic?
let token_endpoint = if auth_url.path().ends_with("/v3") {
format!("{}/auth/tokens", auth_url)
} else {
format!("{}/v3/auth/tokens", auth_url)
};
PasswordAuth {
client: client,
auth_url: auth_url,
region: region,
body: body,
token_endpoint: token_endpoint,
cached_token: ValueCache::new(None)
}
}
fn token_from_response(&self, mut resp: Response) -> Result<Token> {
let token_value = match resp.status() {
StatusCode::Ok | StatusCode::Created => {
match extract_subject_token(resp.headers()) {
Some(value) => value,
None => {
error!("No X-Subject-Token header received from {}",
self.token_endpoint);
return Err(Error::new(ErrorKind::InvalidResponse,
MISSING_SUBJECT_HEADER));
}
}
},
StatusCode::Unauthorized => {
error!("Invalid credentials for user {}",
self.body.auth.identity.password.user.name);
return Err(Error::new_with_details(
ErrorKind::AuthenticationFailed,
Some(resp.status()),
Some(String::from("Unable to authenticate"))
));
},
other => {
error!("Unexpected HTTP error {} when getting a token for {}",
other, self.body.auth.identity.password.user.name);
return Err(Error::new_with_details(
ErrorKind::AuthenticationFailed,
Some(resp.status()),
Some(format!("Unexpected HTTP code {} when authenticating",
resp.status()))
));
}
};
let body = resp.json::<protocol::TokenRoot>()?.token;
debug!("Received a token for user {} from {} expiring at {}",
self.body.auth.identity.password.user.name,
self.token_endpoint, body.expires_at);
trace!("Received catalog: {:?}", body.catalog);
Ok(Token {
value: token_value,
body: body
})
}
fn refresh_token(&self) -> Result<()> {
self.cached_token.validate_and_ensure_value(|val| {
let validity_time_left = val.body.expires_at.clone()
.signed_duration_since(Local::now());
trace!("Token is valid for {:?}", validity_time_left);
return validity_time_left > Duration::minutes(TOKEN_MIN_VALIDITY);
}, || {
debug!("Requesting a token for user {} from {}",
self.body.auth.identity.password.user.name,
self.token_endpoint);
let resp = self.client.post(&self.token_endpoint).json(&self.body)
.header(ContentType::json()).send()?.error_for_status()?;
self.token_from_response(resp)
})
}
fn get_token(&self) -> Result<String> {
self.refresh_token()?;
Ok(self.cached_token.extract(|t| t.value.clone()).unwrap())
}
fn get_catalog(&self) -> Result<Vec<protocol::CatalogRecord>> {
self.refresh_token()?;
Ok(self.cached_token.extract(|t| t.body.catalog.clone()).unwrap())
}
}
impl AuthMethod for PasswordAuth {
/// Get region.
fn region(&self) -> Option<String> { self.region.clone() }
/// Create an authenticated request.
fn request(&self, method: Method, url: Url) -> Result<RequestBuilder> {
let token = self.get_token()?;
let mut headers = Headers::new();
// TODO: replace with a typed header
headers.set_raw("x-auth-token", token);
let mut builder = self.client.request(method, url);
{
let _unused = builder.headers(headers);
}
Ok(RequestBuilder::new(builder))
}
/// Get a URL for the requested service.
fn get_endpoint(&self, service_type: String,
endpoint_interface: Option<String>) -> Result<Url> {
let real_interface = endpoint_interface.unwrap_or(
self.default_endpoint_interface());
debug!("Requesting a catalog endpoint for service '{}', interface \
'{}' from region {:?}", service_type, real_interface,
self.region);
let cat = self.get_catalog()?;
let endp = catalog::find_endpoint(&cat, &service_type,
&real_interface,
&self.region)?;
debug!("Received {:?} for {}", endp, service_type);
Url::parse(&endp.url).map_err(|e| {
error!("Invalid URL {} received from service catalog for service \
'{}', interface '{}' from region {:?}: {}",
endp.url, service_type, real_interface, self.region, e);
Error::new(ErrorKind::InvalidResponse,
format!("Invalid URL {} for {} - {}",
endp.url, service_type, e))
})
}
fn refresh(&mut self) -> Result<()> {
self.cached_token = ValueCache::new(None);
self.refresh_token()
}
}
#[cfg(test)]
pub mod test {
#![allow(unused_results)]
use super::super::AuthMethod;
use super::Identity;
#[test]
fn test_identity_new() {
let id = Identity::new("http://127.0.0.1:8080/").unwrap();
let e = id.auth_url;
assert_eq!(e.scheme(), "http");
assert_eq!(e.host_str().unwrap(), "127.0.0.1");
assert_eq!(e.port().unwrap(), 8080u16);
assert_eq!(e.path(), "/");
}
#[test]
fn test_identity_new_invalid() {
Identity::new("http://127.0.0.1 8080/").err().unwrap();
}
#[test]
fn test_identity_create() {
let id = Identity::new("http://127.0.0.1:8080/identity").unwrap()
.with_user("user", "pa$$w0rd", "example.com")
.with_project_scope("cool project", "example.com")
.create().unwrap();
assert_eq!(&id.auth_url.to_string(), "http://127.0.0.1:8080/identity");
assert_eq!(id.auth_url().to_string(),
"http://127.0.0.1:8080/identity");
assert_eq!(&id.body.auth.identity.password.user.name, "user");
assert_eq!(&id.body.auth.identity.password.user.password, "pa$$w0rd");
assert_eq!(&id.body.auth.identity.password.user.domain.name,
"example.com");
assert_eq!(id.body.auth.identity.methods,
vec![String::from("password")]);
assert_eq!(&id.body.auth.scope.project.name, "cool project");
assert_eq!(&id.body.auth.scope.project.domain.name, "example.com");
assert_eq!(&id.token_endpoint,
"http://127.0.0.1:8080/identity/v3/auth/tokens");
assert_eq!(id.region(), None);
}
#[test]
fn test_identity_create_no_scope() {
Identity::new("http://127.0.0.1:8080/identity").unwrap()
.with_user("user", "pa$$w0rd", "example.com")
.create().err().unwrap();
}
#[test]
fn test_identity_create_no_user() {
Identity::new("http://127.0.0.1:8080/identity").unwrap()
.with_project_scope("cool project", "example.com")
.create().err().unwrap();
}
}
|
/// Copyright (c) 2020, Shoyo Inokuchi
///
/// A simple sudoku solver written in Rust.
/// Feel free to to refer to the repository at: https://github.com/shoyo/sudoku for more
/// information.
///
use std::fmt::{Display, Error, Formatter};
mod boards;
const ROWS: usize = 9;
const COLS: usize = 9;
const CAGE_ROWS: usize = 3;
const CAGE_COLS: usize = 3;
struct Sudoku {
board: Vec<Vec<Option<u8>>>,
}
impl Sudoku {
/// Intialize a sudoku board.
/// Takes in an initial board state defined as a vector of tuples.
/// Tuples take the form of (row index, column index, value). Values should be an
/// integer between 1 and 9.
fn new(initial: Vec<(usize, usize, u8)>) -> Result<Self, String> {
let mut board = Vec::with_capacity(ROWS);
for _ in 0..ROWS {
let mut row = Vec::with_capacity(COLS);
for _ in 0..COLS {
row.push(None);
}
board.push(row);
}
for (row, col, val) in initial {
if row >= ROWS || col >= COLS || val == 0 || val > 9 {
return Err(format!(
"Value: {} at position ({}, {}) is invalid.",
val, row, col
));
}
if board[row][col] != None {
return Err(format!(
"Value already exists at position ({}, {}).",
row, col
));
}
board[row][col] = Some(val);
}
Ok(Self { board: board })
}
/// Solve the sudoku board with backtracking and return an Ok if successful.
/// If the board cannot be solved, return an Error.
/// This function mutates the internal board representation in-place.
fn solve(&mut self) -> Result<(), ()> {
let (row, col) = match self.find_open_cell_() {
Some(cell) => cell,
None => return Ok(()),
};
for val in 1..10 {
if self.valid_insert(row, col, val) {
self.board[row][col] = Some(val);
match self.solve() {
Ok(_) => return Ok(()),
Err(_) => self.board[row][col] = None,
}
}
}
Err(())
}
/// Return true iff the board is complete and correct.
fn verify(&self) -> bool {
for i in 0..ROWS {
if !self.verify_row_(i) {
return false;
}
}
for j in 0..COLS {
if !self.verify_col_(j) {
return false;
}
}
for ci in 0..CAGE_ROWS {
for cj in 0..CAGE_COLS {
if !self.verify_cage_(ci, cj) {
return false;
}
}
}
true
}
/// Return true iff the given row on the board is complete and correct.
fn verify_row_(&self, row: usize) -> bool {
let mut seen = [false; 10];
for col in 0..COLS {
let val = match self.board[row][col] {
Some(val) => val as usize,
None => return false,
};
if seen[val] || val > 9 {
return false;
}
seen[val] = true;
}
true
}
/// Return true iff the given column on the board is complete and correct.
fn verify_col_(&self, col: usize) -> bool {
let mut seen = [false; 10];
for row in 0..ROWS {
let val = match self.board[row][col] {
Some(val) => val as usize,
None => return false,
};
if seen[val] || val > 9 {
return false;
}
seen[val] = true;
}
true
}
/// Return true iff the given cage on the board is complete and correct.
/// A cage refers to a 3-by-3 square on the board with the sudoku constraint.
fn verify_cage_(&self, cage_row: usize, cage_col: usize) -> bool {
let mut seen = [false; 10];
for i in 0..CAGE_ROWS {
for j in 0..CAGE_COLS {
let val = match self.board[cage_row * CAGE_ROWS + i][cage_col * CAGE_COLS + j] {
Some(val) => val as usize,
None => return false,
};
if seen[val] || val > 9 {
return false;
}
seen[val] = true;
}
}
true
}
/// Return the row and column indexes for a cell that does not contain a value.
/// If all cells are filled, return None.
fn find_open_cell_(&self) -> Option<(usize, usize)> {
for i in 0..ROWS {
for j in 0..COLS {
if self.board[i][j] == None {
return Some((i, j));
}
}
}
None
}
/// Return true iff the given value can be placed in the given cell.
fn valid_insert(&self, row: usize, col: usize, val: u8) -> bool {
self.board[row][col] == None
&& self.valid_row_insert_(row, val)
&& self.valid_col_insert_(col, val)
&& self.valid_cage_insert_(row / CAGE_ROWS, col / CAGE_COLS, val)
}
/// Return true iff the given value can be placed in the given row.
fn valid_row_insert_(&self, row: usize, val: u8) -> bool {
for col in 0..COLS {
if let Some(v) = self.board[row][col] {
if v == val {
return false;
}
}
}
true
}
/// Return true iff the given value can be placed in the given column.
fn valid_col_insert_(&self, col: usize, val: u8) -> bool {
for row in 0..ROWS {
if let Some(v) = self.board[row][col] {
if v == val {
return false;
}
}
}
true
}
/// Return true iff the given value can be placed in the given cage.
/// A cage refers to a 3-by-3 square on the board with the sudoku constraint.
fn valid_cage_insert_(&self, cage_row: usize, cage_col: usize, val: u8) -> bool {
for i in 0..CAGE_ROWS {
for j in 0..CAGE_COLS {
if let Some(v) = self.board[cage_row * CAGE_ROWS + i][cage_col * CAGE_COLS + j] {
if v == val {
return false;
}
}
}
}
true
}
}
impl Display for Sudoku {
/// Define how the board is formatted when printed.
fn fmt(&self, _fmt: &mut Formatter<'_>) -> Result<(), Error> {
for i in 0..ROWS {
for j in 0..COLS {
match self.board[i][j] {
Some(num) => print!(" {} ", num),
None => print!(" - "),
}
}
println!();
}
Ok(())
}
}
/// Example usage of Sudoku API.
fn main() {
// Initialize puzzle
let board = boards::VALID_PUZZLE_1.to_vec();
let mut puzzle = Sudoku::new(board).unwrap();
println!("BEFORE:");
println!("{}", puzzle);
// Solve puzzle
match puzzle.solve() {
Ok(_) => {
println!("AFTER:");
println!("{}", puzzle);
}
Err(_) => {
println!("Invalid puzzle.");
}
}
}
/// Unit tests.
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn create_puzzle() {
let puzzle = Sudoku::new(vec![(0, 1, 3), (5, 3, 8), (8, 8, 4)]);
assert!(puzzle.is_ok());
}
#[test]
fn create_invalid_puzzle() {
let puzzle = Sudoku::new(vec![(0, 0, 10)]);
assert!(puzzle.is_err());
}
#[test]
fn verify_valid_solution() {
let puzzle = Sudoku::new(boards::VALID_SOLUTION.to_vec()).unwrap();
assert_eq!(puzzle.verify(), true);
}
#[test]
fn verify_invalid_solution() {
let puzzle = Sudoku::new(boards::INVALID_SOLUTION.to_vec()).unwrap();
assert_eq!(puzzle.verify(), false);
}
#[test]
fn verify_valid_row() {
let puzzle = Sudoku::new(boards::VALID_ROW.to_vec()).unwrap();
assert_eq!(puzzle.verify_row_(4), true);
}
#[test]
fn verify_invalid_row() {
let puzzle = Sudoku::new(boards::INVALID_ROW.to_vec()).unwrap();
assert_eq!(puzzle.verify_row_(4), false);
}
#[test]
fn verify_valid_col() {
let puzzle = Sudoku::new(boards::VALID_COL.to_vec()).unwrap();
assert_eq!(puzzle.verify_col_(4), true);
}
#[test]
fn verify_invalid_col() {
let puzzle = Sudoku::new(boards::INVALID_COL.to_vec()).unwrap();
assert_eq!(puzzle.verify_col_(4), false);
}
#[test]
fn verify_valid_cage() {
let puzzle = Sudoku::new(boards::VALID_CAGE.to_vec()).unwrap();
assert_eq!(puzzle.verify_cage_(0, 0), true);
}
#[test]
fn verify_invalid_cage() {
let puzzle = Sudoku::new(boards::INVALID_CAGE.to_vec()).unwrap();
assert_eq!(puzzle.verify_cage_(0, 0), false);
}
#[test]
fn try_valid_row_insert() {
let puzzle = Sudoku::new(Vec::new()).unwrap();
assert_eq!(puzzle.valid_row_insert_(0, 1), true);
}
#[test]
fn try_invalid_row_insert() {
let puzzle = Sudoku::new(boards::VALID_ROW.to_vec()).unwrap();
assert_eq!(puzzle.valid_row_insert_(4, 1), false);
}
#[test]
fn try_valid_col_insert() {
let puzzle = Sudoku::new(Vec::new()).unwrap();
assert_eq!(puzzle.valid_col_insert_(0, 1), true);
}
#[test]
fn try_invalid_col_insert() {
let puzzle = Sudoku::new(boards::VALID_COL.to_vec()).unwrap();
assert_eq!(puzzle.valid_col_insert_(4, 1), false);
}
#[test]
fn try_valid_cage_insert() {
let puzzle = Sudoku::new(Vec::new()).unwrap();
assert_eq!(puzzle.valid_cage_insert_(0, 0, 1), true);
}
#[test]
fn try_invalid_cage_insert() {
let puzzle = Sudoku::new(boards::VALID_CAGE.to_vec()).unwrap();
assert_eq!(puzzle.valid_cage_insert_(0, 0, 1), false);
}
#[test]
fn solve_valid_puzzle_1() {
let mut puzzle = Sudoku::new(boards::VALID_PUZZLE_1.to_vec()).unwrap();
assert_eq!(puzzle.verify(), false);
let _ = puzzle.solve();
assert_eq!(puzzle.verify(), true);
}
#[test]
fn solve_valid_puzzle_2() {
let mut puzzle = Sudoku::new(boards::VALID_PUZZLE_2.to_vec()).unwrap();
assert_eq!(puzzle.verify(), false);
let _ = puzzle.solve();
assert_eq!(puzzle.verify(), true);
}
#[test]
fn solve_valid_puzzle_3() {
let mut puzzle = Sudoku::new(boards::VALID_PUZZLE_3.to_vec()).unwrap();
assert_eq!(puzzle.verify(), false);
let _ = puzzle.solve();
assert_eq!(puzzle.verify(), true);
}
}
|
pub use self::ball::Ball;
pub use self::collision_box::CollisionBox;
pub use self::human::Human;
pub use self::movement_state::MovementState;
pub use self::net::Net;
pub use self::player::Player;
pub use self::player::PlayerType;
pub use self::robot::Robot;
pub mod ball;
pub mod collision_box;
pub mod human;
pub mod movement_state;
pub mod net;
pub mod player;
pub mod robot;
|
use hymns::input::parse_char_delimited_numbers;
use hymns::runner::timed_run;
const INPUT: &str = include_str!("../input.txt");
fn part1() -> i32 {
let crabs: Vec<i32> = parse_char_delimited_numbers(INPUT, ',').collect();
let (min, max) = crabs
.iter()
.fold((i32::MAX, i32::MIN), |(cur_min, cur_max), &crab| {
(cur_min.min(crab), cur_max.max(crab))
});
(min..=max).fold(i32::MAX, |min_cost, distance| {
min_cost.min(crabs.iter().map(|c| (c - distance).abs()).sum())
})
}
fn part2() -> i32 {
let crabs: Vec<i32> = parse_char_delimited_numbers(INPUT, ',').collect();
let (min, max) = crabs
.iter()
.fold((i32::MAX, i32::MIN), |(cur_min, cur_max), &crab| {
(cur_min.min(crab), cur_max.max(crab))
});
(min..=max).fold(i32::MAX, |min_cost, distance| {
min_cost.min(
crabs
.iter()
.map(|c| {
let steps = (c - distance).abs();
// Formula for sum of first n integers
steps * (steps + 1) / 2
})
.sum(),
)
})
}
fn main() {
timed_run(1, part1);
timed_run(2, part2);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(part1(), 349769);
}
#[test]
fn test_part2() {
assert_eq!(part2(), 99540554);
}
}
|
pub mod sprite_sheet;
pub mod prefab; |
pub fn longest_consecutive(nums: Vec<i32>) -> i32 {
use std::collections::HashSet;
let mut set = HashSet::new();
for &num in nums.iter() {
set.insert(num);
}
let mut longest = 0;
for num in nums {
if !set.contains(&(num-1)) {
let mut current = num;
while set.contains(&(current+1)) {
current += 1;
}
longest = std::cmp::max(longest, current-num+1);
}
}
longest
} |
use crate::errors::ApiError;
use crate::models::games::*;
use crate::models::teams::*;
use crate::schema::{games::dsl::*, matches, teams::dsl::*};
use diesel::{prelude::*, SqliteConnection};
#[derive(
serde_derive::Serialize, serde_derive::Deserialize, Clone, Queryable,
)]
pub struct Match {
pub id: i32,
pub team_1: i32,
pub team_2: i32,
pub timestamp: chrono::NaiveDateTime,
}
impl Match {
pub fn get_team_1(
&self,
conn: &SqliteConnection,
) -> Result<Team, ApiError> {
teams
.find(self.team_1)
.first(conn)
.map_err(|_| ApiError::new("Could not find team_1 of match", 404))
}
pub fn get_team_2(
&self,
conn: &SqliteConnection,
) -> Result<Team, ApiError> {
teams
.find(self.team_2)
.first(conn)
.map_err(|_| ApiError::new("Could not find team_2 of match", 404))
}
pub fn get_games(
&self,
conn: &SqliteConnection,
) -> Result<Vec<Game>, ApiError> {
games
.filter(match_id.eq(self.id))
.get_results(conn)
.map_err(|_| ApiError::new("Could not get games for match", 404))
}
}
#[derive(serde_derive::Deserialize, Insertable)]
#[table_name = "matches"]
pub struct NewMatch {
pub team_1: i32,
pub team_2: i32,
}
|
fn main(){
let hi = "Hi";
let mut counter= 1i;
let max_counter = 100i;
println!("{}",hi);
while counter < max_counter {
println!("count is now {}",counter);
counter +=1;
}
}
|
#![cfg_attr(not(target_arch = "x86_64"), no_std)]
#![cfg_attr(not(target_arch = "x86_64"), no_main)]
#![cfg_attr(not(target_arch = "x86_64"),feature(custom_test_frameworks, lang_items, start))]
#![cfg_attr(not(target_arch = "x86_64"),test_runner(crate::test_runner))]
extern crate libc;
extern crate rand;
extern crate rand_core;
extern crate rand_isaac;
pub mod chapter2;
pub mod chapter3;
pub mod chapter4;
pub mod chapter5;
pub mod chapter6;
pub mod chapter7;
pub mod chapter8;
pub mod chapter9;
pub mod chapter10;
pub mod chapter11;
pub mod chapter12;
pub mod chapter15;
pub mod chapter16;
pub mod chapter17;
#[cfg(target_arch = "x86_64")]
use std::{ops::{Add, BitAnd, BitOr, BitOrAssign, BitXor, Not, Shl, ShlAssign, Shr, ShrAssign, Sub}, hash::{Hash, Hasher}, cmp::{Ord, Ordering, PartialOrd}, fmt::{Binary, Display, Formatter, LowerHex, Octal, UpperHex}};
#[cfg(target_arch = "riscv64")]
use core::{ops::{Add, BitAnd, BitOr, BitOrAssign, BitXor, Not, Shl, ShlAssign, Shr, ShrAssign, Sub}, hash::{Hash, Hasher}, cmp::{Ord, Ordering, PartialOrd}, fmt::{Binary, Display, Formatter, LowerHex, Octal, UpperHex}};
#[cfg(target_arch = "x86_64")]
pub use std::fmt::Error;
#[cfg(target_arch = "riscv64")]
pub use core::fmt::Error;
/// modified from https://docs.rs/ux/0.0.1/src/ux/lib.rs.html#63
macro_rules! implement_from {
{[$($name:ident),*], [$($from:ident),*] } => {$(implement_from!($name, $from);)*};
{$name:ident, [$($from:ident),*] } => {$(implement_from!($name, $from);)*};
{[$($name:ident),*], $from:ident } => {$(implement_from!($name, $from);)*};
{$name:ident, $from:ty} => {
impl From<$from> for $name {
fn from(x: $from) -> $name {
$name(x.into())
}
}
};
}
// Only implement if $type can be converted from $name lossless
macro_rules! implement_into {
{[$($name:ident),*], $from:ident } => {$(implement_into!($name, $from);)*};
{$name:ident, $into:ident} => {
impl From<$name> for $into {
fn from(x: $name) -> $into {
$into::from(x.0)
}
}
};
}
macro_rules! not_impl {
($($t:ty)*) => ($(
impl BitXnor for $t {
type Output = $t;
#[inline]
fn xnor(self, rhs: $t) -> $t { self & rhs | !self & !rhs }
}
)*)
}
macro_rules! define_c {
($name:ident, $bits:expr, $type:ident) => {define_c!(#[doc=""], $name, $bits, $type);};
(#[$doc:meta], $name:ident, $bits:expr, $type:ident) => {
#[$doc]
#[allow(non_camel_case_types)]
#[derive(Default, Clone, Copy, Debug)]
pub struct $name($type);
#[$doc]
impl $name {
pub const MAX: Self = $name(((1 as $type) << ($bits - 1)) - 1);
pub const MIN: Self = $name(-((1 as $type) << ($bits - 1)));
fn mask(self) -> Self {
if ( self.0 & (1<<($bits-1)) ) == 0 {
$name(self.0 & ( ((1 as $type) << $bits).overflowing_sub(1).0))
} else {
$name(self.0 | !( ((1 as $type) << $bits).overflowing_sub(1).0))
}
}
}
implement_common!($name, $bits, $type);
}
}
macro_rules! implement_common {
($name:ident, $bits:expr, $type:ident) => {
impl $name {
/// Returns the smallest value that can be represented by this integer type.
pub fn min_value() -> $name {
$name::MIN
}
/// Returns the largest value that can be represented by this integer type.
pub fn max_value() -> $name {
$name::MAX
}
pub fn new(value: $type) -> $name {
assert!(value <= $name::MAX.0 && value >= $name::MIN.0);
$name(value)
}
pub fn wrapping_sub(self, rhs: Self) -> Self {
$name(self.0.wrapping_sub(rhs.0)).mask()
}
pub fn wrapping_add(self, rhs: Self) -> Self {
$name(self.0.wrapping_add(rhs.0)).mask()
}
}
impl PartialEq for $name {
fn eq(&self, other: &Self) -> bool {
self.mask().0 == other.mask().0
}
}
impl Eq for $name {}
impl PartialOrd for $name {
fn partial_cmp(&self, other: &$name) -> Option<Ordering> {
self.mask().0.partial_cmp(&other.mask().0)
}
}
impl Ord for $name {
fn cmp(&self, other: &$name) -> Ordering {
self.mask().0.cmp(&other.mask().0)
}
}
impl Hash for $name {
fn hash<H: Hasher>(&self, h: &mut H) {
self.mask().0.hash(h)
}
}
// Implement formatting functions
impl Display for $name {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
let &$name(ref value) = self;
<$type as Display>::fmt(value, f)
}
}
impl UpperHex for $name {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
let &$name(ref value) = self;
<$type as UpperHex>::fmt(value, f)
}
}
impl LowerHex for $name {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
let &$name(ref value) = self;
<$type as LowerHex>::fmt(value, f)
}
}
impl Octal for $name {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
let &$name(ref value) = self;
<$type as Octal>::fmt(value, f)
}
}
impl Binary for $name {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
let &$name(ref value) = self;
<$type as Binary>::fmt(value, f)
}
}
impl BitXor for $name {
type Output = $name;
fn bitxor(self, other: $name) -> Self::Output {
let result = $name(self.mask().0.bitxor(&other.mask().0));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl<T> Shr<T> for $name
where
$type: Shr<T, Output = $type>,
{
type Output = $name;
fn shr(self, rhs: T) -> $name {
let result = $name(self.mask().0.shr(rhs));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl<T> Shl<T> for $name
where
$type: Shl<T, Output = $type>,
{
type Output = $name;
fn shl(self, rhs: T) -> $name {
let result = $name(self.mask().0.shl(rhs));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl<T> ShrAssign<T> for $name
where
$type: ShrAssign<T>,
{
fn shr_assign(&mut self, rhs: T) {
*self = self.mask();
self.0.shr_assign(rhs);
}
}
impl<T> ShlAssign<T> for $name
where
$type: ShlAssign<T>,
{
fn shl_assign(&mut self, rhs: T) {
*self = self.mask();
self.0.shl_assign(rhs);
}
}
impl BitOr<$name> for $name {
type Output = $name;
fn bitor(self, rhs: $name) -> Self::Output {
let result = $name(self.mask().0.bitor(rhs.mask().0));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl<'a> BitOr<&'a $name> for $name {
type Output = <$name as BitOr<$name>>::Output;
fn bitor(self, rhs: &'a $name) -> Self::Output {
let result = $name(self.mask().0.bitor(rhs.mask().0));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl<'a> BitOr<$name> for &'a $name {
type Output = <$name as BitOr<$name>>::Output;
fn bitor(self, rhs: $name) -> Self::Output {
let result = $name(self.mask().0.bitor(rhs.mask().0));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl<'a> BitOr<&'a $name> for &'a $name {
type Output = <$name as BitOr<$name>>::Output;
fn bitor(self, rhs: &'a $name) -> Self::Output {
let result = $name(self.mask().0.bitor(rhs.mask().0));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl BitOrAssign<$name> for $name {
fn bitor_assign(&mut self, other: $name) {
*self = self.mask();
self.0.bitor_assign(other.mask().0);
}
}
impl BitAnd<$name> for $name {
type Output = $name;
fn bitand(self, rhs: $name) -> Self::Output {
let result = $name(self.mask().0.bitand(rhs.mask().0));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl<'a> BitAnd<&'a $name> for $name {
type Output = <$name as BitAnd<$name>>::Output;
fn bitand(self, rhs: &'a $name) -> Self::Output {
let result = $name(self.mask().0.bitand(rhs.mask().0));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl<'a> BitAnd<$name> for &'a $name {
type Output = <$name as BitAnd<$name>>::Output;
fn bitand(self, rhs: $name) -> Self::Output {
let result = $name(self.mask().0.bitand(rhs.mask().0));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl<'a> BitAnd<&'a $name> for &'a $name {
type Output = <$name as BitAnd<$name>>::Output;
fn bitand(self, rhs: &'a $name) -> Self::Output {
let result = $name(self.mask().0.bitand(rhs.mask().0));
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl Add for $name {
type Output = Self;
fn add(self, _rhs: Self) -> $name {
let result = $name(self.0.wrapping_add(_rhs.0)).mask();
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl Not for $name {
type Output = Self;
fn not(self) -> $name {
let result = $name(self.mask().0.not());
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl Sub for $name {
type Output = Self;
fn sub(self, _rhs: Self) -> $name {
let result = $name(self.0.wrapping_sub(_rhs.0)).mask();
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
impl BitXnor for $name {
type Output = Self;
fn xnor(self, _rhs: Self) -> $name {
let result = $name(self.0 & _rhs.0 | !self.0 & !_rhs.0);
if result.mask().0 < 0 {
result + $name::MAX + $name(1)
} else {
result
}
}
}
}
}
pub trait BitXnor<Rhs = Self> {
type Output;
fn xnor(self, rhs: Rhs) -> Self::Output;
}
not_impl! { i8 i16 }
define_c!(#[doc="The 8-bit unsigned integer type."], c15, 17, i32);
define_c!(#[doc="The 8-bit unsigned integer type."], c14, 15, i16);
define_c!(#[doc="The 8-bit unsigned integer type."], c13, 14, i16);
define_c!(#[doc="The 8-bit unsigned integer type."], c12, 13, i16);
define_c!(#[doc="The 8-bit unsigned integer type."], c11, 12, i16);
define_c!(#[doc="The 8-bit unsigned integer type."], c10, 11, i16);
define_c!(#[doc="The 8-bit unsigned integer type."], c9, 10, i16);
define_c!(#[doc="The 8-bit unsigned integer type."], c8, 9, i16);
define_c!(#[doc="The 6-bit unsigned integer type."], c6, 7, i8);
define_c!(#[doc="The 5-bit unsigned integer type."], c5, 6, i8);
define_c!(#[doc="The 4-bit unsigned integer type."], c4, 5, i8);
define_c!(#[doc="The 3-bit unsigned integer type."], c3, 4, i8);
define_c!(#[doc="The 2-bit unsigned integer type."], c2, 3, i8);
define_c!(#[doc="The 1-bit unsigned integer type."], c1, 2, i8);
implement_into!([c1, c2, c3, c4, c5, c6], i8);
implement_into!([c1, c2, c3, c4, c5, c6], i64);
implement_into!([c8,c9,c10,c11,c12,c13,c14], i16);
implement_from!(c6, [c1, c2, c3, c4, c5]);
#[cfg(test)]
fn test_runner(tests: &[&dyn Fn()]) {
for test in tests {
test();
}
}
#[cfg(target_arch = "riscv64")]
#[start]
pub extern "C" fn _start() -> ! {
loop {}
}
#[cfg(target_arch = "riscv64")]
#[panic_handler]
fn my_panic(_info: &core::panic::PanicInfo) -> ! {
loop {}
}
#[cfg(target_arch = "riscv64")]
#[lang = "eh_personality"]
extern "C" fn eh_personality() {}
|
pub const WORDLIST: &'static [&'static str] = &[
"ábaco",
"abdomen",
"abeja",
"abierto",
"abogado",
"abono",
"aborto",
"abrazo",
"abrir",
"abuelo",
"abuso",
"acabar",
"academia",
"acceso",
"acción",
"aceite",
"acelga",
"acento",
"aceptar",
"ácido",
"aclarar",
"acné",
"acoger",
"acoso",
"activo",
"acto",
"actriz",
"actuar",
"acudir",
"acuerdo",
"acusar",
"adicto",
"admitir",
"adoptar",
"adorno",
"aduana",
"adulto",
"aéreo",
"afectar",
"afición",
"afinar",
"afirmar",
"ágil",
"agitar",
"agonía",
"agosto",
"agotar",
"agregar",
"agrio",
"agua",
"agudo",
"águila",
"aguja",
"ahogo",
"ahorro",
"aire",
"aislar",
"ajedrez",
"ajeno",
"ajuste",
"alacrán",
"alambre",
"alarma",
"alba",
"álbum",
"alcalde",
"aldea",
"alegre",
"alejar",
"alerta",
"aleta",
"alfiler",
"alga",
"algodón",
"aliado",
"aliento",
"alivio",
"alma",
"almeja",
"almíbar",
"altar",
"alteza",
"altivo",
"alto",
"altura",
"alumno",
"alzar",
"amable",
"amante",
"amapola",
"amargo",
"amasar",
"ámbar",
"ámbito",
"ameno",
"amigo",
"amistad",
"amor",
"amparo",
"amplio",
"ancho",
"anciano",
"ancla",
"andar",
"andén",
"anemia",
"ángulo",
"anillo",
"ánimo",
"anís",
"anotar",
"antena",
"antiguo",
"antojo",
"anual",
"anular",
"anuncio",
"añadir",
"añejo",
"año",
"apagar",
"aparato",
"apetito",
"apio",
"aplicar",
"apodo",
"aporte",
"apoyo",
"aprender",
"aprobar",
"apuesta",
"apuro",
"arado",
"araña",
"arar",
"árbitro",
"árbol",
"arbusto",
"archivo",
"arco",
"arder",
"ardilla",
"arduo",
"área",
"árido",
"aries",
"armonía",
"arnés",
"aroma",
"arpa",
"arpón",
"arreglo",
"arroz",
"arruga",
"arte",
"artista",
"asa",
"asado",
"asalto",
"ascenso",
"asegurar",
"aseo",
"asesor",
"asiento",
"asilo",
"asistir",
"asno",
"asombro",
"áspero",
"astilla",
"astro",
"astuto",
"asumir",
"asunto",
"atajo",
"ataque",
"atar",
"atento",
"ateo",
"ático",
"atleta",
"átomo",
"atraer",
"atroz",
"atún",
"audaz",
"audio",
"auge",
"aula",
"aumento",
"ausente",
"autor",
"aval",
"avance",
"avaro",
"ave",
"avellana",
"avena",
"avestruz",
"avión",
"aviso",
"ayer",
"ayuda",
"ayuno",
"azafrán",
"azar",
"azote",
"azúcar",
"azufre",
"azul",
"baba",
"babor",
"bache",
"bahía",
"baile",
"bajar",
"balanza",
"balcón",
"balde",
"bambú",
"banco",
"banda",
"baño",
"barba",
"barco",
"barniz",
"barro",
"báscula",
"bastón",
"basura",
"batalla",
"batería",
"batir",
"batuta",
"baúl",
"bazar",
"bebé",
"bebida",
"bello",
"besar",
"beso",
"bestia",
"bicho",
"bien",
"bingo",
"blanco",
"bloque",
"blusa",
"boa",
"bobina",
"bobo",
"boca",
"bocina",
"boda",
"bodega",
"boina",
"bola",
"bolero",
"bolsa",
"bomba",
"bondad",
"bonito",
"bono",
"bonsái",
"borde",
"borrar",
"bosque",
"bote",
"botín",
"bóveda",
"bozal",
"bravo",
"brazo",
"brecha",
"breve",
"brillo",
"brinco",
"brisa",
"broca",
"broma",
"bronce",
"brote",
"bruja",
"brusco",
"bruto",
"buceo",
"bucle",
"bueno",
"buey",
"bufanda",
"bufón",
"búho",
"buitre",
"bulto",
"burbuja",
"burla",
"burro",
"buscar",
"butaca",
"buzón",
"caballo",
"cabeza",
"cabina",
"cabra",
"cacao",
"cadáver",
"cadena",
"caer",
"café",
"caída",
"caimán",
"caja",
"cajón",
"cal",
"calamar",
"calcio",
"caldo",
"calidad",
"calle",
"calma",
"calor",
"calvo",
"cama",
"cambio",
"camello",
"camino",
"campo",
"cáncer",
"candil",
"canela",
"canguro",
"canica",
"canto",
"caña",
"cañón",
"caoba",
"caos",
"capaz",
"capitán",
"capote",
"captar",
"capucha",
"cara",
"carbón",
"cárcel",
"careta",
"carga",
"cariño",
"carne",
"carpeta",
"carro",
"carta",
"casa",
"casco",
"casero",
"caspa",
"castor",
"catorce",
"catre",
"caudal",
"causa",
"cazo",
"cebolla",
"ceder",
"cedro",
"celda",
"célebre",
"celoso",
"célula",
"cemento",
"ceniza",
"centro",
"cerca",
"cerdo",
"cereza",
"cero",
"cerrar",
"certeza",
"césped",
"cetro",
"chacal",
"chaleco",
"champú",
"chancla",
"chapa",
"charla",
"chico",
"chiste",
"chivo",
"choque",
"choza",
"chuleta",
"chupar",
"ciclón",
"ciego",
"cielo",
"cien",
"cierto",
"cifra",
"cigarro",
"cima",
"cinco",
"cine",
"cinta",
"ciprés",
"circo",
"ciruela",
"cisne",
"cita",
"ciudad",
"clamor",
"clan",
"claro",
"clase",
"clave",
"cliente",
"clima",
"clínica",
"cobre",
"cocción",
"cochino",
"cocina",
"coco",
"código",
"codo",
"cofre",
"coger",
"cohete",
"cojín",
"cojo",
"cola",
"colcha",
"colegio",
"colgar",
"colina",
"collar",
"colmo",
"columna",
"combate",
"comer",
"comida",
"cómodo",
"compra",
"conde",
"conejo",
"conga",
"conocer",
"consejo",
"contar",
"copa",
"copia",
"corazón",
"corbata",
"corcho",
"cordón",
"corona",
"correr",
"coser",
"cosmos",
"costa",
"cráneo",
"cráter",
"crear",
"crecer",
"creído",
"crema",
"cría",
"crimen",
"cripta",
"crisis",
"cromo",
"crónica",
"croqueta",
"crudo",
"cruz",
"cuadro",
"cuarto",
"cuatro",
"cubo",
"cubrir",
"cuchara",
"cuello",
"cuento",
"cuerda",
"cuesta",
"cueva",
"cuidar",
"culebra",
"culpa",
"culto",
"cumbre",
"cumplir",
"cuna",
"cuneta",
"cuota",
"cupón",
"cúpula",
"curar",
"curioso",
"curso",
"curva",
"cutis",
"dama",
"danza",
"dar",
"dardo",
"dátil",
"deber",
"débil",
"década",
"decir",
"dedo",
"defensa",
"definir",
"dejar",
"delfín",
"delgado",
"delito",
"demora",
"denso",
"dental",
"deporte",
"derecho",
"derrota",
"desayuno",
"deseo",
"desfile",
"desnudo",
"destino",
"desvío",
"detalle",
"detener",
"deuda",
"día",
"diablo",
"diadema",
"diamante",
"diana",
"diario",
"dibujo",
"dictar",
"diente",
"dieta",
"diez",
"difícil",
"digno",
"dilema",
"diluir",
"dinero",
"directo",
"dirigir",
"disco",
"diseño",
"disfraz",
"diva",
"divino",
"doble",
"doce",
"dolor",
"domingo",
"don",
"donar",
"dorado",
"dormir",
"dorso",
"dos",
"dosis",
"dragón",
"droga",
"ducha",
"duda",
"duelo",
"dueño",
"dulce",
"dúo",
"duque",
"durar",
"dureza",
"duro",
"ébano",
"ebrio",
"echar",
"eco",
"ecuador",
"edad",
"edición",
"edificio",
"editor",
"educar",
"efecto",
"eficaz",
"eje",
"ejemplo",
"elefante",
"elegir",
"elemento",
"elevar",
"elipse",
"élite",
"elixir",
"elogio",
"eludir",
"embudo",
"emitir",
"emoción",
"empate",
"empeño",
"empleo",
"empresa",
"enano",
"encargo",
"enchufe",
"encía",
"enemigo",
"enero",
"enfado",
"enfermo",
"engaño",
"enigma",
"enlace",
"enorme",
"enredo",
"ensayo",
"enseñar",
"entero",
"entrar",
"envase",
"envío",
"época",
"equipo",
"erizo",
"escala",
"escena",
"escolar",
"escribir",
"escudo",
"esencia",
"esfera",
"esfuerzo",
"espada",
"espejo",
"espía",
"esposa",
"espuma",
"esquí",
"estar",
"este",
"estilo",
"estufa",
"etapa",
"eterno",
"ética",
"etnia",
"evadir",
"evaluar",
"evento",
"evitar",
"exacto",
"examen",
"exceso",
"excusa",
"exento",
"exigir",
"exilio",
"existir",
"éxito",
"experto",
"explicar",
"exponer",
"extremo",
"fábrica",
"fábula",
"fachada",
"fácil",
"factor",
"faena",
"faja",
"falda",
"fallo",
"falso",
"faltar",
"fama",
"familia",
"famoso",
"faraón",
"farmacia",
"farol",
"farsa",
"fase",
"fatiga",
"fauna",
"favor",
"fax",
"febrero",
"fecha",
"feliz",
"feo",
"feria",
"feroz",
"fértil",
"fervor",
"festín",
"fiable",
"fianza",
"fiar",
"fibra",
"ficción",
"ficha",
"fideo",
"fiebre",
"fiel",
"fiera",
"fiesta",
"figura",
"fijar",
"fijo",
"fila",
"filete",
"filial",
"filtro",
"fin",
"finca",
"fingir",
"finito",
"firma",
"flaco",
"flauta",
"flecha",
"flor",
"flota",
"fluir",
"flujo",
"flúor",
"fobia",
"foca",
"fogata",
"fogón",
"folio",
"folleto",
"fondo",
"forma",
"forro",
"fortuna",
"forzar",
"fosa",
"foto",
"fracaso",
"frágil",
"franja",
"frase",
"fraude",
"freír",
"freno",
"fresa",
"frío",
"frito",
"fruta",
"fuego",
"fuente",
"fuerza",
"fuga",
"fumar",
"función",
"funda",
"furgón",
"furia",
"fusil",
"fútbol",
"futuro",
"gacela",
"gafas",
"gaita",
"gajo",
"gala",
"galería",
"gallo",
"gamba",
"ganar",
"gancho",
"ganga",
"ganso",
"garaje",
"garza",
"gasolina",
"gastar",
"gato",
"gavilán",
"gemelo",
"gemir",
"gen",
"género",
"genio",
"gente",
"geranio",
"gerente",
"germen",
"gesto",
"gigante",
"gimnasio",
"girar",
"giro",
"glaciar",
"globo",
"gloria",
"gol",
"golfo",
"goloso",
"golpe",
"goma",
"gordo",
"gorila",
"gorra",
"gota",
"goteo",
"gozar",
"grada",
"gráfico",
"grano",
"grasa",
"gratis",
"grave",
"grieta",
"grillo",
"gripe",
"gris",
"grito",
"grosor",
"grúa",
"grueso",
"grumo",
"grupo",
"guante",
"guapo",
"guardia",
"guerra",
"guía",
"guiño",
"guion",
"guiso",
"guitarra",
"gusano",
"gustar",
"haber",
"hábil",
"hablar",
"hacer",
"hacha",
"hada",
"hallar",
"hamaca",
"harina",
"haz",
"hazaña",
"hebilla",
"hebra",
"hecho",
"helado",
"helio",
"hembra",
"herir",
"hermano",
"héroe",
"hervir",
"hielo",
"hierro",
"hígado",
"higiene",
"hijo",
"himno",
"historia",
"hocico",
"hogar",
"hoguera",
"hoja",
"hombre",
"hongo",
"honor",
"honra",
"hora",
"hormiga",
"horno",
"hostil",
"hoyo",
"hueco",
"huelga",
"huerta",
"hueso",
"huevo",
"huida",
"huir",
"humano",
"húmedo",
"humilde",
"humo",
"hundir",
"huracán",
"hurto",
"icono",
"ideal",
"idioma",
"ídolo",
"iglesia",
"iglú",
"igual",
"ilegal",
"ilusión",
"imagen",
"imán",
"imitar",
"impar",
"imperio",
"imponer",
"impulso",
"incapaz",
"índice",
"inerte",
"infiel",
"informe",
"ingenio",
"inicio",
"inmenso",
"inmune",
"innato",
"insecto",
"instante",
"interés",
"íntimo",
"intuir",
"inútil",
"invierno",
"ira",
"iris",
"ironía",
"isla",
"islote",
"jabalí",
"jabón",
"jamón",
"jarabe",
"jardín",
"jarra",
"jaula",
"jazmín",
"jefe",
"jeringa",
"jinete",
"jornada",
"joroba",
"joven",
"joya",
"juerga",
"jueves",
"juez",
"jugador",
"jugo",
"juguete",
"juicio",
"junco",
"jungla",
"junio",
"juntar",
"júpiter",
"jurar",
"justo",
"juvenil",
"juzgar",
"kilo",
"koala",
"labio",
"lacio",
"lacra",
"lado",
"ladrón",
"lagarto",
"lágrima",
"laguna",
"laico",
"lamer",
"lámina",
"lámpara",
"lana",
"lancha",
"langosta",
"lanza",
"lápiz",
"largo",
"larva",
"lástima",
"lata",
"látex",
"latir",
"laurel",
"lavar",
"lazo",
"leal",
"lección",
"leche",
"lector",
"leer",
"legión",
"legumbre",
"lejano",
"lengua",
"lento",
"leña",
"león",
"leopardo",
"lesión",
"letal",
"letra",
"leve",
"leyenda",
"libertad",
"libro",
"licor",
"líder",
"lidiar",
"lienzo",
"liga",
"ligero",
"lima",
"límite",
"limón",
"limpio",
"lince",
"lindo",
"línea",
"lingote",
"lino",
"linterna",
"líquido",
"liso",
"lista",
"litera",
"litio",
"litro",
"llaga",
"llama",
"llanto",
"llave",
"llegar",
"llenar",
"llevar",
"llorar",
"llover",
"lluvia",
"lobo",
"loción",
"loco",
"locura",
"lógica",
"logro",
"lombriz",
"lomo",
"lonja",
"lote",
"lucha",
"lucir",
"lugar",
"lujo",
"luna",
"lunes",
"lupa",
"lustro",
"luto",
"luz",
"maceta",
"macho",
"madera",
"madre",
"maduro",
"maestro",
"mafia",
"magia",
"mago",
"maíz",
"maldad",
"maleta",
"malla",
"malo",
"mamá",
"mambo",
"mamut",
"manco",
"mando",
"manejar",
"manga",
"maniquí",
"manjar",
"mano",
"manso",
"manta",
"mañana",
"mapa",
"máquina",
"mar",
"marco",
"marea",
"marfil",
"margen",
"marido",
"mármol",
"marrón",
"martes",
"marzo",
"masa",
"máscara",
"masivo",
"matar",
"materia",
"matiz",
"matriz",
"máximo",
"mayor",
"mazorca",
"mecha",
"medalla",
"medio",
"médula",
"mejilla",
"mejor",
"melena",
"melón",
"memoria",
"menor",
"mensaje",
"mente",
"menú",
"mercado",
"merengue",
"mérito",
"mes",
"mesón",
"meta",
"meter",
"método",
"metro",
"mezcla",
"miedo",
"miel",
"miembro",
"miga",
"mil",
"milagro",
"militar",
"millón",
"mimo",
"mina",
"minero",
"mínimo",
"minuto",
"miope",
"mirar",
"misa",
"miseria",
"misil",
"mismo",
"mitad",
"mito",
"mochila",
"moción",
"moda",
"modelo",
"moho",
"mojar",
"molde",
"moler",
"molino",
"momento",
"momia",
"monarca",
"moneda",
"monja",
"monto",
"moño",
"morada",
"morder",
"moreno",
"morir",
"morro",
"morsa",
"mortal",
"mosca",
"mostrar",
"motivo",
"mover",
"móvil",
"mozo",
"mucho",
"mudar",
"mueble",
"muela",
"muerte",
"muestra",
"mugre",
"mujer",
"mula",
"muleta",
"multa",
"mundo",
"muñeca",
"mural",
"muro",
"músculo",
"museo",
"musgo",
"música",
"muslo",
"nácar",
"nación",
"nadar",
"naipe",
"naranja",
"nariz",
"narrar",
"nasal",
"natal",
"nativo",
"natural",
"náusea",
"naval",
"nave",
"navidad",
"necio",
"néctar",
"negar",
"negocio",
"negro",
"neón",
"nervio",
"neto",
"neutro",
"nevar",
"nevera",
"nicho",
"nido",
"niebla",
"nieto",
"niñez",
"niño",
"nítido",
"nivel",
"nobleza",
"noche",
"nómina",
"noria",
"norma",
"norte",
"nota",
"noticia",
"novato",
"novela",
"novio",
"nube",
"nuca",
"núcleo",
"nudillo",
"nudo",
"nuera",
"nueve",
"nuez",
"nulo",
"número",
"nutria",
"oasis",
"obeso",
"obispo",
"objeto",
"obra",
"obrero",
"observar",
"obtener",
"obvio",
"oca",
"ocaso",
"océano",
"ochenta",
"ocho",
"ocio",
"ocre",
"octavo",
"octubre",
"oculto",
"ocupar",
"ocurrir",
"odiar",
"odio",
"odisea",
"oeste",
"ofensa",
"oferta",
"oficio",
"ofrecer",
"ogro",
"oído",
"oír",
"ojo",
"ola",
"oleada",
"olfato",
"olivo",
"olla",
"olmo",
"olor",
"olvido",
"ombligo",
"onda",
"onza",
"opaco",
"opción",
"ópera",
"opinar",
"oponer",
"optar",
"óptica",
"opuesto",
"oración",
"orador",
"oral",
"órbita",
"orca",
"orden",
"oreja",
"órgano",
"orgía",
"orgullo",
"oriente",
"origen",
"orilla",
"oro",
"orquesta",
"oruga",
"osadía",
"oscuro",
"osezno",
"oso",
"ostra",
"otoño",
"otro",
"oveja",
"óvulo",
"óxido",
"oxígeno",
"oyente",
"ozono",
"pacto",
"padre",
"paella",
"página",
"pago",
"país",
"pájaro",
"palabra",
"palco",
"paleta",
"pálido",
"palma",
"paloma",
"palpar",
"pan",
"panal",
"pánico",
"pantera",
"pañuelo",
"papá",
"papel",
"papilla",
"paquete",
"parar",
"parcela",
"pared",
"parir",
"paro",
"párpado",
"parque",
"párrafo",
"parte",
"pasar",
"paseo",
"pasión",
"paso",
"pasta",
"pata",
"patio",
"patria",
"pausa",
"pauta",
"pavo",
"payaso",
"peatón",
"pecado",
"pecera",
"pecho",
"pedal",
"pedir",
"pegar",
"peine",
"pelar",
"peldaño",
"pelea",
"peligro",
"pellejo",
"pelo",
"peluca",
"pena",
"pensar",
"peñón",
"peón",
"peor",
"pepino",
"pequeño",
"pera",
"percha",
"perder",
"pereza",
"perfil",
"perico",
"perla",
"permiso",
"perro",
"persona",
"pesa",
"pesca",
"pésimo",
"pestaña",
"pétalo",
"petróleo",
"pez",
"pezuña",
"picar",
"pichón",
"pie",
"piedra",
"pierna",
"pieza",
"pijama",
"pilar",
"piloto",
"pimienta",
"pino",
"pintor",
"pinza",
"piña",
"piojo",
"pipa",
"pirata",
"pisar",
"piscina",
"piso",
"pista",
"pitón",
"pizca",
"placa",
"plan",
"plata",
"playa",
"plaza",
"pleito",
"pleno",
"plomo",
"pluma",
"plural",
"pobre",
"poco",
"poder",
"podio",
"poema",
"poesía",
"poeta",
"polen",
"policía",
"pollo",
"polvo",
"pomada",
"pomelo",
"pomo",
"pompa",
"poner",
"porción",
"portal",
"posada",
"poseer",
"posible",
"poste",
"potencia",
"potro",
"pozo",
"prado",
"precoz",
"pregunta",
"premio",
"prensa",
"preso",
"previo",
"primo",
"príncipe",
"prisión",
"privar",
"proa",
"probar",
"proceso",
"producto",
"proeza",
"profesor",
"programa",
"prole",
"promesa",
"pronto",
"propio",
"próximo",
"prueba",
"público",
"puchero",
"pudor",
"pueblo",
"puerta",
"puesto",
"pulga",
"pulir",
"pulmón",
"pulpo",
"pulso",
"puma",
"punto",
"puñal",
"puño",
"pupa",
"pupila",
"puré",
"quedar",
"queja",
"quemar",
"querer",
"queso",
"quieto",
"química",
"quince",
"quitar",
"rábano",
"rabia",
"rabo",
"ración",
"radical",
"raíz",
"rama",
"rampa",
"rancho",
"rango",
"rapaz",
"rápido",
"rapto",
"rasgo",
"raspa",
"rato",
"rayo",
"raza",
"razón",
"reacción",
"realidad",
"rebaño",
"rebote",
"recaer",
"receta",
"rechazo",
"recoger",
"recreo",
"recto",
"recurso",
"red",
"redondo",
"reducir",
"reflejo",
"reforma",
"refrán",
"refugio",
"regalo",
"regir",
"regla",
"regreso",
"rehén",
"reino",
"reír",
"reja",
"relato",
"relevo",
"relieve",
"relleno",
"reloj",
"remar",
"remedio",
"remo",
"rencor",
"rendir",
"renta",
"reparto",
"repetir",
"reposo",
"reptil",
"res",
"rescate",
"resina",
"respeto",
"resto",
"resumen",
"retiro",
"retorno",
"retrato",
"reunir",
"revés",
"revista",
"rey",
"rezar",
"rico",
"riego",
"rienda",
"riesgo",
"rifa",
"rígido",
"rigor",
"rincón",
"riñón",
"río",
"riqueza",
"risa",
"ritmo",
"rito"
];
|
mod api;
mod http;
mod utils;
mod ws;
pub use api::*;
pub use http::*;
pub use ws::*;
|
#[doc = "Reader of register HDP_VAL"]
pub type R = crate::R<u32, super::HDP_VAL>;
#[doc = "Reader of field `HDPVAL`"]
pub type HDPVAL_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:7 - HDPVAL"]
#[inline(always)]
pub fn hdpval(&self) -> HDPVAL_R {
HDPVAL_R::new((self.bits & 0xff) as u8)
}
}
|
use proc_macro2::TokenStream;
use syn;
use helpers::{extract_meta, MetaIteratorHelpers};
pub fn enum_message_inner(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
let variants = match ast.data {
syn::Data::Enum(ref v) => &v.variants,
_ => panic!("EnumMessage only works on Enums"),
};
let mut arms = Vec::new();
let mut detailed_arms = Vec::new();
let mut serializations = Vec::new();
for variant in variants {
let meta = extract_meta(&variant.attrs);
let messages = meta.find_unique_property("strum", "message");
let detailed_messages = meta.find_unique_property("strum", "detailed_message");
let ident = &variant.ident;
use syn::Fields::*;
let params = match variant.fields {
Unit => quote! {},
Unnamed(..) => quote! { (..) },
Named(..) => quote! { {..} },
};
// You can't disable getting the serializations.
{
let mut serialization_variants = meta.find_properties("strum", "serialize");
if serialization_variants.len() == 0 {
serialization_variants.push(ident.to_string());
}
let count = serialization_variants.len();
serializations.push(quote! {
&#name::#ident #params => {
static ARR: [&'static str; #count] = [#(#serialization_variants),*];
&ARR
}
});
}
// But you can disable the messages.
if meta.is_disabled() {
continue;
}
if let Some(msg) = messages {
let params = params.clone();
// Push the simple message.
let tokens = quote! { &#name::#ident #params => ::std::option::Option::Some(#msg) };
arms.push(tokens.clone());
if detailed_messages.is_none() {
detailed_arms.push(tokens);
}
}
if let Some(msg) = detailed_messages {
let params = params.clone();
// Push the simple message.
detailed_arms
.push(quote! { &#name::#ident #params => ::std::option::Option::Some(#msg) });
}
}
if arms.len() < variants.len() {
arms.push(quote! { _ => ::std::option::Option::None });
}
if detailed_arms.len() < variants.len() {
detailed_arms.push(quote! { _ => ::std::option::Option::None });
}
quote! {
impl #impl_generics ::strum::EnumMessage for #name #ty_generics #where_clause {
fn get_message(&self) -> ::std::option::Option<&str> {
match self {
#(#arms),*
}
}
fn get_detailed_message(&self) -> ::std::option::Option<&str> {
match self {
#(#detailed_arms),*
}
}
fn get_serializations(&self) -> &[&str] {
match self {
#(#serializations),*
}
}
}
}
}
|
#![warn(rust_2018_idioms)]
#![allow(unused_imports)]
use thiserror::Error;
#[derive(Error, Debug)]
pub enum Error {
#[error("Kube Api Error: {0}")]
KubeError(#[source] kube::Error),
#[error("SerializationError: {0}")]
SerializationError(#[source] serde_json::Error),
}
pub type Result<T, E = Error> = std::result::Result<T, E>;
/// State machinery for kube, as exposeable to actix
pub mod manager;
pub mod commands;
mod k8sserver;
|
extern crate rand;
use std::f32::consts::PI;
use vector_2d::V2;
use X_LEN;
use Y_LEN;
#[allow(dead_code)]
#[derive(Debug, Clone, Copy)]
pub enum EdgeBehaviour {
Pass,
PacMan,
Wall,
}
use self::EdgeBehaviour::*;
#[derive(Debug, Clone)]
pub struct Momentum {
pub pos: V2,
pub vel: V2,
pub theta: f32,
pub omega: f32,
pub mass: f32, // also moment of inertia
speed_decay: f32,
rotation_decay: f32,
edge_behavior: EdgeBehaviour,
}
impl Momentum {
pub fn move_position(&mut self, time: f32) {
self.pos = self.pos + self.vel.scale(time);
match self.edge_behavior {
PacMan => self.pos = self.pos.mod_euc(X_LEN, Y_LEN),
Pass => (),
Wall => {
if self.pos.0 < 0.0 {
self.pos.0 = 0.0;
self.vel.0 = 0.0;
}
if self.pos.1 < 0.0 {
self.pos.1 = 0.0;
self.vel.1 = 0.0;
}
if self.pos.0 > X_LEN {
self.pos.0 = X_LEN;
self.vel.0 = 0.0;
}
if self.pos.1 > Y_LEN {
self.pos.1 = Y_LEN;
self.vel.1 = 0.0;
}
}
}
self.theta += self.omega * time;
self.theta = self.theta.mod_euc(2.0 * PI);
}
pub fn impart(&mut self, force: V2, torque: f32, time: f32) {
self.vel += force.scale(time / self.mass);
self.vel = self.vel.scale(self.speed_decay.powf(time));
self.omega += torque * time / self.mass;
self.omega *= self.rotation_decay.powf(time);
}
pub fn in_bounds(&self) -> bool {
let V2(x, y) = self.pos;
0.0 <= x && x <= X_LEN && 0.0 <= y && y <= Y_LEN
}
pub fn new_centered() -> Momentum {
Momentum {
pos: V2(X_LEN / 2.0, Y_LEN / 2.0),
theta: PI,
vel: V2(0.0, 0.0),
omega: 0.0,
mass: 10.0,
edge_behavior: PacMan,
speed_decay: 0.6,
rotation_decay: 0.3,
}
}
pub fn new_random_edge<R: rand::Rng>(rng: &mut R, speed: f32, mass: f32) -> Momentum {
let theta = rng.gen_range(0.0, 2.0 * PI);
let vel = V2(0.0, speed).rotate(theta);
let pos = if vel.0.abs() < vel.1.abs() {
V2(
rng.gen_range(0.0, X_LEN),
if vel.1 > 0.0 { 0.0 } else { Y_LEN },
)
} else {
V2(
if vel.0 > 0.0 { 0.0 } else { X_LEN },
rng.gen_range(0.0, Y_LEN),
)
};
Momentum {
pos,
vel,
theta,
omega: rng.gen_range(-PI, PI),
mass,
edge_behavior: Pass,
rotation_decay: 1.0,
speed_decay: 1.0,
}
}
pub fn new_relative(&self, pos: V2, vel: V2, mass: f32) -> Momentum {
Momentum {
pos: self.pos + pos.rotate(self.theta),
vel: self.vel + vel.rotate(self.theta),
theta: self.theta,
omega: 0.0,
mass,
edge_behavior: Pass,
rotation_decay: 1.0,
speed_decay: 1.0,
}
}
#[cfg(test)]
pub fn new_zero() -> Momentum {
Momentum {
pos: V2(0.0, 0.0),
vel: V2(0.0, 0.0),
theta: 0.0,
omega: 0.0,
mass: 0.0,
edge_behavior: Pass,
rotation_decay: 0.0,
speed_decay: 0.0,
}
}
}
|
#[doc = "Reader of register DDRCTRL_MRCTRL0"]
pub type R = crate::R<u32, super::DDRCTRL_MRCTRL0>;
#[doc = "Writer for register DDRCTRL_MRCTRL0"]
pub type W = crate::W<u32, super::DDRCTRL_MRCTRL0>;
#[doc = "Register DDRCTRL_MRCTRL0 `reset()`'s with value 0x10"]
impl crate::ResetValue for super::DDRCTRL_MRCTRL0 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x10
}
}
#[doc = "Reader of field `MR_TYPE`"]
pub type MR_TYPE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MR_TYPE`"]
pub struct MR_TYPE_W<'a> {
w: &'a mut W,
}
impl<'a> MR_TYPE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `MR_RANK`"]
pub type MR_RANK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MR_RANK`"]
pub struct MR_RANK_W<'a> {
w: &'a mut W,
}
impl<'a> MR_RANK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `MR_ADDR`"]
pub type MR_ADDR_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `MR_ADDR`"]
pub struct MR_ADDR_W<'a> {
w: &'a mut W,
}
impl<'a> MR_ADDR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 12)) | (((value as u32) & 0x0f) << 12);
self.w
}
}
#[doc = "Reader of field `MR_WR`"]
pub type MR_WR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MR_WR`"]
pub struct MR_WR_W<'a> {
w: &'a mut W,
}
impl<'a> MR_WR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 0 - MR_TYPE"]
#[inline(always)]
pub fn mr_type(&self) -> MR_TYPE_R {
MR_TYPE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 4 - MR_RANK"]
#[inline(always)]
pub fn mr_rank(&self) -> MR_RANK_R {
MR_RANK_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bits 12:15 - MR_ADDR"]
#[inline(always)]
pub fn mr_addr(&self) -> MR_ADDR_R {
MR_ADDR_R::new(((self.bits >> 12) & 0x0f) as u8)
}
#[doc = "Bit 31 - MR_WR"]
#[inline(always)]
pub fn mr_wr(&self) -> MR_WR_R {
MR_WR_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - MR_TYPE"]
#[inline(always)]
pub fn mr_type(&mut self) -> MR_TYPE_W {
MR_TYPE_W { w: self }
}
#[doc = "Bit 4 - MR_RANK"]
#[inline(always)]
pub fn mr_rank(&mut self) -> MR_RANK_W {
MR_RANK_W { w: self }
}
#[doc = "Bits 12:15 - MR_ADDR"]
#[inline(always)]
pub fn mr_addr(&mut self) -> MR_ADDR_W {
MR_ADDR_W { w: self }
}
#[doc = "Bit 31 - MR_WR"]
#[inline(always)]
pub fn mr_wr(&mut self) -> MR_WR_W {
MR_WR_W { w: self }
}
}
|
#![allow(unused_variables)]
pub mod mock {
use pattern::{StepArgument, Pattern};
use std::collections::HashMap;
use regex::Regex;
const A_PATTERN: &str = "I have cukes in my belly";
pub struct PatternMock {}
impl Pattern for PatternMock {
fn to_regex(&self) -> Regex {
Regex::new(A_PATTERN).unwrap()
}
fn to_string(&self) -> String {
A_PATTERN.to_string()
}
}
pub fn step_handler(arguments: HashMap<String, StepArgument>) {}
pub fn failing_step_handler(arguments: HashMap<String, StepArgument>) {
panic!();
}
pub fn passing_step_handler(arguments: HashMap<String, StepArgument>) {}
}
|
use std::io;
use std::io::Write;
struct Triangle {
a: f64,
b: f64,
c: f64,
}
impl Triangle {
fn area(&self) -> f64 {
let s = (self.a + self.b + self.c) / 2.0;
(s * (s - self.a) * (s - self.b) * (s - self.c)).sqrt()
}
}
struct TriangleBuilder {
a: f64,
b: f64,
c: f64,
}
impl TriangleBuilder {
fn new() -> TriangleBuilder {
TriangleBuilder { a: 3.0, b: 4.0, c: 5.0, }
}
fn a(&mut self, arg: f64) -> &mut TriangleBuilder {
self.a = arg;
self
}
fn b(&mut self, arg: f64) -> &mut TriangleBuilder {
self.b = arg;
self
}
fn c(&mut self, arg: f64) -> &mut TriangleBuilder {
self.c = arg;
self
}
fn finalize(&self) -> Triangle {
Triangle { a: self.a, b: self.b, c: self.c }
}
}
fn input_data() -> f64 {
let mut line = String::new();
io::stdin().read_line(&mut line)
.expect("Failed to read line");
let line: f64 = match line.trim().parse() {
Ok(num) => num,
Err(_) => 1.0,
};
line
}
fn main() {
print!("input a: ");
io::stdout().flush().unwrap();
let a = input_data();
print!("input b: ");
io::stdout().flush().unwrap();
let b = input_data();
print!("input c: ");
io::stdout().flush().unwrap();
let c = input_data();
let tri = TriangleBuilder::new()
.a(a).b(b).c(c)
.finalize();
println!("this triangle's area: {}", tri.area());
}
////////////////////////////////////////////////////////////////////
#[test]
fn triangle_test() {
let tri = Triangle { a: 3.0, b: 4.0, c: 5.0 };
assert_eq!(6.0, tri.area());
}
|
use std::{
boxed::Box,
cell::UnsafeCell,
marker::PhantomPinned,
mem::MaybeUninit,
ops::{Deref, DerefMut},
pin::Pin,
};
unsafe fn pin_dance<'a, R, T>(pin: &'a mut Pin<R>) -> &'a mut T
where
R: DerefMut<Target = T>,
{
let mut_pin = Pin::as_mut(pin);
Pin::get_unchecked_mut(mut_pin)
}
trait Poly2<A, B> {}
impl<R, T> Poly2<R, T> for Dereference<R, T> {}
impl<R, T> Poly2<R, T> for DereferenceMut<R, T> {}
trait Transmutable<N> {
type Into;
}
impl<R, T, N> Transmutable<N> for Dereference<R, T> {
type Into = Dereference<R, N>;
}
impl<R, T, N> Transmutable<N> for DereferenceMut<R, T> {
type Into = DereferenceMut<R, N>;
}
fn map0<'a, F, FOuterToTn, R, T, Tn, N, DOuter, DInner, Cons, Pinnit>(
this: Pin<Box<DInner>>,
referent_fn: F,
cons: Cons,
get_t: FOuterToTn,
pin: Pinnit,
) -> Pin<Box<DOuter>>
where
F: Fn(Tn) -> N,
// Tn: 'a,
DInner: Poly2<R, T>,
FOuterToTn: Fn(&mut DOuter) -> Tn,
DOuter: Poly2<Box<DInner>, N>,
Cons: Fn(Box<DInner>) -> Pin<Box<DOuter>>,
Pinnit: Fn(N, Pin<Box<DOuter>>) -> Pin<Box<DOuter>>,
{
let rt = unsafe { Pin::into_inner_unchecked(this) };
let mut d = cons(rt);
let n = unsafe {
let mut mut_d = pin_dance(&mut d);
// This bypasses normal borrow checking
// We're guaranteeing that the referee lives as long as the produced value and won't be mutated
referent_fn(get_t(&mut mut_d))
};
pin(n, d)
}
fn map_into0<F, R, Rn, T, N, DIn, DOut, GetT, InnerInit, UnwrapInnerInit, GetR, Pinnit>(
mut this: Pin<Box<DIn>>,
referent_fn: F,
get_t: GetT,
unwrap_inner: UnwrapInnerInit,
get_r: GetR,
pin: Pinnit,
) -> Pin<Box<DOut>>
where
F: Fn(&Rn, T) -> N,
DIn: Poly2<R, T> + Transmutable<N>,
DOut: Poly2<R, N>,
GetT: Fn(&mut DIn) -> &mut MaybeUninit<InnerInit>,
UnwrapInnerInit: Fn(InnerInit) -> T,
GetR: Fn(&DIn) -> &Rn,
Pinnit: Fn(N, Pin<Box<DOut>>) -> Pin<Box<DOut>>,
{
unsafe {
// Get inside the pin
let mut_ref = pin_dance(&mut this);
// prepare a landing zone for our current referent
let mut t = MaybeUninit::uninit();
// And swap it out for the uninitialized value
std::mem::swap(&mut t, get_t(mut_ref));
// And feed it into the user supplied conversion
let n = referent_fn(get_r(mut_ref), unwrap_inner(t.assume_init()));
// Transmute ourselves into the new type
// This is safe because the referent is boxed so the new type will be the same size as the old one
// Also we've already de-initialized the referent with the swap, so no incorrectly typed valid memory
let d_n = std::mem::transmute(this);
//Install the referent and return
pin(n, d_n)
}
}
pub struct Dereference<R, T> {
referent: MaybeUninit<Box<T>>,
referee: R, // Must come second for drop order to be safe
_pin: PhantomPinned,
}
impl<R, T> Dereference<R, T> {
fn new0(referee: R) -> Pin<Box<Self>> {
Box::pin(Dereference {
referee,
referent: MaybeUninit::uninit(),
_pin: PhantomPinned,
})
}
fn pinnit(t: T, mut this: Pin<Box<Self>>) -> Pin<Box<Self>> {
unsafe {
pin_dance(&mut this).referent = MaybeUninit::new(Box::new(t));
}
this
}
pub fn new<'a, F>(referee: R, referent_fn: F) -> Pin<Box<Self>>
where
R: 'a,
F: Fn(&'a R) -> T,
{
let d = Self::new0(referee);
let t = unsafe {
// This bypasses normal borrow checking
// We're guaranteeing that the referee lives as long as the produced value and won't be mutated
let r_ptr: *const R = &d.referee;
referent_fn(&*r_ptr)
};
Self::pinnit(t, d)
}
pub fn map<'a, F, N>(
this: Pin<Box<Self>>,
referent_fn: F,
) -> Pin<Box<Dereference<Box<Self>, N>>>
where
Self: 'a,
F: Fn(&'a T) -> N,
{
map0(
this,
referent_fn,
Dereference::new0,
|m_d| unsafe { &*m_d.referee.referent.as_ptr() },
Dereference::pinnit,
)
}
pub fn map_mut<'a, F, N>(
this: Pin<Box<Self>>,
referent_fn: F,
) -> Pin<Box<DereferenceMut<Box<Self>, N>>>
where
Self: 'a,
F: Fn(&'a mut T) -> N,
{
map0(
this,
referent_fn,
DereferenceMut::new0,
|m_d| unsafe { &mut *m_d.referee.referent.as_mut_ptr() },
DereferenceMut::pinnit,
)
}
pub fn map_into<'a, F, N>(this: Pin<Box<Self>>, referent_fn: F) -> Pin<Box<Dereference<R, N>>>
where
Self: 'a,
F: Fn(&R, T) -> N,
{
map_into0(
this,
referent_fn,
|m_r| &mut m_r.referent,
|boxed| *boxed,
|m_r| &m_r.referee,
Dereference::pinnit,
)
}
pub fn ref_referee(&self) -> &R {
&self.referee
}
}
impl<R, T> Deref for Dereference<R, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
// safety guranteed by construction
unsafe { &*self.referent.as_ptr() }
}
}
impl<R, T> Drop for Dereference<R, T> {
fn drop(&mut self) {
let mut t = MaybeUninit::uninit();
std::mem::swap(&mut t, &mut self.referent);
std::mem::forget(t);
}
}
/* Mutable Variant (cannot borrow referee externally) */
pub struct DereferenceMut<R, T> {
referent: MaybeUninit<Box<UnsafeCell<T>>>,
referee: R, // Must come second for drop order to be safe
_pin: PhantomPinned,
}
impl<R, T> DereferenceMut<R, T> {
fn new0(referee: R) -> Pin<Box<Self>> {
Box::pin(DereferenceMut {
referee,
referent: MaybeUninit::uninit(),
_pin: PhantomPinned,
})
}
fn uninit_box_cell(t: T) -> MaybeUninit<Box<UnsafeCell<T>>> {
MaybeUninit::new(Box::new(UnsafeCell::new(t)))
}
fn pinnit(t: T, mut this: Pin<Box<Self>>) -> Pin<Box<Self>> {
unsafe {
pin_dance(&mut this).referent = Self::uninit_box_cell(t);
}
this
}
pub fn new_mut<'a, F>(referee: R, referentr_fn: F) -> Pin<Box<Self>>
where
R: 'a,
F: Fn(&'a mut R) -> T,
{
let mut d = Self::new0(referee);
unsafe {
let mut_d = pin_dance(&mut d);
let r_ptr: *mut R = &mut mut_d.referee;
mut_d.referent = Self::uninit_box_cell(referentr_fn(&mut *r_ptr));
};
d
}
pub fn map_mut<'a, F, N>(
this: Pin<Box<Self>>,
referent_fn: F,
) -> Pin<Box<DereferenceMut<Box<Self>, N>>>
where
Self: 'a,
F: Fn(&'a mut T) -> N,
{
map0(
this,
referent_fn,
DereferenceMut::new0,
|m_d| unsafe { &mut *(*m_d.referee.referent.as_ptr()).get() },
DereferenceMut::pinnit,
)
}
pub fn map<F, N>(this: Pin<Box<Self>>, referent_fn: F) -> Pin<Box<Dereference<Box<Self>, N>>>
where
F: Fn(&T) -> N,
{
map0(
this,
referent_fn,
Dereference::new0,
|m_d| unsafe { &*(*m_d.referee.referent.as_ptr()).get() },
Dereference::pinnit,
)
}
pub fn map_into<'a, F, N>(
this: Pin<Box<Self>>,
referent_fn: F,
) -> Pin<Box<DereferenceMut<R, N>>>
where
Self: 'a,
F: Fn(T) -> N,
{
map_into0(
this,
|_: &(), x| referent_fn(x),
|m_r| &mut m_r.referent,
|box_cell| (*box_cell).into_inner(),
|_| &(),
DereferenceMut::pinnit,
)
}
pub fn deref_mut<'a>(this: &'a mut Pin<Box<DereferenceMut<R, T>>>) -> &'a mut T {
unsafe {
let x = &*this.referent.as_ptr();
&mut *x.as_ref().get()
}
}
}
impl<R, T> Deref for DereferenceMut<R, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
// safety guranteed by construction
unsafe {
let b = &*self.referent.as_ptr();
&*b.as_ref().get()
}
}
}
impl<R, T> Drop for DereferenceMut<R, T> {
fn drop(&mut self) {
let mut t = MaybeUninit::uninit();
std::mem::swap(&mut t, &mut self.referent);
std::mem::forget(t);
}
}
impl<R, T> Iterator for Pin<Box<DereferenceMut<R, T>>>
where
T: Iterator,
{
type Item = T::Item;
fn next(&mut self) -> Option<Self::Item> {
<DereferenceMut<R, T>>::deref_mut(self).next()
}
}
#[cfg(test)]
mod tests {
use std::ops::Deref;
use super::*;
#[test]
fn it_works() {
let a = Dereference::new(0, |z| (z, 0));
let b = Dereference::map_mut(a, |x: &mut (&i32, i32)| {
x.1 = 1;
(x, 2)
});
let ((x, y), z) = DereferenceMut::deref(&b);
assert_eq!(**x, 0);
assert_eq!(*y, 1);
assert_eq!(*z, 2);
}
#[test]
fn into_works() {
let a = Dereference::new(0, |z| (z, 0));
let b = Dereference::map_into(a, |_, mut x: (&i32, i32)| {
x.1 = 1;
(x, 2u64)
});
let ((x, y), z) = Dereference::deref(&b);
assert_eq!(**x, 0);
assert_eq!(*y, 1);
assert_eq!(*z, 2);
}
}
|
pub fn get_permutation(n: i32, k: i32) -> String {
let n = n as usize;
let mut result = vec![0; n];
let mut took = vec![false; n];
fn find_l_untaken(n: usize, took: &mut Vec<bool>, l: usize) -> usize {
let mut counter = 0;
for j in 0..n {
if !took[j] {
if counter == l {
took[j] = true;
return j
}
counter += 1;
}
}
unreachable!()
}
let mut k = k as usize;
let mut fact = (1..n).fold(1, |acc, x| acc * x) as usize;
for i in (0..n).rev() {
let l = (k-1) / fact;
result[n-1-i] = find_l_untaken(n, &mut took, l);
k -= l * fact;
fact /= std::cmp::max(i, 1);
}
result.into_iter().map(|x| std::char::from_digit(x as u32 + 1, 10).unwrap()).collect()
}
#[test]
fn test_get_permutation() {
assert_eq!(get_permutation(3, 3), "213".to_string());
assert_eq!(get_permutation(4, 9), "2314".to_string());
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.