text stringlengths 8 4.13M |
|---|
import std::ivec;
import std::option;
import std::map::hashmap;
import driver::session::session;
import codemap::span;
import std::map::new_str_hash;
import codemap;
type syntax_expander =
fn(&ext_ctxt, span, @ast::expr, option::t[str]) -> @ast::expr ;
type macro_def = {ident: str, ext: syntax_extension};
type macro_definer =
fn(&ext_ctxt, span, @ast::expr, option::t[str]) -> macro_def ;
tag syntax_extension {
normal(syntax_expander);
macro_defining(macro_definer);
}
// A temporary hard-coded map of methods for expanding syntax extension
// AST nodes into full ASTs
fn syntax_expander_table() -> hashmap[str, syntax_extension] {
let syntax_expanders = new_str_hash[syntax_extension]();
syntax_expanders.insert("fmt", normal(ext::fmt::expand_syntax_ext));
syntax_expanders.insert("env", normal(ext::env::expand_syntax_ext));
syntax_expanders.insert("macro",
macro_defining(ext::simplext::add_new_extension));
syntax_expanders.insert("concat_idents",
normal(ext::concat_idents::expand_syntax_ext));
syntax_expanders.insert("ident_to_str",
normal(ext::ident_to_str::expand_syntax_ext));
ret syntax_expanders;
}
type span_msg_fn = fn(span, str) -> ! ;
type msg_fn = fn(str) -> ! ;
type next_id_fn = fn() -> ast::node_id ;
// Provides a limited set of services necessary for syntax extensions
// to do their thing
type ext_ctxt =
{crate_file_name_hack: str,
span_fatal: span_msg_fn,
span_unimpl: span_msg_fn,
span_bug: span_msg_fn,
bug: msg_fn,
next_id: next_id_fn};
fn mk_ctxt(sess: &session) -> ext_ctxt {
fn ext_span_fatal_(sess: &session, sp: span, msg: str) -> ! {
sess.span_err(sp, msg);
fail;
}
let ext_span_fatal = bind ext_span_fatal_(sess, _, _);
fn ext_span_unimpl_(sess: &session, sp: span, msg: str) -> ! {
sess.span_err(sp, "unimplemented " + msg);
fail;
}
let ext_span_bug = bind ext_span_bug_(sess, _, _);
fn ext_span_bug_(sess: &session, sp: span, msg: str) -> ! {
sess.span_bug(sp, msg);
}
let ext_span_unimpl = bind ext_span_unimpl_(sess, _, _);
fn ext_bug_(sess: &session, msg: str) -> ! { sess.bug(msg); }
let ext_bug = bind ext_bug_(sess, _);
// FIXME: Some extensions work by building ASTs with paths to functions
// they need to call at runtime. As those functions live in the std crate,
// the paths are prefixed with "std::". Unfortunately, these paths can't
// work for code called from inside the stdard library, so here we pass
// the extensions the file name of the crate being compiled so they can
// use it to guess whether paths should be prepended with "std::". This is
// super-ugly and needs a better solution.
let crate_file_name_hack = sess.get_codemap().files.(0).name;
fn ext_next_id_(sess: &session) -> ast::node_id {
ret sess.next_node_id(); // temporary, until bind works better
}
let ext_next_id = bind ext_next_id_(sess);
ret {crate_file_name_hack: crate_file_name_hack,
span_fatal: ext_span_fatal,
span_unimpl: ext_span_unimpl,
span_bug: ext_span_bug,
bug: ext_bug,
next_id: ext_next_id};
}
fn expr_to_str(cx: &ext_ctxt, expr: @ast::expr, error: str) -> str {
alt expr.node {
ast::expr_lit(l) {
alt l.node {
ast::lit_str(s, _) { ret s; }
_ { cx.span_fatal(l.span, error); }
}
}
_ { cx.span_fatal(expr.span, error); }
}
}
fn expr_to_ident(cx: &ext_ctxt, expr: @ast::expr, error: str) -> ast::ident {
alt expr.node {
ast::expr_path(p) {
if ivec::len(p.node.types) > 0u || ivec::len(p.node.idents) != 1u {
cx.span_fatal(expr.span, error);
} else { ret p.node.idents.(0); }
}
_ { cx.span_fatal(expr.span, error); }
}
}
fn make_new_lit(cx: &ext_ctxt, sp: codemap::span, lit: ast::lit_) ->
@ast::expr {
let sp_lit = @{node: lit, span: sp};
ret @{id: cx.next_id(), node: ast::expr_lit(sp_lit), span: sp};
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// compile-command: "make -k -C $RBUILD 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
// End:
//
|
use crate::error::HandleError;
use crate::filter::{account_by_session, changed_visibility_by_query, with_db};
use model::Account;
use mysql::MySqlPool;
use repository::ChangeAccountVisibility;
use warp::filters::BoxedFilter;
use warp::path;
use warp::{Filter, Rejection, Reply};
pub fn route(db_pool: &MySqlPool) -> BoxedFilter<(impl Reply,)> {
warp::post()
.and(path("user"))
.and(path("visibility"))
.and(with_db(db_pool))
.and(account_by_session(db_pool))
.and(changed_visibility_by_query())
.and_then(change_visibility_handler)
.boxed()
}
async fn change_visibility_handler<C: ChangeAccountVisibility>(
repos: C,
mut account: Account,
changed_visibility: bool,
) -> Result<impl Reply, Rejection> {
account.set_visibility(changed_visibility);
change_visibility(repos, &account)?;
Ok(serde_json::to_string(&account).unwrap())
}
fn change_visibility<C: ChangeAccountVisibility>(
mut repos: C,
account: &Account,
) -> Result<(), HandleError> {
Ok(repos.change_visibility(account)?)
}
|
use std::env::var;
use solana_client::rpc_client::RpcClient;
use solana_sdk::pubkey::Pubkey;
use solana_sdk::signer::{
keypair::{read_keypair_file, Keypair},
Signer,
};
#[derive(Debug)]
pub enum StateError {
SolanaMissingAPIUrl(String),
SolanaMissingKeypairPath(String),
SolanaKeypairLoadError(String),
}
pub struct SolanaClient {
pub client: RpcClient,
pub keypair: Keypair,
pub pubkey: Pubkey,
}
impl SolanaClient {
pub fn from_env() -> Result<SolanaClient, StateError> {
let solana_api_url = match var("SPLIFF_SOLANA_API_URL") {
Ok(api_url) => api_url,
Err(_) => {
return Err(StateError::SolanaMissingAPIUrl(
"SPLIFF_SOLANA_API_URL environment variable not set".to_string(),
))
}
};
let rpc_client = RpcClient::new(solana_api_url);
let solana_keypair_path = match var("SPLIFF_SOLANA_KEYPAIR_PATH") {
Ok(keypair_path) => keypair_path,
Err(_) => {
return Err(StateError::SolanaMissingKeypairPath(
"SPLIFF_SOLANA_KEYPAIR_PATH environment variable not set".to_string(),
))
}
};
let solana_keypair = match read_keypair_file(&solana_keypair_path) {
Ok(keypair) => keypair,
Err(_) => {
return Err(StateError::SolanaKeypairLoadError(format!(
"Failed to load keypair from path {}",
solana_keypair_path
)));
}
};
let solana_pubkey = solana_keypair.pubkey();
return Ok(SolanaClient {
client: rpc_client,
keypair: solana_keypair,
pubkey: solana_pubkey,
});
}
}
|
use day01;
fn main() {
let input: Vec<&str> = include_str!("../../input/2018/day1.txt").lines().collect();
println!("Part 1: {}", day01::part1(&input));
println!("Part 2: {}", day01::part2_functional(&input));
}
|
pub mod closed_range;
pub mod open_range;
// 自身しか関わらないもの
pub trait SelfRange {
fn new(lower: i8, upper: i8) -> Self;
fn to_string(&self) -> String;
fn contains(&self, number: i8) -> bool;
fn contains_all(&self, numbers: Vec<i8>) -> bool {
for number in numbers {
if !self.contains(number) {
return false;
}
}
return true;
}
fn parse(string: String) -> Self;
}
// 他のstructが絡むもの
pub trait MultiRange<T> {
fn equals(&self, range: &T) -> bool;
fn is_connected_to(&self, range: &T) -> bool {
match self.intersection(range) {
Ok(_) => true,
Err(_) => false,
}
}
fn get_intersection(&self, range: &T) -> String {
self.intersection(range).unwrap()
}
fn intersection(&self, range: &T) -> Result<String, String>;
}
|
use crate::error::{Error, ParserError, ParserErrorKind as PEK};
use crate::iter::MultiPeek;
use crate::lexer::{TokenIterator, Token, TokenKind as TK};
use crate::position::Position;
use std::collections::BTreeMap;
use std::rc::Rc;
use ExprKind as EK;
#[derive(Clone, Debug)]
pub struct FunctionBean {
pub params: Vec<String>,
pub body: Box<Expr>,
}
pub type Block = Vec<Expr>;
#[derive(Clone, Debug)]
pub struct Expr {
pub kind: ExprKind,
pub position: Position,
}
#[derive(Clone, Debug)]
pub enum ExprKind {
// Statement-like (can only appear in a semicolon-delimited list of expressions; require
// semicolon afterwards):
Continue(Option<Box<Expr>>),
Break(Option<Box<Expr>>),
Return(Option<Box<Expr>>),
Throw(Box<Expr>),
Let(String, Option<Box<Expr>>),
// Primary:
Identifier(String),
Function(Rc<FunctionBean>),
Boolean(bool),
Number(f64),
String(String),
Array(Vec<Expr>),
Object(BTreeMap<String, Expr>),
Null,
Paren(Box<Expr>),
If(Vec<(Expr, Block)>, Option<Block>),
Try(Block, (String, Block)),
While(Box<Expr>, Block),
Loop(Block),
For(String, Box<Expr>, Block),
Block(Block),
// Primary suffix:
Call(Box<Expr>, Vec<Expr>),
Index(Box<Expr>, Box<Expr>),
Dot(Box<Expr>, String),
// Unary:
Negative(Box<Expr>),
Positive(Box<Expr>),
BoolNot(Box<Expr>),
BitNegate(Box<Expr>),
// Binary:
Equal(Box<Expr>, Box<Expr>),
NotEqual(Box<Expr>, Box<Expr>),
LessThan(Box<Expr>, Box<Expr>),
LessThanEqual(Box<Expr>, Box<Expr>),
GreaterThan(Box<Expr>, Box<Expr>),
GreaterThanEqual(Box<Expr>, Box<Expr>),
Add(Box<Expr>, Box<Expr>),
Subtract(Box<Expr>, Box<Expr>),
Multiply(Box<Expr>, Box<Expr>),
Divide(Box<Expr>, Box<Expr>),
Modulo(Box<Expr>, Box<Expr>),
LeftShift(Box<Expr>, Box<Expr>),
RightShift(Box<Expr>, Box<Expr>),
BoolOr(Box<Expr>, Box<Expr>),
BoolAnd(Box<Expr>, Box<Expr>),
BoolXor(Box<Expr>, Box<Expr>),
BitOr(Box<Expr>, Box<Expr>),
BitAnd(Box<Expr>, Box<Expr>),
BitXor(Box<Expr>, Box<Expr>),
// Binary (assignment):
Assign(Box<Expr>, Box<Expr>),
LeftShiftAssign(Box<Expr>, Box<Expr>),
RightShiftAssign(Box<Expr>, Box<Expr>),
PlusAssign(Box<Expr>, Box<Expr>),
MinusAssign(Box<Expr>, Box<Expr>),
MultiplyAssign(Box<Expr>, Box<Expr>),
DivideAssign(Box<Expr>, Box<Expr>),
ModuloAssign(Box<Expr>, Box<Expr>),
OrAssign(Box<Expr>, Box<Expr>),
AndAssign(Box<Expr>, Box<Expr>),
XorAssign(Box<Expr>, Box<Expr>),
}
pub struct Parser<'a> {
tokens: MultiPeek<TokenIterator<'a>>,
prev_pos: Position,
next_pos: Position,
}
impl<'a> Parser<'a> {
pub fn parse_script(script: &str) -> Result<Block, Error> {
let tokens = MultiPeek::new(TokenIterator::new(script));
let prev_pos = Position { line: 1, column: 0 };
let next_pos = tokens.inner().next_pos();
parse_script(&mut Parser { tokens, prev_pos, next_pos })
}
fn next_token(&mut self) -> Result<Option<TK>, Error> {
match self.tokens.next() {
Some(Ok(Token { kind, position })) => {
self.prev_pos = position;
self.next_pos = self.tokens.inner().next_pos();
Ok(Some(kind))
},
Some(Err(err)) => Err(err),
None => Ok(None),
}
}
fn peek_token(&mut self, index: usize) -> Result<Option<&TK>, Error> {
match self.tokens.peek(index) {
Some(Ok(Token { ref kind, .. })) => {
Ok(Some(kind))
},
Some(Err(err)) => Err(err.clone()),
None => Ok(None),
}
}
#[inline]
fn err(&self, kind: PEK, position: Position) -> Error {
Error::Parser(ParserError { kind, position })
}
}
macro_rules! expect_next {
($parser:expr, $($match_arm:tt)+) => {
match $parser.next_token()? {
$($match_arm)+,
Some(_) => return Err($parser.err(PEK::UnexpectedToken, $parser.prev_pos)),
None => return Err($parser.err(PEK::UnexpectedEOF, $parser.next_pos)),
}
}
}
fn expect_semicolon<'a>(parser: &mut Parser<'a>) -> Result<(), Error> {
match parser.peek_token(0)? {
Some(TK::Semicolon) => Ok(()),
Some(_) => Err(parser.err(PEK::ExpectedSemicolon, parser.next_pos)),
None => Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
}
}
fn parse_block<'a>(parser: &mut Parser<'a>) -> Result<Block, Error> {
match parser.next_token()? {
Some(TK::LCurly) => (),
_ => return Err(parser.err(PEK::ExpectedLCurly, parser.prev_pos)),
}
parse_block_no_lcurly(parser)
}
fn parse_block_no_lcurly<'a>(parser: &mut Parser<'a>) -> Result<Block, Error> {
let mut exprs = Vec::new();
let mut saw_semicolon = true;
loop {
match parser.peek_token(0)? {
Some(TK::RCurly) => {
parser.next_token()?;
break;
},
Some(TK::Semicolon) => {
parser.next_token()?;
saw_semicolon = true;
continue;
},
None => return Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
Some(_) => {
if saw_semicolon {
exprs.push(parse_block_expr(parser)?);
saw_semicolon = false;
} else {
return Err(parser.err(PEK::ExpectedSemicolon, parser.next_pos));
}
},
}
}
if saw_semicolon {
exprs.push(Expr { kind: EK::Null, position: parser.prev_pos });
}
Ok(exprs)
}
fn parse_script<'a>(parser: &mut Parser<'a>) -> Result<Block, Error> {
let mut exprs = Vec::new();
let mut saw_semicolon = true;
loop {
match parser.peek_token(0)? {
Some(TK::Semicolon) => {
parser.next_token()?;
saw_semicolon = true;
continue;
},
None => break,
Some(_) => {
if saw_semicolon {
exprs.push(parse_block_expr(parser)?);
saw_semicolon = false;
} else {
return Err(parser.err(PEK::ExpectedSemicolon, parser.next_pos));
}
},
}
}
if saw_semicolon {
exprs.push(Expr { kind: EK::Null, position: parser.prev_pos });
}
Ok(exprs)
}
fn parse_block_expr<'a>(parser: &mut Parser<'a>) -> Result<Expr, Error> {
let position = parser.next_pos;
let kind = match parser.peek_token(0)? {
Some(TK::Break) => parse_flow(parser, TK::Break)?,
Some(TK::Continue) => parse_flow(parser, TK::Continue)?,
Some(TK::Return) => parse_flow(parser, TK::Return)?,
Some(TK::Throw) => parse_throw(parser)?,
Some(TK::Let) => parse_let(parser)?,
Some(_) => return parse_expr(parser),
None => unreachable!(),
};
Ok(Expr { kind, position })
}
fn parse_primary<'a>(parser: &mut Parser<'a>) -> Result<Expr, Error> {
let position = parser.next_pos;
let kind = match parser.next_token()? {
Some(TK::Number(value)) => EK::Number(value),
Some(TK::String(value)) => EK::String(value),
Some(TK::Identifier(ident)) => EK::Identifier(ident),
Some(TK::True) => EK::Boolean(true),
Some(TK::False) => EK::Boolean(false),
Some(TK::Null) => EK::Null,
Some(TK::Pipe) => parse_function(parser, false)?,
Some(TK::Or) => parse_function(parser, true)?,
Some(TK::LSquare) => parse_array(parser)?,
Some(TK::LCurly) => parse_block_or_object(parser)?,
Some(TK::If) => parse_if(parser)?,
Some(TK::While) => parse_while(parser)?,
Some(TK::For) => parse_for(parser)?,
Some(TK::Try) => parse_try(parser)?,
Some(TK::Loop) => parse_loop(parser)?,
Some(TK::LParen) => parse_paren(parser)?,
None => return Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
_ => return Err(parser.err(PEK::UnexpectedToken, position)),
};
let mut expr = Expr { kind, position };
loop {
match parser.peek_token(0)? {
Some(TK::LParen) => expr = parse_call(parser, expr)?,
Some(TK::LSquare) => expr = parse_index(parser, expr)?,
Some(TK::Period) => expr = parse_dot(parser, expr)?,
_ => return Ok(expr),
}
}
}
fn parse_call<'a>(parser: &mut Parser<'a>, expr: Expr) -> Result<Expr, Error> {
parser.next_token()?;
let position = parser.prev_pos;
let mut arguments = Vec::new();
loop {
match parser.peek_token(0)? {
Some(TK::RParen) => {
parser.next_token()?;
break;
},
Some(_) => {
arguments.push(parse_expr(parser)?);
match parser.next_token()? {
Some(TK::RParen) => break,
Some(TK::Comma) => continue,
None => return Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
_ => return Err(parser.err(PEK::UnexpectedToken, parser.prev_pos)),
}
},
None => return Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
}
}
Ok(Expr { kind: EK::Call(Box::new(expr), arguments), position })
}
fn parse_index<'a>(parser: &mut Parser<'a>, expr: Expr) -> Result<Expr, Error> {
parser.next_token()?;
let position = parser.prev_pos;
let index = parse_expr(parser)?;
expect_next!(parser, Some(TK::RSquare) => ());
Ok(Expr { kind: EK::Index(Box::new(expr), Box::new(index)), position })
}
fn parse_dot<'a>(parser: &mut Parser<'a>, expr: Expr) -> Result<Expr, Error> {
parser.next_token()?;
let position = parser.prev_pos;
let ident = expect_next!(parser, Some(TK::Identifier(ident)) => ident);
Ok(Expr { kind: EK::Dot(Box::new(expr), ident), position })
}
fn parse_if<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
let mut clauses = Vec::new();
let mut else_block = None;
loop {
clauses.push((parse_expr(parser)?, parse_block(parser)?));
if let Some(TK::Else) = parser.peek_token(0)? {
parser.next_token()?;
} else {
break;
}
if let Some(TK::If) = parser.peek_token(0)? {
parser.next_token()?;
} else {
else_block = Some(parse_block(parser)?);
break;
}
}
Ok(EK::If(clauses, else_block))
}
fn parse_loop<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
Ok(EK::Loop(parse_block(parser)?))
}
fn parse_while<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
Ok(EK::While(Box::new(parse_expr(parser)?), parse_block(parser)?))
}
fn parse_for<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
let ident = expect_next!(parser, Some(TK::Identifier(ident)) => ident);
expect_next!(parser, Some(TK::In) => ());
let iter = parse_expr(parser)?;
let block = parse_block(parser)?;
Ok(EK::For(ident, Box::new(iter), block))
}
fn parse_try<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
let try_block = parse_block(parser)?;
expect_next!(parser, Some(TK::Catch) => ());
let ident = expect_next!(parser, Some(TK::Identifier(ident)) => ident);
let catch_block = parse_block(parser)?;
Ok(EK::Try(try_block, (ident, catch_block)))
}
fn parse_flow<'a>(parser: &mut Parser<'a>, kind: TK) -> Result<EK, Error> {
parser.next_token()?;
let expr = match parser.peek_token(0)? {
Some(TK::Semicolon) => None,
Some(_) => Some(Box::new(parse_expr(parser)?)),
None => return Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
};
expect_semicolon(parser)?;
match kind {
TK::Continue => Ok(EK::Continue(expr)),
TK::Break => Ok(EK::Break(expr)),
TK::Return => Ok(EK::Return(expr)),
_ => unreachable!(),
}
}
fn parse_throw<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
parser.next_token()?;
let expr = parse_expr(parser)?;
expect_semicolon(parser)?;
Ok(EK::Throw(Box::new(expr)))
}
fn parse_let<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
parser.next_token()?;
let ident = expect_next!(parser, Some(TK::Identifier(ident)) => ident);
let initial_value = if let Some(TK::Assign) = parser.peek_token(0)? {
parser.next_token()?;
Some(Box::new(parse_expr(parser)?))
} else {
None
};
expect_semicolon(parser)?;
Ok(EK::Let(ident, initial_value))
}
fn parse_function<'a>(parser: &mut Parser<'a>, skip_params: bool) -> Result<EK, Error> {
let mut params = Vec::new();
if !skip_params {
loop {
match parser.next_token()? {
Some(TK::Pipe) => break,
Some(TK::Identifier(param)) => {
params.push(param);
match parser.next_token()? {
Some(TK::Pipe) => break,
Some(TK::Comma) => continue,
None => return Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
_ => return Err(parser.err(PEK::UnexpectedToken, parser.prev_pos)),
}
},
None => return Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
_ => return Err(parser.err(PEK::UnexpectedToken, parser.prev_pos)),
}
}
}
// Typically, an empty curly brace pair (`{}`) is interpreted as an empty object literal. But in
// this particular context, we want to treat `{}` as a blank function body.
//
// To have the function return an empty object, either `{{}}` or `({})` will suffice. The latter
// is how it would be done in JavaScript, where a similar case of ambiguity between block and
// object literal exists.
let body = if is_next_curly_pair(parser)? {
let position = parser.next_pos;
let kind = EK::Block(parse_block(parser)?);
Expr { kind, position }
} else {
parse_expr(parser)?
};
Ok(EK::Function(Rc::new(FunctionBean { params, body: Box::new(body) })))
}
fn is_next_curly_pair<'a>(parser: &mut Parser<'a>) -> Result<bool, Error> {
match parser.peek_token(0)? {
Some(TK::LCurly) => (),
_ => return Ok(false),
}
Ok(match parser.peek_token(1)? {
Some(TK::RCurly) => true,
_ => false,
})
}
fn parse_array<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
let mut elements = Vec::new();
loop {
match parser.peek_token(0)? {
Some(TK::RSquare) => {
parser.next_token()?;
break;
},
Some(_) => {
elements.push(parse_expr(parser)?);
match parser.next_token()? {
Some(TK::RSquare) => break,
Some(TK::Comma) => continue,
None => return Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
_ => return Err(parser.err(PEK::UnexpectedToken, parser.prev_pos)),
}
},
None => return Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
}
}
Ok(EK::Array(elements))
}
fn parse_block_or_object<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
let mut is_object = false;
loop {
match parser.peek_token(0)? {
Some(TK::Identifier(_)) | Some(TK::String(_)) => (),
Some(TK::RCurly) => {
is_object = true;
break;
},
_ => break,
}
match parser.peek_token(1)? {
Some(TK::Colon) => (),
_ => break,
}
is_object = true;
break;
}
if is_object {
parse_object(parser)
} else {
Ok(EK::Block(parse_block_no_lcurly(parser)?))
}
}
fn parse_object<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
let mut pairs = BTreeMap::new();
loop {
let key = match parser.next_token()? {
Some(TK::RCurly) => break,
Some(TK::Identifier(key)) | Some(TK::String(key)) => key,
_ => return Err(parser.err(PEK::UnexpectedToken, parser.prev_pos)),
};
expect_next!(parser, Some(TK::Colon) => ());
let value = parse_expr(parser)?;
pairs.insert(key, value);
match parser.next_token()? {
Some(TK::RCurly) => break,
Some(TK::Comma) => continue,
None => return Err(parser.err(PEK::UnexpectedEOF, parser.next_pos)),
_ => return Err(parser.err(PEK::UnexpectedToken, parser.prev_pos)),
}
}
Ok(EK::Object(pairs))
}
fn parse_paren<'a>(parser: &mut Parser<'a>) -> Result<EK, Error> {
let expr = parse_expr(parser)?;
match parser.next_token()? {
Some(TK::RParen) => (),
_ => return Err(parser.err(PEK::ExpectedRParen, parser.prev_pos)),
}
Ok(EK::Paren(Box::new(expr)))
}
fn parse_unary<'a>(parser: &mut Parser<'a>) -> Result<Expr, Error> {
let position = parser.next_pos;
let kind = match parser.peek_token(0)? {
Some(TK::Minus) => {
parser.next_token()?;
EK::Negative(Box::new(parse_unary(parser)?))
},
Some(TK::Plus) => {
parser.next_token()?;
EK::Positive(Box::new(parse_unary(parser)?))
},
Some(TK::Bang) => {
parser.next_token()?;
EK::BoolNot(Box::new(parse_unary(parser)?))
},
Some(TK::Tilde) => {
parser.next_token()?;
EK::BitNegate(Box::new(parse_unary(parser)?))
},
_ => return parse_primary(parser),
};
Ok(Expr { kind, position })
}
#[derive(PartialEq, Eq)]
enum Associativity {
LTR,
RTL,
}
fn get_precedence(token: &TK) -> (i8, Associativity) {
use Associativity::*;
match *token {
TK::Divide | TK::Multiply | TK::Modulo => (70, LTR),
TK::Plus | TK::Minus => (60, LTR),
TK::LeftShift | TK::RightShift => (50, LTR),
TK::LessThan | TK::LessThanEqual | TK::GreaterThan | TK::GreaterThanEqual => (41, LTR),
TK::Equal | TK::NotEqual => (40, LTR),
TK::Ampersand => (32, LTR),
TK::Caret => (31, LTR),
TK::Pipe => (30, LTR),
TK::And => (22, LTR),
TK::Xor => (21, LTR),
TK::Or => (20, LTR),
TK::Assign | TK::PlusAssign | TK::MinusAssign | TK::MultiplyAssign | TK::DivideAssign |
TK::LeftShiftAssign | TK::RightShiftAssign | TK::AndAssign | TK::OrAssign | TK::XorAssign |
TK::ModuloAssign => (10, RTL),
_ => (-1, LTR),
}
}
// Precedence climbing method taken from Richards and Whitby-Strevens.
//
// This function is initially called with `prev_prec` as `0`.
fn parse_binary<'a>(parser: &mut Parser<'a>, prev_prec: i8, mut lhs: Expr) -> Result<Expr, Error> {
loop {
let (prec, _) = match parser.peek_token(0)? {
Some(curr_op) => get_precedence(curr_op),
None => break,
};
if prec < prev_prec {
break;
}
// We can `unwrap()` here because a next token is guaranteed to exist; if `peek_token()`
// returned `None`, we would not be here.
let operator = parser.next_token()?.unwrap();
let position = parser.prev_pos;
let mut rhs = parse_unary(parser)?;
loop {
let (next_prec, next_assoc) = match parser.peek_token(0)? {
Some(next_op) => get_precedence(next_op),
None => break,
};
if prec < next_prec || (prec == next_prec && next_assoc == Associativity::RTL) {
rhs = parse_binary(parser, next_prec, rhs)?;
} else {
break;
}
}
let lhs_kind = match operator {
TK::Multiply => EK::Multiply(Box::new(lhs), Box::new(rhs)),
TK::Divide => EK::Divide(Box::new(lhs), Box::new(rhs)),
TK::Modulo => EK::Modulo(Box::new(lhs), Box::new(rhs)),
TK::Plus => EK::Add(Box::new(lhs), Box::new(rhs)),
TK::Minus => EK::Subtract(Box::new(lhs), Box::new(rhs)),
TK::LeftShift => EK::LeftShift(Box::new(lhs), Box::new(rhs)),
TK::RightShift => EK::RightShift(Box::new(lhs), Box::new(rhs)),
TK::LessThan => EK::LessThan(Box::new(lhs), Box::new(rhs)),
TK::LessThanEqual => EK::LessThanEqual(Box::new(lhs), Box::new(rhs)),
TK::GreaterThan => EK::GreaterThan(Box::new(lhs), Box::new(rhs)),
TK::GreaterThanEqual => EK::GreaterThanEqual(Box::new(lhs), Box::new(rhs)),
TK::Equal => EK::Equal(Box::new(lhs), Box::new(rhs)),
TK::NotEqual => EK::NotEqual(Box::new(lhs), Box::new(rhs)),
TK::Ampersand => EK::BitAnd(Box::new(lhs), Box::new(rhs)),
TK::Caret => EK::BitXor(Box::new(lhs), Box::new(rhs)),
TK::Pipe => EK::BitOr(Box::new(lhs), Box::new(rhs)),
TK::And => EK::BoolAnd(Box::new(lhs), Box::new(rhs)),
TK::Xor => EK::BoolXor(Box::new(lhs), Box::new(rhs)),
TK::Or => EK::BoolOr(Box::new(lhs), Box::new(rhs)),
TK::Assign => EK::Assign(Box::new(lhs), Box::new(rhs)),
TK::PlusAssign => EK::PlusAssign(Box::new(lhs), Box::new(rhs)),
TK::MinusAssign => EK::MinusAssign(Box::new(lhs), Box::new(rhs)),
TK::MultiplyAssign => EK::MultiplyAssign(Box::new(lhs), Box::new(rhs)),
TK::DivideAssign => EK::DivideAssign(Box::new(lhs), Box::new(rhs)),
TK::LeftShiftAssign => EK::LeftShiftAssign(Box::new(lhs), Box::new(rhs)),
TK::RightShiftAssign => EK::RightShiftAssign(Box::new(lhs), Box::new(rhs)),
TK::AndAssign => EK::AndAssign(Box::new(lhs), Box::new(rhs)),
TK::OrAssign => EK::OrAssign(Box::new(lhs), Box::new(rhs)),
TK::XorAssign => EK::XorAssign(Box::new(lhs), Box::new(rhs)),
TK::ModuloAssign => EK::ModuloAssign(Box::new(lhs), Box::new(rhs)),
_ => return Err(parser.err(PEK::UnexpectedToken, position)),
};
lhs = Expr { kind: lhs_kind, position };
}
Ok(lhs)
}
fn parse_expr<'a>(parser: &mut Parser<'a>) -> Result<Expr, Error> {
let lhs = parse_unary(parser)?;
parse_binary(parser, 0, lhs)
}
|
use std::marker::PhantomData;
use actix_codec::{AsyncRead, AsyncWrite};
use actix_service::{NewService, Service};
use futures::{future::ok, future::FutureResult, Async, Future, Poll};
use openssl::ssl::{HandshakeError, SslConnector};
use tokio_openssl::{ConnectAsync, SslConnectorExt, SslStream};
use crate::resolver::RequestHost;
/// Openssl connector factory
pub struct OpensslConnector<R, T, E> {
connector: SslConnector,
_t: PhantomData<(R, T, E)>,
}
impl<R, T, E> OpensslConnector<R, T, E> {
pub fn new(connector: SslConnector) -> Self {
OpensslConnector {
connector,
_t: PhantomData,
}
}
}
impl<R: RequestHost, T: AsyncRead + AsyncWrite> OpensslConnector<R, T, ()> {
pub fn service(
connector: SslConnector,
) -> impl Service<Request = (R, T), Response = (R, SslStream<T>), Error = HandshakeError<T>>
{
OpensslConnectorService {
connector: connector,
_t: PhantomData,
}
}
}
impl<R, T, E> Clone for OpensslConnector<R, T, E> {
fn clone(&self) -> Self {
Self {
connector: self.connector.clone(),
_t: PhantomData,
}
}
}
impl<R: RequestHost, T: AsyncRead + AsyncWrite, E> NewService<()>
for OpensslConnector<R, T, E>
{
type Request = (R, T);
type Response = (R, SslStream<T>);
type Error = HandshakeError<T>;
type Service = OpensslConnectorService<R, T>;
type InitError = E;
type Future = FutureResult<Self::Service, Self::InitError>;
fn new_service(&self, _: &()) -> Self::Future {
ok(OpensslConnectorService {
connector: self.connector.clone(),
_t: PhantomData,
})
}
}
pub struct OpensslConnectorService<R, T> {
connector: SslConnector,
_t: PhantomData<(R, T)>,
}
impl<R: RequestHost, T: AsyncRead + AsyncWrite> Service for OpensslConnectorService<R, T> {
type Request = (R, T);
type Response = (R, SslStream<T>);
type Error = HandshakeError<T>;
type Future = ConnectAsyncExt<R, T>;
fn poll_ready(&mut self) -> Poll<(), Self::Error> {
Ok(Async::Ready(()))
}
fn call(&mut self, (req, stream): (R, T)) -> Self::Future {
ConnectAsyncExt {
fut: SslConnectorExt::connect_async(&self.connector, req.host(), stream),
req: Some(req),
}
}
}
pub struct ConnectAsyncExt<R, T> {
req: Option<R>,
fut: ConnectAsync<T>,
}
impl<R, T> Future for ConnectAsyncExt<R, T>
where
R: RequestHost,
T: AsyncRead + AsyncWrite,
{
type Item = (R, SslStream<T>);
type Error = HandshakeError<T>;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.fut.poll()? {
Async::Ready(stream) => Ok(Async::Ready((self.req.take().unwrap(), stream))),
Async::NotReady => Ok(Async::NotReady),
}
}
}
|
use P25::random_permute;
fn main() {
let li = vec!['a', 'b', 'c', 'd', 'e', 'f'];
println!("{:?}", random_permute(&li));
println!("{:?}", random_permute(&li));
println!("{:?}", random_permute(&li));
}
|
extern crate bmp;
extern crate rusterizer;
use bmp::{Image, Pixel};
use rusterizer::geometry::{Drawable, Line, Rect};
/*
A bare bones program drawing some lines and rectangles
to a BMP context which then renders into a BMP file
*/
fn main() {
let imgx : u32 = 300;
let imgy : u32 = 300;
let mut img = Image::new(imgx, imgy);
let white : Pixel = Pixel{r: 255, g: 255, b: 255};
for x in 0..100 {
Line::new(0, x*4,x*5, x*4).draw(white, &mut img);
}
for y in 0..10 {
Rect{x: y*10, y: y*10, w: y*10, h: y*7}.draw(white, &mut img);
}
let _ = img.save("test.bmp");
}
// end
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// This test ensures that each pointer type `P<X>` is covariant in `X`.
use std::rc::Rc;
use std::sync::Arc;
fn a<'r>(x: Box<&'static str>) -> Box<&'r str> {
x
}
fn b<'r, 'w>(x: &'w &'static str) -> &'w &'r str {
x
}
fn c<'r>(x: Arc<&'static str>) -> Arc<&'r str> {
x
}
fn d<'r>(x: Rc<&'static str>) -> Rc<&'r str> {
x
}
fn main() {}
|
use std::{
collections::HashMap,
io::{Write},
};
use rbx_xml::{EncodeError};
use rbx_dom_weak::{RbxValue, RbxTree, RbxInstanceProperties};
pub struct RunInRbxPlace {
tree: RbxTree,
}
impl RunInRbxPlace {
pub fn new(mut tree: RbxTree, port: u16) -> RunInRbxPlace {
enable_http(&mut tree);
add_plugin_marker(&mut tree, port);
RunInRbxPlace {
tree
}
}
pub fn write<W: Write>(&self, output: W) -> Result<(), EncodeError> {
let root_id = self.tree.get_root_id();
let top_level_ids = self.tree.get_instance(root_id).unwrap().get_children_ids();
rbx_xml::to_writer_default(output, &self.tree, top_level_ids)
}
}
fn add_plugin_marker(tree: &mut RbxTree, port: u16) {
let mut properties = HashMap::new();
properties.insert(String::from("Value"), RbxValue::Int32 { value: port as i32 });
let marker = RbxInstanceProperties {
name: String::from("RUN_IN_ROBLOX_PORT"),
class_name: String::from("IntValue"),
properties,
};
let root_id = tree.get_root_id();
tree.insert_instance(marker, root_id);
}
fn enable_http(tree: &mut RbxTree) {
let http_service_id = match tree.descendants(tree.get_root_id())
.find(|descendant| descendant.class_name == "HttpService") {
Some(http_service) => Some(http_service.get_id()),
None => None,
};
match http_service_id {
Some(instance_id) => {
let http_service = tree.get_instance_mut(instance_id)
.expect("HttpService has disappeared suddenly");
http_service.properties.entry("HttpEnabled".to_string())
.or_insert(RbxValue::Bool { value : true });
},
None => insert_http_service(tree),
}
}
fn insert_http_service(tree: &mut RbxTree) {
let http_service = RbxInstanceProperties {
name: String::from("HttpService"),
class_name: String::from("HttpService"),
properties: {
let mut properties = HashMap::new();
properties.insert(
String::from("HttpEnabled"),
RbxValue::Bool { value: true },
);
properties
},
};
tree.insert_instance(http_service, tree.get_root_id());
}
|
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT.
/// Why was a `CAA` record ignored?
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum CertificateAuthorityAuthorizationResourceRecordIgnoredBecauseReason<'a>
{
/// The tag length exceeded 15.
TagLengthExceeded15(usize),
/// The flag bits contained unassigned values.
UseOfUnassignedFlagBits(u8),
/// RFC Errata 3547 clarified that RFC 6844 reserves the property names `auth`, `path` and `policy`.
TagReservedByRfcErrata3547(&'a [u8]),
/// Unassigned property name.
TagUnassigned(&'a [u8]),
}
|
use super::*;
#[test]
fn with_lesser_small_integer_second_returns_second() {
min(|_, process| process.integer(-1), Second);
}
#[test]
fn with_same_small_integer_second_returns_first() {
min(|first, _| first, First);
}
#[test]
fn with_same_value_small_integer_second_returns_first() {
min(|_, process| process.integer(0), First);
}
#[test]
fn with_greater_small_integer_second_returns_first() {
min(|_, process| process.integer(1), First);
}
#[test]
fn with_lesser_big_integer_second_returns_second() {
min(
|_, process| process.integer(SmallInteger::MIN_VALUE - 1),
Second,
)
}
#[test]
fn with_greater_big_integer_second_returns_first() {
min(
|_, process| process.integer(SmallInteger::MAX_VALUE + 1),
First,
)
}
#[test]
fn with_lesser_float_second_returns_second() {
min(|_, process| process.float(-1.0), Second)
}
#[test]
fn with_same_value_float_second_returns_first() {
min(|_, process| process.float(0.0), First)
}
#[test]
fn with_greater_float_second_returns_first() {
min(|_, process| process.float(1.0), First)
}
#[test]
fn without_number_second_returns_first() {
run!(
|arc_process| {
(
strategy::term::integer::small(arc_process.clone()),
strategy::term::is_not_number(arc_process.clone()),
)
},
|(first, second)| {
prop_assert_eq!(result(first, second), first);
Ok(())
},
);
}
fn min<R>(second: R, which: FirstSecond)
where
R: FnOnce(Term, &Process) -> Term,
{
super::min(|process| process.integer(0), second, which);
}
|
use std::fs::File;
use std::io::prelude::*;
use std::{thread, time};
fn main() {
let mut f1 = File::create("/sys/class/gpio/export").unwrap();
f1.write_all(b"0");
let mut f2 = File::create("/sys/class/gpio/gpio0/direction").unwrap();
f2.write_all(b"out");
let mut f3 = File::create("/sys/class/gpio/gpio0/value").unwrap();
// 500 milli seconds
let delay = time::Duration::from_millis(500);
// thread::sleep(delay) will sleep for 500ms.
loop {
// write a "1" to f3, delay for .5 seconds,
// write a "0"to f3, delay for .5 seconds
}
}
|
// call all intro 1 functions
pub fn _intro_1_notes() {
_mutability();
_tuples();
_arrays();
_strings();
_ownership();
_structures();
_control_flow();
_enums_and_options();
_vectors_and_hashmaps();
_casting_and_lets_and_result();
}
// underscore suppresses the "unused" warning.
pub fn _mutability() {
// variables are immutable (constant) by default. mut makes them mutatable.
let mut x: u32 = 5;
println!("{}", x);
// this only works with mut variables
x = 10;
println!("{}", x);
}
pub fn _tuples() {
// tuple example. Can intermix types.
let tupl = (1, "fifty");
// print value
println!("{}", tupl.1);
// print debug (strings are surrounded by quotes ("))
println!("{:?}", tupl.1);
// debug the entire tuple
println!("{:?}", tupl);
// debug with pretty for the entire tuple
println!("{:#?}", tupl);
// tuples that are too long cannot be printed
// let tooLong = (1,2,3,4,5,6,7,8,9,10,11,12,13);
// println!("{:?}", tooLong);
}
pub fn _arrays() {
// array example
let arra: [i32; 5] = [1,2,3,4,5];
println!("{:?}", arra);
// print array item
println!("{:?}", arra[0]);
// print array length
println!("{:?}", arra.len());
// import a library/external dependency
use std::mem;
// print array memory size
println!("{:?}", mem::size_of_val(&arra));
// taking slices. takes items 2,3, and 4. The min index is inclusive, the max index is exclusive.
let slic = &arra[1..4];
println!("{:?}", slic);
}
pub fn _strings() {
// string literal ("mystring") is a slice of a string (&str).
// strings are compound types of slices put together into a string.
// strings can be treated like slices because of this.
// to create a String with the type String, use this.
let the_string = String::from("Hello ");
println!("{:?}", the_string);
// string concatenation example. Note the reference (&another_string).
let another_string = String::from("World!");
let combined_string = the_string + &another_string;
println!("{}", combined_string);
}
/// This function covers notes about ownership and borrowing, with some
/// introduction to memory references and dereferencing.
pub fn _ownership() {
// scope ends at the closing curly brace, so we cannot print _a after its scope ends.
{
let _a = 10;
}
// println!("{}", _a);
// variables own values. x owns 1, stored on the stack.
// let x = 1;
// "borrowing" allows another variable to use its owned value.
let strin = String::from("myString");
let other_strin_var = &strin;
println!("{}", other_strin_var);
// "moving" a value as a parameter to a function causes us to lose access
// to it since we gave it to the function.
// Vec is like an expandable array (think python list). It is stored in
// the heap, so it is a referenced value, rather than primitives which are
// stored in the stack.
println!("Moving example start.");
let mut v = Vec::new();
for i in 1..100 {
v.push(i);
}
fn move_v(v: Vec<i32>) {
println!("_move() took v: {}", v[10] + v[15]);
}
move_v(v);
// If we do not get ownership of the referenced v back, then we can no
// longer use it since we gave it to _move().
// println!("{}", v[0]);
println!("Moving example end.");
// "copying" example. Similar to "move", but a parameter is passed by copy
// of the value. This is what happens with primitives (this example uses i32)
// with the same syntax as the moving example.
println!("Copying example start.");
let a = 20;
let b = 30;
println!("before calling _copy() main has a: {} and b: {}", a, b);
fn copy(a: i32, b: i32) {
println!("_copy() combined a and b to make: {}", a + b);
}
copy(a, b);
println!("After calling _copy() main still has a: {} and b: {}", a, b);
println!("Copying example end.");
// Bigger "borrowing" example.
println!("Borrowing example start.");
let mut v2 = Vec::new();
for i in 1..100 {
v2.push(i);
}
println!("created v2. for testing, the item with index 50 is: {}", v2[50]);
// v2 gets returned after it is borrowed by the function,
// so v2 has ownership of its value again.
fn return_after_borrowing(v: Vec<i32>) -> Vec<i32> {
println!("_return_after_borrowing() borrowed and will return v2: {}", v[50] + v[51]);
v
}
v2 = return_after_borrowing(v2);
println!("after being returned from a function that took v2 as Vec as a param: {}", v2[50]);
// pass a reference to the function. The function will dereference v2 to access it.
// When the function is finished, then v2 will own the vector again.
fn borrow_dereference(v: &Vec<i32>) {
println!("_borrow_dereference borrowed and dereferenced v2: {}", (*v)[50] + (*v)[51]);
}
borrow_dereference(&v2);
println!("after being passed as a reference to a function that took v2 and dereferenced it, v2 can be accessed in main: {}", v2[50]);
// pass a reference to the function. It sounds like there is
// some automatic dereferencing happening here.
// When the function is finished, then v2 will own the vector again.
fn borrow_without_dereference(v: &Vec<i32>) {
println!("borrow_without_dereference borrowed v2: {}", v[50] + v[51]);
}
borrow_without_dereference(&v2);
println!("after being passed as a reference to a function, v2 can be accessed in main: {}", v2[50]);
println!("Borrowing example end.");
// example of borrowing with loop and function.
// loop borrows, then function borrows.
println!("Loop and function borrowing example start.");
// vec! is a macro for creating a vector.
let v3 = vec![4, 5, 3, 6, 7, 4, 8, 6, 4, 2, 4, 2, 5, 3, 7, 7];
println!("created v3: {}", v3[0]);
fn count(v: &Vec<i32>, value: i32) -> usize {
v.into_iter().filter(|&&x| x == value).count()
}
for &i in &v3 {
let i_count = count(&v3, i);
println!("{} is repeated {} times", i, i_count);
}
// v3 is owned after the loop completes because it was only passed by
// reference to the loop and function.
println!("v3 is owned after the loop: {}", v3[0]);
println!("Loop and function borrowing example end.");
}
/// This function covers notes about structures, methods, related functions
/// (like Java static methods), and display/debug traits.
pub fn _structures() {
// struct contains data (properties).
// The derive annotation is for deriving the Debug trait, used later for
// printing a Rectangle with debug info in println!.
#[derive(Debug)]
struct Rectangle {
width: u32,
height: u32,
}
// access properties with a period (.)
fn area(rect: &Rectangle) -> u32 {
rect.width * rect.height
}
// instantiate a Rectangle structure.
let rect = Rectangle {
width: 35,
height: 55,
};
println!("Rectangle with {}x{} has area: {}", rect.width, rect.height, area(&rect));
// Create an implementation of Rectangle that has a method.
impl Rectangle {
fn area(&self) -> u32 {
self.width * self.height
}
fn show(&self) {
println!("Rectangle show: {}x{} has area: {}", self.width, self.height, self.area());
}
}
println!("Rectangle area method: {}x{} has area: {}", rect.width, rect.height, rect.area());
// Related functions are like static methods in Java. Related functions
// can be defined in the same impl block, or in a separate one like this.
// Separating related functions and methods like this is encouraged.
impl Rectangle {
// related function that creates a Rectangle.
fn new(width: u32, height: u32) -> Rectangle {
Rectangle {
// Note that if the parameter names are the same as
// the properties, you don't need to give the value.
width,//: width,
height,//: height,
}
}
}
// Use the related function to create a new rectangle.
let new_rect = Rectangle::new(57, 83);
new_rect.show();
// Because of the derived Debug trait on Rectangle, we can print it with
// debug info.
println!("Debug new_rect: {:#?}", new_rect);
// import for the Display trait.
use std::fmt;
// This impl is for adding the Display trait to the Rectangle.
impl fmt::Display for Rectangle {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// write! macro is used to write strings to arbitrary writers.
// Just like Java, we can use it to write to files or buffers
// or fun things like that.
write!(f, "({}, {}) and area: {}", self.width, self.height, self.area())
}
}
// With the Display trait, we can print it without debug.
println!("Print (Display) new_rect: {}", new_rect);
}
pub fn _control_flow() {
// logical operators: == != < > >= <=
let num = 6;
// if conditions need to evaluate to a boolean.
// No wierd type coercion for if conditions like in javascript.
// if block
if num < 10 {
println!("{} is less than 10", num);
}
// if with else if and else
if num % 4 == 0 {
println!("{} is divisible by 4", num);
} else if num % 3 == 0 {
println!("{} is divisible by 3", num);
} else {
println!("example of an else block.");
}
// binding (initialize/set a variable) with if and else.
// This kind of seems like ternary operators in other languages.
let condition = true;
let my_num = if condition {
50
} else {
76
};
println!("myNum: {}", my_num);
// Infinite loop example
// loop {
// println!("infinite loop");
// }
// loop example
let mut count = 0;
loop {
println!("in loop, count: {}", count);
count += 1;
if count >= 10 {
// it looks like you have to explicitly break loops.
// there is no "while condition".
// break, without any labels given, breaks the innermost loop
// (assuming nested loops).
break;
}
}
// nested loop with labels example. Nested loops can break by label.
let mut end_condition = false;
'a: loop {
println!("loop a");
'b: loop {
println!("loop b");
'c: loop {
println!("loop c");
if end_condition {
// end our loops by breaking the outermost one.
break 'a;
}
end_condition = true;
// break loop b, meaning we start the next line in loop a.
break 'b;
}
// println!("This print in loop b will not run since c will break b or a.");
}
// continue can also be given a label to start the first line in loop a.
continue 'a;
// println!("This print will not run.");
}
// binding loops
let x = loop {
// break acts like "return" in this context.
break 10;
};
println!("x = {}", x);
// while loop example
let mut get_to_zero = 10;
while get_to_zero != 0 {
println!("while loop: {}!", get_to_zero);
get_to_zero = get_to_zero - 1;
}
// for loop example. for loops look similar to python.
let my_vec = vec![1, 2, 3, 4, 5];
for i in my_vec {
println!("for loop vec example i: {}", i);
}
// for loop without a list/array/vector.
// Note that the range does not include the endpoint
// (prints to 9 rather than 10).
for i in 1..10 {
println!("for loop with range example i: {}", i);
}
// for loop with inclusive range.
// Was experimental when the tutorial was published, but it seems to work now.
for i in 1..=10 {
println!("for loop with range INCLUSIVE example i: {}", i);
}
// match example. Similar to switch statements in other languages.
let mat = 5;
match mat {
1 => println!("one"),
2 => println!("two"),
3 => println!("three"),
4 => println!("four"),
5 => println!("five"),
_ => println!("something else. This is like a default."),
}
// match example with multiple conditions leading to the same case.
// includes single bar (|) or-like options and a range using ...
let mat2 = 19;
println!("Extra matching example, mat2: {}", mat2);
match mat2 {
1 => println!("one"),
2 | 3 | 5 | 7 | 11 => println!("This is a prime"),
// note this range is inclusive, including 19
13 ... 19 => println!("This is a \"teen\"", ),
_ => println!("This is not special", ),
}
// match with conditions using tuples. We can match on one index,
// then retrieve the other index for use in a case
// (in this example, print it out).
let my_tuple = (0, -2);
println!("matching example with tuples: {:?}", my_tuple);
match my_tuple {
(0, y) => println!("first index is 0, y: {}", y),
(x, 0) => println!("second index is 0, x: {}", x),
_ => println!("tuple has no match"),
}
// match with extra conditions.
let pair = (5, -5);
println!("matching with extra conditions: {:?}", pair);
match pair {
(x, y) if x == y => println!("x equals y"),
(x, y) if x + y == 0 => println!("x plus y equals zero"),
(x, _) if x % 2 == 0 => println!("x is even"),
_ => println!("no match"),
}
// match with binding a variable to the matched value
// good for accessing a value that we do not have ownership of,
// since the bound variable (n) is basically a clone of the given value (p).
let p = 5;
println!("matching with binding a variable to the match: {}", p);
match p {
n @ 1 ... 12 => println!("n is between 1-12: {}", n),
n @ 13 ... 19 => println!("n is between 13-19: {}", n),
_ => println!("no match"),
}
// match can also be used to bind a variable.
let p2 = 14;
println!("binding a variable with a match: {}", p2);
let n2 = match p2 {
n @ 1 ... 12 => n + 1,
n @ 13 ... 19 => n + 2,
_ => 0,
};
println!("n2 was assigned: {}", n2);
}
pub fn _enums_and_options() {
// allows dead, unused code. Another annotation will disallow dead code.
//#![allow(dead_code)]
// define an enum with its types.
// enum Direction {
// types in an enum can be structs, containing data
// Up(u32),
// Down {x: u32, y: f64},
// types can also be unit structs which contain no data
// Left,
// Right,
// }
#[derive(Debug)]
struct Point {
x: i32,
y: i32,
}
#[derive(Debug)]
enum Direction {
_Up(Point),
_Down(Point),
_Left(Point),
_Right(Point),
}
#[derive(Debug)]
enum Keys {
_UpKey(String),
_DownKey(String),
_LeftKey(String),
_RightKey(String),
}
// impl of Direction that matches a direction and returns a key.
impl Direction {
fn match_direction(&self) -> Keys {
match *self {
Direction::_Up(_) => Keys::_UpKey(String::from("Pressed w")),
Direction::_Down(_) => Keys::_DownKey(String::from("Pressed s")),
Direction::_Left(_) => Keys::_LeftKey(String::from("Pressed a")),
Direction::_Right(_) => Keys::_RightKey(String::from("Pressed d")),
}
}
}
impl Keys {
fn destruct(&self) -> &String {
match *self {
Keys::_UpKey(ref s) => s,
Keys::_DownKey(ref s) => s,
Keys::_LeftKey(ref s) => s,
Keys::_RightKey(ref s) => s,
}
}
}
// create an instance of the up direction
let up_dir = Direction::_Up(Point {x: 0, y: 1});
let up_key = up_dir.match_direction();
println!("up_key: {:?}", up_key);
let up_key_string = up_key.destruct();
println!("up_key_string from destruct(): {}", up_key_string);
// intro to ref keyword. ref keyword creates a reference to a value.
let u = 10; // u owns 10
let v = &u; // v gets a reference to the 10 that u owns
let ref z = u; // z gets a reference to the 10 that u owns
println!("ref keyword with debug values: u: {:?} v: {:?} z: {:?}", u, v, z);
// prove that z and v are the same. You cannot compare u == v because a
// reference to an i32 is a different type from an i32.
if z == v {
println!("z == v evaluated to true.");
} else {
println!("z == v evaluated to false.");
}
// Polymorphism example, implemented using match and enums.
enum Shape {
Rectangle {width: u32, height: u32},
Square(u32),
Circle(f64),
}
// define method that any Shape can use.
impl Shape {
fn area(&self) -> f64 {
match *self {
// note the type cast to f64. This is done to keep the
// returned type consistent since the circle will return an f64.
Shape::Rectangle {width, height} => (width * height) as f64,
Shape::Square(ref s) => (s * s) as f64,
Shape::Circle(ref r) => 3.14 * (r * r),
}
}
}
// Because each of these objects are shapes (types from the Shape enum) they
// all have the area method.
let rect = Shape::Rectangle{width: 10, height: 70};
let square = Shape::Square(10);
let circle = Shape::Circle(4.5);
println!("polymorphism example:", );
let rect_area = rect.area();
println!("rect area: {}", rect_area);
let square_area = square.area();
println!("rect area: {}", square_area);
let circle_area = circle.area();
println!("rect area: {}", circle_area);
// options. Option enum from std library looks like:
// enum Option<T> {
// Some(T),
// None,
// }
// division returns an optional f64.
fn division(x: f64, y: f64) -> Option<f64> {
if y == 0.0 {
None
} else {
Some(x / y)
}
}
// optionals checked via match. If None is returned (think null or nil)
// then do something to handle it. It looks like this prevents situations
// were null is an unhandled case.
let res = division(5.0, 7.0);
// let res = division(5.0, 0.0);
match res {
// display 7 places past the decimal point.
Some(x) => println!("{:.7}", x),
None => println!("cannot divide by 0"),
}
}
pub fn _vectors_and_hashmaps() {
// vectors have variable length and can grow or shrink.
// slices (think strings) are very similar.
// they are like lists in python.
// let x = vec![1,2,3,4];
let mut v: Vec<i32> = Vec::new();
v.push(5);
v.push(6);
v.push(7);
v.push(8);
// iterating over elements in a vector
for elem in &v {
println!("{}", elem);
}
// list everything in the vector with debug info.
// The length and capacity can be accessed with these methods.
println!("v: {:?} length: {} capacity: {}", &v, v.len(), v.capacity());
// see the vector grow
v.push(9);
println!("v: {:?} length: {} capacity: {}", &v, v.len(), v.capacity());
// vectors have the pop method (think stacks) to remove the last element
// from the vector and returns an option containing the element.
// Note that since it is an option, it may return None.
println!("{:?}", v.pop());
println!("v: {:?}", &v);
// empty vector test
println!("empty vector:");
let mut empty_vec: Vec<i32> = Vec::new();
println!("empty_vec: {:?} length: {} capacity: {}", &empty_vec, empty_vec.len(), empty_vec.capacity());
println!("empty_vec.pop(): {:?}", empty_vec.pop());
// example of using polymorphism through enums to get multiple types
// into the same vector.
#[derive(Debug)]
enum Example {
Int(i32),
Float(f64),
Text(String),
}
let multitype_vec = vec![
Example::Int(142),
Example::Float(12.32),
Example::Text(String::from("string")),
];
println!("multitype example: {:?}", &multitype_vec);
// hashmaps
use std::collections::HashMap;
// create a new hashmap
let mut hm = HashMap::new();
// add items with a key and value.
hm.insert(String::from("random"), 12);
hm.insert(String::from("strings"), 49);
// iterate over the keys and values.
println!("Hashmap hm key: value list");
for (k, v) in &hm {
println!("{}: {}", k, v);
}
// get a value (in an option) by giving a key.
// use match so you can cover the case where a bad key is given,
// which will cause get to return None.
println!("using hm.get(\"random\"):");
match hm.get(&String::from("random")) {
Some(&n) => println!("{}", n),
_ => println!("no match"),
}
}
pub fn _casting_and_lets_and_result() {
// This is what we used so far to handle options:
let s = Some('c');
// match s {
// Some(i) => println!("{}", i),
// _ => {},
// }
// the if-let will give us some easier to read syntax when we are working
// with options that will either be something or None.
if let Some(i) = s {
println!("if-let example s: {}", i);
}// else {
// {}
// }
// example loop with a counter inside an option.
// counts by 2 and then quits at 20.
println!("while-let example: ");
let mut s = Some(0);
// loop {
// match s {
// Some(i) => if i > 19 {
// println!("Quit");
// s = None;
// } else {
// println!("{}", i);
// s = Some(i + 2);
// },
// _ => {
// break;
// }
// }
// }
// example of a while-let loop, does the same thing as the above loop
while let Some(i) = s {
if i > 19 {
println!("Quit");
s = None;
} else {
println!("{}", i);
s = Some(i + 2);
}
}
// casting is done with the as keyword.
let f = 24.4321_f32;
let i = f as u8;
let c = i as char;
// c ends up as some wierd character here.
println!("casting example: f, i, c");
println!("{} {} {}", f, i, c);
// only u8 can be cast to char. 256 is out of range of char,
// but 255 is in range.
// println!("{}", 256 as char);
// Result looks like this. It is similar to Option,
// except that instead of None we can have an Err that has some data inside it.
// enum Result<T, E> {
// Ok(T),
// Err(E),
// }
// example with Result. Includes opening a File and panic!.
println!("Result example (hopefully we don't panic!)", );
use std::fs::File;
// it looks like rust File can use slashes (unix style) or
// backslashes (windows style) when referencing a path to a file.
let f = File::open("res/test_result.txt");
let _f = match f {
Ok(file) => file,
Err(error) => {
panic!("Result example: There was a problem opening the file: {:?}", error)
}
};
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct IRadialControllerConfigurationInterop(pub ::windows::core::IUnknown);
impl IRadialControllerConfigurationInterop {
#[cfg(feature = "Win32_Foundation")]
pub unsafe fn GetForWindow<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::HWND>, T: ::windows::core::Interface>(&self, hwnd: Param0) -> ::windows::core::Result<T> {
let mut result__ = ::core::option::Option::None;
(::windows::core::Interface::vtable(self).6)(::core::mem::transmute_copy(self), hwnd.into_param().abi(), &<T as ::windows::core::Interface>::IID, &mut result__ as *mut _ as *mut _).and_some(result__)
}
}
unsafe impl ::windows::core::Interface for IRadialControllerConfigurationInterop {
type Vtable = IRadialControllerConfigurationInterop_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x787cdaac_3186_476d_87e4_b9374a7b9970);
}
impl ::core::convert::From<IRadialControllerConfigurationInterop> for ::windows::core::IUnknown {
fn from(value: IRadialControllerConfigurationInterop) -> Self {
value.0
}
}
impl ::core::convert::From<&IRadialControllerConfigurationInterop> for ::windows::core::IUnknown {
fn from(value: &IRadialControllerConfigurationInterop) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for IRadialControllerConfigurationInterop {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a IRadialControllerConfigurationInterop {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[repr(C)]
#[doc(hidden)]
pub struct IRadialControllerConfigurationInterop_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Win32_Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, hwnd: super::super::super::Foundation::HWND, riid: *const ::windows::core::GUID, ppv: *mut *mut ::core::ffi::c_void) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Win32_Foundation"))] usize,
);
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct IRadialControllerIndependentInputSourceInterop(pub ::windows::core::IUnknown);
impl IRadialControllerIndependentInputSourceInterop {
#[cfg(feature = "Win32_Foundation")]
pub unsafe fn CreateForWindow<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::HWND>, T: ::windows::core::Interface>(&self, hwnd: Param0) -> ::windows::core::Result<T> {
let mut result__ = ::core::option::Option::None;
(::windows::core::Interface::vtable(self).6)(::core::mem::transmute_copy(self), hwnd.into_param().abi(), &<T as ::windows::core::Interface>::IID, &mut result__ as *mut _ as *mut _).and_some(result__)
}
}
unsafe impl ::windows::core::Interface for IRadialControllerIndependentInputSourceInterop {
type Vtable = IRadialControllerIndependentInputSourceInterop_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x3d577eff_4cee_11e6_b535_001bdc06ab3b);
}
impl ::core::convert::From<IRadialControllerIndependentInputSourceInterop> for ::windows::core::IUnknown {
fn from(value: IRadialControllerIndependentInputSourceInterop) -> Self {
value.0
}
}
impl ::core::convert::From<&IRadialControllerIndependentInputSourceInterop> for ::windows::core::IUnknown {
fn from(value: &IRadialControllerIndependentInputSourceInterop) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for IRadialControllerIndependentInputSourceInterop {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a IRadialControllerIndependentInputSourceInterop {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[repr(C)]
#[doc(hidden)]
pub struct IRadialControllerIndependentInputSourceInterop_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Win32_Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, hwnd: super::super::super::Foundation::HWND, riid: *const ::windows::core::GUID, ppv: *mut *mut ::core::ffi::c_void) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Win32_Foundation"))] usize,
);
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct IRadialControllerInterop(pub ::windows::core::IUnknown);
impl IRadialControllerInterop {
#[cfg(feature = "Win32_Foundation")]
pub unsafe fn CreateForWindow<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::HWND>, T: ::windows::core::Interface>(&self, hwnd: Param0) -> ::windows::core::Result<T> {
let mut result__ = ::core::option::Option::None;
(::windows::core::Interface::vtable(self).6)(::core::mem::transmute_copy(self), hwnd.into_param().abi(), &<T as ::windows::core::Interface>::IID, &mut result__ as *mut _ as *mut _).and_some(result__)
}
}
unsafe impl ::windows::core::Interface for IRadialControllerInterop {
type Vtable = IRadialControllerInterop_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x1b0535c9_57ad_45c1_9d79_ad5c34360513);
}
impl ::core::convert::From<IRadialControllerInterop> for ::windows::core::IUnknown {
fn from(value: IRadialControllerInterop) -> Self {
value.0
}
}
impl ::core::convert::From<&IRadialControllerInterop> for ::windows::core::IUnknown {
fn from(value: &IRadialControllerInterop) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for IRadialControllerInterop {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a IRadialControllerInterop {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[repr(C)]
#[doc(hidden)]
pub struct IRadialControllerInterop_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Win32_Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, hwnd: super::super::super::Foundation::HWND, riid: *const ::windows::core::GUID, ppv: *mut *mut ::core::ffi::c_void) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Win32_Foundation"))] usize,
);
|
use crate::metadata::Metadata;
use crate::spatial_ref::SpatialRef;
use crate::utils::{_last_null_pointer_err, _string};
use crate::vector::defn::Defn;
use crate::vector::{Feature, FieldValue, Geometry};
use crate::{dataset::Dataset, gdal_major_object::MajorObject};
use gdal_sys::{
self, GDALMajorObjectH, OGREnvelope, OGRErr, OGRFieldDefnH, OGRFieldType, OGRLayerH,
};
use libc::c_int;
use std::ptr::null_mut;
use std::{convert::TryInto, ffi::CString, marker::PhantomData};
use crate::errors::*;
/// Layer capabilities
#[allow(clippy::upper_case_acronyms)]
pub enum LayerCaps {
/// Layer capability for random read
OLCRandomRead,
/// Layer capability for sequential write
OLCSequentialWrite,
/// Layer capability for random write
OLCRandomWrite,
/// Layer capability for fast spatial filter
OLCFastSpatialFilter,
/// Layer capability for fast feature count retrieval
OLCFastFeatureCount,
/// Layer capability for fast extent retrieval
OLCFastGetExtent,
/// Layer capability for field creation
OLCCreateField,
/// Layer capability for field deletion
OLCDeleteField,
/// Layer capability for field reordering
OLCReorderFields,
/// Layer capability for field alteration
OLCAlterFieldDefn,
/// Layer capability for transactions
OLCTransactions,
/// Layer capability for feature deletiond
OLCDeleteFeature,
/// Layer capability for setting next feature index
OLCFastSetNextByIndex,
/// Layer capability for strings returned with UTF-8 encoding
OLCStringsAsUTF8,
/// Layer capability for field ignoring
OLCIgnoreFields,
/// Layer capability for geometry field creation
OLCCreateGeomField,
/// Layer capability for curve geometries support
OLCCurveGeometries,
/// Layer capability for measured geometries support
OLCMeasuredGeometries,
}
// Manage conversion to Gdal values
impl LayerCaps {
fn into_cstring(self) -> CString {
CString::new(match self {
Self::OLCRandomRead => "RandomRead",
Self::OLCSequentialWrite => "SequentialWrite",
Self::OLCRandomWrite => "RandomWrite",
Self::OLCFastSpatialFilter => "FastSpatialFilter",
Self::OLCFastFeatureCount => "FastFeatureCount",
Self::OLCFastGetExtent => "FastGetExtent",
Self::OLCCreateField => "CreateField",
Self::OLCDeleteField => "DeleteField",
Self::OLCReorderFields => "ReorderFields",
Self::OLCAlterFieldDefn => "AlterFieldDefn",
Self::OLCTransactions => "Transactions",
Self::OLCDeleteFeature => "DeleteFeature",
Self::OLCFastSetNextByIndex => "FastSetNextByIndex",
Self::OLCStringsAsUTF8 => "StringsAsUTF8",
Self::OLCIgnoreFields => "IgnoreFields",
Self::OLCCreateGeomField => "CreateGeomField",
Self::OLCCurveGeometries => "CurveGeometries",
Self::OLCMeasuredGeometries => "MeasuredGeometries",
})
.unwrap()
}
}
/// Layer in a vector dataset
///
/// ```
/// use std::path::Path;
/// use gdal::Dataset;
///
/// let dataset = Dataset::open(Path::new("fixtures/roads.geojson")).unwrap();
/// let mut layer = dataset.layer(0).unwrap();
/// for feature in layer.features() {
/// // do something with each feature
/// }
/// ```
#[derive(Debug)]
pub struct Layer<'a> {
c_layer: OGRLayerH,
defn: Defn,
phantom: PhantomData<&'a Dataset>,
}
impl<'a> MajorObject for Layer<'a> {
unsafe fn gdal_object_ptr(&self) -> GDALMajorObjectH {
self.c_layer
}
}
impl<'a> Metadata for Layer<'a> {}
impl<'a> Layer<'a> {
/// Creates a new Layer from a GDAL layer pointer
///
/// # Safety
/// This method operates on a raw C pointer
pub unsafe fn from_c_layer(_: &'a Dataset, c_layer: OGRLayerH) -> Layer<'a> {
let c_defn = gdal_sys::OGR_L_GetLayerDefn(c_layer);
let defn = Defn::from_c_defn(c_defn);
Layer {
c_layer,
defn,
phantom: PhantomData,
}
}
/// Returns the C wrapped pointer
///
/// # Safety
/// This method returns a raw C pointer
pub unsafe fn c_layer(&self) -> OGRLayerH {
self.c_layer
}
/// Returns the feature with the given feature id `fid`, or `None` if not found.
///
/// This function is unaffected by the spatial or attribute filters.
///
/// Not all drivers support this efficiently; however, the call should always work if the
/// feature exists, as a fallback implementation just scans all the features in the layer
/// looking for the desired feature.
pub fn feature(&self, fid: u64) -> Option<Feature> {
let c_feature = unsafe { gdal_sys::OGR_L_GetFeature(self.c_layer, fid as i64) };
if c_feature.is_null() {
None
} else {
Some(unsafe { Feature::from_c_feature(self.defn(), c_feature) })
}
}
/// Returns iterator over the features in this layer.
///
/// **Note.** This method resets the current index to
/// the beginning before iteration. It also borrows the
/// layer mutably, preventing any overlapping borrows.
pub fn features(&mut self) -> FeatureIterator {
self.reset_feature_reading();
FeatureIterator::_with_layer(self)
}
/// Set a spatial filter on this layer.
///
/// Refer [OGR_L_SetSpatialFilter](https://gdal.org/doxygen/classOGRLayer.html#a75c06b4993f8eb76b569f37365cd19ab)
pub fn set_spatial_filter(&mut self, geometry: &Geometry) {
unsafe { gdal_sys::OGR_L_SetSpatialFilter(self.c_layer, geometry.c_geometry()) };
}
/// Set a spatial rectangle filter on this layer by specifying the bounds of a rectangle.
pub fn set_spatial_filter_rect(&mut self, min_x: f64, min_y: f64, max_x: f64, max_y: f64) {
unsafe { gdal_sys::OGR_L_SetSpatialFilterRect(self.c_layer, min_x, min_y, max_x, max_y) };
}
/// Clear spatial filters set on this layer.
pub fn clear_spatial_filter(&mut self) {
unsafe { gdal_sys::OGR_L_SetSpatialFilter(self.c_layer, null_mut()) };
}
/// Get the name of this layer.
pub fn name(&self) -> String {
let rv = unsafe { gdal_sys::OGR_L_GetName(self.c_layer) };
_string(rv)
}
pub fn has_capability(&self, capability: LayerCaps) -> bool {
unsafe {
gdal_sys::OGR_L_TestCapability(self.c_layer, capability.into_cstring().as_ptr()) == 1
}
}
pub fn defn(&self) -> &Defn {
&self.defn
}
pub fn create_defn_fields(&self, fields_def: &[(&str, OGRFieldType::Type)]) -> Result<()> {
for fd in fields_def {
let fdefn = FieldDefn::new(fd.0, fd.1)?;
fdefn.add_to_layer(self)?;
}
Ok(())
}
pub fn create_feature(&mut self, geometry: Geometry) -> Result<()> {
let feature = Feature::new(&self.defn)?;
let c_geometry = unsafe { geometry.into_c_geometry() };
let rv = unsafe { gdal_sys::OGR_F_SetGeometryDirectly(feature.c_feature(), c_geometry) };
if rv != OGRErr::OGRERR_NONE {
return Err(GdalError::OgrError {
err: rv,
method_name: "OGR_F_SetGeometryDirectly",
});
}
let rv = unsafe { gdal_sys::OGR_L_CreateFeature(self.c_layer, feature.c_feature()) };
if rv != OGRErr::OGRERR_NONE {
return Err(GdalError::OgrError {
err: rv,
method_name: "OGR_L_CreateFeature",
});
}
Ok(())
}
pub fn create_feature_fields(
&mut self,
geometry: Geometry,
field_names: &[&str],
values: &[FieldValue],
) -> Result<()> {
let mut ft = Feature::new(&self.defn)?;
ft.set_geometry(geometry)?;
for (fd, val) in field_names.iter().zip(values.iter()) {
ft.set_field(fd, val)?;
}
ft.create(self)?;
Ok(())
}
/// Returns the number of features in this layer, even if it requires expensive calculation.
///
/// Some drivers will actually scan the entire layer once to count objects.
///
/// The returned count takes the [spatial filter](`Layer::set_spatial_filter`) into account.
/// For dynamic databases the count may not be exact.
pub fn feature_count(&self) -> u64 {
(unsafe { gdal_sys::OGR_L_GetFeatureCount(self.c_layer, 1) }) as u64
}
/// Returns the number of features in this layer, if it is possible to compute this
/// efficiently.
///
/// For some drivers, it would be expensive to establish the feature count, in which case
/// [`None`] will be returned.
///
/// The returned count takes the [spatial filter](`Layer::set_spatial_filter`) into account.
/// For dynamic databases the count may not be exact.
pub fn try_feature_count(&self) -> Option<u64> {
let rv = unsafe { gdal_sys::OGR_L_GetFeatureCount(self.c_layer, 0) };
if rv < 0 {
None
} else {
Some(rv as u64)
}
}
/// Returns the extent of this layer as an axis-aligned bounding box, even if it requires
/// expensive calculation.
///
/// Some drivers will actually scan the entire layer once to count objects.
///
/// Depending on the driver, the returned extent may or may not take the [spatial
/// filter](`Layer::set_spatial_filter`) into account. So it is safer to call `get_extent`
/// without setting a spatial filter.
///
/// Layers without any geometry may return [`OGRErr::OGRERR_FAILURE`] to indicate that no
/// meaningful extents could be collected.
pub fn get_extent(&self) -> Result<gdal_sys::OGREnvelope> {
let mut envelope = OGREnvelope {
MinX: 0.0,
MaxX: 0.0,
MinY: 0.0,
MaxY: 0.0,
};
let force = 1;
let rv = unsafe { gdal_sys::OGR_L_GetExtent(self.c_layer, &mut envelope, force) };
if rv != OGRErr::OGRERR_NONE {
return Err(GdalError::OgrError {
err: rv,
method_name: "OGR_L_GetExtent",
});
}
Ok(envelope)
}
/// Returns the extent of this layer as an axis-aligned bounding box, if it is possible to
/// compute this efficiently.
///
/// For some drivers, it would be expensive to calculate the extent, in which case [`None`]
/// will be returned.
///
/// Depending on the driver, the returned extent may or may not take the [spatial
/// filter](`Layer::set_spatial_filter`) into account. So it is safer to call `try_get_extent`
/// without setting a spatial filter.
pub fn try_get_extent(&self) -> Result<Option<gdal_sys::OGREnvelope>> {
let mut envelope = OGREnvelope {
MinX: 0.0,
MaxX: 0.0,
MinY: 0.0,
MaxY: 0.0,
};
let force = 0;
let rv = unsafe { gdal_sys::OGR_L_GetExtent(self.c_layer, &mut envelope, force) };
if rv == OGRErr::OGRERR_FAILURE {
Ok(None)
} else {
if rv != OGRErr::OGRERR_NONE {
return Err(GdalError::OgrError {
err: rv,
method_name: "OGR_L_GetExtent",
});
}
Ok(Some(envelope))
}
}
/// Fetch the spatial reference system for this layer.
///
/// Refer [OGR_L_GetSpatialRef](https://gdal.org/doxygen/classOGRLayer.html#a75c06b4993f8eb76b569f37365cd19ab)
pub fn spatial_ref(&self) -> Result<SpatialRef> {
let c_obj = unsafe { gdal_sys::OGR_L_GetSpatialRef(self.c_layer) };
if c_obj.is_null() {
return Err(_last_null_pointer_err("OGR_L_GetSpatialRef"));
}
SpatialRef::from_c_obj(c_obj)
}
fn reset_feature_reading(&mut self) {
unsafe {
gdal_sys::OGR_L_ResetReading(self.c_layer);
}
}
/// Set a new attribute query that restricts features when using the feature iterator.
///
/// From the GDAL docs: Note that installing a query string will generally result in resetting the current reading position
///
/// Parameters:
/// - `query` in restricted SQL WHERE format
///
pub fn set_attribute_filter(&mut self, query: &str) -> Result<()> {
let c_str = CString::new(query)?;
let rv = unsafe { gdal_sys::OGR_L_SetAttributeFilter(self.c_layer, c_str.as_ptr()) };
if rv != OGRErr::OGRERR_NONE {
return Err(GdalError::OgrError {
err: rv,
method_name: "OGR_L_SetAttributeFilter",
});
}
Ok(())
}
/// Clear the attribute filter set on this layer
///
/// From the GDAL docs: Note that installing a query string will generally result in resetting the current reading position
///
pub fn clear_attribute_filter(&mut self) {
unsafe {
gdal_sys::OGR_L_SetAttributeFilter(self.c_layer, null_mut());
}
}
}
pub struct FeatureIterator<'a> {
defn: &'a Defn,
c_layer: OGRLayerH,
size_hint: Option<usize>,
}
impl<'a> Iterator for FeatureIterator<'a> {
type Item = Feature<'a>;
#[inline]
fn next(&mut self) -> Option<Feature<'a>> {
let c_feature = unsafe { gdal_sys::OGR_L_GetNextFeature(self.c_layer) };
if c_feature.is_null() {
None
} else {
Some(unsafe { Feature::from_c_feature(self.defn, c_feature) })
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
match self.size_hint {
Some(size) => (size, Some(size)),
None => (0, None),
}
}
}
impl<'a> FeatureIterator<'a> {
pub fn _with_layer(layer: &'a Layer) -> FeatureIterator<'a> {
let defn = layer.defn();
let size_hint = layer
.try_feature_count()
.map(|s| s.try_into().ok())
.flatten();
FeatureIterator {
c_layer: layer.c_layer,
size_hint,
defn,
}
}
}
pub struct FieldDefn {
c_obj: OGRFieldDefnH,
}
impl Drop for FieldDefn {
fn drop(&mut self) {
unsafe { gdal_sys::OGR_Fld_Destroy(self.c_obj) };
}
}
impl MajorObject for FieldDefn {
unsafe fn gdal_object_ptr(&self) -> GDALMajorObjectH {
self.c_obj
}
}
impl FieldDefn {
pub fn new(name: &str, field_type: OGRFieldType::Type) -> Result<FieldDefn> {
let c_str = CString::new(name)?;
let c_obj = unsafe { gdal_sys::OGR_Fld_Create(c_str.as_ptr(), field_type) };
if c_obj.is_null() {
return Err(_last_null_pointer_err("OGR_Fld_Create"));
};
Ok(FieldDefn { c_obj })
}
pub fn set_width(&self, width: i32) {
unsafe { gdal_sys::OGR_Fld_SetWidth(self.c_obj, width as c_int) };
}
pub fn set_precision(&self, precision: i32) {
unsafe { gdal_sys::OGR_Fld_SetPrecision(self.c_obj, precision as c_int) };
}
pub fn add_to_layer(&self, layer: &Layer) -> Result<()> {
let rv = unsafe { gdal_sys::OGR_L_CreateField(layer.c_layer(), self.c_obj, 1) };
if rv != OGRErr::OGRERR_NONE {
return Err(GdalError::OgrError {
err: rv,
method_name: "OGR_L_CreateFeature",
});
}
Ok(())
}
}
|
extern crate regex;
use std::io;
use std::fs;
use std::io::BufRead;
use std::path::Path;
use std::collections::HashSet;
fn main() {
let input = parse_input();
let val = "FBFBBFFRLR";
println!("{} => {}", val, line_to_seat_number(val));
let min = input.taken_seats.iter().min().unwrap();
let max = input.taken_seats.iter().max().unwrap();
println!("Max seat is {}", max);
for i in *min..*max {
if !input.taken_seats.contains(&i) {
println!("My seat is {}", i);
break;
}
}
}
struct InputData {
taken_seats: HashSet<usize>,
}
impl InputData {
}
fn line_to_seat_number(line: &str) -> usize {
let mut seat_number = 0;
for c in line.chars() {
match c {
'F' => { seat_number *= 2; }
'B' => { seat_number *= 2; seat_number += 1 }
'R' => { seat_number *= 2; seat_number += 1 }
'L' => { seat_number *= 2; }
_ => {}
}
}
seat_number
}
fn parse_input() -> InputData {
let io_result = lines_in_file("input/day5.txt");
match io_result {
Ok(lines) => {
let taken_seats = lines.map(|line| match line {
Ok(stuff) => {
line_to_seat_number(&stuff)
}
Err(_) => panic!("Error reading line"),
}).collect();
InputData {
taken_seats: taken_seats,
}
},
Err(_) => panic!("Error reading file"),
}
}
fn lines_in_file<P>(file_path: P) -> io::Result<io::Lines<io::BufReader<fs::File>>> where P: AsRef<Path> {
let file = fs::File::open(file_path)?;
Ok(io::BufReader::new(file).lines())
}
|
use super::{run_data_test, InfluxRpcTest};
use async_trait::async_trait;
use data_types::{MAX_NANO_TIME, MIN_NANO_TIME};
use futures::{prelude::*, FutureExt};
use std::sync::Arc;
use test_helpers_end_to_end::{DataGenerator, GrpcRequestBuilder, MiniCluster, StepTestState};
#[tokio::test]
async fn measurement_names() {
let generator = Arc::new(DataGenerator::new());
run_data_test(
Arc::clone(&generator),
Box::new(move |state: &mut StepTestState| {
let generator = Arc::clone(&generator);
async move {
let mut storage_client = state.cluster().querier_storage_client();
let measurement_names_request = GrpcRequestBuilder::new()
.source(state.cluster())
.timestamp_range(generator.min_time(), generator.max_time())
.build_measurement_names();
let measurement_names_response = storage_client
.measurement_names(measurement_names_request)
.await
.unwrap();
let responses: Vec<_> = measurement_names_response
.into_inner()
.try_collect()
.await
.unwrap();
let values = &responses[0].values;
let values: Vec<_> = values
.iter()
.map(|s| std::str::from_utf8(s).unwrap())
.collect();
assert_eq!(
values,
vec!["attributes", "cpu_load_short", "status", "swap", "system"]
);
}
.boxed()
}),
)
.await
}
#[tokio::test]
async fn no_predicate() {
Arc::new(MeasurementNamesTest {
setup_name: "TwoMeasurements",
request: GrpcRequestBuilder::new(),
expected_names: vec!["cpu", "disk"],
})
.run()
.await;
}
#[tokio::test]
async fn predicate_no_results() {
Arc::new(MeasurementNamesTest {
setup_name: "TwoMeasurementsManyFields",
// no rows pass this predicate
request: GrpcRequestBuilder::new().timestamp_range(10_000_000, 20_000_000),
expected_names: vec![],
})
.run()
.await;
}
#[tokio::test]
async fn predicate_no_non_null_results() {
// only a single row with a null field passes this predicate (expect no table names)
Arc::new(MeasurementNamesTest {
setup_name: "TwoMeasurementsManyFields",
// no rows pass this predicate
request: GrpcRequestBuilder::new()
// only get last row of o2 (timestamp = 300)
.timestamp_range(200, 400)
// model predicate like _field='reading' which last row does not have
.field_predicate("reading")
.measurement_predicate("o2"),
expected_names: vec![],
})
.run()
.await;
}
#[tokio::test]
async fn generic_plan_predicate_no_non_null_results() {
// only a single row with a null field passes this predicate (expect no table names) -- has a
// general purpose predicate to force a generic plan
Arc::new(MeasurementNamesTest {
setup_name: "TwoMeasurementsManyFields",
request: GrpcRequestBuilder::new()
// only get last row of o2 (timestamp = 300)
.timestamp_range(200, 400)
// model predicate like _field='reading' which last row does not have
.field_predicate("reading")
.measurement_predicate("o2")
.tag_predicate("state", "CA"),
expected_names: vec![],
})
.run()
.await;
}
#[tokio::test]
async fn timestamp_range_includes_all_measurements() {
Arc::new(MeasurementNamesTest {
setup_name: "TwoMeasurements",
request: GrpcRequestBuilder::new().timestamp_range(0, 201),
expected_names: vec!["cpu", "disk"],
})
.run()
.await;
}
#[tokio::test]
async fn timestamp_range_includes_some_measurements() {
Arc::new(MeasurementNamesTest {
setup_name: "TwoMeasurements",
request: GrpcRequestBuilder::new().timestamp_range(0, 200),
expected_names: vec!["cpu"],
})
.run()
.await;
}
#[tokio::test]
async fn timestamp_range_includes_no_measurements() {
Arc::new(MeasurementNamesTest {
setup_name: "TwoMeasurements",
request: GrpcRequestBuilder::new().timestamp_range(250, 350),
expected_names: vec![],
})
.run()
.await;
}
#[tokio::test]
async fn timestamp_range_max_time_included() {
Arc::new(MeasurementNamesTest {
setup_name: "MeasurementWithMaxTime",
request: GrpcRequestBuilder::new().timestamp_range(MIN_NANO_TIME + 1, MAX_NANO_TIME + 1),
expected_names: vec!["cpu"],
})
.run()
.await;
}
#[tokio::test]
async fn timestamp_range_max_time_excluded() {
Arc::new(MeasurementNamesTest {
setup_name: "MeasurementWithMaxTime",
request: GrpcRequestBuilder::new().timestamp_range(MIN_NANO_TIME + 1, MAX_NANO_TIME),
expected_names: vec![],
})
.run()
.await;
}
#[tokio::test]
async fn timestamp_range_all_time() {
Arc::new(MeasurementNamesTest {
setup_name: "MeasurementWithMaxTime",
request: GrpcRequestBuilder::new().timestamp_range(MIN_NANO_TIME, MAX_NANO_TIME + 1),
expected_names: vec!["cpu"],
})
.run()
.await;
}
#[tokio::test]
async fn periods() {
Arc::new(MeasurementNamesTest {
setup_name: "PeriodsInNames",
request: GrpcRequestBuilder::new().timestamp_range(MIN_NANO_TIME, MAX_NANO_TIME + 1),
expected_names: vec!["measurement.one"],
})
.run()
.await;
}
#[tokio::test]
async fn generic_predicate() {
Arc::new(MeasurementNamesTest {
setup_name: "TwoMeasurements",
request: GrpcRequestBuilder::new()
.field_predicate("bytes")
.field_value_predicate(99),
expected_names: vec!["disk"],
})
.run()
.await;
}
#[derive(Debug)]
struct MeasurementNamesTest {
setup_name: &'static str,
request: GrpcRequestBuilder,
expected_names: Vec<&'static str>,
}
#[async_trait]
impl InfluxRpcTest for MeasurementNamesTest {
fn setup_name(&self) -> &'static str {
self.setup_name
}
async fn request_and_assert(&self, cluster: &MiniCluster) {
let mut storage_client = cluster.querier_storage_client();
let measurement_names_request = self
.request
.clone()
.source(cluster)
.build_measurement_names();
let measurement_names_response = storage_client
.measurement_names(measurement_names_request)
.await
.unwrap();
let responses: Vec<_> = measurement_names_response
.into_inner()
.try_collect()
.await
.unwrap();
let names = &responses[0].values;
let names: Vec<_> = names
.iter()
.map(|s| std::str::from_utf8(s).unwrap())
.collect();
assert_eq!(names, self.expected_names);
}
}
|
use super::*;
impl Mask {
pub fn single_bits(self) -> MaskIter {
MaskIter(self)
}
pub fn single_bit_indices(self) -> IndexIter {
IndexIter(self)
}
}
#[derive(Eq, Copy, Clone, Debug, PartialEq)]
pub struct MaskIter(Mask);
impl Iterator for MaskIter {
type Item = Mask;
fn next(&mut self) -> Option<Self::Item> {
if self.0 == masks::EMPTY {
None
} else {
let mask = self.0;
let result = mask.least_significant_bit();
self.0 = Mask(mask.0 & mask.0.wrapping_sub(1));
Some(result)
}
}
}
impl DoubleEndedIterator for MaskIter {
fn next_back(&mut self) -> Option<Self::Item> {
if self.0 == masks::EMPTY {
None
} else {
let mask = self.0;
let result = mask.most_significant_bit();
self.0 = Mask(mask.0 ^ result.0);
Some(result)
}
}
}
#[derive(Eq, Copy, Clone, Debug, PartialEq)]
pub struct IndexIter(Mask);
impl Iterator for IndexIter {
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
if self.0 == masks::EMPTY {
None
} else {
let mask = self.0;
let result = mask.index_of_least_significant_bit();
self.0 = Mask(mask.0 & mask.0.wrapping_sub(1));
Some(result)
}
}
}
impl DoubleEndedIterator for IndexIter {
fn next_back(&mut self) -> Option<Self::Item> {
if self.0 == masks::EMPTY {
None
} else {
let mask = self.0;
let result = mask.index_of_most_significant_bit();
self.0 = Mask(mask.0 ^ (1u64 << result));
debug_assert!(mask.count() > self.0.count(),
"{:X}, {:X}",
mask.0,
(self.0).0);
Some(result)
}
}
}
#[cfg(test)]
mod tests {
use super::super::*;
use quickcheck::*;
quickcheck! {
fn count_should_match_single_bits(m : Mask) -> bool {
m.count() as usize== m.single_bits().count()
}
fn count_should_match_single_bit_indices(m : Mask) -> bool {
m.count() as usize== m.single_bit_indices().count()
}
}
impl Arbitrary for Mask {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
Mask(g.next_u64())
}
}
} |
use std::collections::HashSet;
use divisors::Divisors;
use proconio::input;
fn main() {
input! {
n: usize,
m: usize,
a: [usize; m],
};
let set: HashSet<usize> = a.into_iter().collect();
let mut open = vec![false; n + 1];
let mut ans = n;
for i in (1..=n).rev() {
let yes = if set.contains(&i) {
!open[i]
} else {
open[i]
};
if yes {
ans -= 1;
for d in i.divisors() {
open[d] = !open[d];
}
}
}
println!("{}", ans);
}
|
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_context;
use clippy_utils::ty::implements_trait;
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, Pat, PatKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::Ty;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// ### What it does
/// Checks for pattern matchings that can be expressed using equality.
///
/// ### Why is this bad?
///
/// * It reads better and has less cognitive load because equality won't cause binding.
/// * It is a [Yoda condition](https://en.wikipedia.org/wiki/Yoda_conditions). Yoda conditions are widely
/// criticized for increasing the cognitive load of reading the code.
/// * Equality is a simple bool expression and can be merged with `&&` and `||` and
/// reuse if blocks
///
/// ### Example
/// ```rust,ignore
/// if let Some(2) = x {
/// do_thing();
/// }
/// ```
/// Should be written
/// ```rust,ignore
/// if x == Some(2) {
/// do_thing();
/// }
/// ```
#[clippy::version = "1.57.0"]
pub EQUATABLE_IF_LET,
nursery,
"using pattern matching instead of equality"
}
declare_lint_pass!(PatternEquality => [EQUATABLE_IF_LET]);
/// detects if pattern matches just one thing
fn unary_pattern(pat: &Pat<'_>) -> bool {
fn array_rec(pats: &[Pat<'_>]) -> bool {
pats.iter().all(unary_pattern)
}
match &pat.kind {
PatKind::Slice(_, _, _) | PatKind::Range(_, _, _) | PatKind::Binding(..) | PatKind::Wild | PatKind::Or(_) => {
false
},
PatKind::Struct(_, a, etc) => !etc && a.iter().all(|x| unary_pattern(x.pat)),
PatKind::Tuple(a, etc) | PatKind::TupleStruct(_, a, etc) => !etc.is_some() && array_rec(a),
PatKind::Ref(x, _) | PatKind::Box(x) => unary_pattern(x),
PatKind::Path(_) | PatKind::Lit(_) => true,
}
}
fn is_structural_partial_eq(cx: &LateContext<'tcx>, ty: Ty<'tcx>, other: Ty<'tcx>) -> bool {
if let Some(def_id) = cx.tcx.lang_items().eq_trait() {
implements_trait(cx, ty, def_id, &[other.into()])
} else {
false
}
}
impl<'tcx> LateLintPass<'tcx> for PatternEquality {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
if_chain! {
if let ExprKind::Let(pat, exp, _) = expr.kind;
if unary_pattern(pat);
let exp_ty = cx.typeck_results().expr_ty(exp);
let pat_ty = cx.typeck_results().pat_ty(pat);
if is_structural_partial_eq(cx, exp_ty, pat_ty);
then {
let mut applicability = Applicability::MachineApplicable;
let pat_str = match pat.kind {
PatKind::Struct(..) => format!(
"({})",
snippet_with_context(cx, pat.span, expr.span.ctxt(), "..", &mut applicability).0,
),
_ => snippet_with_context(cx, pat.span, expr.span.ctxt(), "..", &mut applicability).0.to_string(),
};
span_lint_and_sugg(
cx,
EQUATABLE_IF_LET,
expr.span,
"this pattern matching can be expressed using equality",
"try",
format!(
"{} == {}",
snippet_with_context(cx, exp.span, expr.span.ctxt(), "..", &mut applicability).0,
pat_str,
),
applicability,
);
}
}
}
}
|
use async_channel::{Receiver, Sender};
use net::{packets::*, Runtime};
mod server_net;
use server_net::server;
mod database;
use database::*;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
use simple_logger::SimpleLogger;
#[derive(PartialEq, Eq, Hash)]
struct Country {
owner: UserId,
}
async fn handle_packet(
read_buffer: &mut ReadBuffer,
write_buf_sender: Sender<WriteBuf>,
db: &mut Database,
) -> Result<(), ReadValueError> {
let packet = read_buffer.read_client_packet()?;
info!("Recieved packet {:?} from client", packet);
match packet {
ClientPacket::Connect => {
write_buf_sender
.send(
WriteBuf::new_server_packet(ServerPacket::ServerInfo).push(ServerInfo {
name: "Alpha server".to_string(),
description: "The testing server".to_string(),
host: "James".to_string(),
}),
)
.await
.unwrap();
}
ClientPacket::SignUp => {
let auth = Authentication::deserialize(read_buffer)?;
info!("signup Auth {:?}", auth);
let response = if PlayerData::pass_secure(&auth) {
if db.get_player_by_username(&auth.username).is_some() {
ServerPacket::InvalidSignup
} else {
db.players.push(PlayerData::new(auth));
ServerPacket::SucessfulSignup
}
} else {
ServerPacket::InvalidSignup
};
write_buf_sender
.send(WriteBuf::new_server_packet(response))
.await
.unwrap();
}
ClientPacket::Login => {
let auth = Authentication::deserialize(read_buffer)?;
info!("login Auth {:?}", auth);
let response = if let Some(user) = db.get_player_by_username(&auth.username) {
if user.check_pass(auth.password) {
ServerPacket::SucessfulLogin
} else {
ServerPacket::InvalidLogin
}
} else {
ServerPacket::InvalidLogin
};
write_buf_sender
.send(WriteBuf::new_server_packet(response))
.await
.unwrap();
}
ClientPacket::RequestCountryInfo => todo!(),
}
Ok(())
}
async fn handle_packets<'a>(read_buf_reciever: Receiver<(ReadBuffer, Sender<WriteBuf>)>) {
let mut db = Database::construct();
while let Ok(mut data) = read_buf_reciever.recv().await {
handle_packet(&mut data.0, data.1, &mut db)
.await
.unwrap_or_else(|e| error!("error reading data from client {}", e))
}
}
fn main() {
SimpleLogger::new().init().unwrap();
info!("Spawning runtime");
let rt = Runtime::new().unwrap();
let (read_buf_sender, read_buf_reciever) =
async_channel::unbounded::<(ReadBuffer, Sender<WriteBuf>)>();
rt.spawn(server("0.0.0.0:2453", read_buf_sender));
rt.block_on(handle_packets(read_buf_reciever));
}
|
use actix::prelude::*;
use failure::{err_msg, Fallible};
use std::collections::{HashMap, VecDeque};
use std::time::Duration;
use ycommon::runner_proto as proto;
use super::client_proto::*;
use super::runner_proxy::{self, MsgRunnerEvent, RunnerProxy};
use super::*;
use crate::{
app::{self, api},
db,
};
pub type RoomClientListener = Recipient<ClientEvents>;
#[derive(Debug, Clone, Message)]
pub enum ClientEvents {
Packet(ClientResponses),
Destroy, // destroy current client
}
struct RoomClient {
id: u32,
name: String,
recipient: RoomClientListener,
}
#[derive(Debug, Clone, Message)]
pub enum RoomEvents {
Closed(String), // room_key
}
pub type RoomEventListener = Recipient<RoomEvents>;
pub struct Room {
room_key: String,
room_run_env: Option<proto::RunEnv>,
listener: RoomEventListener,
clients: HashMap<u32, RoomClient>,
next_client_id: u32,
runner_proxy_addr: Option<Addr<RunnerProxy>>,
// cache output
terminal_stdout: VecDeque<String>,
// room stop handler
stop_handle: Option<SpawnHandle>,
// current code buffer
code_buffer: TextBuffer,
// pad with content
pad: Option<db::Pad>,
}
impl Room {
pub fn new(room_key: &String, listener: RoomEventListener) -> Room {
Room {
room_key: room_key.to_owned(),
room_run_env: None,
listener,
clients: HashMap::new(),
next_client_id: 1,
runner_proxy_addr: None,
terminal_stdout: VecDeque::new(),
stop_handle: None,
code_buffer: TextBuffer::new(),
pad: None,
}
}
pub fn room_config() -> &'static app::config::RoomConfig {
&app::config().room
}
}
impl Actor for Room {
type Context = Context<Self>;
fn started(&mut self, ctx: &mut <Self as Actor>::Context) {
let runner_service_url = Self::room_config().runner_service_url.to_owned();
let recipient = ctx.address().recipient();
let room_key = self.room_key.to_owned();
let api_executor = api::api_executor();
let req = api::ReqQueryPad {
hash: room_key.clone(),
};
api_executor
.send(req)
.into_actor(self)
.then(|res, this: &mut Self, context| {
let resp = match res {
Err(err) => {
warn!("load pad error = {:?}", err);
context.stop();
return fut::err(());
}
Ok(val) => val,
};
let data: api::RespQueryPad = match resp {
Err(err) => {
warn!("load pad database err = {:?}", err);
context.stop();
return fut::err(());
}
Ok(data) => data,
};
// pad loaded
let curr_pad: &db::Pad = match data.pad.as_ref() {
None => {
warn!("pad not found: {}", data.hash);
context.stop();
return fut::err(());
}
Some(val) => val,
};
let pad_language = curr_pad.language.to_owned();
info!("pad loaded: {}, language = {}", curr_pad.hash, pad_language);
this.pad = data.pad;
if let Some(content) = data.content {
this.code_buffer.set_text(content.code.as_str());
}
let run_env = proto::RunEnv {
win_size: proto::WinSize { row: 0, col: 0 },
language: pad_language,
boot: None,
};
this.room_run_env = Some(run_env.clone());
let proxy_addr =
RunnerProxy::new(room_key, run_env, recipient, runner_service_url).start();
this.runner_proxy_addr = Some(proxy_addr);
fut::ok(())
})
.wait(ctx);
// start auto save
let cfg = Self::room_config();
if cfg.auto_save_seconds > 0 {
let save_interval = Duration::from_secs(cfg.auto_save_seconds);
ctx.run_interval(save_interval, |this: &mut Self, context| {
this.save_pad_content(context);
});
}
}
fn stopping(&mut self, ctx: &mut Self::Context) -> Running {
info!("room stopping: {}", self.room_key);
self.save_pad_content(ctx);
self.destroy_clients(ctx);
self.destroy_proxy(ctx);
let _ = self
.listener
.do_send(RoomEvents::Closed(self.room_key.clone()));
Running::Stop
}
fn stopped(&mut self, ctx: &mut Self::Context) {
info!("room stopped: {}", self.room_key);
}
}
impl Room {
pub fn broadcast_all(&self, msg: ClientEvents) {
self.broadcast_excludes(msg, vec![]);
}
pub fn broadcast_excludes(&self, msg: ClientEvents, excludes: Vec<u32>) {
for client in self.clients.values() {
if excludes.contains(&client.id) {
continue;
}
let res = client.recipient.do_send(msg.clone());
match res {
Err(err) => info!("send recipient {} error: {:?}", client.id, err),
Ok(_) => (),
}
}
}
fn destroy_clients(&mut self, ctx: &mut <Self as Actor>::Context) {
self.broadcast_all(ClientEvents::Destroy);
}
fn destroy_proxy(&mut self, ctx: &mut <Self as Actor>::Context) {
if let Some(runner_proxy_addr) = self.runner_proxy_addr.as_ref() {
info!("sending stop signal to runner_proxy");
let _ = runner_proxy_addr.do_send(runner_proxy::ReqStop);
}
}
fn save_pad_content(&mut self, ctx: &mut <Self as Actor>::Context) {
let pad: &db::Pad = match self.pad.as_ref() {
None => {
info!("pad not found, skip saving content: {}", self.room_key);
return;
}
Some(val) => val,
};
let code = self.code_buffer.text();
let req = api::ReqSavePadContent {
content: db::NewPadContent {
pad_id: pad.id,
code: code,
},
};
let api_executor = api::api_executor();
api_executor.do_send(req);
}
}
pub struct MsgJoinRoom {
pub name: String,
pub client: RoomClientListener,
}
impl Message for MsgJoinRoom {
type Result = u32; // client id
}
impl Handler<MsgJoinRoom> for Room {
type Result = MessageResult<MsgJoinRoom>;
fn handle(&mut self, msg: MsgJoinRoom, ctx: &mut Self::Context) -> Self::Result {
let client_id = self.get_next_client_id();
let room_client = RoomClient {
id: client_id,
name: msg.name,
recipient: msg.client,
};
info!("client({}) joins room: {}", client_id, self.room_key);
// send language to client
let language = match self.pad.as_ref() {
None => "plaintext".to_owned(),
Some(pad) => pad.language.to_owned(),
};
let msg_lang = ClientResponses::Command(CommandResponseParams::SetLang(language));
let _ = room_client
.recipient
.do_send(ClientEvents::Packet(msg_lang));
// send editor text to client
let curr_code = self.code_buffer.text();
let msg_code = ClientResponses::Editor(EditorSyncParams::Text(curr_code));
let _ = room_client
.recipient
.do_send(ClientEvents::Packet(msg_code));
// send cached output to client
for line in self.terminal_stdout.iter() {
let value = TerminalResponseParams::Stdout(line.to_owned());
let msg_stdout = ClientResponses::Terminal(value);
let _ = room_client
.recipient
.do_send(ClientEvents::Packet(msg_stdout));
}
self.clients.insert(client_id, room_client);
// check stop handle
if let Some(stop_handle) = self.stop_handle.take() {
info!(
"client({}) joined room, cancelling stop handle: {}",
client_id, self.room_key
);
ctx.cancel_future(stop_handle);
}
MessageResult(client_id)
}
}
impl Room {
fn get_next_client_id(&mut self) -> u32 {
self.next_client_id += 1;
return self.next_client_id;
}
}
pub struct MsgLeaveRoom {
pub client_id: u32,
}
impl Message for MsgLeaveRoom {
type Result = u32;
}
impl Handler<MsgLeaveRoom> for Room {
type Result = MessageResult<MsgLeaveRoom>;
fn handle(&mut self, msg: MsgLeaveRoom, ctx: &mut Self::Context) -> Self::Result {
info!("client({}) leaves room: {}", msg.client_id, self.room_key);
let client_option = self.clients.remove(&msg.client_id);
if self.clients.is_empty() {
// if room is empty, destory room after a few seconds
let delay_ms = Self::room_config().close_delay_ms;
info!("room is empty now, waiting for {}ms to stop", delay_ms);
let stop_handle = ctx.run_later(
Duration::from_millis(delay_ms),
|this: &mut Self, context: &mut Self::Context| {
info!("stop delay timeout, call stop room now: {}", this.room_key);
context.stop();
},
);
self.stop_handle = Some(stop_handle);
}
// broadcast remove all cursors
let payload = EditorSyncParams::Cursor(CursorChangedEvent {
peer_id: msg.client_id,
position: None,
secondary_positions: vec![],
});
let new_msg = ClientResponses::Editor(payload);
self.broadcast_excludes(ClientEvents::Packet(new_msg), vec![msg.client_id]);
match client_option {
None => MessageResult(0),
Some(client) => MessageResult(client.id),
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct MsgRoomClientRequest {
pub client_id: u32,
pub client_request: ClientRequests,
}
impl Message for MsgRoomClientRequest {
type Result = u32;
}
impl Handler<MsgRoomClientRequest> for Room {
type Result = MessageResult<MsgRoomClientRequest>;
fn handle(&mut self, msg: MsgRoomClientRequest, ctx: &mut Self::Context) -> Self::Result {
info!("room: {}, client msg: {:?}", self.room_key, msg);
let packet = msg.client_request;
match packet {
ClientRequests::Editor(payload) => {
if let Err(err) = self.on_editor(ctx, msg.client_id, payload) {
warn!("on_editor err = {:?}", err);
}
}
ClientRequests::Command(payload) => {
if let Err(err) = self.on_command(ctx, msg.client_id, payload) {
warn!("on_command err = {:?}", err);
}
}
ClientRequests::Terminal(payload) => {
if let Err(err) = self.on_terminal(ctx, msg.client_id, payload) {
warn!("on_terminal err = {:?}", err);
}
}
}
MessageResult(0)
}
}
impl Room {
pub fn on_editor(
&mut self,
ctx: &mut <Self as Actor>::Context,
client_id: u32,
mut payload: EditorSyncParams,
) -> Fallible<()> {
info!("on_editor: client = {}, msg = {:?}", client_id, payload);
// sync to code buffer
// TODO: port teletype algorithms here
match &mut payload {
EditorSyncParams::Text(text) => {
warn!("not supported text input from client");
}
EditorSyncParams::Changed(changed) => {
for change in changed.changes.iter() {
let r = &change.range;
let start = TextPosition::new(r.start_line as usize, r.start_column as usize);
let end = TextPosition::new(r.end_line as usize, r.end_column as usize);
let res = self.code_buffer.edit(&start, &end, change.text.as_str());
if let Err(err) = res {
warn!("invalid edit: err = {:?}, change = {:?}", err, change);
}
}
}
EditorSyncParams::Cursor(cursor) => {
cursor.peer_id = client_id;
}
};
let msg = ClientResponses::Editor(payload);
self.broadcast_excludes(ClientEvents::Packet(msg), vec![client_id]);
Ok(())
}
pub fn on_command(
&mut self,
ctx: &mut <Self as Actor>::Context,
client_id: u32,
payload: CommandRequestParams,
) -> Fallible<()> {
info!("on_command: client = {}, msg = {:?}", client_id, payload);
let runner_proxy_addr = self
.runner_proxy_addr
.as_ref()
.ok_or(err_msg("runner not ready"))?
.clone();
let msg = match payload {
CommandRequestParams::Reset() => match self.room_run_env.as_ref() {
None => {
warn!("room run env not set: {}", self.room_key);
return Err(err_msg("run_env not set"));
}
Some(run_env) => proto::ServiceRequests::Reset(run_env.clone()),
},
CommandRequestParams::RunCode(content) => match self.room_run_env.as_ref() {
None => {
warn!("room run env not found: {}", self.room_key);
return Err(err_msg("run_env not set"));
}
Some(run_env) => proto::ServiceRequests::Run(proto::Code {
id: 1,
language: run_env.language.clone(),
filename: "source".to_owned(),
content: content,
}),
},
CommandRequestParams::SetLang(lang) => {
self.update_room_language(lang.as_str());
// broadcast to clients
let msg_lang =
ClientResponses::Command(CommandResponseParams::SetLang(lang.clone()));
self.broadcast_all(ClientEvents::Packet(msg_lang));
proto::ServiceRequests::Reset(self.room_run_env.as_ref().unwrap().clone())
}
};
runner_proxy_addr
.send(msg)
.into_actor(self)
.then(|res, act, context| {
debug!("runner proxy response = {:?}", res);
fut::ok(())
})
.spawn(ctx);
Ok(())
}
fn update_room_language(&mut self, language: &str) {
// save run_env
if let Some(run_env) = self.room_run_env.as_mut() {
run_env.language = language.to_owned();
run_env.boot = None;
}
// save language to pad
if let Some(pad) = self.pad.as_mut() {
let pad: &mut db::Pad = pad;
pad.language = language.to_owned();
let req = api::ReqUpdatePad {
pad_id: pad.id,
changeset: db::PadChangeset {
language: Some(language.to_owned()),
status: None,
title: None,
},
};
api::api_executor().do_send(req);
}
}
pub fn on_terminal(
&mut self,
ctx: &mut <Self as Actor>::Context,
client_id: u32,
payload: TerminalRequestParams,
) -> Fallible<()> {
info!("on_terminal: client = {}, msg = {:?}", client_id, payload);
let runner_proxy_addr = self
.runner_proxy_addr
.as_ref()
.ok_or(err_msg("runner not ready"))?;
let msg = match payload {
TerminalRequestParams::SetSize(row, col) => {
proto::ServiceRequests::WinSize(proto::WinSize { row, col })
}
TerminalRequestParams::Stdin(input) => proto::ServiceRequests::Stdin(input),
};
runner_proxy_addr
.send(msg)
.into_actor(self)
.then(|res, act, context| {
debug!("runner proxy response = {:?}", res);
fut::ok(())
})
.spawn(ctx);
Ok(())
}
}
impl Handler<MsgRunnerEvent> for Room {
type Result = MessageResult<MsgRunnerEvent>;
fn handle(&mut self, msg: MsgRunnerEvent, ctx: &mut Self::Context) -> Self::Result {
match msg {
MsgRunnerEvent::Stdout(payload) => {
self.push_terminal_stdout(&payload);
let value = TerminalResponseParams::Stdout(payload);
let msg = ClientResponses::Terminal(value);
self.broadcast_all(ClientEvents::Packet(msg));
}
}
MessageResult(())
}
}
impl Room {
fn push_terminal_stdout(&mut self, payload: &String) {
match self.terminal_stdout.back_mut() {
None => {
self.terminal_stdout.push_back(payload.clone());
}
Some(back_line) => {
let line: &mut String = back_line;
if line.len() >= 256 {
self.terminal_stdout.push_back(payload.clone());
} else if line.ends_with("\n") {
self.terminal_stdout.push_back(payload.clone());
} else {
line.push_str(payload.as_str());
}
}
}
let cache_lines = Self::room_config().cache_lines;
if self.terminal_stdout.len() > cache_lines {
self.terminal_stdout.pop_front();
}
}
}
|
use crate::{FunctionData, Instruction, Value, Location, Set};
enum Prop {
Value(Value),
Undef,
Nop,
None,
}
pub struct UndefinedPropagatePass;
impl super::Pass for UndefinedPropagatePass {
fn name(&self) -> &str {
"undefined propagation"
}
fn time(&self) -> crate::timing::TimedBlock {
crate::timing::undefined_propagate()
}
fn run_on_function(&self, function: &mut FunctionData) -> bool {
let mut did_something = false;
let mut undefined = Set::default();
// Propagate undefined values.
//
// v5 = select u1 v4, u32 v2, undefined
// Becomes:
// v5 = alias v2
//
// v3 = add u32 v1, undefined
// Becomes:
// v3 = undefined
macro_rules! is_undefined {
($value: expr) => {
function.is_value_undefined($value) || undefined.contains(&$value)
}
}
for label in function.reachable_labels() {
let mut body = &function.blocks[&label];
let body_len = body.len();
for inst_id in 0..body_len {
let instruction = &body[inst_id];
let mut prop = Prop::None;
match instruction {
Instruction::ArithmeticUnary { .. } | Instruction::ArithmeticBinary { .. } |
Instruction::IntCompare { .. } | Instruction::Load { .. } |
Instruction::GetElementPtr { .. } | Instruction::Cast { .. } => {
// If any input is undefined then whole result is undefined.
for input in instruction.read_values() {
if is_undefined!(input) {
prop = Prop::Undef;
break;
}
}
}
Instruction::Store { .. } => {
// If any input is undefined then remove the store.
for input in instruction.read_values() {
if is_undefined!(input) {
prop = Prop::Nop;
break;
}
}
}
Instruction::Select { on_true, on_false, .. } => {
// If there is a single non-undefined value then select
// becomes alias to that non-undefined value.
if is_undefined!(*on_true) {
prop = Prop::Value(*on_false);
}
if is_undefined!(*on_false) {
prop = Prop::Value(*on_true);
}
}
_ => {}
}
let replacement = match prop {
Prop::None => None,
Prop::Nop => Some(Instruction::Nop),
Prop::Value(value) => {
let output = instruction.created_value().unwrap();
Some(Instruction::Alias {
dst: output,
value,
})
}
Prop::Undef => {
let output = instruction.created_value().unwrap();
let ty = function.value_type(output);
let undef = function.undefined_value(ty);
undefined.insert(output);
body = &function.blocks[&label];
Some(Instruction::Alias {
dst: output,
value: undef,
})
}
};
if let Some(replacement) = replacement {
let location = Location::new(label, inst_id);
*function.instruction_mut(location) = replacement;
body = &function.blocks[&label];
did_something = true;
}
}
}
did_something
}
}
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(repr_align, attr_literals)]
// See also repr-transparent.rs
#[repr(transparent, C)] //~ ERROR cannot have other repr
struct TransparentPlusC {
ptr: *const u8
}
#[repr(transparent, packed)] //~ ERROR cannot have other repr
struct TransparentPlusPacked(*const u8);
#[repr(transparent, align(2))] //~ ERROR cannot have other repr
struct TransparentPlusAlign(u8);
#[repr(transparent)] //~ ERROR cannot have other repr
#[repr(C)]
struct SeparateAttributes(*mut u8);
|
use std::borrow::Cow;
use std::convert::TryFrom;
use byteorder::{ByteOrder, LittleEndian};
use time::{Date, OffsetDateTime, PrimitiveDateTime, Time, UtcOffset};
use crate::decode::Decode;
use crate::encode::Encode;
use crate::io::{Buf, BufMut};
use crate::mysql::protocol::TypeId;
use crate::mysql::type_info::MySqlTypeInfo;
use crate::mysql::{MySql, MySqlData, MySqlValue};
use crate::types::Type;
impl Type<MySql> for OffsetDateTime {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::TIMESTAMP)
}
}
impl Encode<MySql> for OffsetDateTime {
fn encode(&self, buf: &mut Vec<u8>) {
let utc_dt = self.to_offset(UtcOffset::UTC);
let primitive_dt = PrimitiveDateTime::new(utc_dt.date(), utc_dt.time());
Encode::<MySql>::encode(&primitive_dt, buf);
}
}
impl<'de> Decode<'de, MySql> for OffsetDateTime {
fn decode(value: MySqlValue<'de>) -> crate::Result<Self> {
let primitive: PrimitiveDateTime = Decode::<MySql>::decode(value)?;
Ok(primitive.assume_utc())
}
}
impl Type<MySql> for Time {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::TIME)
}
}
impl Encode<MySql> for Time {
fn encode(&self, buf: &mut Vec<u8>) {
let len = Encode::<MySql>::size_hint(self) - 1;
buf.push(len as u8);
// Time is not negative
buf.push(0);
// "date on 4 bytes little-endian format" (?)
// https://mariadb.com/kb/en/resultset-row/#teimstamp-binary-encoding
buf.advance(4);
encode_time(self, len > 9, buf);
}
fn size_hint(&self) -> usize {
if self.nanosecond() == 0 {
// if micro_seconds is 0, length is 8 and micro_seconds is not sent
9
} else {
// otherwise length is 12
13
}
}
}
impl<'de> Decode<'de, MySql> for Time {
fn decode(value: MySqlValue<'de>) -> crate::Result<Self> {
match value.try_get()? {
MySqlData::Binary(mut buf) => {
// data length, expecting 8 or 12 (fractional seconds)
let len = buf.get_u8()?;
// is negative : int<1>
let is_negative = buf.get_u8()?;
assert_eq!(is_negative, 0, "Negative dates/times are not supported");
// "date on 4 bytes little-endian format" (?)
// https://mariadb.com/kb/en/resultset-row/#timestamp-binary-encoding
buf.advance(4);
decode_time(len - 5, buf)
}
MySqlData::Text(buf) => {
let s = from_utf8(buf).map_err(crate::Error::decode)?;
// If there are less than 9 digits after the decimal point
// We need to zero-pad
// TODO: Ask [time] to add a parse % for less-than-fixed-9 nanos
let s = if s.len() < 20 {
Cow::Owned(format!("{:0<19}", s))
} else {
Cow::Borrowed(s)
};
Time::parse(&*s, "%H:%M:%S.%N").map_err(crate::Error::decode)
}
}
}
}
impl Type<MySql> for Date {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::DATE)
}
}
impl Encode<MySql> for Date {
fn encode(&self, buf: &mut Vec<u8>) {
buf.push(4);
encode_date(self, buf);
}
fn size_hint(&self) -> usize {
5
}
}
impl<'de> Decode<'de, MySql> for Date {
fn decode(value: MySqlValue<'de>) -> crate::Result<Self> {
match value.try_get()? {
MySqlData::Binary(buf) => decode_date(&buf[1..]),
MySqlData::Text(buf) => {
let s = from_utf8(buf).map_err(crate::Error::decode)?;
Date::parse(s, "%Y-%m-%d").map_err(crate::Error::decode)
}
}
}
}
impl Type<MySql> for PrimitiveDateTime {
fn type_info() -> MySqlTypeInfo {
MySqlTypeInfo::new(TypeId::DATETIME)
}
}
impl Encode<MySql> for PrimitiveDateTime {
fn encode(&self, buf: &mut Vec<u8>) {
let len = Encode::<MySql>::size_hint(self) - 1;
buf.push(len as u8);
encode_date(&self.date(), buf);
if len > 4 {
encode_time(&self.time(), len > 8, buf);
}
}
fn size_hint(&self) -> usize {
// to save space the packet can be compressed:
match (self.hour(), self.minute(), self.second(), self.nanosecond()) {
// if hour, minutes, seconds and micro_seconds are all 0,
// length is 4 and no other field is sent
(0, 0, 0, 0) => 5,
// if micro_seconds is 0, length is 7
// and micro_seconds is not sent
(_, _, _, 0) => 8,
// otherwise length is 11
(_, _, _, _) => 12,
}
}
}
impl<'de> Decode<'de, MySql> for PrimitiveDateTime {
fn decode(value: MySqlValue<'de>) -> crate::Result<Self> {
match value.try_get()? {
MySqlData::Binary(buf) => {
let len = buf[0];
let date = decode_date(&buf[1..])?;
let dt = if len > 4 {
date.with_time(decode_time(len - 4, &buf[5..])?)
} else {
date.midnight()
};
Ok(dt)
}
MySqlData::Text(buf) => {
let s = from_utf8(buf).map_err(crate::Error::decode)?;
// If there are less than 9 digits after the decimal point
// We need to zero-pad
// TODO: Ask [time] to add a parse % for less-than-fixed-9 nanos
let s = if s.len() < 31 {
if s.contains('.') {
Cow::Owned(format!("{:0<30}", s))
} else {
Cow::Owned(format!("{}.000000000", s))
}
} else {
Cow::Borrowed(s)
};
PrimitiveDateTime::parse(&*s, "%Y-%m-%d %H:%M:%S.%N").map_err(crate::Error::decode)
}
}
}
}
fn encode_date(date: &Date, buf: &mut Vec<u8>) {
// MySQL supports years from 1000 - 9999
let year = u16::try_from(date.year())
.unwrap_or_else(|_| panic!("Date out of range for Mysql: {}", date));
buf.extend_from_slice(&year.to_le_bytes());
buf.push(date.month());
buf.push(date.day());
}
fn decode_date(buf: &[u8]) -> crate::Result<Date> {
Date::try_from_ymd(
LittleEndian::read_u16(buf) as i32,
buf[2] as u8,
buf[3] as u8,
)
.map_err(|e| decode_err!("Error while decoding Date: {}", e))
}
fn encode_time(time: &Time, include_micros: bool, buf: &mut Vec<u8>) {
buf.push(time.hour());
buf.push(time.minute());
buf.push(time.second());
if include_micros {
buf.put_u32::<LittleEndian>((time.nanosecond() / 1000) as u32);
}
}
fn decode_time(len: u8, mut buf: &[u8]) -> crate::Result<Time> {
let hour = buf.get_u8()?;
let minute = buf.get_u8()?;
let seconds = buf.get_u8()?;
let micros = if len > 3 {
// microseconds : int<EOF>
buf.get_uint::<LittleEndian>(buf.len())?
} else {
0
};
Time::try_from_hms_micro(hour, minute, seconds, micros as u32)
.map_err(|e| decode_err!("Time out of range for MySQL: {}", e))
}
use std::str::from_utf8;
#[cfg(test)]
use time::{date, time};
#[test]
fn test_encode_date_time() {
let mut buf = Vec::new();
// test values from https://dev.mysql.com/doc/internals/en/binary-protocol-value.html
let date = PrimitiveDateTime::new(date!(2010 - 10 - 17), time!(19:27:30.000001));
Encode::<MySql>::encode(&date, &mut buf);
assert_eq!(*buf, [11, 218, 7, 10, 17, 19, 27, 30, 1, 0, 0, 0]);
buf.clear();
let date = PrimitiveDateTime::new(date!(2010 - 10 - 17), time!(19:27:30));
Encode::<MySql>::encode(&date, &mut buf);
assert_eq!(*buf, [7, 218, 7, 10, 17, 19, 27, 30]);
buf.clear();
let date = PrimitiveDateTime::new(date!(2010 - 10 - 17), time!(00:00:00));
Encode::<MySql>::encode(&date, &mut buf);
assert_eq!(*buf, [4, 218, 7, 10, 17]);
}
#[test]
fn test_decode_date_time() {
// test values from https://dev.mysql.com/doc/internals/en/binary-protocol-value.html
let buf = [11, 218, 7, 10, 17, 19, 27, 30, 1, 0, 0, 0];
let date1 = <PrimitiveDateTime as Decode<MySql>>::decode(MySqlValue::binary(
MySqlTypeInfo::default(),
&buf,
))
.unwrap();
assert_eq!(date1.to_string(), "2010-10-17 19:27:30.000001");
let buf = [7, 218, 7, 10, 17, 19, 27, 30];
let date2 = <PrimitiveDateTime as Decode<MySql>>::decode(MySqlValue::binary(
MySqlTypeInfo::default(),
&buf,
))
.unwrap();
assert_eq!(date2.to_string(), "2010-10-17 19:27:30");
let buf = [4, 218, 7, 10, 17];
let date3 = <PrimitiveDateTime as Decode<MySql>>::decode(MySqlValue::binary(
MySqlTypeInfo::default(),
&buf,
))
.unwrap();
assert_eq!(date3.to_string(), "2010-10-17 0:00");
}
#[test]
fn test_encode_date() {
let mut buf = Vec::new();
let date: Date = date!(2010 - 10 - 17);
Encode::<MySql>::encode(&date, &mut buf);
assert_eq!(*buf, [4, 218, 7, 10, 17]);
}
#[test]
fn test_decode_date() {
let buf = [4, 218, 7, 10, 17];
let date = <Date as Decode<MySql>>::decode(MySqlValue::binary(MySqlTypeInfo::default(), &buf))
.unwrap();
assert_eq!(date, date!(2010 - 10 - 17));
}
|
use std::any::type_name;
use std::fmt::{Formatter, Display, Result};
fn reverse(pair: (i32, &'static str)) -> (&'static str, i32){
return (pair.1, pair.0);
}
fn reverse2(pair: (i32, &'static str)) -> (&'static str, i32){
let (the_int, the_string) = pair; //destruct the pair.
return (the_string, the_int);
}
fn transpose(m: Matrix) -> Matrix{
let Matrix(a,b,c,d) = m;
return Matrix(a,c,b,d);
}
// named tuple
#[derive(Debug)]
struct Matrix(f32, f32, f32, f32);
impl Display for Matrix{
fn fmt(&self, f: &mut Formatter) -> Result {
let Matrix(a, b, c, d) = self;
writeln!(f,"({a}),({b})")?;
writeln!(f,"({c}),({d})")
}
}
fn main(){
let long_tuple = (1i32, 'a', "test", true);
println!("{}",long_tuple.0);
println!("{}",long_tuple.1);
println!("{}",long_tuple.2);
println!("{}",long_tuple.3);
let tupa_tupe = ((1.0f32, 1.1f32),(1i32, 2i32), "rounded");
println!("{tupa_tupe:?}");
let pair = (1, "cheese");
println!("{pair:?}");
let reversed = reverse(pair);
println!("reversed: {reversed:?}");
let reversed2 = reverse2(pair);
println!("reversed: {reversed2:?}");
// one size tuple
let solo = (5i32,);
let just_integer = (5i32);
println!("{}", type_of(solo));
println!("{}", type_of(just_integer));
let tuple = (1, "hello", 4.5, true);
// destructure
let (a, b, c, d) = tuple;
println!("{:?}, {:?}, {:?}, {:?}", a, b, c, d);
let matrix = Matrix(1.1, 1.2, 2.1, 2.2);
println!("{:}", matrix);
println!("Matrix:\n{}", matrix);
println!("Transpose:\n{}", transpose(matrix));
}
fn type_of<T>(_: T) -> &'static str {
type_name::<T>()
} |
use std::cmp;
use std::io::Read;
fn main() {
let mut buf = String::new();
// 標準入力から全部bufに読み込む
std::io::stdin().read_to_string(&mut buf).unwrap();
// 行ごとのiterが取れる
let mut iter = buf.split_whitespace();
let polititian_number: usize = iter.next().unwrap().parse().unwrap();
let relation_number: usize = iter.next().unwrap().parse().unwrap();
let mut N = vec![vec![0; 12]; 12];
for _ in 0..relation_number {
let x: usize = iter.next().unwrap().parse().unwrap();
let y: usize = iter.next().unwrap().parse().unwrap();
N[x - 1][y - 1] = 1;
N[y - 1][x - 1] = 1;
}
let mut combinations = vec![];
// 2の3乗の組み合わせを作成
for i in 0..2usize.pow(polititian_number as u32) {
let mut combination = vec![];
// ビット演算する際の最大桁が1桁目に来るとこで
// シフトできれば良いので、number.len()-1までloop
for j in 0..polititian_number {
// j桁右シフトして最初のbitが1かチェック
if ((i >> j) & 1) == 1 {
combination.push(j);
}
}
if combination.len() > 1 {
combinations.push(combination);
}
}
let mut result = 0;
for combination in combinations {
let mut check_results = vec![];
for p in get_pattern(&combination) {
let x = combination[p[0]];
let y = combination[p[1]];
check_results.push(N[x][y] == 1 && N[y][x] == 1);
}
if check_results.iter().all(|&r| r == true) {
result = cmp::max(combination.len(), result);
}
}
println!("{}", result);
}
fn get_pattern(comb: &Vec<usize>) -> Vec<Vec<usize>> {
let mut results: Vec<Vec<usize>> = vec![];
for i in 0..2usize.pow(comb.len() as u32) {
let mut result = vec![];
for j in 0..comb.len() {
// j桁右シフトして最初のbitが1かチェック
if ((i >> j) & 1) == 1 {
result.push(j);
}
}
if result.len() == 2 {
results.push(result);
}
}
return results;
}
|
#[path = "spawn_monitor_1/with_function.rs"]
pub mod with_function;
// `without_function_errors_badarg` in unit tests
|
pub mod controller;
pub mod models;
pub mod routes;
pub mod service;
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
pub type CompositeTransform3D = *mut ::core::ffi::c_void;
#[repr(C)]
pub struct Matrix3D {
pub M11: f64,
pub M12: f64,
pub M13: f64,
pub M14: f64,
pub M21: f64,
pub M22: f64,
pub M23: f64,
pub M24: f64,
pub M31: f64,
pub M32: f64,
pub M33: f64,
pub M34: f64,
pub OffsetX: f64,
pub OffsetY: f64,
pub OffsetZ: f64,
pub M44: f64,
}
impl ::core::marker::Copy for Matrix3D {}
impl ::core::clone::Clone for Matrix3D {
fn clone(&self) -> Self {
*self
}
}
pub type Matrix3DHelper = *mut ::core::ffi::c_void;
pub type PerspectiveTransform3D = *mut ::core::ffi::c_void;
pub type Transform3D = *mut ::core::ffi::c_void;
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(raw_identifiers)]
use std::mem;
#[r#repr(r#C, r#packed)]
struct Test {
a: bool, b: u64
}
#[r#derive(r#Debug)]
struct Test2(u32);
pub fn main() {
assert_eq!(mem::size_of::<Test>(), 9);
assert_eq!("Test2(123)", format!("{:?}", Test2(123)));
}
|
use image::ImageResult;
use std::env;
fn to_u32(v: String) -> Option<u32> {
v.parse().ok()
}
fn main() -> ImageResult<()> {
let mut args = env::args().skip(1);
let file = args.next().unwrap();
let w = args.next().and_then(to_u32).unwrap();
let h = args.next().and_then(to_u32).unwrap();
let dest = args.next().unwrap();
let img = image::open(file)?;
let img_n = img.thumbnail(w, h);
img_n.save(dest)?;
Ok(())
}
|
use doc_comment::doctest;
doctest!("../../README.md");
|
// xfail-stage0
// -*- rust -*-
use std;
import std.rand;
fn main() {
let rand.rng r1 = rand.mk_rng();
log r1.next();
log r1.next();
{
auto r2 = rand.mk_rng();
log r1.next();
log r2.next();
log r1.next();
log r1.next();
log r2.next();
log r2.next();
log r1.next();
log r1.next();
log r1.next();
log r2.next();
log r2.next();
log r2.next();
}
log r1.next();
log r1.next();
}
|
use super::BufferInfo;
use super::TextureInfo;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CommonTextureUsage {
Sample,
StorageRead,
ResolveSrc,
BlitSrc,
DepthRead
}
pub struct TextureResourceInfo {
pub texture_info: TextureInfo,
pub common_usage: CommonTextureUsage
}
pub trait TextureResource {}
pub struct BufferResourceInfo {
pub buffer_info: BufferInfo
}
pub trait BufferResource {}
|
use super::error::Error;
use super::events::{validate_message, validate_thread};
use super::http_multipart::extract_file;
use super::image;
use super::limits::{Limits, LIMITS};
use data::{Config, FullThread, NewMessage, NewThread, Threads};
use db::Db;
use rocket::fairing::AdHoc;
use rocket::http::ContentType;
use rocket::{Data, State};
use rocket_contrib::json::{Json, JsonValue};
use std::path::PathBuf;
use tripcode::file_sha512;
#[get("/threads?<before>&<after>&<offset>&<limit>&<tag>")]
fn threads_list(
db: Db,
before: Option<u32>, // timestamp
after: Option<u32>, // timestamp
offset: Option<u32>,
limit: Option<u32>,
tag: Option<String>,
) -> Result<Json<Threads>, Error> {
let limit = limit.unwrap_or(100);
let resp = match (before, after, offset) {
(None, None, None) => db.get_threads_before(0, limit),
(Some(ts), None, None) => db.get_threads_before(ts, limit),
(None, None, Some(offset)) => db.get_threads_offset(offset, limit),
_ => return Err(Error::ParamComb),
};
Ok(Json(resp))
}
#[get("/threads/<id>")]
fn thread_id(db: Db, id: i32) -> Option<Json<FullThread>> {
db.get_thread(id).map(Json)
}
#[post("/threads", format = "json", data = "<thr>")]
fn thread_new(db: Db, thr: Json<NewThread>) -> Result<&'static str, Error> {
let thr = validate_thread(thr.0)?;
db.new_thread(thr);
Ok("{}")
}
#[post("/threads/<id>", format = "json", data = "<msg>")]
fn thread_reply(
db: Db,
id: i32,
msg: Json<NewMessage>,
) -> Result<Error, Error> {
let msg = validate_message(msg.0)?;
Ok(db.reply_thread(id, msg))
}
#[post("/upload", format = "multipart", data = "<data>")]
fn api_post_upload(
db: Db,
config: State<Config>,
cont_type: &ContentType,
data: Data,
) -> Result<JsonValue, Error> {
let (_temp_dir, fname) = extract_file(cont_type, data, &config.tmp_dir)?;
let hash = file_sha512(&fname).ok_or_else(|| Error::Upload("ok"))?;
if !db.have_file(&hash) {
let info = image::get_info(&fname)
.ok_or_else(|| Error::Upload("Cant parse file"))?;
let (thumb_fname, thumb_type) = image::make_thumb(&fname)
.ok_or_else(|| Error::Upload("Cant generate_thumb"))?;
std::fs::rename(
fname,
config
.files_dir
.join(format!("{}.{}", &hash, info.type_.ext())),
)
.map_err(|_| Error::Upload("Can't rename"))?;
std::fs::rename(
thumb_fname,
config
.thumbs_dir
.join(format!("{}.{}", &hash, thumb_type.ext())),
)
.map_err(|_| Error::Upload("Can't rename"))?;
db.add_file(info, &hash);
}
Ok(json!({ "id": &hash }))
}
#[delete("/threads/<id>?<password>")]
fn api_delete_thread(db: Db, id: i32, password: String) -> Error {
db.delete_thread(id, password)
}
#[delete("/threads/<id>/replies/<no>?<password>")]
fn api_delete_thread_reply(
db: Db,
id: i32,
no: i32,
password: String,
) -> Error {
db.delete_message(id, no, password)
}
#[get("/limits")]
fn limits() -> Json<Limits> {
Json(LIMITS)
}
pub fn start() {
rocket::ignite()
.attach(Db::fairing())
.attach(AdHoc::on_attach("Config", |rocket| {
let config = {
let c = &rocket.config();
Config {
tmp_dir: PathBuf::from(
c.get_str("tmp_dir").unwrap().to_string(),
),
thumbs_dir: PathBuf::from(
c.get_str("thumbs_dir").unwrap().to_string(),
),
files_dir: PathBuf::from(
c.get_str("files_dir").unwrap().to_string(),
),
}
};
Ok(rocket.manage(config))
}))
.mount(
"/",
routes![
api_delete_thread,
api_delete_thread_reply,
api_post_upload,
limits,
thread_id,
thread_new,
thread_reply,
threads_list,
],
)
.launch();
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(clippy::uninlined_format_args)]
//! sled_store implement a key-value like store backed by sled::Tree.
//!
//! It is used by raft for log and state machine storage.
pub use bytes_error::SledBytesError;
pub use db::get_sled_db;
pub use db::init_sled_db;
pub use db::init_temp_sled_db;
pub use openraft;
pub use sled;
pub use sled_iter::iter;
pub use sled_key_space::SledKeySpace;
pub use sled_serde::SledOrderedSerde;
pub use sled_serde::SledRangeSerde;
pub use sled_serde::SledSerde;
pub use sled_tree::AsKeySpace;
pub use sled_tree::SledAsRef;
pub use sled_tree::SledItem;
pub use sled_tree::SledTree;
pub use sled_tree::TransactionSledTree;
pub use sled_tree::TxnKeySpace;
pub use store::Store;
mod bytes_error;
mod db;
mod sled_iter;
mod sled_key_space;
mod sled_serde;
mod sled_serde_impl;
mod sled_tree;
mod store;
|
use std::io::{Read, Write};
use byteorder::{ReadBytesExt, WriteBytesExt};
pub mod field;
use record::field::FieldType;
use Error;
use std::convert::TryFrom;
#[derive(Copy, Clone)]
pub struct FieldFlags(u8);
impl FieldFlags {
pub fn new() -> Self {
Self { 0: 0 }
}
pub fn system_column(self) -> bool {
(self.0 & 0x01) != 0
}
pub fn can_store_null(self) -> bool {
(self.0 & 0x02) != 0
}
pub fn is_binary(self) -> bool {
(self.0 & 0x04) != 0
}
pub fn is_auto_incrementing(self) -> bool {
(self.0 & 0x0C) != 0
}
}
/// Struct giving the info for a record field
pub struct RecordFieldInfo {
/// The name of the field
pub name: String,
/// The field type
pub field_type: FieldType,
pub displacement_field: [u8; 4],
pub field_length: u8,
pub num_decimal_places: u8,
pub flags: FieldFlags,
pub autoincrement_next_val: [u8; 5],
pub autoincrement_step: u8,
}
impl RecordFieldInfo {
pub(crate) const SIZE: usize = 32;
pub(crate) fn new(name: String, field_type: FieldType, length: u8) -> Self {
Self {
name,
field_type,
displacement_field: [0u8; 4],
field_length: length,
num_decimal_places: 0,
flags: FieldFlags::new(),
autoincrement_next_val: [0u8; 5],
autoincrement_step: 0u8,
}
}
pub(crate) fn read_from<T: Read>(source: &mut T) -> Result<Self, Error> {
let mut name = [0u8; 11];
source.read_exact(&mut name)?;
let field_type = source.read_u8()?;
let mut displacement_field = [0u8; 4];
source.read_exact(&mut displacement_field)?;
let record_length = source.read_u8()?;
let num_decimal_places = source.read_u8()?;
let flags = FieldFlags {
0: source.read_u8()?,
};
let mut autoincrement_next_val = [0u8; 5];
source.read_exact(&mut autoincrement_next_val)?;
let autoincrement_step = source.read_u8()?;
let mut _reserved = [0u8; 7];
source.read_exact(&mut _reserved)?;
let s = String::from_utf8_lossy(&name)
.trim_matches(|c| c == '\u{0}')
.to_owned();
let field_type = FieldType::try_from(field_type as char)?;
Ok(Self {
name: s,
field_type,
displacement_field,
field_length: record_length,
num_decimal_places,
flags,
autoincrement_next_val,
autoincrement_step,
})
}
pub(crate) fn write_to<T: Write>(&self, dest: &mut T) -> Result<(), Error> {
let num_bytes = self.name.as_bytes().len();
if num_bytes > 10 {
return Err(Error::FieldLengthTooLong);
}
dest.write_all(&self.name.as_bytes()[0..num_bytes])?;
let mut name_bytes = [0u8; 11];
name_bytes[10] = '\0' as u8;
dest.write_all(&name_bytes[0..11 - num_bytes])?;
dest.write_u8(self.field_type as u8)?;
dest.write_all(&self.displacement_field)?;
dest.write_u8(self.field_length)?;
dest.write_u8(self.num_decimal_places)?;
dest.write_u8(self.flags.0)?;
dest.write_all(&self.autoincrement_next_val)?;
dest.write_u8(self.autoincrement_step)?;
let reserved = [0u8; 7];
dest.write_all(&reserved)?;
Ok(())
}
pub fn new_deletion_flag() -> Self {
Self {
name: "DeletionFlag".to_owned(),
field_type: FieldType::Character,
displacement_field: [0u8; 4],
field_length: 1,
num_decimal_places: 0,
flags: FieldFlags { 0: 0u8 },
autoincrement_next_val: [0u8; 5],
autoincrement_step: 0u8,
}
}
}
#[cfg(test)]
mod test {
use super::*;
use header::Header;
use std::fs::File;
use std::io::{Cursor, Seek, SeekFrom};
#[test]
fn test_record_info_read_writing() {
let mut file = File::open("tests/data/line.dbf").unwrap();
file.seek(SeekFrom::Start(Header::SIZE as u64)).unwrap();
let mut record_info_bytes = [0u8; RecordFieldInfo::SIZE];
file.read_exact(&mut record_info_bytes).unwrap();
let mut cursor = Cursor::new(record_info_bytes);
let records_info = RecordFieldInfo::read_from(&mut cursor).unwrap();
let mut out = Cursor::new(Vec::<u8>::with_capacity(RecordFieldInfo::SIZE));
records_info.write_to(&mut out).unwrap();
let bytes_written = out.into_inner();
assert_eq!(bytes_written.len(), record_info_bytes.len());
assert_eq!(bytes_written, record_info_bytes);
}
}
|
use std::{
collections::BTreeMap,
fmt::{self, Display, Formatter},
ops::{Deref, DerefMut},
};
use serde::{Deserialize, Deserializer, Serialize};
use crate::{ConstValue, Name};
/// Variables of a query.
#[derive(Debug, Clone, Default, Serialize, Eq, PartialEq)]
#[serde(transparent)]
pub struct Variables(BTreeMap<Name, ConstValue>);
impl Display for Variables {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str("{")?;
for (i, (name, value)) in self.0.iter().enumerate() {
write!(f, "{}{}: {}", if i == 0 { "" } else { ", " }, name, value)?;
}
f.write_str("}")
}
}
impl<'de> Deserialize<'de> for Variables {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
Ok(Self(
<Option<BTreeMap<Name, ConstValue>>>::deserialize(deserializer)?.unwrap_or_default(),
))
}
}
impl Variables {
/// Get the variables from a GraphQL value.
///
/// If the value is not a map, then no variables will be returned.
#[must_use]
pub fn from_value(value: ConstValue) -> Self {
match value {
ConstValue::Object(obj) => Self(obj.into_iter().collect()),
_ => Self::default(),
}
}
/// Get the values from a JSON value.
///
/// If the value is not a map or the keys of a map are not valid GraphQL
/// names, then no variables will be returned.
#[must_use]
pub fn from_json(value: serde_json::Value) -> Self {
ConstValue::from_json(value)
.map(Self::from_value)
.unwrap_or_default()
}
/// Get the variables as a GraphQL value.
#[must_use]
pub fn into_value(self) -> ConstValue {
ConstValue::Object(self.0.into_iter().collect())
}
}
impl From<Variables> for ConstValue {
fn from(variables: Variables) -> Self {
variables.into_value()
}
}
impl Deref for Variables {
type Target = BTreeMap<Name, ConstValue>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for Variables {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
|
//! Compute and represent local information on the different objects representing of the IR.
use crate::device::{Context, Device};
use crate::ir::{self, Statement};
use crate::model::{size, HwPressure};
use crate::search_space::{DimKind, Domain, Order, SearchSpace, ThreadMapping};
use fxhash::FxHashMap;
use itertools::Itertools;
use num::integer::lcm;
use utils::*;
/// Local information on the different objects.
#[derive(Debug)]
pub struct LocalInfo {
/// The loops inside and outside each Stmt.
pub nesting: FxHashMap<ir::StmtId, Nesting>,
/// The pressure incured by a signle instance of each Stmt.
pub hw_pressure: FxHashMap<ir::StmtId, HwPressure>,
/// The pressure induced by a single iteration of each dimension and the exit latency
/// of the loop.
pub dim_overhead: FxHashMap<ir::DimId, (HwPressure, HwPressure)>,
/// The overhead to initialize a thread.
pub thread_overhead: HwPressure,
/// Available parallelism in the kernel.
pub parallelism: Parallelism,
}
impl LocalInfo {
/// Compute the local information for the given search space, in the context.
pub fn compute(space: &SearchSpace, context: &dyn Context) -> Self {
let dim_sizes = space
.ir_instance()
.dims()
.map(|d| (d.id(), size::bounds(d.size(), space, context)))
.collect();
let nesting: FxHashMap<_, _> = space
.ir_instance()
.statements()
.map(|stmt| (stmt.stmt_id(), Nesting::compute(space, stmt.stmt_id())))
.collect();
let mut hw_pressure = space
.ir_instance()
.statements()
.map(|stmt| {
let is_thread = if let ir::StmtId::Dim(id) = stmt.stmt_id() {
space.domain().get_dim_kind(id) == DimKind::THREAD
} else {
false
};
// Only keep the pressure of innermost thread dimensions. Otherwise it
// will be taken multiple times into account.
let pressure =
if is_thread && nesting[&stmt.stmt_id()].has_inner_thread_dims {
HwPressure::zero(&*context.device())
} else {
context
.device()
.hw_pressure(space, &dim_sizes, &nesting, stmt, context)
};
(stmt.stmt_id(), pressure)
})
.collect();
let mut dim_overhead = space
.ir_instance()
.dims()
.map(|d| {
let kind = space.domain().get_dim_kind(d.id());
if kind == DimKind::THREAD && nesting[&d.stmt_id()].has_inner_thread_dims
{
// Only keep the overhead on innermost thread dimensions. Otherwise it
// will be taken multiple times into account.
let zero = HwPressure::zero(&*context.device());
(d.id(), (zero.clone(), zero))
} else {
(d.id(), context.device().loop_iter_pressure(kind))
}
})
.collect();
let parallelism = parallelism(&nesting, space, context);
// Add the pressure induced by induction variables.
let mut thread_overhead = HwPressure::zero(&*context.device());
for (_, var) in space.ir_instance().induction_vars() {
add_indvar_pressure(
&*context.device(),
space,
&dim_sizes,
var,
&mut hw_pressure,
&mut dim_overhead,
&mut thread_overhead,
);
}
LocalInfo {
nesting,
hw_pressure,
dim_overhead,
thread_overhead,
parallelism,
}
}
}
fn add_indvar_pressure(
device: &dyn Device,
space: &SearchSpace,
dim_sizes: &FxHashMap<ir::DimId, size::Range>,
indvar: &ir::InductionVar,
hw_pressure: &mut FxHashMap<ir::StmtId, HwPressure>,
dim_overhead: &mut FxHashMap<ir::DimId, (HwPressure, HwPressure)>,
thread_overhead: &mut HwPressure,
) {
for &(dim, _) in indvar.dims() {
let dim_kind = space.domain().get_dim_kind(dim);
if dim_kind.intersects(DimKind::VECTOR) {
continue;
}
let t = device
.lower_type(indvar.base().t(), space)
.unwrap_or(ir::Type::I(32));
let mut overhead = if dim_kind.intersects(DimKind::UNROLL | DimKind::LOOP) {
// FIXME: do not add the latency if the induction level can statically computed.
// This is the case when:
// - the loop is unrolled
// - the increment is a constant
// - both the conditions are also true for an inner dimension.
device.additive_indvar_pressure(&t)
} else {
device.multiplicative_indvar_pressure(&t)
};
let size = dim_sizes[&dim].min;
if dim_kind.intersects(DimKind::THREAD | DimKind::BLOCK) {
thread_overhead.add_parallel(&overhead);
} else if size > 1 {
unwrap!(dim_overhead.get_mut(&dim))
.0
.add_parallel(&overhead);
overhead.repeat_parallel((size - 1) as f64);
unwrap!(hw_pressure.get_mut(&dim.into())).add_parallel(&overhead);
}
}
}
/// Nesting of an object.
#[derive(Debug)]
pub struct Nesting {
/// Dimensions nested inside the current Stmt.
pub inner_dims: VecSet<ir::DimId>,
/// Basic blocks nested inside the current Stmt.
pub inner_stmts: VecSet<ir::StmtId>,
/// Dimensions nested outsidethe current Stmt.
pub outer_dims: VecSet<ir::DimId>,
/// Dimensions to be processed before the current Stmt.
pub before_self: VecSet<ir::DimId>,
/// Dimensions that should not take the current Stmt into account when processed.
pub after_self: VecSet<ir::DimId>,
/// The dimensions that can be merged to this one and have a bigger ID.
pub bigger_merged_dims: VecSet<ir::DimId>,
/// Indicates if the block may have thread dimensions nested inside it.
/// Only consider thread dimensions that are sure to be mapped to threads.
has_inner_thread_dims: bool,
/// Number of threads that are not represented in the active dimensions of the block.
pub num_unmapped_threads: ir::PartialSize,
/// Maximal number of threads this block can be in, considering only outer dimensions
/// (an not mapped out dimensions).
pub max_threads_per_block: ir::PartialSize,
}
impl Nesting {
/// Computes the nesting of a `Statement`.
fn compute(space: &SearchSpace, stmt: ir::StmtId) -> Self {
let mut inner_dims = Vec::new();
let mut inner_stmts = Vec::new();
let mut before_self = Vec::new();
let mut after_self = Vec::new();
let mut bigger_merged_dims = Vec::new();
let mut has_inner_thread_dims = false;
for other_stmt in space.ir_instance().statements() {
if other_stmt.stmt_id() == stmt {
continue;
}
let order = space.domain().get_order(other_stmt.stmt_id(), stmt);
if Order::INNER.contains(order) {
inner_stmts.push(other_stmt.stmt_id());
}
if let Some(dim) = other_stmt.as_dim() {
let other_kind = space.domain().get_dim_kind(dim.id());
if Order::INNER.contains(order) {
inner_dims.push(dim.id());
}
if order.intersects(Order::INNER) && other_kind == DimKind::THREAD {
has_inner_thread_dims = true;
}
if (Order::INNER | Order::BEFORE).contains(order) {
before_self.push(dim.id());
}
if (Order::OUTER | Order::AFTER).contains(order) {
after_self.push(dim.id());
}
if order.intersects(Order::MERGED) && other_stmt.stmt_id() > stmt {
bigger_merged_dims.push(dim.id());
}
}
}
let outer_dims = Self::get_iteration_dims(space, stmt);
let num_unmapped_threads = space
.ir_instance()
.thread_dims()
.filter(|dim| {
!outer_dims.iter().any(|&other| {
if dim.id() == other {
return true;
}
if space.ir_instance().dim(other).possible_sizes().is_none() {
return false;
}
let mapping = space.domain().get_thread_mapping(dim.id(), other);
mapping.intersects(ThreadMapping::MAPPED)
})
})
.map(|d| d.size())
.product::<ir::PartialSize>();
let max_threads_per_block = outer_dims
.iter()
.cloned()
.filter(|&d| space.domain().get_dim_kind(d).intersects(DimKind::THREAD))
.map(|d| space.ir_instance().dim(d).size())
.product::<ir::PartialSize>();
Nesting {
inner_dims: VecSet::new(inner_dims),
inner_stmts: VecSet::new(inner_stmts),
outer_dims,
before_self: VecSet::new(before_self),
after_self: VecSet::new(after_self),
bigger_merged_dims: VecSet::new(bigger_merged_dims),
has_inner_thread_dims,
num_unmapped_threads,
max_threads_per_block,
}
}
/// Computess the list of iteration dimensions of a `Statement`.
fn get_iteration_dims(space: &SearchSpace, stmt: ir::StmtId) -> VecSet<ir::DimId> {
let dims = if let ir::StmtId::Inst(inst) = stmt {
space
.ir_instance()
.inst(inst)
.iteration_dims()
.iter()
.cloned()
.collect()
} else {
let mut outer = Vec::new();
for dim in space.ir_instance().dims().map(|d| d.id()) {
if stmt == dim.into() {
continue;
}
let order = space.domain().get_order(dim.into(), stmt);
if Order::OUTER.contains(order)
&& outer.iter().cloned().all(|outer: ir::DimId| {
let ord = space.domain().get_order(dim.into(), outer.into());
!ord.contains(Order::MERGED)
})
{
outer.push(dim);
}
}
outer
};
VecSet::new(dims)
}
}
/// Minimum and maximum parallelism in the whole GPU.
#[derive(Debug)]
pub struct Parallelism {
/// Minimal number of blocks.
pub min_num_blocks: u64,
/// Minimal number of threads per blocks.
pub min_num_threads_per_blocks: u64,
/// Minimal number of threads.
pub min_num_threads: u64,
/// A multiple of the number of blocks.
pub lcm_num_blocks: u64,
}
impl Parallelism {
/// Combines two `Parallelism` summaries computed on different instructions and computes the
/// `Parallelism` of the union of the instructions.
fn combine(self, rhs: &Self) -> Self {
let min_num_threads_per_blocks = self
.min_num_threads_per_blocks
.min(rhs.min_num_threads_per_blocks);
Parallelism {
min_num_blocks: self.min_num_blocks.min(rhs.min_num_blocks),
min_num_threads_per_blocks,
min_num_threads: self.min_num_threads.min(rhs.min_num_threads),
lcm_num_blocks: lcm(self.lcm_num_blocks, rhs.lcm_num_blocks),
}
}
}
impl Default for Parallelism {
fn default() -> Self {
Parallelism {
min_num_blocks: 1,
min_num_threads_per_blocks: 1,
min_num_threads: 1,
lcm_num_blocks: 1,
}
}
}
/// Computes the minimal and maximal parallelism accross instructions.
fn parallelism(
nesting: &FxHashMap<ir::StmtId, Nesting>,
space: &SearchSpace,
ctx: &dyn Context,
) -> Parallelism {
let size_thread_dims = space
.ir_instance()
.thread_dims()
.map(|d| d.size())
.product::<ir::PartialSize>();
let min_threads_per_blocks = size::bounds(&size_thread_dims, space, ctx).min;
space
.ir_instance()
.insts()
.map(|inst| {
let mut min_size_blocks = ir::PartialSize::default();
let mut max_size_blocks = ir::PartialSize::default();
for &dim in &nesting[&inst.stmt_id()].outer_dims {
let kind = space.domain().get_dim_kind(dim);
if kind.intersects(DimKind::BLOCK) {
let size = space.ir_instance().dim(dim).size();
max_size_blocks *= size;
if kind == DimKind::BLOCK {
min_size_blocks *= size;
}
}
}
let min_num_blocks = size::bounds(&min_size_blocks, space, ctx).min;
let lcm_num_blocks = size::factors(&max_size_blocks, space, ctx).lcm;
let size_threads_and_blocks = min_size_blocks * &size_thread_dims;
Parallelism {
min_num_blocks,
min_num_threads_per_blocks: min_threads_per_blocks,
min_num_threads: size::bounds(&size_threads_and_blocks, space, ctx).min,
lcm_num_blocks,
}
})
.fold1(|lhs, rhs| lhs.combine(&rhs))
.unwrap_or_default()
}
|
// Copyright 2019. The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use crate::{
output_manager_service::TxId,
schema::{coinbase_transactions, completed_transactions, inbound_transactions, outbound_transactions},
transaction_service::{
error::TransactionStorageError,
storage::database::{
CompletedTransaction,
DbKey,
DbKeyValuePair,
DbValue,
InboundTransaction,
OutboundTransaction,
PendingCoinbaseTransaction,
TransactionBackend,
TransactionStatus,
WriteOperation,
},
},
};
use chrono::NaiveDateTime;
use diesel::{prelude::*, result::Error as DieselError, SqliteConnection};
use std::{
collections::HashMap,
convert::TryFrom,
sync::{Arc, Mutex, MutexGuard},
};
use tari_core::transactions::{
tari_amount::MicroTari,
types::{Commitment, PublicKey},
};
use tari_crypto::tari_utilities::ByteArray;
/// A Sqlite backend for the Transaction Service. The Backend is accessed via a connection pool to the Sqlite file.
#[derive(Clone)]
pub struct TransactionServiceSqliteDatabase {
database_connection: Arc<Mutex<SqliteConnection>>,
}
impl TransactionServiceSqliteDatabase {
pub fn new(database_connection: Arc<Mutex<SqliteConnection>>) -> Self {
Self { database_connection }
}
fn insert(kvp: DbKeyValuePair, conn: MutexGuard<SqliteConnection>) -> Result<(), TransactionStorageError> {
match kvp {
DbKeyValuePair::PendingOutboundTransaction(k, v) => {
if OutboundTransactionSql::find(k, &(*conn)).is_ok() {
return Err(TransactionStorageError::DuplicateOutput);
}
OutboundTransactionSql::try_from(*v)?.commit(&(*conn))?;
},
DbKeyValuePair::PendingInboundTransaction(k, v) => {
if InboundTransactionSql::find(k, &(*conn)).is_ok() {
return Err(TransactionStorageError::DuplicateOutput);
}
InboundTransactionSql::try_from(*v)?.commit(&(*conn))?;
},
DbKeyValuePair::PendingCoinbaseTransaction(k, v) => {
if PendingCoinbaseTransactionSql::find(k, &(*conn)).is_ok() {
return Err(TransactionStorageError::DuplicateOutput);
}
PendingCoinbaseTransactionSql::from(*v).commit(&(*conn))?;
},
DbKeyValuePair::CompletedTransaction(k, v) => {
if CompletedTransactionSql::find(k, &(*conn)).is_ok() {
return Err(TransactionStorageError::DuplicateOutput);
}
CompletedTransactionSql::try_from(*v)?.commit(&(*conn))?;
},
}
Ok(())
}
fn remove(key: DbKey, conn: MutexGuard<SqliteConnection>) -> Result<Option<DbValue>, TransactionStorageError> {
match key {
DbKey::PendingOutboundTransaction(k) => match OutboundTransactionSql::find(k, &(*conn)) {
Ok(v) => {
v.delete(&(*conn))?;
Ok(Some(DbValue::PendingOutboundTransaction(Box::new(
OutboundTransaction::try_from(v)?,
))))
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => Err(
TransactionStorageError::ValueNotFound(DbKey::PendingOutboundTransaction(k)),
),
Err(e) => Err(e),
},
DbKey::PendingInboundTransaction(k) => match InboundTransactionSql::find(k, &(*conn)) {
Ok(v) => {
v.delete(&(*conn))?;
Ok(Some(DbValue::PendingInboundTransaction(Box::new(
InboundTransaction::try_from(v)?,
))))
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => Err(
TransactionStorageError::ValueNotFound(DbKey::PendingOutboundTransaction(k)),
),
Err(e) => Err(e),
},
DbKey::PendingCoinbaseTransaction(k) => match PendingCoinbaseTransactionSql::find(k, &(*conn)) {
Ok(v) => {
v.delete(&(*conn))?;
Ok(Some(DbValue::PendingCoinbaseTransaction(Box::new(
PendingCoinbaseTransaction::try_from(v)?,
))))
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => Err(
TransactionStorageError::ValueNotFound(DbKey::PendingOutboundTransaction(k)),
),
Err(e) => Err(e),
},
DbKey::CompletedTransaction(k) => match CompletedTransactionSql::find(k, &(*conn)) {
Ok(v) => {
v.delete(&(*conn))?;
Ok(Some(DbValue::CompletedTransaction(Box::new(
CompletedTransaction::try_from(v)?,
))))
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => {
Err(TransactionStorageError::ValueNotFound(DbKey::CompletedTransaction(k)))
},
Err(e) => Err(e),
},
DbKey::PendingOutboundTransactions => Err(TransactionStorageError::OperationNotSupported),
DbKey::PendingInboundTransactions => Err(TransactionStorageError::OperationNotSupported),
DbKey::CompletedTransactions => Err(TransactionStorageError::OperationNotSupported),
DbKey::PendingCoinbaseTransactions => Err(TransactionStorageError::OperationNotSupported),
}
}
}
impl TransactionBackend for TransactionServiceSqliteDatabase {
fn fetch(&self, key: &DbKey) -> Result<Option<DbValue>, TransactionStorageError> {
let conn = acquire_lock!(self.database_connection);
let result = match key {
DbKey::PendingOutboundTransaction(t) => match OutboundTransactionSql::find(*t, &(*conn)) {
Ok(o) => Some(DbValue::PendingOutboundTransaction(Box::new(
OutboundTransaction::try_from(o)?,
))),
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => None,
Err(e) => return Err(e),
},
DbKey::PendingInboundTransaction(t) => match InboundTransactionSql::find(*t, &(*conn)) {
Ok(o) => Some(DbValue::PendingInboundTransaction(Box::new(
InboundTransaction::try_from(o)?,
))),
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => None,
Err(e) => return Err(e),
},
DbKey::PendingCoinbaseTransaction(t) => match PendingCoinbaseTransactionSql::find(*t, &(*conn)) {
Ok(o) => Some(DbValue::PendingCoinbaseTransaction(Box::new(
PendingCoinbaseTransaction::try_from(o)?,
))),
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => None,
Err(e) => return Err(e),
},
DbKey::CompletedTransaction(t) => match CompletedTransactionSql::find(*t, &(*conn)) {
Ok(o) => Some(DbValue::CompletedTransaction(Box::new(CompletedTransaction::try_from(
o,
)?))),
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => None,
Err(e) => return Err(e),
},
DbKey::PendingOutboundTransactions => Some(DbValue::PendingOutboundTransactions(
OutboundTransactionSql::index(&(*conn))?
.iter()
.fold(HashMap::new(), |mut acc, x| {
if let Ok(v) = OutboundTransaction::try_from((*x).clone()) {
acc.insert(x.tx_id as u64, v);
}
acc
}),
)),
DbKey::PendingInboundTransactions => Some(DbValue::PendingInboundTransactions(
InboundTransactionSql::index(&(*conn))?
.iter()
.fold(HashMap::new(), |mut acc, x| {
if let Ok(v) = InboundTransaction::try_from((*x).clone()) {
acc.insert(x.tx_id as u64, v);
}
acc
}),
)),
DbKey::PendingCoinbaseTransactions => Some(DbValue::PendingCoinbaseTransactions(
PendingCoinbaseTransactionSql::index(&(*conn))?
.iter()
.fold(HashMap::new(), |mut acc, x| {
if let Ok(v) = PendingCoinbaseTransaction::try_from((*x).clone()) {
acc.insert(x.tx_id as u64, v);
}
acc
}),
)),
DbKey::CompletedTransactions => Some(DbValue::CompletedTransactions(
CompletedTransactionSql::index(&(*conn))?
.iter()
.fold(HashMap::new(), |mut acc, x| {
if let Ok(v) = CompletedTransaction::try_from((*x).clone()) {
acc.insert(x.tx_id as u64, v);
}
acc
}),
)),
};
Ok(result)
}
fn contains(&self, key: &DbKey) -> Result<bool, TransactionStorageError> {
let conn = acquire_lock!(self.database_connection);
let result = match key {
DbKey::PendingOutboundTransaction(k) => OutboundTransactionSql::find(*k, &(*conn)).is_ok(),
DbKey::PendingInboundTransaction(k) => InboundTransactionSql::find(*k, &(*conn)).is_ok(),
DbKey::PendingCoinbaseTransaction(k) => PendingCoinbaseTransactionSql::find(*k, &(*conn)).is_ok(),
DbKey::CompletedTransaction(k) => CompletedTransactionSql::find(*k, &(*conn)).is_ok(),
DbKey::PendingOutboundTransactions => false,
DbKey::PendingInboundTransactions => false,
DbKey::CompletedTransactions => false,
DbKey::PendingCoinbaseTransactions => false,
};
Ok(result)
}
fn write(&self, op: WriteOperation) -> Result<Option<DbValue>, TransactionStorageError> {
let conn = acquire_lock!(self.database_connection);
match op {
WriteOperation::Insert(kvp) => TransactionServiceSqliteDatabase::insert(kvp, conn).map(|_| None),
WriteOperation::Remove(key) => TransactionServiceSqliteDatabase::remove(key, conn),
}
}
fn transaction_exists(&self, tx_id: u64) -> Result<bool, TransactionStorageError> {
let conn = acquire_lock!(self.database_connection);
Ok(OutboundTransactionSql::find(tx_id, &(*conn)).is_ok() ||
InboundTransactionSql::find(tx_id, &(*conn)).is_ok() ||
PendingCoinbaseTransactionSql::find(tx_id, &(*conn)).is_ok() ||
CompletedTransactionSql::find(tx_id, &(*conn)).is_ok())
}
fn complete_outbound_transaction(
&self,
tx_id: u64,
completed_transaction: CompletedTransaction,
) -> Result<(), TransactionStorageError>
{
let conn = acquire_lock!(self.database_connection);
if CompletedTransactionSql::find(tx_id, &(*conn)).is_ok() {
return Err(TransactionStorageError::TransactionAlreadyExists);
}
match OutboundTransactionSql::find(tx_id, &(*conn)) {
Ok(v) => {
let completed_tx_sql = CompletedTransactionSql::try_from(completed_transaction)?;
v.delete(&(*conn))?;
completed_tx_sql.commit(&(*conn))?;
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => {
return Err(TransactionStorageError::ValueNotFound(
DbKey::PendingOutboundTransaction(tx_id),
))
},
Err(e) => return Err(e),
};
Ok(())
}
fn complete_inbound_transaction(
&self,
tx_id: u64,
completed_transaction: CompletedTransaction,
) -> Result<(), TransactionStorageError>
{
let conn = acquire_lock!(self.database_connection);
if CompletedTransactionSql::find(tx_id, &(*conn)).is_ok() {
return Err(TransactionStorageError::TransactionAlreadyExists);
}
match InboundTransactionSql::find(tx_id, &(*conn)) {
Ok(v) => {
let completed_tx_sql = CompletedTransactionSql::try_from(completed_transaction)?;
v.delete(&(*conn))?;
completed_tx_sql.commit(&(*conn))?;
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => {
return Err(TransactionStorageError::ValueNotFound(
DbKey::PendingInboundTransaction(tx_id),
))
},
Err(e) => return Err(e),
};
Ok(())
}
fn complete_coinbase_transaction(
&self,
tx_id: u64,
completed_transaction: CompletedTransaction,
) -> Result<(), TransactionStorageError>
{
let conn = acquire_lock!(self.database_connection);
if CompletedTransactionSql::find(tx_id, &(*conn)).is_ok() {
return Err(TransactionStorageError::TransactionAlreadyExists);
}
match PendingCoinbaseTransactionSql::find(tx_id, &(*conn)) {
Ok(v) => {
let completed_tx_sql = CompletedTransactionSql::try_from(completed_transaction)?;
v.delete(&(*conn))?;
completed_tx_sql.commit(&(*conn))?;
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => {
return Err(TransactionStorageError::ValueNotFound(
DbKey::PendingCoinbaseTransaction(tx_id),
))
},
Err(e) => return Err(e),
};
Ok(())
}
fn broadcast_completed_transaction(&self, tx_id: u64) -> Result<(), TransactionStorageError> {
let conn = acquire_lock!(self.database_connection);
match CompletedTransactionSql::find(tx_id, &(*conn)) {
Ok(v) => {
if TransactionStatus::try_from(v.status)? == TransactionStatus::Completed {
let _ = v.update(
UpdateCompletedTransaction {
status: Some(TransactionStatus::Broadcast),
timestamp: None,
},
&(*conn),
)?;
}
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => {
return Err(TransactionStorageError::ValueNotFound(DbKey::CompletedTransaction(
tx_id,
)))
},
Err(e) => return Err(e),
};
Ok(())
}
fn mine_completed_transaction(&self, tx_id: u64) -> Result<(), TransactionStorageError> {
let conn = acquire_lock!(self.database_connection);
match CompletedTransactionSql::find(tx_id, &(*conn)) {
Ok(v) => {
let _ = v.update(
UpdateCompletedTransaction {
status: Some(TransactionStatus::Mined),
timestamp: None,
},
&(*conn),
)?;
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => {
return Err(TransactionStorageError::ValueNotFound(DbKey::CompletedTransaction(
tx_id,
)))
},
Err(e) => return Err(e),
};
Ok(())
}
fn cancel_completed_transaction(&self, tx_id: u64) -> Result<(), TransactionStorageError> {
let conn = acquire_lock!(self.database_connection);
match CompletedTransactionSql::find(tx_id, &(*conn)) {
Ok(v) => {
v.cancel(&(*conn))?;
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => {
return Err(TransactionStorageError::ValueNotFound(DbKey::CompletedTransaction(
tx_id,
)));
},
Err(e) => return Err(e),
};
Ok(())
}
fn cancel_pending_transaction(&self, tx_id: u64) -> Result<(), TransactionStorageError> {
let conn = acquire_lock!(self.database_connection);
match InboundTransactionSql::find(tx_id, &(*conn)) {
Ok(v) => {
let _ = v.cancel(&(*conn))?;
},
Err(_) => {
match OutboundTransactionSql::find(tx_id, &(*conn)) {
Ok(v) => {
let _ = v.cancel(&(*conn))?;
},
Err(TransactionStorageError::DieselError(DieselError::NotFound)) => {
return Err(TransactionStorageError::ValuesNotFound);
},
Err(e) => return Err(e),
};
},
};
Ok(())
}
#[cfg(feature = "test_harness")]
fn update_completed_transaction_timestamp(
&self,
tx_id: u64,
timestamp: NaiveDateTime,
) -> Result<(), TransactionStorageError>
{
let conn = acquire_lock!(self.database_connection);
if let Ok(tx) = CompletedTransactionSql::find(tx_id, &(*conn)) {
let _ = tx.update(
UpdateCompletedTransaction {
status: None,
timestamp: Some(timestamp),
},
&(*conn),
);
}
Ok(())
}
}
#[derive(Clone, Debug, Queryable, Insertable, PartialEq)]
#[table_name = "inbound_transactions"]
struct InboundTransactionSql {
tx_id: i64,
source_public_key: Vec<u8>,
amount: i64,
receiver_protocol: String,
message: String,
timestamp: NaiveDateTime,
}
impl InboundTransactionSql {
pub fn commit(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
diesel::insert_into(inbound_transactions::table)
.values(self.clone())
.execute(conn)?;
Ok(())
}
pub fn index(conn: &SqliteConnection) -> Result<Vec<InboundTransactionSql>, TransactionStorageError> {
Ok(inbound_transactions::table.load::<InboundTransactionSql>(conn)?)
}
pub fn find(tx_id: TxId, conn: &SqliteConnection) -> Result<InboundTransactionSql, TransactionStorageError> {
Ok(inbound_transactions::table
.filter(inbound_transactions::tx_id.eq(tx_id as i64))
.first::<InboundTransactionSql>(conn)?)
}
pub fn delete(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
let num_deleted =
diesel::delete(inbound_transactions::table.filter(inbound_transactions::tx_id.eq(&self.tx_id)))
.execute(conn)?;
if num_deleted == 0 {
return Err(TransactionStorageError::ValuesNotFound);
}
Ok(())
}
pub fn cancel(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
// TODO Once sqlite migrations are implemented have cancellation be done with a Status flag
self.delete(conn)
}
}
impl TryFrom<InboundTransaction> for InboundTransactionSql {
type Error = TransactionStorageError;
fn try_from(i: InboundTransaction) -> Result<Self, Self::Error> {
Ok(Self {
tx_id: i.tx_id as i64,
source_public_key: i.source_public_key.to_vec(),
amount: u64::from(i.amount) as i64,
receiver_protocol: serde_json::to_string(&i.receiver_protocol)?,
message: i.message,
timestamp: i.timestamp,
})
}
}
impl TryFrom<InboundTransactionSql> for InboundTransaction {
type Error = TransactionStorageError;
fn try_from(i: InboundTransactionSql) -> Result<Self, Self::Error> {
Ok(Self {
tx_id: i.tx_id as u64,
source_public_key: PublicKey::from_vec(&i.source_public_key)
.map_err(|_| TransactionStorageError::ConversionError)?,
amount: MicroTari::from(i.amount as u64),
receiver_protocol: serde_json::from_str(&i.receiver_protocol)?,
status: TransactionStatus::Pending,
message: i.message,
timestamp: i.timestamp,
})
}
}
/// A structure to represent a Sql compatible version of the OutboundTransaction struct
#[derive(Clone, Debug, Queryable, Insertable, PartialEq)]
#[table_name = "outbound_transactions"]
struct OutboundTransactionSql {
tx_id: i64,
destination_public_key: Vec<u8>,
amount: i64,
fee: i64,
sender_protocol: String,
message: String,
timestamp: NaiveDateTime,
}
impl OutboundTransactionSql {
pub fn commit(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
diesel::insert_into(outbound_transactions::table)
.values(self.clone())
.execute(conn)?;
Ok(())
}
pub fn index(conn: &SqliteConnection) -> Result<Vec<OutboundTransactionSql>, TransactionStorageError> {
Ok(outbound_transactions::table.load::<OutboundTransactionSql>(conn)?)
}
pub fn find(tx_id: TxId, conn: &SqliteConnection) -> Result<OutboundTransactionSql, TransactionStorageError> {
Ok(outbound_transactions::table
.filter(outbound_transactions::tx_id.eq(tx_id as i64))
.first::<OutboundTransactionSql>(conn)?)
}
pub fn delete(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
let num_deleted =
diesel::delete(outbound_transactions::table.filter(outbound_transactions::tx_id.eq(&self.tx_id)))
.execute(conn)?;
if num_deleted == 0 {
return Err(TransactionStorageError::ValuesNotFound);
}
Ok(())
}
pub fn cancel(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
// TODO Once sqlite migrations are implemented have cancellation be done with a Status flag
self.delete(conn)
}
}
impl TryFrom<OutboundTransaction> for OutboundTransactionSql {
type Error = TransactionStorageError;
fn try_from(i: OutboundTransaction) -> Result<Self, Self::Error> {
Ok(Self {
tx_id: i.tx_id as i64,
destination_public_key: i.destination_public_key.to_vec(),
amount: u64::from(i.amount) as i64,
fee: u64::from(i.fee) as i64,
sender_protocol: serde_json::to_string(&i.sender_protocol)?,
message: i.message,
timestamp: i.timestamp,
})
}
}
impl TryFrom<OutboundTransactionSql> for OutboundTransaction {
type Error = TransactionStorageError;
fn try_from(i: OutboundTransactionSql) -> Result<Self, Self::Error> {
Ok(Self {
tx_id: i.tx_id as u64,
destination_public_key: PublicKey::from_vec(&i.destination_public_key)
.map_err(|_| TransactionStorageError::ConversionError)?,
amount: MicroTari::from(i.amount as u64),
fee: MicroTari::from(i.fee as u64),
sender_protocol: serde_json::from_str(&i.sender_protocol)?,
status: TransactionStatus::Pending,
message: i.message,
timestamp: i.timestamp,
})
}
}
#[derive(Clone, Debug, Queryable, Insertable, PartialEq)]
#[table_name = "coinbase_transactions"]
struct PendingCoinbaseTransactionSql {
tx_id: i64,
amount: i64,
commitment: Vec<u8>,
timestamp: NaiveDateTime,
}
impl PendingCoinbaseTransactionSql {
pub fn commit(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
diesel::insert_into(coinbase_transactions::table)
.values(self.clone())
.execute(conn)?;
Ok(())
}
pub fn index(conn: &SqliteConnection) -> Result<Vec<PendingCoinbaseTransactionSql>, TransactionStorageError> {
Ok(coinbase_transactions::table.load::<PendingCoinbaseTransactionSql>(conn)?)
}
pub fn find(
tx_id: TxId,
conn: &SqliteConnection,
) -> Result<PendingCoinbaseTransactionSql, TransactionStorageError>
{
Ok(coinbase_transactions::table
.filter(coinbase_transactions::tx_id.eq(tx_id as i64))
.first::<PendingCoinbaseTransactionSql>(conn)?)
}
pub fn delete(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
let num_deleted =
diesel::delete(coinbase_transactions::table.filter(coinbase_transactions::tx_id.eq(&self.tx_id)))
.execute(conn)?;
if num_deleted == 0 {
return Err(TransactionStorageError::ValuesNotFound);
}
Ok(())
}
}
impl From<PendingCoinbaseTransaction> for PendingCoinbaseTransactionSql {
fn from(i: PendingCoinbaseTransaction) -> Self {
Self {
tx_id: i.tx_id as i64,
amount: u64::from(i.amount) as i64,
commitment: i.commitment.to_vec(),
timestamp: i.timestamp,
}
}
}
impl TryFrom<PendingCoinbaseTransactionSql> for PendingCoinbaseTransaction {
type Error = TransactionStorageError;
fn try_from(i: PendingCoinbaseTransactionSql) -> Result<Self, Self::Error> {
Ok(Self {
tx_id: i.tx_id as u64,
amount: MicroTari::from(i.amount as u64),
commitment: Commitment::from_vec(&i.commitment).map_err(|_| TransactionStorageError::ConversionError)?,
timestamp: i.timestamp,
})
}
}
/// A structure to represent a Sql compatible version of the CompletedTransaction struct
#[derive(Clone, Debug, Queryable, Insertable, PartialEq)]
#[table_name = "completed_transactions"]
struct CompletedTransactionSql {
tx_id: i64,
source_public_key: Vec<u8>,
destination_public_key: Vec<u8>,
amount: i64,
fee: i64,
transaction_protocol: String,
status: i32,
message: String,
timestamp: NaiveDateTime,
}
impl CompletedTransactionSql {
pub fn commit(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
diesel::insert_into(completed_transactions::table)
.values(self.clone())
.execute(conn)?;
Ok(())
}
pub fn index(conn: &SqliteConnection) -> Result<Vec<CompletedTransactionSql>, TransactionStorageError> {
Ok(completed_transactions::table
.filter(completed_transactions::status.ne(TransactionStatus::Cancelled as i32))
.load::<CompletedTransactionSql>(conn)?)
}
pub fn find(tx_id: TxId, conn: &SqliteConnection) -> Result<CompletedTransactionSql, TransactionStorageError> {
Ok(completed_transactions::table
.filter(completed_transactions::tx_id.eq(tx_id as i64))
.filter(completed_transactions::status.ne(TransactionStatus::Cancelled as i32))
.first::<CompletedTransactionSql>(conn)?)
}
pub fn delete(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
let num_deleted =
diesel::delete(completed_transactions::table.filter(completed_transactions::tx_id.eq(&self.tx_id)))
.execute(conn)?;
if num_deleted == 0 {
return Err(TransactionStorageError::ValuesNotFound);
}
Ok(())
}
pub fn update(
&self,
updated_tx: UpdateCompletedTransaction,
conn: &SqliteConnection,
) -> Result<CompletedTransactionSql, TransactionStorageError>
{
let num_updated =
diesel::update(completed_transactions::table.filter(completed_transactions::tx_id.eq(&self.tx_id)))
.set(UpdateCompletedTransactionSql::from(updated_tx))
.execute(conn)?;
if num_updated == 0 {
return Err(TransactionStorageError::UnexpectedResult(
"Database update error".to_string(),
));
}
Ok(CompletedTransactionSql::find(self.tx_id as u64, conn)?)
}
pub fn cancel(&self, conn: &SqliteConnection) -> Result<(), TransactionStorageError> {
let num_updated =
diesel::update(completed_transactions::table.filter(completed_transactions::tx_id.eq(&self.tx_id)))
.set(UpdateCompletedTransactionSql {
status: Some(TransactionStatus::Cancelled as i32),
timestamp: None,
})
.execute(conn)?;
if num_updated == 0 {
return Err(TransactionStorageError::UnexpectedResult(
"Database update error".to_string(),
));
}
Ok(())
}
}
impl TryFrom<CompletedTransaction> for CompletedTransactionSql {
type Error = TransactionStorageError;
fn try_from(c: CompletedTransaction) -> Result<Self, Self::Error> {
Ok(Self {
tx_id: c.tx_id as i64,
source_public_key: c.source_public_key.to_vec(),
destination_public_key: c.destination_public_key.to_vec(),
amount: u64::from(c.amount) as i64,
fee: u64::from(c.fee) as i64,
transaction_protocol: serde_json::to_string(&c.transaction)?,
status: c.status as i32,
message: c.message,
timestamp: c.timestamp,
})
}
}
impl TryFrom<CompletedTransactionSql> for CompletedTransaction {
type Error = TransactionStorageError;
fn try_from(c: CompletedTransactionSql) -> Result<Self, Self::Error> {
Ok(Self {
tx_id: c.tx_id as u64,
source_public_key: PublicKey::from_vec(&c.source_public_key)
.map_err(|_| TransactionStorageError::ConversionError)?,
destination_public_key: PublicKey::from_vec(&c.destination_public_key)
.map_err(|_| TransactionStorageError::ConversionError)?,
amount: MicroTari::from(c.amount as u64),
fee: MicroTari::from(c.fee as u64),
transaction: serde_json::from_str(&c.transaction_protocol)?,
status: TransactionStatus::try_from(c.status)?,
message: c.message,
timestamp: c.timestamp,
})
}
}
/// These are the fields that can be updated for a Completed Transaction
pub struct UpdateCompletedTransaction {
status: Option<TransactionStatus>,
timestamp: Option<NaiveDateTime>,
}
#[derive(AsChangeset)]
#[table_name = "completed_transactions"]
pub struct UpdateCompletedTransactionSql {
status: Option<i32>,
timestamp: Option<NaiveDateTime>,
}
/// Map a Rust friendly UpdateCompletedTransaction to the Sql data type form
impl From<UpdateCompletedTransaction> for UpdateCompletedTransactionSql {
fn from(u: UpdateCompletedTransaction) -> Self {
Self {
status: u.status.map(|s| s as i32),
timestamp: u.timestamp,
}
}
}
#[cfg(test)]
mod test {
#[cfg(feature = "test_harness")]
use crate::transaction_service::storage::sqlite_db::UpdateCompletedTransaction;
use crate::transaction_service::storage::{
database::{
CompletedTransaction,
InboundTransaction,
OutboundTransaction,
PendingCoinbaseTransaction,
TransactionStatus,
},
sqlite_db::{
CompletedTransactionSql,
InboundTransactionSql,
OutboundTransactionSql,
PendingCoinbaseTransactionSql,
},
};
use chrono::Utc;
use diesel::{Connection, SqliteConnection};
use rand::rngs::OsRng;
use std::convert::TryFrom;
use tari_core::transactions::{
tari_amount::MicroTari,
transaction::{OutputFeatures, Transaction, UnblindedOutput},
transaction_protocol::sender::TransactionSenderMessage,
types::{CommitmentFactory, CryptoFactories, HashDigest, PrivateKey, PublicKey},
ReceiverTransactionProtocol,
SenderTransactionProtocol,
};
use tari_crypto::{
commitment::HomomorphicCommitmentFactory,
keys::{PublicKey as PublicKeyTrait, SecretKey as SecretKeyTrait},
};
use tari_test_utils::random::string;
use tempdir::TempDir;
#[test]
fn test_crud() {
let factories = CryptoFactories::default();
let db_name = format!("{}.sqlite3", string(8).as_str());
let temp_dir = TempDir::new(string(8).as_str()).unwrap();
let db_folder = temp_dir.path().to_str().unwrap().to_string();
let db_path = format!("{}{}", db_folder, db_name);
embed_migrations!("./migrations");
let conn = SqliteConnection::establish(&db_path).unwrap_or_else(|_| panic!("Error connecting to {}", db_path));
embedded_migrations::run_with_output(&conn, &mut std::io::stdout()).expect("Migration failed");
conn.execute("PRAGMA foreign_keys = ON").unwrap();
let mut builder = SenderTransactionProtocol::builder(1);
let amount = MicroTari::from(10_000);
let input = UnblindedOutput::new(MicroTari::from(100_000), PrivateKey::random(&mut OsRng), None);
builder
.with_lock_height(0)
.with_fee_per_gram(MicroTari::from(177))
.with_offset(PrivateKey::random(&mut OsRng))
.with_private_nonce(PrivateKey::random(&mut OsRng))
.with_amount(0, amount)
.with_message("Yo!".to_string())
.with_input(
input.as_transaction_input(&factories.commitment, OutputFeatures::default()),
input.clone(),
)
.with_change_secret(PrivateKey::random(&mut OsRng));
let stp = builder.build::<HashDigest>(&factories).unwrap();
let outbound_tx1 = OutboundTransaction {
tx_id: 1u64,
destination_public_key: PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)),
amount,
fee: stp.clone().get_fee_amount().unwrap(),
sender_protocol: stp.clone(),
status: TransactionStatus::Pending,
message: "Yo!".to_string(),
timestamp: Utc::now().naive_utc(),
};
let outbound_tx2 = OutboundTransactionSql::try_from(OutboundTransaction {
tx_id: 2u64,
destination_public_key: PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)),
amount,
fee: stp.clone().get_fee_amount().unwrap(),
sender_protocol: stp.clone(),
status: TransactionStatus::Pending,
message: "Hey!".to_string(),
timestamp: Utc::now().naive_utc(),
})
.unwrap();
OutboundTransactionSql::from(OutboundTransactionSql::try_from(outbound_tx1.clone()).unwrap())
.commit(&conn)
.unwrap();
OutboundTransactionSql::from(outbound_tx2.clone())
.commit(&conn)
.unwrap();
let outbound_txs = OutboundTransactionSql::index(&conn).unwrap();
assert_eq!(outbound_txs.len(), 2);
let returned_outbound_tx =
OutboundTransaction::try_from(OutboundTransactionSql::find(1u64, &conn).unwrap()).unwrap();
assert_eq!(
OutboundTransactionSql::try_from(returned_outbound_tx).unwrap(),
OutboundTransactionSql::try_from(outbound_tx1.clone()).unwrap()
);
let rtp = ReceiverTransactionProtocol::new(
TransactionSenderMessage::Single(Box::new(stp.clone().build_single_round_message().unwrap())),
PrivateKey::random(&mut OsRng),
PrivateKey::random(&mut OsRng),
OutputFeatures::default(),
&factories,
);
let inbound_tx1 = InboundTransaction {
tx_id: 2,
source_public_key: PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)),
amount,
receiver_protocol: rtp.clone(),
status: TransactionStatus::Pending,
message: "Yo!".to_string(),
timestamp: Utc::now().naive_utc(),
};
let inbound_tx2 = InboundTransaction {
tx_id: 3,
source_public_key: PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)),
amount,
receiver_protocol: rtp.clone(),
status: TransactionStatus::Pending,
message: "Hey!".to_string(),
timestamp: Utc::now().naive_utc(),
};
InboundTransactionSql::try_from(inbound_tx1.clone())
.unwrap()
.commit(&conn)
.unwrap();
InboundTransactionSql::try_from(inbound_tx2)
.unwrap()
.commit(&conn)
.unwrap();
let inbound_txs = InboundTransactionSql::index(&conn).unwrap();
assert_eq!(inbound_txs.len(), 2);
let returned_inbound_tx =
InboundTransaction::try_from(InboundTransactionSql::find(2u64, &conn).unwrap()).unwrap();
assert_eq!(
InboundTransactionSql::try_from(returned_inbound_tx).unwrap(),
InboundTransactionSql::try_from(inbound_tx1.clone()).unwrap()
);
let tx = Transaction::new(vec![], vec![], vec![], PrivateKey::random(&mut OsRng));
let completed_tx1 = CompletedTransaction {
tx_id: 2,
source_public_key: PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)),
destination_public_key: PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)),
amount,
fee: MicroTari::from(100),
transaction: tx.clone(),
status: TransactionStatus::Mined,
message: "Yo!".to_string(),
timestamp: Utc::now().naive_utc(),
};
let completed_tx2 = CompletedTransaction {
tx_id: 3,
source_public_key: PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)),
destination_public_key: PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)),
amount,
fee: MicroTari::from(100),
transaction: tx.clone(),
status: TransactionStatus::Broadcast,
message: "Hey!".to_string(),
timestamp: Utc::now().naive_utc(),
};
CompletedTransactionSql::try_from(completed_tx1.clone())
.unwrap()
.commit(&conn)
.unwrap();
assert!(CompletedTransactionSql::try_from(completed_tx1.clone())
.unwrap()
.commit(&conn)
.is_err());
CompletedTransactionSql::try_from(completed_tx2.clone())
.unwrap()
.commit(&conn)
.unwrap();
let completed_txs = CompletedTransactionSql::index(&conn).unwrap();
assert_eq!(completed_txs.len(), 2);
let returned_completed_tx =
CompletedTransaction::try_from(CompletedTransactionSql::find(2u64, &conn).unwrap()).unwrap();
assert_eq!(
CompletedTransactionSql::try_from(returned_completed_tx).unwrap(),
CompletedTransactionSql::try_from(completed_tx1.clone()).unwrap()
);
assert!(InboundTransactionSql::find(inbound_tx1.tx_id, &conn).is_ok());
InboundTransactionSql::try_from(inbound_tx1.clone())
.unwrap()
.delete(&conn)
.unwrap();
assert!(InboundTransactionSql::try_from(inbound_tx1.clone())
.unwrap()
.delete(&conn)
.is_err());
assert!(InboundTransactionSql::find(inbound_tx1.tx_id, &conn).is_err());
assert!(OutboundTransactionSql::find(inbound_tx1.tx_id, &conn).is_ok());
OutboundTransactionSql::try_from(outbound_tx1.clone())
.unwrap()
.delete(&conn)
.unwrap();
assert!(OutboundTransactionSql::try_from(outbound_tx1.clone())
.unwrap()
.delete(&conn)
.is_err());
assert!(OutboundTransactionSql::find(outbound_tx1.tx_id, &conn).is_err());
assert!(CompletedTransactionSql::find(completed_tx1.tx_id, &conn).is_ok());
CompletedTransactionSql::try_from(completed_tx1.clone())
.unwrap()
.delete(&conn)
.unwrap();
assert!(CompletedTransactionSql::try_from(completed_tx1.clone())
.unwrap()
.delete(&conn)
.is_err());
assert!(CompletedTransactionSql::find(completed_tx1.tx_id, &conn).is_err());
let commitment_factory = CommitmentFactory::default();
let coinbase1 = PendingCoinbaseTransaction {
tx_id: 44,
amount: MicroTari::from(5355),
commitment: commitment_factory.zero(),
timestamp: Utc::now().naive_utc(),
};
PendingCoinbaseTransactionSql::from(coinbase1.clone())
.commit(&conn)
.unwrap();
assert_eq!(
coinbase1,
PendingCoinbaseTransaction::try_from(PendingCoinbaseTransactionSql::find(44u64, &conn).unwrap()).unwrap()
);
PendingCoinbaseTransactionSql::from(coinbase1.clone())
.delete(&conn)
.unwrap();
assert!(PendingCoinbaseTransactionSql::find(44u64, &conn).is_err());
#[cfg(feature = "test_harness")]
let updated_tx = CompletedTransactionSql::find(completed_tx2.tx_id, &conn)
.unwrap()
.update(
UpdateCompletedTransaction {
status: Some(TransactionStatus::Mined),
timestamp: None,
},
&conn,
)
.unwrap();
#[cfg(feature = "test_harness")]
assert_eq!(updated_tx.status, 2);
}
}
|
use errors::*;
use regex;
use std;
use std::io::Read;
use tempfile;
pub struct Changeset {
pub title: String,
pub message: Option<String>,
pub branch: Option<String>,
pub pr: Option<String>,
}
impl Changeset {
const BRANCH_FIELD_LABEL: &'static str = "Branch name:";
const PR_FIELD_LABEL: &'static str = "Pull request:";
pub fn new_from_editor(github_owner: &str, github_repo: &str) -> Result<Changeset> {
let mut tmpfile =
tempfile::NamedTempFile::new().chain_err(|| "Failed to create new temporary file.")?;
let editor = std::env::var("VISUAL")
.or_else(|_| {
std::env::var("EDITOR").or_else(
|_| -> std::result::Result<String, std::env::VarError> { Ok("vi".to_string()) },
)
})
.unwrap();
let rc = std::process::Command::new(&editor)
.args(&[tmpfile.path()])
.status()
.chain_err(|| {
format!(
"Could not open temporary file '{}' with editor '{}'.",
tmpfile.path().to_string_lossy(),
editor
)
})?;
if rc.success() {
let mut buf = String::new();
tmpfile.read_to_string(&mut buf).chain_err(|| {
format!(
"Could not read contents of temporary file '{}' opened with editor '{}'.",
tmpfile.path().to_string_lossy(),
editor
)
})?;
Self::new_from_string(&buf, github_owner, github_repo)
} else {
match rc.code() {
Some(code) => bail!(
"Editor '{}' exited with code '{}' after opening temporary file '{}'.",
editor,
code,
tmpfile.path().to_string_lossy()
),
None => bail!(
"Editor '{}' terminated by signal after opening temporary file '{}'.",
editor,
tmpfile.path().to_string_lossy()
),
}
}
}
pub fn new_from_string(
string: &str,
github_owner: &str,
github_repo: &str,
) -> Result<Changeset> {
let lines = string.lines();
let mut title = None;
let mut message = Vec::<&str>::new();
let mut branch = None;
let mut pr = None;
for line in lines {
match line {
x if x.is_empty() => continue,
x if x.starts_with('#') => continue,
x if x.starts_with(Self::BRANCH_FIELD_LABEL) => match branch {
Some(_) => bail!(
"Multiple 'Branch name' fields found in changeset description:\n{}",
string,
),
None => branch = Some(x[Self::BRANCH_FIELD_LABEL.len()..].trim().to_string()),
},
x if x.starts_with(Self::PR_FIELD_LABEL) => match pr {
Some(_) => bail!(
"Multiple 'Pull request' fields found in changeset description:\n{}",
string,
),
None => match Self::parse_pull_request(
&x[Self::PR_FIELD_LABEL.len()..],
github_owner,
github_repo,
) {
Ok(pr_url) => pr = Some(pr_url),
Err(_) => bail!(
"Could not parse pull request number from 'Pull request' field: '{}'.",
x
),
},
},
x => match title {
Some(_) => message.push(x),
None => title = Some(x),
},
}
}
let title = title
.ok_or_else(|| {
format!(
"Could not parse title from changeset description:\n{}",
string
)
})?
.to_string();
let message = if message.is_empty() {
None
} else {
Some(message.join("\n"))
};
Ok(Changeset {
title,
message,
branch,
pr,
})
}
fn parse_pull_request(string: &str, github_owner: &str, github_repo: &str) -> Result<String> {
let pattern = format!(
r"^\s*(https://github.com/{}/{}/pull/|http://github.com/{0}/{1}/pull/|#)?(?P<pr_number>[0-9]+)\s*$",
github_owner,
github_repo,
);
let re =
regex::Regex::new(&pattern).chain_err(|| "Could not construct pull request regex.")?;
let captures = re.captures(string).ok_or_else(|| {
format!(
"Could not extract pull request number in 'Pull request' field: '{}'.",
string
)
})?;
let pr_number = captures
.name("pr_number")
.ok_or_else(|| {
format!(
"Could not find pull request number in 'Pull request' field: '{}'.",
string
)
})?
.as_str();
let pr_number = pr_number.parse::<u64>().chain_err(|| {
format!(
"Could not parse pull request number from 'Pull request' field: '{}'.",
pr_number
)
})?;
Ok(format!(
"https://github.com/{}/{}/pull/{}",
github_owner, github_repo, pr_number,
))
}
}
#[cfg(test)]
mod tests {
use super::*;
const MESSAGE_FIXTURE: &str = indoc!(
"
# First comment
This is the title.
# Another comment
This is the first line of the description.
# This is a comment in the middle of the description
This is the second line of the description.
Branch name: hello
Pull request: https://github.com/Coneko/stack/pull/4
"
);
#[test]
fn new_from_string_cannot_create_from_empty_string() {
let result = Changeset::new_from_string("", "Coneko", "stack");
assert!(result.is_err());
}
#[test]
fn new_from_string_cannot_create_from_string_without_title() {
let message = indoc!(
"
# comment
Branch name: hello
Pull request: https://github.com/Coneko/stack/pull/1
"
);
let result = Changeset::new_from_string(message, "Coneko", "stack");
assert!(result.is_err());
}
#[test]
fn new_from_string_can_create_from_string_with_only_title() {
let message = indoc!(
"
This is the title.
"
);
let result = Changeset::new_from_string(message, "Coneko", "stack");
assert!(result.is_ok());
let result = result.unwrap();
assert_eq!(result.title, "This is the title.");
}
#[test]
fn new_from_string_can_read_title() {
let result = Changeset::new_from_string(MESSAGE_FIXTURE, "Coneko", "stack");
assert!(result.is_ok());
let result = result.unwrap();
assert_eq!(result.title, "This is the title.")
}
#[test]
fn new_from_string_can_read_message() {
let result = Changeset::new_from_string(MESSAGE_FIXTURE, "Coneko", "stack");
assert!(result.is_ok());
let result = result.unwrap();
assert!(result.message.is_some());
let message = result.message.unwrap();
assert_eq!(
message,
"This is the first line of the description.\nThis is the second line of the description.",
);
}
#[test]
fn new_from_string_can_read_branch() {
let result = Changeset::new_from_string(MESSAGE_FIXTURE, "Coneko", "stack");
assert!(result.is_ok());
let result = result.unwrap();
assert!(result.branch.is_some());
let branch = result.branch.unwrap();
assert_eq!(branch, "hello".to_string());
}
#[test]
fn new_from_string_can_read_pr() {
let result = Changeset::new_from_string(MESSAGE_FIXTURE, "Coneko", "stack");
assert!(result.is_ok());
let result = result.unwrap();
assert!(result.pr.is_some());
let pr = result.pr.unwrap();
assert_eq!(pr, "https://github.com/Coneko/stack/pull/4");
}
#[test]
fn new_from_string_cannot_create_from_string_with_multiple_pr_fields() {
let message = indoc!(
"
This is the title.
Pull request: https://github.com/Coneko/stack/pull/1
This is the longer description of the commit.
Pull request: https://github.com/Coneko/stack/pull/1
"
);
let result = Changeset::new_from_string(message, "Coneko", "stack");
assert!(result.is_err());
let result = result.err().unwrap();
assert!(result.description().contains("Multiple"));
}
#[test]
fn new_from_string_cannot_create_from_string_with_invalid_pr_field() {
let message = indoc!(
"
This is the title.
Pull request: hello
"
);
let result = Changeset::new_from_string(message, "Coneko", "stack");
assert!(result.is_err());
let result = result.err().unwrap();
assert!(result.description().contains("Could not parse"));
}
#[test]
fn parse_pull_request_cannot_parse_pr_from_empty_string() {
let result = Changeset::parse_pull_request("", "Coneko", "stack");
assert!(result.is_err());
}
#[test]
fn parse_pull_request_cannot_parse_invalid_pr_field() {
let result = Changeset::parse_pull_request("not a valid PR reference", "Coneko", "stack");
assert!(result.is_err());
}
#[test]
fn parse_pull_request_can_parse_number() {
let result = Changeset::parse_pull_request("1", "Coneko", "stack");
assert!(result.is_ok());
assert_eq!(result.unwrap(), "https://github.com/Coneko/stack/pull/1");
}
#[test]
fn parse_pull_request_can_parse_pr_reference() {
let result = Changeset::parse_pull_request("#1", "Coneko", "stack");
assert!(result.is_ok());
assert_eq!(result.unwrap(), "https://github.com/Coneko/stack/pull/1");
}
#[test]
fn parse_pull_request_can_parse_https_url() {
let result = Changeset::parse_pull_request(
"https://github.com/Coneko/stack/pull/1",
"Coneko",
"stack",
);
assert!(result.is_ok());
assert_eq!(result.unwrap(), "https://github.com/Coneko/stack/pull/1");
}
#[test]
fn parse_pull_request_can_parse_http_url() {
let result = Changeset::parse_pull_request(
"http://github.com/Coneko/stack/pull/1",
"Coneko",
"stack",
);
assert!(result.is_ok());
assert_eq!(result.unwrap(), "https://github.com/Coneko/stack/pull/1");
}
}
|
use chrono::{TimeZone, Utc};
use jenkins_api::{
build::{BuildStatus, CommonBuild},
client::{Path, TreeBuilder},
Jenkins, JenkinsBuilder,
};
use serde::Deserialize;
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
#[derive(Clone)]
struct BuildInfo {
#[allow(dead_code)] // We currently don't use the job_name
branch_name: String,
last_build: Option<CommonBuild>,
}
#[derive(Deserialize, Clone)]
struct JenkinsJob {
name: String,
url: String,
}
#[derive(Deserialize)]
struct JenkinsJobs {
jobs: Vec<JenkinsJob>,
}
fn build_info(jenkins: &Jenkins, branch: &JenkinsJob) -> Result<BuildInfo> {
let job = jenkins.get_job(&branch.name)?;
let last_build = match job.last_build {
Some(b) => Some(b.get_full_build(jenkins)?),
None => None,
};
Ok(BuildInfo {
branch_name: job.name,
last_build,
})
}
fn get_jobs(jenkins: &Jenkins) -> Result<Vec<JenkinsJob>> {
let jobs: JenkinsJobs = jenkins.get_object_as(
Path::Home,
TreeBuilder::object("jobs")
.with_subfield("name")
.with_subfield("url")
.build(),
)?;
Ok(jobs.jobs)
}
fn format_timestamp(milliseconds: u64) -> String {
Utc.timestamp_millis(milliseconds as i64).to_string()
}
fn report_on_job(job: JenkinsJob, last_success: Option<CommonBuild>) {
println!("{}", job.name);
match last_success {
None => println!(" Warning: Last main branch build failed."),
Some(i)
if (i.timestamp as i64) >= Utc.ymd(2021, 1, 1).and_hms(0, 0, 0).timestamp_millis()
&& (i.timestamp as i64)
< Utc.ymd(2022, 1, 1).and_hms(0, 0, 0).timestamp_millis() =>
{
println!(" Built in 2021: {}", format_timestamp(i.timestamp),)
}
Some(i)
if (i.timestamp as i64) < Utc.ymd(2022, 1, 1).and_hms(0, 0, 0).timestamp_millis() =>
{
println!(" Built in 2022: {}", format_timestamp(i.timestamp))
}
_ => println!(" Not built."),
}
}
const BRANCHES: [&str; 2] = ["main", "master"];
fn main() -> Result<()> {
let jenkins = JenkinsBuilder::new(&std::env::var("JENKINS_URL")?).build()?;
let jobs = get_jobs(&jenkins)?;
for job in jobs {
// We want the branch at $JENKINS_URL/job/<job_name>/job/<branch_name>,
// but get_object_as does not support getting sub-objects (using name: <job_name>/job/<branch_name> sanitises the / to %2F which does not work).
// So we need to build a new root for getting branches
let job_jenkins = JenkinsBuilder::new(&job.url).build()?;
let mut last_success = None;
for branch_name in &BRANCHES {
let branch = JenkinsJob {
name: branch_name.to_string(),
url: "".to_string(),
};
if let Ok(build_info) = build_info(&job_jenkins, &branch) {
match build_info.last_build {
None => {}
Some(ref info) => match info.result {
Some(BuildStatus::Success) => {
last_success = match last_success {
Some(last) => {
Some(std::cmp::max_by_key(last, info.clone(), |t| t.timestamp))
}
None => Some(info.clone()),
};
}
_ => {}
},
};
}
}
report_on_job(job, last_success);
}
Ok(())
}
|
use crate::interface::{
BlockHeight, BlockTimestamp, ContractBalances, ContractFinancials, EarningsDistribution,
};
//required in order for near_bindgen macro to work outside of lib.rs
use crate::config::CONTRACT_MIN_OPERATIONAL_BALANCE;
use crate::near::log;
use crate::*;
use near_sdk::near_bindgen;
#[near_bindgen]
impl ContractFinancials for Contract {
fn balances(&self) -> ContractBalances {
ContractBalances {
total_contract_balance: env::account_balance().into(),
total_contract_storage_usage_cost: self.total_contract_storage_usage_cost().into(),
total_available_balance: self.total_available_balance().into(),
total_user_accounts_balance: self.total_user_accounts_balance().into(),
customer_batched_stake_deposits: self.customer_batched_stake_deposits().into(),
total_available_unstaked_near: self.total_near.amount().into(),
near_liquidity_pool: self.near_liquidity_pool.into(),
total_account_storage_escrow: self.total_account_storage_escrow.into(),
contract_owner_storage_usage_cost: self.contract_owner_storage_usage_cost().into(),
contract_owner_available_balance: self.owner_available_balance().into(),
contract_owner_balance: self.contract_owner_balance.into(),
contract_earnings: self.contract_earnings().into(),
contract_owner_earnings: self.contract_owner_earnings().into(),
user_accounts_earnings: self.user_accounts_earnings().into(),
collected_earnings: self.collected_earnings.into(),
contract_required_operational_balance: CONTRACT_MIN_OPERATIONAL_BALANCE.into(),
block_height: BlockHeight(env::block_index().into()),
block_timestamp: BlockTimestamp(env::block_timestamp().into()),
}
}
#[payable]
fn deposit_earnings(&mut self) -> interface::YoctoNear {
*self.collected_earnings += env::account_balance();
self.collected_earnings.into()
}
}
impl Contract {
pub fn total_contract_storage_usage_cost(&self) -> YoctoNear {
(env::storage_usage() as u128 * self.config.storage_cost_per_byte().value()).into()
}
pub fn total_available_balance(&self) -> YoctoNear {
(env::account_balance() - self.total_contract_storage_usage_cost().value()).into()
}
pub fn customer_batched_stake_deposits(&self) -> YoctoNear {
(self
.stake_batch
.map_or(0, |batch| batch.balance().amount().value())
+ self
.next_stake_batch
.map_or(0, |batch| batch.balance().amount().value()))
.into()
}
pub fn total_user_accounts_balance(&self) -> YoctoNear {
(self.customer_batched_stake_deposits().value()
+ self.total_near.amount().value()
+ self.near_liquidity_pool.value()
+ self.total_account_storage_escrow.value())
.into()
}
/// returns how much gas rewards the contract has accumulated
pub fn contract_earnings(&self) -> YoctoNear {
env::account_balance()
.saturating_sub(self.contract_owner_balance.value())
.saturating_sub(self.total_user_accounts_balance().value())
.saturating_sub(self.collected_earnings.value())
.into()
}
pub fn total_earnings(&self) -> YoctoNear {
self.contract_earnings() + self.collected_earnings
}
/// percentage of earnings from contract gas rewards and collected earnings that are allotted to
/// the contract owner
pub fn contract_owner_earnings(&self) -> YoctoNear {
self.contract_owner_share(self.total_earnings())
}
fn contract_owner_share(&self, amount: YoctoNear) -> YoctoNear {
let contract_owner_earnings_percentage =
self.config.contract_owner_earnings_percentage() as u128;
(amount.value() / 100 * contract_owner_earnings_percentage).into()
}
pub fn user_accounts_earnings(&self) -> YoctoNear {
self.total_earnings() - self.contract_owner_earnings()
}
pub fn contract_owner_storage_usage_cost(&self) -> YoctoNear {
(self.contract_initial_storage_usage.value() as u128
* self.config.storage_cost_per_byte().value())
.into()
}
pub fn owner_available_balance(&self) -> YoctoNear {
let balance = self.contract_owner_balance - self.contract_owner_storage_usage_cost();
if balance > CONTRACT_MIN_OPERATIONAL_BALANCE {
balance - CONTRACT_MIN_OPERATIONAL_BALANCE
} else {
0.into()
}
}
pub fn distribute_earnings(&mut self) {
let contract_owner_earnings = self.contract_owner_earnings();
let user_accounts_earnings = self.user_accounts_earnings();
self.contract_owner_balance = self
.contract_owner_balance
.saturating_add(contract_owner_earnings.value())
.into();
// funds added to liquidity pool distributes earnings to the user
self.near_liquidity_pool = self
.near_liquidity_pool
.saturating_add(user_accounts_earnings.value())
.into();
// collected earnings have been distributed
self.collected_earnings = 0.into();
log(EarningsDistribution {
contract_owner_earnings: contract_owner_earnings.into(),
user_accounts_earnings: user_accounts_earnings.into(),
})
}
}
|
/*
A few simple abstractions for distributed communication over a network.
*/
use super::util::sleep_for_secs;
use std::io::{Read, Write};
use std::net::{Ipv4Addr, SocketAddrV4, TcpListener, TcpStream};
use std::str;
fn parse_host(host: &str) -> Ipv4Addr {
if host.to_lowercase() == "localhost" {
Ipv4Addr::LOCALHOST
} else {
host.parse().unwrap_or_else(|err| {
panic!("Unable to parse '{}' as an IPV4 address: {}", host, err)
})
}
}
fn socket(host: &str, port: u16) -> SocketAddrV4 {
SocketAddrV4::new(parse_host(host), port)
}
// Handshake: barrier between two distributed processes
// call handshake(s0, true) on s0 and handshake(s0, false) on s1.
const HANDSHAKE_SLEEP: u64 = 2;
pub fn handshake(s0: SocketAddrV4, listener: bool) {
if listener {
/* Handshake listener */
// println!("[listener] initializing...");
let listener = TcpListener::bind(s0).unwrap_or_else(|err| {
panic!("Failed to start TCP connection at {:?}: {}", s0, err);
});
// println!("[listener] waiting...");
let stream = listener.incoming().next().unwrap_or_else(|| {
panic!("Failed to get stream using TCP (got None) at {:?}", s0);
});
// println!("[listener] reading...");
let mut data = [0_u8; 50];
let _msg = stream
.unwrap_or_else(|err| {
panic!("Listener failed to get message from stream: {}", err);
})
.read(&mut data)
.unwrap_or_else(|err| {
panic!("Listener failed to read message from stream: {}", err);
});
// println!("[listener] got: {}", msg);
// println!("[listener] handshake complete");
} else {
/* Handshake sender */
loop {
// println!("[sender] waiting...");
if let Ok(mut stream) = TcpStream::connect(s0) {
// println!("[sender] writing...");
stream.write_all(&[1]).unwrap();
break;
} else {
// println!("[sender] sleeping for {}s", HANDSHAKE_SLEEP);
sleep_for_secs(HANDSHAKE_SLEEP);
}
}
// println!("[sender] handshake complete")
}
}
// Barrier between n distributed processes
// uses host 'host' on process 0
// precondition: the range of ports [port + 1, port + 2 * num_nodes) is unique
// for each call
// uses port port + i to communicate between host (index 0) and index i
pub fn barrier(host: &str, num_nodes: u64, this_node: u64, start_port: u16) {
assert!(this_node < num_nodes);
for phase in 0..2 {
// println!(
// "[node {}/{}] barrier phase {}",
// this_node, num_nodes, phase + 1
// );
if this_node == 0 {
/* Listener */
for i in 1..num_nodes {
// println!(
// "[node {}/{}] listening for handshake from {}",
// this_node, num_nodes, i
// );
let socket0 = socket(
host,
start_port + (num_nodes as u16) * phase + (i as u16),
);
handshake(socket0, true);
}
} else {
/* Sender */
// println!("[node {}/{}] sending handshake", this_node, num_nodes);
let socket0 = socket(
host,
start_port + (num_nodes as u16) * phase + (this_node as u16),
);
handshake(socket0, false);
}
}
// println!("[node {}/{}] barrier complete", this_node, num_nodes);
}
|
pub struct Solution;
impl Solution {
pub fn find_itinerary(tickets: Vec<Vec<String>>) -> Vec<String> {
use std::collections::HashMap;
let mut nexts = HashMap::new();
let mut count = 0;
let mut head = Vec::new();
let mut tail = Vec::new();
for mut ticket in tickets {
let to = ticket.pop().unwrap();
let from = ticket.pop().unwrap();
nexts.entry(from).or_insert_with(Vec::new).push(to);
count += 1;
}
for (_, v) in &mut nexts {
v.sort();
v.reverse();
}
head.push("JFK".to_string());
loop {
while let Some(next) = nexts.get_mut(head.last().unwrap()).and_then(|v| v.pop()) {
count -= 1;
head.push(next);
}
if count == 0 {
break;
}
while nexts
.get(head.last().unwrap())
.map_or(true, |v| v.is_empty())
{
tail.push(head.pop().unwrap());
}
}
while let Some(next) = tail.pop() {
head.push(next);
}
head
}
}
#[test]
fn test0332() {
fn case(tickets: &[[&str; 2]], want: &[&str]) {
let tickets = tickets
.iter()
.map(|&[from, to]| vec![from.to_string(), to.to_string()])
.collect();
let want = want.iter().map(|&s| s.to_string()).collect::<Vec<String>>();
let got = Solution::find_itinerary(tickets);
assert_eq!(got, want);
}
case(
&[
["MUC", "LHR"],
["JFK", "MUC"],
["SFO", "SJC"],
["LHR", "SFO"],
],
&["JFK", "MUC", "LHR", "SFO", "SJC"],
);
case(
&[
["JFK", "SFO"],
["JFK", "ATL"],
["SFO", "ATL"],
["ATL", "JFK"],
["ATL", "SFO"],
],
&["JFK", "ATL", "JFK", "SFO", "ATL", "SFO"],
);
case(
&[
["JFK", "AAA"],
["AAA", "CCC"],
["CCC", "AAA"],
["AAA", "BBB"],
],
&["JFK", "AAA", "CCC", "AAA", "BBB"],
);
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use common_catalog::table_args::TableArgs;
use common_exception::Result;
use common_expression::Scalar;
use databend_query::table_functions::GPT2SQLTable;
#[test]
fn test_ai_to_sql_args() -> Result<()> {
// 1 arg.
{
let tbl_args =
TableArgs::new_positioned(vec![Scalar::String("prompt".to_string().into_bytes())]);
let _ = GPT2SQLTable::create("system", "ai_to_sql", 1, tbl_args)?;
}
// 2 args.
{
let tbl_args = TableArgs::new_positioned(vec![
Scalar::String("prompt".to_string().into_bytes()),
Scalar::String("api-key".to_string().into_bytes()),
]);
let _ = GPT2SQLTable::create("system", "ai_to_sql", 1, tbl_args)?;
}
// 3 args.
{
let tbl_args = TableArgs::new_positioned(vec![
Scalar::String("prompt".to_string().into_bytes()),
Scalar::String("api-key".to_string().into_bytes()),
Scalar::String("3rd".to_string().into_bytes()),
]);
let result = GPT2SQLTable::create("system", "ai_to_sql", 1, tbl_args);
assert!(result.is_err());
}
Ok(())
}
|
use error::*;
use byteorder::{WriteBytesExt, LittleEndian, BigEndian};
use std::any::Any;
macro_rules! impl_serialize {
($type:path, $write:path) => {
impl Serialize for $type {
#[inline]
fn serialize_to(&self, buffer: &mut Vec<u8>) -> Result<()> {
$write(buffer, *self)?;
Ok(())
}
}
};
}
pub trait Serialize {
/// Serialize to the passed buffer.
fn serialize_to(&self, buffer: &mut Vec<u8>) -> Result<()>;
}
impl Serialize for bool {
#[inline]
fn serialize_to(&self, buffer: &mut Vec<u8>) -> Result<()> {
if *self {
buffer.write_i32::<LittleEndian>(-1720552011)?;
} else {
buffer.write_i32::<LittleEndian>(-1132882121)?;
}
Ok(())
}
}
impl Serialize for i8 {
#[inline]
fn serialize_to(&self, buffer: &mut Vec<u8>) -> Result<()> {
buffer.push(*self as u8);
Ok(())
}
}
impl Serialize for u8 {
#[inline]
fn serialize_to(&self, buffer: &mut Vec<u8>) -> Result<()> {
buffer.push(*self);
Ok(())
}
}
impl_serialize!(i16, WriteBytesExt::write_i16<LittleEndian>);
impl_serialize!(i32, WriteBytesExt::write_i32<LittleEndian>);
impl_serialize!(i64, WriteBytesExt::write_i64<LittleEndian>);
impl_serialize!(u16, WriteBytesExt::write_u16<LittleEndian>);
impl_serialize!(u32, WriteBytesExt::write_u32<LittleEndian>);
impl_serialize!(u64, WriteBytesExt::write_u64<LittleEndian>);
impl_serialize!(f32, WriteBytesExt::write_f32<LittleEndian>);
impl_serialize!(f64, WriteBytesExt::write_f64<LittleEndian>);
impl Serialize for i128 {
#[inline]
fn serialize_to(&self, buffer: &mut Vec<u8>) -> Result<()> {
(*self as u128).serialize_to(buffer)
}
}
impl Serialize for u128 {
#[inline]
fn serialize_to(&self, buffer: &mut Vec<u8>) -> Result<()> {
buffer.write_u64::<BigEndian>((*self >> 64) as u64)?;
buffer.write_u64::<BigEndian>(*self as u64)?;
Ok(())
}
}
impl Serialize for String {
fn serialize_to(&self, buffer: &mut Vec<u8>) -> Result<()> {
let len = self.len();
if len <= 253 {
// If L <= 253, the serialization contains one byte with the value of L,
// then L bytes of the string followed by 0 to 3 characters containing 0,
// such that the overall length of the value be divisible by 4,
// whereupon all of this is interpreted as a sequence
// of int(L/4)+1 32-bit little-endian integers.
buffer.push(len as u8);
} else {
// If L >= 254, the serialization contains byte 254, followed by 3
// bytes with the string length L in little-endian order, followed by L
// bytes of the string, further followed by 0 to 3 null padding bytes.
buffer.push(254);
buffer.write_uint::<LittleEndian>(len as u64, 3)?;
}
// Write each character in the string
buffer.extend(self.as_bytes());
// [...] string followed by 0 to 3 characters containing 0,
// such that the overall length of the value be divisible by 4 [...]
let rem = len % 4;
if rem > 0 {
for _ in 0..(4 - rem) {
buffer.push(0);
}
}
Ok(())
}
}
impl<T: Serialize> Serialize for Vec<T> {
fn serialize_to(&self, buffer: &mut Vec<u8>) -> Result<()> {
// Write type identifier (for Vec)
buffer.write_u32::<LittleEndian>(0x1cb5c415u32)?;
// Write length
let len = buffer.len() as u32;
buffer.write_u32::<LittleEndian>(len)?;
// Write elements
for element in self {
// FIXME: Ensure vector elements are serialized as bare types
element.serialize_to(buffer)?;
}
Ok(())
}
}
impl Serialize for Box<Any> {
fn serialize_to(&self, buffer: &mut Vec<u8>) -> Result<()> {
match self.downcast_ref::<Box<Serialize>>() {
Some(as_ser) => as_ser.serialize_to(buffer),
None => {
// FIXME: Return an error
panic!("Serialize not implemented")
}
}
}
}
|
mod thread_pool;
use std::net::{TcpListener, TcpStream};
use std::io::{Read, Write};
use std::time::Duration;
use std::sync::{Arc, RwLock};
fn main() {
let tcp_listener = TcpListener::bind("localhost:7878")
.unwrap_or_else(|err| {
eprintln!("Error occurred: {}", err);
std::process::exit(1);
});
let thread_pool = thread_pool::ThreadPool::new(4).unwrap();
for stream in tcp_listener.incoming() {
if let Ok(stream) = stream {
stream.set_nodelay(true).expect("Set no-delay failed.");
let stream = Arc::new(RwLock::new(stream));
let cloned_stream = Arc::clone(&stream);
thread_pool.execute( || {
handle_connection(cloned_stream);
});
println!("Connection established: {}", stream.read().unwrap().peer_addr().unwrap());
} else {
eprintln!("Connection failed.");
}
}
}
fn handle_connection(stream: Arc<RwLock<TcpStream>>) {
let mut buffer= [0u8; 512];
let mut stream = stream.write().unwrap();
match stream.read(&mut buffer) {
Ok(_) => {
let get_root = b"GET / HTTP/1.1\r\n";
let get_sleep = b"GET /sleep HTTP/1.1\r\n";
let (status_line, filename) = if buffer.starts_with(get_root) {
("HTTP/1.1 200 OK\r\n\r\n", "hello.html")
} else if buffer.starts_with(get_sleep) {
std::thread::sleep(Duration::from_secs(5));
("HTTP/1.1 200 OK\r\n\r\n", "hello.html")
} else {
("HTTP/1.1 404 NOT FOUND\r\n\r\n", "404.html")
};
let contents = std::fs::read_to_string(filename).unwrap();
let response = format!("{}{}", status_line, contents);
stream.write(response.as_bytes()).unwrap();
stream.flush().unwrap();
},
Err(err) => {
eprintln!("Error occurred {}", err);
}
}
} |
extern crate genetic_bf;
extern crate clap;
extern crate serde_yaml;
use std::fs::File;
use std::io::{Read, Error, ErrorKind, stdin, stdout};
use clap::{Arg, App};
use genetic_bf::{Config, VM, VMResult, generate_program};
fn main() {
let args = App::new("genetic-bf")
.version("0.1.0")
.author("Brett Patterson")
.about("An attempt at using genetic algorithms to discover Brainfuck programs")
.arg(Arg::with_name("run")
.short("r")
.long("run")
.help("Run the provided Brainfuck program"))
.arg(Arg::with_name("file")
.help("The input configuration or Brainfuck program")
.required(true))
.get_matches();
let file = args.value_of("file")
.ok_or(Error::new(ErrorKind::InvalidInput, "No file specified"))
.and_then(File::open);
match file {
Ok(file) => {
if args.is_present("run") {
let prog: Vec<u8> = file.bytes().collect::<Result<Vec<u8>, Error>>().unwrap();
let mut vm = VM::new(prog, stdin(), stdout());
match vm.run() {
VMResult::Error(e) => panic!(e),
VMResult::Ok => {}
}
} else {
let config: Config = serde_yaml::from_reader(file).unwrap();
match generate_program(config) {
Ok(prog) => {
println!("{}", prog);
}
Err(e) => {
println!("Unable to generate program: {}", e);
}
}
}
}
Err(e) => panic!("Unable to open file: {}", e),
}
} |
use std::borrow::Cow;
use std::path::Path;
use hashlink::LinkedHashMap;
use url::{ParseError, Url};
use crate::{CreditCard, Login, Record, SecureNote, SoftwareLicence};
const SKIP_KEYS: &[&str] = &["^html", "^recaptcha", "commit", "op", "label", "action"];
const SKIP_VALUES: &[&str] = &["✓", "SEND", "Y"];
const LOGIN_FIELDS: &[&str] = &[
"*login",
"*username",
"*mail",
"wpname",
"membership no",
"medicarecardnumber",
"e",
];
const NOTE_FIELDS: &[&str] = &["comments", "customIcon", "predicate b64"];
const WEBSITE_FIELDS: &[&str] = &["location", "url", "website"];
#[derive(Debug)]
pub struct RawRecord<'a> {
path: &'a Path,
depth: usize,
password: Option<&'a str>,
fields: LinkedHashMap<Cow<'a, str>, &'a str>,
}
pub(crate) fn raw<'a>(path: &'a Path, depth: usize, item: &'a str) -> RawRecord<'a> {
if item.lines().count() == 1
&& item
.lines()
.next()
.map_or(false, |line| !line.contains(": "))
{
return RawRecord {
path,
depth,
password: Some(item.trim_end()),
fields: LinkedHashMap::new(),
};
}
// Probably a secure note
const COMMENTS: &str = "comments: ";
if item.split(':').count() == 2 || (item.starts_with(COMMENTS) && !item.contains("license key: ")) {
let (key, value) = item.split_once(':').unwrap();
if !key.contains('\n') {
let mut fields = LinkedHashMap::new();
fields.insert(Cow::from(key), value.trim_start());
return RawRecord {
path,
depth,
password: None,
fields,
};
}
}
let mut fields = LinkedHashMap::new();
let mut password = None;
for line in item.lines() {
if let Some((key, value)) = line.split_once(": ") {
let key = key.to_ascii_lowercase();
if key.contains("pass")
|| key.contains("pwd")
|| key == "p"
|| key.starts_with("reg-pw")
{
// Use as password or skip if password is already set
if password.is_none() {
password = Some(value.trim_start())
}
} else if value == "✓" {
// skip
} else {
fields.insert(Cow::from(key), value.trim_start());
}
} else if password.is_none() {
password = Some(line)
} else if password.is_some() && fields.len() == 1 && fields.contains_key("comments") {
// This is a secure note with a password, such as an ssh key
let pos = item.find(COMMENTS).unwrap();
let note = &item[pos + COMMENTS.len()..];
fields.insert(Cow::from("comments"), note);
return RawRecord {
path,
depth,
password,
fields,
};
} else {
panic!(
"error: malformed item: {:?}, {:?}: {}",
password, fields, item
);
}
}
RawRecord {
path,
depth,
password,
fields,
}
}
fn skip_key(key: &str) -> bool {
key.is_empty() || SKIP_KEYS.iter().any(|&skip| matches(skip, key))
}
fn matches(pattern: &str, value: &str) -> bool {
if pattern.starts_with('^') {
value.starts_with(&pattern[1..])
} else if pattern.starts_with('*') {
value.contains(&pattern[1..])
} else {
value == pattern
}
}
fn skip_value(password: Option<&str>, value: &str) -> bool {
password.map_or(false, |password| value == password) || SKIP_VALUES.contains(&value)
}
fn title_from_path(depth: usize, path: &Path) -> String {
if depth > 2 {
panic!("unhandled depth > 2")
} else if depth > 1 {
let domain = path
.parent()
.and_then(|p| p.file_name())
.and_then(|os| os.to_str())
.map(|s| s.to_string())
.unwrap();
let user = path
.file_stem()
.and_then(|os| os.to_str())
.map(|s| s.to_string())
.unwrap();
format!("{} ({})", domain, user)
} else {
path.file_stem()
.and_then(|os| os.to_str())
.map(|s| s.to_string())
.unwrap()
}
}
impl<'a> From<RawRecord<'a>> for Record {
fn from(mut raw: RawRecord) -> Record {
let title = raw
.fields
.get("title")
.map(|s| s.to_string())
.unwrap_or_else(|| title_from_path(raw.depth, raw.path));
if let Some(password) = raw.password.or_else(|| raw.fields.get("password").map(|&s| s)) {
if raw.fields.contains_key("cardholder") && raw.fields.contains_key("number") {
let card = read_credit_card(title, &raw);
Record::CreditCard(card)
} else if raw.fields.contains_key("license key") || raw.fields.contains_key("licensed to") {
let software = read_software_licence(title, &raw);
Record::SoftwareLicence(software)
} else {
// Try to find username
let username = raw
.fields
.iter()
.find_map(|(key, value)| {
for &field in LOGIN_FIELDS.iter() {
if matches(field, key) {
if field == "*mail" || field == "e" {
// Ensure @ is present if we're matching on an email field
if value.contains('@') {
return Some(value.to_string());
}
} else {
return Some(value.to_string());
}
}
}
None
})
.or_else(|| {
// Nested item
if raw.depth > 1 {
raw.path
.file_stem()
.and_then(|os| os.to_str())
.map(|s| s.to_string())
} else {
None
}
});
let website = WEBSITE_FIELDS
.iter()
.find_map(|&key| raw.fields.get(key).map(|&v| v))
.or_else(|| {
if raw.depth > 1 {
raw.path
.parent()
.and_then(|p| p.file_name())
.and_then(|os| os.to_str())
} else {
None
}
})
.map(|s| parse_url(s));
// Remove fields that we don't need to retain now
raw.fields.retain(|key, _value| {
!(WEBSITE_FIELDS.contains(&key.as_ref())
|| LOGIN_FIELDS.iter().any(|&field| matches(field, key)))
});
let login = Login::new(
title,
website,
username,
Some(password.to_string()),
fields_to_notes(Some(password), raw.fields),
);
Record::Login(login)
}
} else if raw.fields.contains_key("license key") || raw.fields.contains_key("licensed to") {
let software = read_software_licence(title, &raw);
Record::SoftwareLicence(software)
} else if raw.fields.contains_key("number") {
let card = read_credit_card(title, &raw);
Record::CreditCard(card)
} else {
if let Some(notes) = NOTE_FIELDS.iter().find_map(|&key| raw.fields.get(key)) {
let note = SecureNote {
title,
text: notes.to_string(),
};
Record::SecureNote(note)
} else if raw.fields.contains_key("website") {
let login = Login::new(
title,
Some(parse_url(raw.fields["website"])),
None,
None,
fields_to_notes(None, raw.fields),
);
Record::Login(login)
} else {
panic!("Unhandled item")
}
}
}
}
fn fields_to_notes<'a>(
password: Option<&str>,
fields: LinkedHashMap<Cow<'a, str>, &'a str>,
) -> Option<String> {
let notes = fields
.into_iter()
.filter_map(|(key, value)| {
if skip_key(&key) || skip_value(password, value) {
// eprintln!("skip: {} → {}", key, value);
None
} else {
Some(format!("{}: {}", key, value))
}
})
.collect::<Vec<_>>(); // TODO: use intersperse if/when stable https://github.com/rust-lang/rust/issues/79524
if notes.is_empty() {
None
} else {
Some(notes.join("\n"))
}
}
fn parse_url(s: &str) -> Url {
match s.parse() {
Ok(url) => url,
Err(ParseError::RelativeUrlWithoutBase) => {
let fallback = String::from("https://") + s;
fallback
.parse()
.expect(&format!("invalid fallback url: {}", fallback))
}
Err(e) => panic!("invalid url: {}", e),
}
}
fn read_credit_card(title: String, raw: &RawRecord) -> CreditCard {
let card_number = raw.fields.get("number").map(|&s| String::from(s));
let expiry_date = raw
.fields
.get("expiry date")
.or(raw.fields.get("expiry"))
.map(|&s| String::from(s));
let cardholder_name = raw.fields.get("cardholder").map(|&s| String::from(s));
let pin = raw
.password
.or_else(|| raw.fields.get("pin").map(|&v| v))
.map(String::from);
let bank_name = raw.fields.get("bank name").map(|&s| String::from(s));
let cvv = raw
.fields
.get("cvc")
.or(raw.fields.get("cvv"))
.map(|&s| String::from(s));
let notes = None;
let card = CreditCard {
title,
card_number: card_number.expect("missing card number"),
expiry_date,
cardholder_name,
pin,
bank_name,
cvv,
notes,
};
card
}
fn read_software_licence(title: String, raw: &RawRecord) -> SoftwareLicence {
let version = raw
.fields
.get("product version")
.or_else(|| raw.fields.get("version"))
.map(|&s| String::from(s));
let license_key = raw
.fields
.get("license key")
.or_else(|| raw.fields.get("reg code"))
.map(|&s| String::from(s));
let your_name = raw
.fields
.get("licensed to")
.or_else(|| raw.fields.get("reg name"))
.map(|&s| String::from(s));
let your_email = raw
.fields
.get("registered email")
.or_else(|| raw.fields.get("reg email"))
.map(|&s| String::from(s));
// let company = raw.fields.get("")
let download_link = raw
.fields
.get("download link")
.or_else(|| raw.fields.get("download page"))
.map(|link| link.parse().unwrap());
let software_publisher = raw
.fields
.get("publisher name")
.or_else(|| raw.fields.get("publisher"))
.map(|&s| String::from(s));
let publishers_website = raw
.fields
.get("publisher website")
.or_else(|| raw.fields.get("website"))
.map(|link| link.parse().unwrap());
// let retail_price = raw.fields.get("");
let support_email = raw.fields.get("support email").map(|&s| String::from(s));
let purchase_date = raw.fields.get("order date").map(|&s| String::from(s));
let order_number = raw.fields.get("order number").map(|&s| String::from(s));
SoftwareLicence {
title,
version,
license_key,
your_name,
your_email,
company: None,
download_link,
software_publisher,
publishers_website,
retail_price: None,
support_email,
purchase_date,
order_number,
notes: None,
}
.sanitise()
}
#[cfg(test)]
mod tests {
use std::fs;
use std::path::Path;
use crate::{CreditCard, Login, Record, SecureNote, SoftwareLicence};
fn parse_path<P: AsRef<Path>>(path: P) -> Record {
let path = path.as_ref();
let content = fs::read_to_string(&path).unwrap();
let depth = path.components().count() - 1;
let raw = super::raw(&path, depth, &content);
Record::from(raw)
}
#[test]
fn test_myer() {
// Tests:
// * title has domain removed from file stem
// * email field is only selected if it contains an @ in the value
let actual = parse_path("tests/m.myer.com.au Myer.txt");
let notes = r#"firstname: Wesley
lastname: Wesley
country: AU
address1: Level 1, 123 Example St
city: FITZROY
zipcode: 3065
state: VIC
phone1type: CEL
phone1: 0412345678"#;
let expected = Record::Login(Login {
title: String::from("Myer"),
website: Some("https://m.myer.com.au/webapp/wcs/stores/servlet/m20OrderShippingBillingDetailsView?catalogId=10051&langId=-1&storeId=10251".parse().unwrap()),
username: Some(String::from("test@example.com")),
password: Some(String::from("this-is-a-test-password")),
notes: Some(String::from(notes))
});
assert_eq!(actual, expected)
}
#[test]
fn test_url_without_scheme() {
let actual = parse_path("tests/bugzilla.mozilla.org Mozilla bugzilla.txt");
let expected = Record::Login(Login {
title: String::from("Mozilla bugzilla"),
website: Some("https://bugzilla.mozilla.org/token.cgi".parse().unwrap()),
username: Some(String::from("test@example.com")),
password: Some(String::from("this-is-a-test-password")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_multiline_secure_note() {
let actual = parse_path("tests/multiline secure note.txt");
let text = r"# START OF EXAMPLE KEY FILE
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
DDDDDDDDDDDDDDDDDDDDDDDDDDD/DDDDDDDDD/DDDDDDDDDDDD+XtKG=
# END OF EXAMPLE KEY FILE
";
let expected = Record::SecureNote(SecureNote {
title: String::from("multiline secure note"),
text: String::from(text),
});
assert_eq!(actual, expected)
}
#[test]
fn test_multiline_secure_note_with_colons() {
let actual = parse_path("tests/multiline secure note with colons.txt");
let text = r"asfd
blargh: thing
";
let expected = Record::SecureNote(SecureNote {
title: String::from("multiline secure note with colons"),
text: String::from(text),
});
assert_eq!(actual, expected)
}
#[test]
fn test_multiline_secure_note_with_password() {
let actual = parse_path("tests/multiline secure note with password.txt");
let notes = Some(
r"comments: line 1
line 2
line 3
"
.to_string(),
);
let expected = Record::Login(Login {
title: String::from("multiline secure note with password"),
website: None,
username: None,
password: Some(String::from("this-is-a-test-password")),
notes,
});
assert_eq!(actual, expected)
}
#[test]
fn test_just_password() {
let actual = parse_path("tests/example.com.txt");
let expected = Record::Login(Login {
title: String::from("example.com"),
website: Some("https://example.com".parse().unwrap()),
username: None,
password: Some(String::from("this-is-a-test-password")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_strip_generated_password() {
let actual = parse_path("tests/Generated Password for example.com.txt");
let expected = Record::Login(Login {
title: String::from("example.com"),
website: Some("http://example.com".parse().unwrap()),
username: None,
password: Some(String::from("this-is-a-test-password")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_keep_domain_in_title_when_ip_address() {
let actual = parse_path("tests/192.168.0.8 (Administrator).txt");
let expected = Record::Login(Login {
title: String::from("192.168.0.8 (Administrator)"),
website: Some("http://192.168.0.8".parse().unwrap()),
username: Some(String::from("Administrator")),
password: Some(String::from("this-is-a-test-password")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_keep_domain_in_title_when_second_part_is_username() {
let actual = parse_path("tests/yousendit.com (test@example.com).txt");
let expected = Record::Login(Login {
title: String::from("yousendit.com (test@example.com)"),
website: Some("http://yousendit.com".parse().unwrap()),
username: Some(String::from("test@example.com")),
password: Some(String::from("this-is-a-test-password")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_keep_domain_in_title_when_paren() {
let actual = parse_path("tests/typekit.com (example).txt");
let expected = Record::Login(Login {
title: String::from("typekit.com (example)"),
website: Some("https://typekit.com/users/new/trial".parse().unwrap()),
username: Some(String::from("typekit@example.com")),
password: Some(String::from("this-is-a-test-password")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_case_insensitive_pass() {
let actual = parse_path("tests/wiki.trikeapps.com.txt");
let expected = Record::Login(Login {
title: String::from("wiki.trikeapps.com"),
website: Some(
"https://wiki.trikeapps.com/index.php/Special:UserLogin"
.parse()
.unwrap(),
),
username: Some(String::from("Wmoore")),
password: Some(String::from("this-is-a-test-password")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_strip_password_confirmations() {
let actual = parse_path("tests/password confirmation.txt");
let expected = Record::Login(Login {
title: String::from("password confirmation"),
website: Some("https://example.com".parse().unwrap()),
username: Some(String::from("test@example.com")),
password: Some(String::from("XXXXXXXXXXXXXXXXXXXX")),
notes: Some(String::from("firstname: Wesley Moore")),
});
assert_eq!(actual, expected)
}
#[test]
fn test_nested_login() {
// Tests that the username, title, and url are picked from the path
let actual = parse_path("tests/example.com/wezm.txt");
let expected = Record::Login(Login {
title: String::from("example.com (wezm)"),
website: Some("https://example.com".parse().unwrap()),
username: Some(String::from("wezm")),
password: Some(String::from("this-is-a-test-password")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_software_license_incomplete() {
let actual = parse_path("tests/Divvy.txt");
let expected = Record::SoftwareLicence(SoftwareLicence {
title: String::from("Divvy"),
version: None,
license_key: Some(String::from(
"TEST-TEST-TEST-TEST-TEST-TEST-TEST-TEST-TEST-TEST-TEST-TEST-TEST-TEST-AAAA",
)),
your_name: Some(String::from("Wesley Moore")),
your_email: Some(String::from("test@example.com")),
company: None,
download_link: Some(
"http://mizage.com/divvy/downloads/Divvy.zip"
.parse()
.unwrap(),
),
software_publisher: None,
publishers_website: None,
retail_price: None,
support_email: None,
purchase_date: None,
order_number: None,
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_software_license_complete() {
let actual = parse_path("tests/agilebits.com 1Password 5.txt");
let expected = Record::SoftwareLicence(SoftwareLicence {
title: String::from("1Password 5"),
version: Some(String::from("5.0.2")),
license_key: None,
your_name: Some(String::from("Wesley Moore")),
your_email: Some(String::from("test@example.com")),
company: None,
download_link: Some("https://agilebits.com/downloads".parse().unwrap()),
software_publisher: Some(String::from("AgileBits Inc.")),
publishers_website: Some("https://agilebits.com/onepassword".parse().unwrap()),
retail_price: None,
support_email: Some(String::from("support@agilebits.com")),
purchase_date: Some(String::from("6/10/2013")),
order_number: Some(String::from("0000000")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_software_license_with_comments() {
let actual = parse_path("tests/deals.macupdate.com SnapHeal Pro.txt");
let expected = Record::SoftwareLicence(SoftwareLicence {
title: String::from("SnapHeal Pro"),
version: Some(String::from("1.0")),
license_key: Some(String::from("ZZZZ-ZZZZ-ZZZZ-ZZZZ-ZZZZ")),
your_name: Some(String::from("Wesley Moore")),
your_email: Some(String::from("test@example.com")),
company: None,
download_link: Some("https://deals.macupdate.com/receipt/1234567890".parse().unwrap()),
software_publisher: Some(String::from("MacUpdate Promo")),
publishers_website: Some("https://deals.macupdate.com/".parse().unwrap()),
retail_price: None,
support_email: Some(String::from("support@macupdate.com")),
purchase_date: None,
order_number: Some(String::from("1234567890")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_credit_card_1() {
let actual = parse_path("tests/CC 1.txt");
let expected = Record::CreditCard(CreditCard {
title: String::from("CC 1"),
card_number: String::from("376000000000000"),
expiry_date: Some(String::from("02/20")),
cardholder_name: Some(String::from("First Last")),
pin: None,
bank_name: None,
cvv: Some(String::from("1234")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_credit_card_2() {
let actual = parse_path("tests/CC 2.txt");
let expected = Record::CreditCard(CreditCard {
title: String::from("CC 2"),
card_number: String::from("4100000000000000"),
expiry_date: Some(String::from("02/2026")),
cardholder_name: Some(String::from("First Last")),
pin: Some(String::from("1234")),
bank_name: None,
cvv: Some(String::from("123")),
notes: None,
});
assert_eq!(actual, expected)
}
#[test]
fn test_credit_card_3() {
let actual = parse_path("tests/CC 3.txt");
let expected = Record::CreditCard(CreditCard {
title: String::from("CC 3"),
card_number: String::from("370000000000000"),
expiry_date: Some(String::from("0/6/2018")),
cardholder_name: None,
pin: Some(String::from("4567")),
bank_name: None,
cvv: Some(String::from("1234")),
notes: None,
});
assert_eq!(actual, expected)
}
}
|
extern crate percent_encoding;
use percent_encoding::{percent_decode, percent_encode, DEFAULT_ENCODE_SET};
use std::slice;
/// Fill the provided output buffer with the quoted string.
///
/// # Parameters
///
/// * input_buf: Non-null pointer to UTF-8-encoded character sequence to be quoted. A terminating
/// zero is not required.
/// * input_len: Number of bytes in input_buf (without terminating zero).
/// * output_buf: Non-null pointer to buffer which will hold the UTF-8-encoded output string. The
/// buffer should be big enough to hold the quoted string. This function is not going
/// to write beyond the bounds specified by `output_len`.
/// * output_len: Length of the output buffer.
///
/// # Return value
///
/// The number of bytes required to hold the quoted string. By comparing `output_len` with the
/// returned value one can determine if the provided output buffer has been sufficient.
#[no_mangle]
pub unsafe extern "C" fn quote(
input_buf: *const u8,
input_len: usize,
output_buf: *mut u8,
output_len: usize,
) -> usize {
let input = slice::from_raw_parts(input_buf, input_len);
let output = slice::from_raw_parts_mut(output_buf, output_len);
let mut index = 0;
let mut quoted_bytes = percent_encode(input, DEFAULT_ENCODE_SET).flat_map(str::bytes);
for byte in (&mut quoted_bytes).take(output_len) {
output[index] = byte;
index += 1;
}
// The number of bytes required to hold the quoted string
index + quoted_bytes.count()
}
/// Fill the provided output buffer with the unquoted string.
///
/// # Parameters
///
/// * input_buf: Non-null pointer to UTF-8-encoded character sequence to be unquoted. A terminating
/// zero is not required.
/// * input_len: Number of bytes in input_buf (without terminating zero).
/// * output_buf: Non-null pointer to buffer which will hold the UTF-8-encoded output string. The
/// buffer should be big enough to hold the unquoted string. This function is not
/// going to write beyond the bounds specified by `output_len`.
/// * output_len: Length of the output buffer.
///
/// # Return value
///
/// The number of bytes required to hold the unquoted string. By comparing `output_len` with the
/// returned value one can determine if the provided output buffer has been sufficient.
#[no_mangle]
pub unsafe extern "C" fn unquote(
input_buf: *const u8,
input_len: usize,
output_buf: *mut u8,
output_len: usize,
) -> usize {
let input = slice::from_raw_parts(input_buf, input_len);
let output = slice::from_raw_parts_mut(output_buf, output_len);
let mut index = 0;
let mut unquoted_bytes = percent_decode(input);
for byte in (&mut unquoted_bytes).take(output_len) {
output[index] = byte;
index += 1;
}
// The number of bytes required to hold the unquoted string
index + unquoted_bytes.count() + output_len
}
#[cfg(test)]
mod tests {
use super::*;
use percent_encoding::utf8_percent_encode;
#[test]
fn quoting_works() {
assert_eq!(
utf8_percent_encode("/El Niño/", DEFAULT_ENCODE_SET).to_string(),
"/El%20Ni%C3%B1o/"
);
let input = "/El Niño/";
unsafe {
let mut buf = vec![0; 10];
let buf_len = quote(input.as_ptr(), input.len(), buf.as_mut_ptr(), buf.len());
assert_eq!(buf_len, "/El%20Ni%C3%B1o/".len());
let mut buf = vec![0; buf_len];
quote(input.as_ptr(), input.len(), buf.as_mut_ptr(), buf.len());
let quoted = String::from_utf8(buf).unwrap();
assert_eq!(quoted, "/El%20Ni%C3%B1o/");
}
}
#[test]
fn unquoting_works() {
assert_eq!(
utf8_percent_encode("/El Niño/", DEFAULT_ENCODE_SET).to_string(),
"/El%20Ni%C3%B1o/"
);
let input = "/El%20Ni%C3%B1o/";
let mut buf = vec![0; 1];
unsafe {
let buf_len = unquote(input.as_ptr(), input.len(), buf.as_mut_ptr(), buf.len());
assert_eq!(buf_len, "/El Niño/".len());
let mut buf = vec![0; buf_len];
unquote(input.as_ptr(), input.len(), buf.as_mut_ptr(), buf.len());
let unquoted = String::from_utf8(buf).unwrap();
assert_eq!(unquoted, "/El Niño/");
}
}
}
|
use glam::{Quat, Vec3};
use crate::error::ReadValueError;
use std::convert::TryInto;
pub struct ReadBuffer {
buffer: Vec<u8>,
position: usize,
}
impl<'a> ReadBuffer {
pub fn new(buffer: Vec<u8>) -> Self {
Self {
buffer,
position: 0,
}
}
pub fn read_array<const COUNT: usize>(
&mut self,
type_name: &'static str,
) -> Result<[u8; COUNT], ReadValueError> {
self.position += COUNT;
match self.buffer[self.position - COUNT..self.position].try_into() {
Ok(x) => Ok(x),
Err(_) => Err(ReadValueError::BufferToShort(COUNT, type_name)),
}
}
pub fn read_vec(
&mut self,
bytes: usize,
type_name: &'static str,
) -> Result<Vec<u8>, ReadValueError> {
if self.position + bytes > self.buffer.len() {
Err(ReadValueError::BufferToShort(bytes, type_name))
} else {
self.position += bytes;
Ok(self.buffer[self.position - bytes..self.position].to_vec())
}
}
pub fn get_pos(&self) -> usize {
self.position
}
}
pub trait Serializable {
fn serialize(&self, buf: &mut Vec<u8>);
fn deserialize(buf: &mut ReadBuffer) -> Result<Self, ReadValueError>
where
Self: Sized;
}
macro_rules! impl_num_serializable {
($($struct_name:ident),*) => {
// block to be repeated
$(
impl Serializable for $struct_name {
fn serialize(&self, buf: &mut Vec<u8>) {
buf.extend_from_slice(&self.to_be_bytes())
}
fn deserialize(buf: &mut ReadBuffer) -> Result<Self, ReadValueError>{
Ok($struct_name::from_be_bytes(buf.read_array(std::any::type_name::<$struct_name>())?))
}
}
)*
};
}
impl_num_serializable! { u8, u16, u32, u64, u128, i8, i16, i32, i64, i128, f32, f64 }
impl Serializable for String {
fn serialize(&self, buf: &mut Vec<u8>) {
Serializable::serialize(&(self.len() as u16), buf);
buf.extend_from_slice(self.as_bytes())
}
fn deserialize(buf: &mut ReadBuffer) -> Result<Self, ReadValueError> {
let len = u16::from_be_bytes(buf.read_array("String len (u16)")?) as usize;
String::from_utf8(buf.read_vec(len, "String value")?)
.map_err(|_| ReadValueError::StringParseError)
}
}
impl Serializable for Vec3 {
fn serialize(&self, buf: &mut Vec<u8>) {
buf.extend_from_slice(&self.x.to_be_bytes());
buf.extend_from_slice(&self.y.to_be_bytes());
buf.extend_from_slice(&self.z.to_be_bytes());
}
fn deserialize(buf: &mut ReadBuffer) -> Result<Self, ReadValueError> {
Ok(Vec3::new(
f32::deserialize(buf)?,
f32::deserialize(buf)?,
f32::deserialize(buf)?,
))
}
}
impl Serializable for Quat {
fn serialize(&self, buf: &mut Vec<u8>) {
buf.extend_from_slice(&self.x.to_be_bytes());
buf.extend_from_slice(&self.y.to_be_bytes());
buf.extend_from_slice(&self.z.to_be_bytes());
buf.extend_from_slice(&self.w.to_be_bytes());
}
fn deserialize(buf: &mut ReadBuffer) -> Result<Self, ReadValueError> {
Ok(Quat::from_array([
f32::deserialize(buf)?,
f32::deserialize(buf)?,
f32::deserialize(buf)?,
f32::deserialize(buf)?,
]))
}
}
|
use std::env;
use diesel::pg::PgConnection;
use diesel::r2d2::{ConnectionManager, Pool, PooledConnection};
use futures::future::{self, Future};
use serde_json;
use warp::{Filter, Reply, Rejection};
use crate::exception::{self, INTERNAL_SERVER_ERROR};
type PgPool = Pool<ConnectionManager<PgConnection>>;
pub type PgPooled = PooledConnection<ConnectionManager<PgConnection>>;
/// pg_pool handles the PostgreSQL connection thread pool.
pub fn pg_pool() -> PgPool {
let db_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let manager = ConnectionManager::<PgConnection>::new(db_url);
let pool = Pool::new(manager)
.expect("PostgreSQL connection pool could not be created");
log::info!("initiated postgresSQL thread connection pool");
pool
}
/// Run a function on a threadpool, returning a future resolving when the function completes.
pub fn fut_threadpool<F, T>(f: F) -> impl Future<Item = T, Error = tokio_threadpool::BlockingError>
where
F: FnOnce() -> T,
{
let mut f_only_once = Some(f);
futures::future::poll_fn(move || {
tokio_threadpool::blocking(|| {
let f = f_only_once.take().unwrap();
f()
})
})
}
/// Run a function on a threadpool, returning a future resolving when the
/// function completes. Any (unexpected!) threadpool error is turned into a
/// Warp rejection, wrapping the Internal Server Error problem.
pub fn threadpool<F, T>(f: F) -> impl Future<Item = T, Error = Rejection>
where
F: FnOnce() -> T,
{
fut_threadpool(f).map_err(|_| warp::reject::custom(INTERNAL_SERVER_ERROR))
}
/// Runs a function on a threadpool, ignoring a potential Diesel error inside the threadpool.
/// This error is turned into an internal server error (as Diesel errors are unexpected, and
/// indicative of erroneous queries).
pub fn threadpool_diesel_ok<F, T>(f: F) -> impl Future<Item = T, Error = Rejection>
where
F: FnOnce() -> Result<T, diesel::result::Error>,
{
threadpool(f).and_then(|result| match result {
Ok(v) => future::ok(v),
Err(_) => future::err(warp::reject::custom(INTERNAL_SERVER_ERROR)),
})
}
/// Create a filter to get a PostgreSQL connection from a PostgreSQL connection pool.
pub fn pg(
pg_pool: crate::utils::PgPool,
) -> impl Filter<Extract = (crate::utils::PgPooled,),
Error = Rejection> + Clone {
warp::any()
.map(move || pg_pool.clone())
.and_then(|pg_pool: crate::utils::PgPool| match pg_pool.get() {
Ok(pg_pooled) => Ok(pg_pooled),
Err(_) => Err(warp::reject::custom(INTERNAL_SERVER_ERROR)),
})
}
/// Convert rejections into replies.
pub fn handle_rejection(rejection: Rejection) -> Result<impl Reply, Rejection> {
use crate::exception::{ExceptionMsg, Fault};
let reply = if let Some(fault) = rejection.find_cause::<Fault>() {
// This rejection originated in this implementation.
let static_exception = ExceptionMsg::from(fault);
warp::reply::with_status(
serde_json::to_string(&static_exception).unwrap(),
fault.to_status_code(),
)
} else {
// This rejection originated in Warp.
let fault = if rejection.is_not_found() {
exception::NOT_FOUND
} else {
exception::INTERNAL_SERVER_ERROR
};
let static_exception = ExceptionMsg::from(&fault);
warp::reply::with_status(
serde_json::to_string(&static_exception).unwrap(),
fault.to_status_code(),
)
};
Ok(warp::reply::with_header(
reply,
"Content-Type",
"application/fault+json",
)
)
}
|
#[tokio::test]
async fn multiple_consecutive_ephemeral_listening_addresses() {
let node = ipfs::Node::new("test_node").await;
let target = libp2p::build_multiaddr!(Ip4([127, 0, 0, 1]), Tcp(0u16));
let first = node.add_listening_address(target.clone()).await.unwrap();
assert_ne!(target, first);
let second = node.add_listening_address(target.clone()).await.unwrap();
assert_ne!(target, second);
assert_ne!(first, second);
}
#[tokio::test]
async fn multiple_concurrent_ephemeral_listening_addresses_on_same_ip() {
let node = ipfs::Node::new("test_node").await;
let target = libp2p::build_multiaddr!(Ip4([127, 0, 0, 1]), Tcp(0u16));
let first = node.add_listening_address(target.clone());
let second = node.add_listening_address(target);
let (first, second) = futures::future::join(first, second).await;
// before we have an Swarm-alike api on the background task to make sure the two futures
// (first and second) would attempt to modify the background task before a poll to the
// inner swarm, this will produce one or two successes.
//
// with two attempts without polling the swarm in the between:
// assert_eq!(first.is_ok(), second.is_err());
//
// intuitively it could seem that first will always succeed because it must get the first
// attempt to push messages into the queue but not sure if that should be leaned on.
assert!(
first.is_ok() || second.is_ok(),
"first: {:?}, second: {:?}",
first,
second
);
}
#[tokio::test]
#[cfg(not(target_os = "macos"))]
async fn multiple_concurrent_ephemeral_listening_addresses_on_different_ip() {
let node = ipfs::Node::new("test_node").await;
// it doesnt work on mac os x as 127.0.0.2 is not enabled by default.
let first =
node.add_listening_address(libp2p::build_multiaddr!(Ip4([127, 0, 0, 1]), Tcp(0u16)));
let second =
node.add_listening_address(libp2p::build_multiaddr!(Ip4([127, 0, 0, 2]), Tcp(0u16)));
let (first, second) = futures::future::join(first, second).await;
// both should succeed
first.unwrap();
second.unwrap();
}
#[tokio::test]
async fn adding_unspecified_addr_resolves_with_first() {
let node = ipfs::Node::new("test_node").await;
// there is no test in trying to match this with others as ... that would be quite
// perilous.
node.add_listening_address(libp2p::build_multiaddr!(Ip4([0, 0, 0, 0]), Tcp(0u16)))
.await
.unwrap();
}
#[tokio::test]
async fn listening_for_multiple_unspecified_addresses() {
let node = ipfs::Node::new("test_node").await;
// there is no test in trying to match this with others as ... that would be quite
// perilous.
let target = libp2p::build_multiaddr!(Ip4([0, 0, 0, 0]), Tcp(0u16));
let first = node.add_listening_address(target.clone());
let second = node.add_listening_address(target);
let (first, second) = futures::future::join(first, second).await;
// this test is a bad one similar to multiple_concurrent_ephemeral_listening_addresses_on_same_ip
// see also https://github.com/rs-ipfs/rust-ipfs/issues/194 for more discussion.
// the other should be denied because there is a pending incomplete when trying to listen
// on unspecified address
assert!(
first.is_ok() || second.is_ok(),
"first: {:?}, second: {:?}",
first,
second
);
}
#[tokio::test]
async fn remove_listening_address() {
let node = ipfs::Node::new("test_node").await;
let unbound = libp2p::build_multiaddr!(Ip4([127, 0, 0, 1]), Tcp(0u16));
let first = node.add_listening_address(unbound.clone()).await.unwrap();
// the book keeping changes from matching the unbound address to the bound one returned
// from the future.
node.remove_listening_address(unbound.clone())
.await
.unwrap_err();
node.remove_listening_address(first).await.unwrap();
}
#[test]
#[ignore]
fn remove_listening_address_before_completing() {
// TODO: cannot test this before we have a way of getting between the queue used to communicate
// with the IpfsFuture (or better yet, construct one ourselves here in the test) to make sure
// we can push a IpfsEvent::AddListenerAddress followed by an IpfsEvent::RemoveListenerAddress
// "immediatedly".
}
#[tokio::test]
async fn pre_configured_listening_addrs() {
use ipfs::{IpfsOptions, MultiaddrWithPeerId, MultiaddrWithoutPeerId, Node};
use libp2p::Multiaddr;
use std::convert::TryFrom;
let mut opts = IpfsOptions::inmemory_with_generated_keys();
let addr: Multiaddr = "/ip4/127.0.0.1/tcp/4001".parse().unwrap();
opts.listening_addrs.push(addr.clone());
let ipfs = Node::with_options(opts).await;
let (_id, addrs) = ipfs.identity().await.unwrap();
let addrs: Vec<MultiaddrWithoutPeerId> = addrs
.into_iter()
.map(|addr| MultiaddrWithPeerId::try_from(addr).unwrap().multiaddr)
.collect();
let addr = MultiaddrWithoutPeerId::try_from(addr).unwrap();
assert!(
addrs.contains(&addr),
"pre-configured listening addr not found; is port 4001 available to listen on?; listening addrs: {:?}",
addrs
);
}
|
use libc::{c_char, size_t};
pub const ERROR_MSG_SIZE_MAX: usize = 256;
pub const ERROR_MSG_SLEN_MAX: usize = ERROR_MSG_SIZE_MAX - 1;
#[link(name = "kernaux")]
extern "C" {
#[link_name = "kernaux_cmdline"]
// TODO: Rust's "bool" is not guaranteed to be compatible with C's one
pub fn cmdline(
cmdline: *const c_char,
error_msg: *mut c_char,
argc: *mut size_t,
argv: *mut *mut c_char,
buffer: *mut c_char,
arg_count_max: size_t,
buffer_size: size_t,
) -> bool;
}
#[cfg(test)]
mod tests {
use super::*;
use std::ffi::{CStr, CString};
const ARG_COUNT_MAX: usize = 100;
const BUFFER_SIZE: usize = 4096;
#[test]
fn default() {
let cmdline = CString::new("foo bar\\ baz \"car cdr\"").unwrap();
let mut error_msg: [c_char; ERROR_MSG_SIZE_MAX] =
[0; ERROR_MSG_SIZE_MAX];
let mut argc: size_t = 0;
let mut argv: [*mut c_char; ARG_COUNT_MAX] =
[0 as *mut c_char; ARG_COUNT_MAX];
let mut buffer: [c_char; BUFFER_SIZE] = [0; BUFFER_SIZE];
unsafe {
assert!(super::cmdline(
cmdline.as_ptr(),
error_msg.as_mut_ptr(),
&mut argc,
argv.as_mut_ptr(),
buffer.as_mut_ptr(),
ARG_COUNT_MAX,
BUFFER_SIZE,
));
}
assert_eq!(
unsafe { CStr::from_ptr(error_msg.as_ptr()) }
.to_str()
.unwrap(),
"",
);
assert_eq!(argc, 3);
assert_eq!(unsafe { CStr::from_ptr(argv[0]) }.to_str().unwrap(), "foo");
assert_eq!(
unsafe { CStr::from_ptr(argv[1]) }.to_str().unwrap(),
"bar baz",
);
assert_eq!(
unsafe { CStr::from_ptr(argv[2]) }.to_str().unwrap(),
"car cdr",
);
}
}
|
#![feature(ip_constructors)]
extern crate clap;
extern crate reqwest;
extern crate rottenbrit;
extern crate url;
use std::io::Read;
use url::percent_encoding::{percent_encode as pe, DEFAULT_ENCODE_SET};
use rottenbrit::metainfo::{MetaInfo, get_info_hash};
fn percent_encode(input: &[u8]) -> String {
pe(input, DEFAULT_ENCODE_SET).collect()
}
struct TrackerRequest {
info_hash: Vec<u8>,
peer_id: Vec<u8>,
ip: Option<String>,
port: u16,
uploaded: u64,
downloaded: u64,
left: u64,
event: Option<String>,
}
fn get_tracker(info_hash: Vec<u8>, url: &str, size: u64) -> Result<Vec<u8>, Box<::std::error::Error>> {
let tr = TrackerRequest {
info_hash,
peer_id: b"rbxxxyyyyyzzzzz00000".to_vec(),
ip: None,
port: 6881,
uploaded: 0,
downloaded: 0,
left: size,
event: Some("started".to_string()),
};
let url = format!(
"{}?info_hash={}&peer_id={}&port={}&uploaded={}&downloaded={}&left={}&event={}",
url,
percent_encode(&tr.info_hash),
percent_encode(&tr.peer_id),
tr.port,
tr.uploaded,
tr.downloaded,
tr.left,
tr.event.unwrap(),
);
println!("Reqwesting URL: {}", url);
Ok(reqwest::get(&url).and_then(|mut response| {
println!("Got a {} response", response.status());
let mut buf = Vec::new();
response.read_to_end(&mut buf).expect("Reading response");
Ok(buf)
})?)
}
fn main() {
let opts = clap::App::new("RottenBrit")
.version("0.1")
.author("J. Cliff Dyer <jcd@sdf.org>")
.about("Rotten Brit Bittorrent client")
.arg(
clap::Arg::with_name("torrent")
.required(true)
.help("The path to a .torrent file."),
)
.get_matches();
let mut tordata = Vec::new(); // Get file size
let mut f = std::fs::File::open(opts.value_of("torrent").unwrap())
.expect("open torrent");
f.read_to_end(&mut tordata).expect("read torrent");
let info_hash = get_info_hash(tordata.clone()).expect("info hash");
let mi = MetaInfo::from_bytes(&tordata).expect("parsing torrent file");
println!("Got torrent: {:?}", &mi.announce);
let bytes = get_tracker(info_hash.digest().bytes().to_vec(), &mi.announce, mi.info.length());
println!("{}", ::std::string::String::from_utf8_lossy(&bytes.unwrap()));
}
|
mod args;
mod exec;
mod monitor;
use clap::{crate_authors, crate_version, App, AppSettings, Arg, ArgMatches, Result as ClapResult};
use inflector::Inflector;
use n3_builder::{ast, dirs, inflector, ExecRoot, GlobalVars, Result, Vars, QUERY_SPLIT_1};
use crate::args::Command;
pub const SWITCH_FN_1: &[(&str, FnExec)] = &[("monitor", self::monitor::f)];
pub const SWITCH_FN_2: &[&str] = &["train", "eval", "publish"];
pub type FnExec = fn(Command) -> Result<()>;
fn main() -> Result<()> {
// Step 1. parse envs
let env = GlobalVars::default();
let env_vars = env.to_variables();
// Step 2. parse command & exec
let cmd_args: Vec<_> = std::env::args().skip(1).collect();
let command = cmd_args.get(0);
let exec = cmd_args.get(1);
// Step 3-1. execute commands that don't need a root
if let Some((_, f)) = command
.map(|x| SWITCH_FN_1.iter().find(|(k, _)| k == x))
.flatten()
{
let args_set = [&env_vars];
match unsafe { parse_args(&args_set) } {
Ok(_) => {
f(Command {
command: command.unwrap(),
env: &env,
args: None,
})?;
}
Err(e) => {
println!("{}", e);
}
}
// drop order: app (=matches) -> args
drop(env);
Ok(())
}
// Step 3-2. execute commands with a root
else if let Some(exec) = exec {
let mut root = ExecRoot::try_new(env.clone(), Default::default())?;
let args = root.get(&exec.to_pascal_case())?;
let args_set = [&env_vars, &args.to_variables()];
match unsafe { parse_args(&args_set) } {
Ok(_) => {
crate::exec::execute(Command {
command: command.unwrap(),
env: &env,
args: Some(args),
})?;
}
Err(e) => {
println!("{}", e);
}
}
// drop order: app (=matches) -> args
drop(root);
drop(env);
Ok(())
}
// Step 3-3. show help message
else {
let app = unsafe { subcommand_args(&env_vars, app()) };
let matches = app.get_matches_from(&["--help"]);
// drop order: app (=matches) -> args
drop(matches);
drop(env);
Ok(())
}
}
unsafe fn parse_args<'a, 'b, 'c>(args: &[&'a Vars]) -> ClapResult<Result<ArgMatches<'b>>>
where
'a: 'b,
'b: 'c,
{
let mut app = app();
for args in args {
app = subcommand_args(args, app);
}
let matches = app.get_matches_safe()?;
for args in args {
if let Err(e) = apply(&matches, args) {
return Ok(Err(e));
}
}
Ok(Ok(matches))
}
fn apply(matches: &ArgMatches, args: &Vars) -> Result<()> {
for name in args.inner.keys() {
if let Some(values) = matches.values_of(name) {
let value = values.collect::<Vec<_>>().join(QUERY_SPLIT_1);
args.set(name, &value)?;
}
}
Ok(())
}
unsafe fn subcommand_args<'a, 'b, 'c>(args: &'a Vars, mut app: App<'b, 'c>) -> App<'b, 'c>
where
'a: 'b,
'b: 'c,
{
for (name, var) in &args.inner {
// drop order: app -> args
let var = var.try_borrow_unguarded().unwrap();
// hidden
if var.name == dirs::N3_SOURCE_ROOT {
continue;
}
let mut arg = Arg::with_name(name).long(name).takes_value(true);
if let Some(shortcut) = &var.shortcut {
arg = arg.short(shortcut.to_lowercase());
}
if let Some(desc) = &var.description {
arg = arg.help(desc);
}
if let Some(ast::LetType::List(_)) = var.ty {
arg = arg.multiple(true);
}
if var.value.is_none() {
arg = arg.required(true);
}
app = app.arg(arg);
}
app
}
fn app<'a, 'b, 'c>() -> App<'b, 'c>
where
'a: 'b,
'b: 'c,
{
let exec_commands: Vec<_> = SWITCH_FN_2.iter().map(|x| ("command", *x)).collect();
App::new("n3")
.version(crate_version!())
.author(crate_authors!())
.about("Neural Network Notation")
.arg(
Arg::with_name("root_dir")
.long("root_dir")
.help("The n3 program's own local directory")
.takes_value(true),
)
.setting(AppSettings::ArgRequiredElseHelp)
.setting(AppSettings::ColoredHelp)
.setting(AppSettings::ColorAuto)
.setting(AppSettings::GlobalVersion)
.arg(Arg::with_name("command").required(true))
.arg(Arg::with_name("exec").required_ifs(&exec_commands))
}
|
use std::{future, sync::Arc};
use futures::{stream, StreamExt};
use observability_deps::tracing::debug;
use parking_lot::Mutex;
use tokio::time::Instant;
use crate::buffer_tree::partition::PartitionData;
use super::queue::PersistQueue;
/// [`PERSIST_ENQUEUE_CONCURRENCY`] defines the parallelism used when acquiring
/// partition locks and marking the partition as persisting.
const PERSIST_ENQUEUE_CONCURRENCY: usize = 5;
// Persist a set of [`PartitionData`], blocking for completion of all enqueued
// persist jobs and returning the number of partitions that were persisted.
//
// This call is not atomic, partitions are marked for persistence incrementally.
// Writes that landed into the partition buffer after this call, but before the
// partition data is read will be included in the persisted data.
pub(crate) async fn persist_partitions<T, P>(iter: T, persist: &P) -> usize
where
T: Iterator<Item = Arc<Mutex<PartitionData>>> + Send,
P: PersistQueue + Clone,
{
let notifications = stream::iter(iter)
.filter_map(|p| async move {
let t = Instant::now();
// Skip this partition if there is no data to persist
let data = p.lock().mark_persisting()?;
debug!(
partition_id=%data.partition_id(),
lock_wait=?Instant::now().duration_since(t),
"read data for persistence"
);
// Enqueue the partition for persistence.
//
// The persist task will call mark_persisted() on the partition
// once complete.
// Some(future::ready(persist.queue_persist(p, data).await))
Some(future::ready((p, data)))
})
// Concurrently attempt to obtain partition locks and mark them as
// persisting. This will hide the latency of individual lock
// acquisitions.
.buffer_unordered(PERSIST_ENQUEUE_CONCURRENCY)
// Serialise adding partitions to the persist queue (a fast
// operation that doesn't benefit from contention at all).
.then(|(p, data)| {
let persist = persist.clone();
// Enqueue and retain the notification receiver, which will be
// awaited later.
#[allow(clippy::async_yields_async)]
async move {
persist.enqueue(p, data).await
}
})
.collect::<Vec<_>>()
.await;
debug!(
n_partitions = notifications.len(),
"queued all non-empty partitions for persist"
);
let count = notifications.len();
// Wait for all the persist completion notifications.
for n in notifications {
n.await.expect("persist worker task panic");
}
count
}
|
//! Macros for implementing common functionality and traits for the marked
//! pointer types with identical internal structure (Owned, Shared, Unlinked and
//! Unprotected)
macro_rules! impl_trait {
($self:ident) => {
type Pointer = Self;
type Item = T;
type MarkBits = N;
#[inline]
fn as_marked_ptr(&self) -> crate::pointer::MarkedPtr<T, N> {
self.inner.into_marked_ptr()
}
#[inline]
fn into_marked_ptr(self) -> crate::pointer::MarkedPtr<Self::Item, Self::MarkBits> {
self.into_marked_non_null().into_marked_ptr()
}
#[inline]
fn marked($self: Self, tag: usize) -> crate::pointer::Marked<Self::Pointer> {
let inner = $self.inner.with_tag(tag);
crate::pointer::Marked::Value(Self { inner, _marker: PhantomData })
}
#[inline]
fn unmarked($self: Self) -> Self {
let inner = $self.inner.clear_tag();
Self { inner, _marker: PhantomData }
}
#[inline]
fn decompose($self: Self) -> (Self, usize) {
let (inner, tag) = $self.inner.decompose();
core::mem::forget($self);
( Self { inner: crate::pointer::MarkedNonNull::from(inner), _marker: PhantomData }, tag)
}
#[inline]
unsafe fn from_marked_ptr(
marked: crate::pointer::MarkedPtr<Self::Item, Self::MarkBits>
) -> Self
{
debug_assert!(!marked.is_null());
Self { inner: MarkedNonNull::new_unchecked(marked), _marker: PhantomData}
}
#[inline]
unsafe fn from_marked_non_null(
marked: crate::pointer::MarkedNonNull<Self::Item, Self::MarkBits>
) -> Self
{
Self { inner: marked, _marker: PhantomData }
}
};
}
macro_rules! impl_inherent {
($self:ident) => {
/// Creates a `None` variant for an [`Option<Self>`][Option].
///
/// This is useful for calls to [`store`][store], [`swap`][swap] or
/// [`compare_exchange_*`][compare_exchange], when a `null` pointer
/// needs to be inserted.
/// These methods accept values of various non-nullable pointer types
/// ([`Shared`][Shared], [`Owned`][Owned], [`Unlinked`][Unlinked] and
/// [`Unprotected`][Unprotected]) and [`Option`] types thereof as
/// argument.
/// However, the compiler is usually not able to infer the concrete
/// type, when a [`None`] is inserted, and this function is intended for
/// these cases.
///
/// [store]: crate::Atomic::store
/// [swap]: crate::Atomic::swap
/// [compare_exchange]: crate::Atomic::compare_exchange
/// [Shared]: crate::Shared
/// [Owned]: crate::Owned
/// [Unlinked]: crate::Unlinked
/// [Unprotected]: crate::Unprotected
///
/// # Example
///
/// ```
/// use std::sync::atomic::Ordering;
///
/// type Atomic<T> = reclaim::leak::Atomic<T, reclaim::typenum::U0>;
/// type Owned<T> = reclaim::leak::Owned<T, reclaim::typenum::U0>;
/// type Unlinked<T> = reclaim::leak::Unlinked<T, reclaim::typenum::U0>;
///
/// let atomic = Atomic::new(1);
/// let swap = atomic.swap(Owned::none(), Ordering::Relaxed).unwrap();
///
/// assert_eq!(swap.as_ref(), &1);
/// unsafe { Unlinked::retire(swap) }; // leaks memory
/// ```
#[inline]
pub fn none() -> Option<Self> {
None
}
/// Creates an unmarked [`Null`][crate::pointer::Marked::Null] variant
/// for a [`Marked<Self>`][crate::pointer::Marked].
#[inline]
pub fn null() -> crate::pointer::Marked<Self> {
Marked::Null(0)
}
/// Creates a marked [`Null`][crate::pointer::Marked::Null] variant for
/// a [`Marked<Self>`][crate::pointer::Marked] with the given `tag`.
#[inline]
pub fn null_with_tag(tag: usize) -> crate::pointer::Marked<Self> {
Marked::Null(tag)
}
/// Consumes the given `Self` and returns the same value but with the
/// specified `tag`.
///
/// Any previous tag is overwritten.
#[inline]
pub fn compose($self: Self, tag: usize) -> Self {
let inner = $self.inner;
core::mem::forget($self);
Self { inner: inner.with_tag(tag), _marker: PhantomData }
}
};
}
|
use winnow::prelude::*;
use winnow::Partial;
mod json;
mod parser;
mod parser_dispatch;
mod parser_partial;
fn json_bench(c: &mut criterion::Criterion) {
let data = [("small", SMALL), ("canada", CANADA)];
let mut group = c.benchmark_group("json");
for (name, sample) in data {
let len = sample.len();
group.throughput(criterion::Throughput::Bytes(len as u64));
group.bench_with_input(criterion::BenchmarkId::new("basic", name), &len, |b, _| {
type Error<'i> = winnow::error::InputError<parser::Stream<'i>>;
b.iter(|| parser::json::<Error>.parse_peek(sample).unwrap());
});
group.bench_with_input(criterion::BenchmarkId::new("unit", name), &len, |b, _| {
type Error<'i> = ();
b.iter(|| parser::json::<Error>.parse_peek(sample).unwrap());
});
group.bench_with_input(
criterion::BenchmarkId::new("context", name),
&len,
|b, _| {
type Error<'i> = winnow::error::ContextError<parser::Stream<'i>>;
b.iter(|| parser::json::<Error>.parse_peek(sample).unwrap());
},
);
group.bench_with_input(
criterion::BenchmarkId::new("dispatch", name),
&len,
|b, _| {
type Error<'i> = winnow::error::InputError<parser_dispatch::Stream<'i>>;
b.iter(|| parser_dispatch::json::<Error>.parse_peek(sample).unwrap());
},
);
group.bench_with_input(
criterion::BenchmarkId::new("streaming", name),
&len,
|b, _| {
type Error<'i> = winnow::error::InputError<parser_partial::Stream<'i>>;
b.iter(|| {
parser_partial::json::<Error>
.parse_peek(Partial::new(sample))
.unwrap()
});
},
);
}
group.finish();
}
const SMALL: &str = " { \"a\"\t: 42,
\"b\": [ \"x\", \"y\", 12 ,\"\\u2014\", \"\\uD83D\\uDE10\"] ,
\"c\": { \"hello\" : \"world\"
}
} ";
const CANADA: &str = include_str!("../../third_party/nativejson-benchmark/data/canada.json");
criterion::criterion_group!(benches, json_bench,);
criterion::criterion_main!(benches);
|
use super::{Expr};
use derive_more::Display;
use std::ops;
#[derive(Display, Clone, Copy)]
pub enum Number {
Int(i32),
Float(f32),
}
impl Number {
pub const fn simplify(self) -> Expr {
Expr::Number(self)
}
}
impl PartialEq<i32> for Number {
fn eq(&self, other: &i32) -> bool {
use Number::{Float, Int};
match *self {
Int(i) if i == *other => true,
Float(f) if f == *other as f32 => true,
_ => false,
}
}
}
impl ops::Add for Number {
type Output = Self;
fn add(self, other: Self) -> Self {
use Number::{Float, Int};
match (self, other) {
(Int(l), Int(r)) => Int(l + r),
(Int(l), Float(r)) => Float(l as f32 + r),
(Float(l), Int(r)) => Float(l + r as f32),
(Float(l), Float(r)) => Float(l + r),
}
}
}
impl ops::Sub for Number {
type Output = Self;
fn sub(self, other: Self) -> Self {
use Number::{Float, Int};
match (self, other) {
(Int(l), Int(r)) => Int(l - r),
(Int(l), Float(r)) => Float(l as f32 - r),
(Float(l), Int(r)) => Float(l - r as f32),
(Float(l), Float(r)) => Float(l - r),
}
}
}
impl ops::Mul for Number {
type Output = Self;
fn mul(self, other: Self) -> Self {
use Number::{Float, Int};
match (self, other) {
(Int(l), Int(r)) => Int(l * r),
(Int(l), Float(r)) => Float(l as f32 * r),
(Float(l), Int(r)) => Float(l * r as f32),
(Float(l), Float(r)) => Float(l * r),
}
}
}
impl ops::Div for Number {
type Output = Self;
fn div(self, other: Self) -> Self {
use Number::{Float, Int};
match (self, other) {
(Int(l), Int(r)) => Float(l as f32 / r as f32),
(Int(l), Float(r)) => Float(l as f32 / r),
(Float(l), Int(r)) => Float(l / r as f32),
(Float(l), Float(r)) => Float(l / r),
}
}
}
|
use super::{DiscordHandler, DiscordMsg};
use async_trait::async_trait;
/// Prints events at [`tracing::Level::DEBUG`] and errors at [`tracing::Level::WARN`]
pub struct Printer;
#[async_trait]
impl DiscordHandler for Printer {
async fn on_message(&self, msg: DiscordMsg) {
match msg {
DiscordMsg::Event(eve) => tracing::debug!(event = ?eve),
DiscordMsg::Error(err) => tracing::warn!(error = ?err),
}
}
}
/// Forwards messages to a receiver
///
/// ```no_run
/// use discord_sdk as ds;
/// let (forwarder, mut events) = ds::handlers::Forwarder::new();
/// let discord = ds::Discord::new(ds::DiscordApp::PlainId(1), ds::Subscriptions::ALL, Box::new(forwarder)).unwrap();
/// ```
pub struct Forwarder {
tx: tokio::sync::mpsc::UnboundedSender<DiscordMsg>,
}
impl Forwarder {
pub fn new() -> (Self, tokio::sync::mpsc::UnboundedReceiver<DiscordMsg>) {
let (tx, rx) = tokio::sync::mpsc::unbounded_channel();
(Self { tx }, rx)
}
}
#[async_trait]
impl DiscordHandler for Forwarder {
async fn on_message(&self, msg: DiscordMsg) {
if let Err(msg) = self.tx.send(msg) {
tracing::warn!(msg = ?msg.0, "message dropped");
}
}
}
|
mod reader;
pub use reader::{ClosedSegmentFileReader, Error as ReaderError, Result as ReaderResult};
mod writer;
pub use writer::{Error as WriterError, OpenSegmentFileWriter, Result as WriterResult};
|
use anyhow::{anyhow, Result};
use audio_device::wasapi;
use audio_generator::{self as gen, Generator as _};
fn run_output<T>(client: wasapi::Client, mut config: wasapi::ClientConfig) -> Result<()>
where
T: Copy + wasapi::Sample + audio_core::Translate<f32>,
[T]: rand::Fill,
{
config.sample_rate = 120000;
let initialized = client.initialize::<T>(config)?;
let mut render_client = initialized.render_client()?;
client.start()?;
let config = initialized.config();
let sample_rate = config.sample_rate as f32;
dbg!(config);
let mut a = gen::Sine::new(261.63, sample_rate);
let mut b = gen::Sine::new(329.63, sample_rate);
let mut c = gen::Sine::new(440.00, sample_rate);
loop {
let mut data = render_client.buffer_mut()?;
for n in (0..data.len()).step_by(config.channels as usize) {
let f = T::translate((a.sample() + b.sample() + c.sample()) * 0.01);
for c in 0..config.channels as usize {
data[n + c] = f;
}
}
data.release()?;
}
}
fn generate_audio() -> Result<()> {
let output =
wasapi::default_output_client()?.ok_or_else(|| anyhow!("no default device found"))?;
let config = output.default_client_config()?;
match config.sample_format {
wasapi::SampleFormat::I16 => run_output::<i16>(output, config),
wasapi::SampleFormat::F32 => run_output::<f32>(output, config),
}
}
pub fn main() -> Result<()> {
println!("WARNING: This program will generate audio and we do our best to avoid them being too loud.");
println!("Please make sure your volume is turned down!");
println!();
println!("Press [enter] to continue...");
let mut line = String::new();
std::io::stdin().read_line(&mut line)?;
let bg = ste::Builder::new().prelude(wasapi::audio_prelude).build()?;
bg.submit(generate_audio)?;
bg.join();
Ok(())
}
|
use std::any::TypeId;
use ttmap::{TypeMap, ValueBox};
#[test]
fn check_type_map() {
let mut map = TypeMap::new();
assert!(map.is_empty());
assert_eq!(map.len(), 0);
assert!(map.insert("test").is_none());
assert_eq!(*map.insert("lolka").unwrap(), "test");
assert_eq!(*map.get::<&'static str>().unwrap(), "lolka");
assert!(map.insert::<fn ()>(check_type_map).is_none());
assert!(*map.get::<fn ()>().unwrap() == check_type_map);
assert!(!map.has::<usize>());
assert_eq!(*map.get_or_default::<usize>(), 0);
*map.get_or_default::<usize>() = 5;
assert_eq!(*map.get_or_default::<usize>(), 5);
assert_eq!(*map.get::<usize>().unwrap(), 5);
assert!(!map.is_empty());
assert_eq!(map.len(), 3);
assert_eq!(*map.remove::<usize>().unwrap(), 5);
assert_eq!(map.len(), 2);
assert_eq!(map.remove::<usize>(), None);
assert_eq!(*map.remove::<&'static str>().unwrap(), "lolka");
assert_eq!(map.len(), 1);
assert_eq!(map.remove::<&'static str>(), None);
assert!(*map.remove::<fn ()>().unwrap() == check_type_map);
assert_eq!(map.len(), 0);
assert_eq!(map.remove::<fn ()>(), None);
assert!(map.is_empty());
map.clear();
assert!(map.is_empty());
}
#[test]
fn check_raw() {
let mut map = TypeMap::new();
assert!(map.is_empty());
assert_eq!(map.len(), 0);
assert!(map.insert("test").is_none());
assert_eq!(*(*map.insert_raw(Box::new("lolka") as ValueBox).unwrap()).downcast_ref::<&'static str>().unwrap(), "test");
assert_eq!(*map.get::<&'static str>().unwrap(), "lolka");
assert_eq!(*map.get_raw(TypeId::of::<&'static str>()).unwrap().downcast_ref::<&'static str>().unwrap(), "lolka");
assert!(map.get::<usize>().is_none());
assert!(map.get_raw(TypeId::of::<usize>()).is_none());
*map.get_mut_raw(TypeId::of::<&'static str>()).unwrap().downcast_mut::<&'static str>().unwrap() = "abc";
assert_eq!(*map.get::<&'static str>().unwrap(), "abc");
assert_eq!(*map.get_raw(TypeId::of::<&'static str>()).unwrap().downcast_ref::<&'static str>().unwrap(), "abc");
assert!(map.get::<usize>().is_none());
assert!(map.get_raw(TypeId::of::<usize>()).is_none());
let str_box = map.remove_raw(TypeId::of::<&'static str>()).unwrap();
assert!(map.remove_raw(TypeId::of::<&'static str>()).is_none());
assert!(map.get::<&'static str>().is_none());
assert!(map.get_raw(TypeId::of::<&'static str>()).is_none());
assert_eq!(str_box.as_ref().type_id(), TypeId::of::<&'static str>());
let str_box = str_box.downcast::<bool>().unwrap_err();
assert_eq!(*str_box.downcast::<&'static str>().unwrap(), "abc");
}
#[test]
fn check_dtor_called() {
let mut is_called = false;
struct CustomData<'a>(&'a mut bool);
impl<'a> Drop for CustomData<'a> {
fn drop(&mut self) {
*self.0 = true;
}
}
{
let is_called: &'static mut bool = unsafe {
core::mem::transmute(&mut is_called)
};
let data = CustomData(is_called);
let mut map = TypeMap::new();
assert!(map.insert(data).is_none());
}
assert!(is_called);
}
|
pub ( in oo_facade) mod complex_parts {
const TORQUE_MUL: u32 = 1000;
pub use std::cell::Cell;
#[derive(Debug, Clone, Copy)]
pub enum Throttle {
Fw,
N
}
#[derive(Debug)]
pub struct Engine {
rpm: Cell<u32>
}
impl Engine {
pub fn new() -> Engine {
Engine { rpm: Cell::new(0) }
}
pub fn set_rpm(&self, rpm: u32) {
self.rpm.replace(rpm);
}
pub fn get_torque(&self) -> u32 {
let t = self.rpm.get() * TORQUE_MUL;
println!("Engine torque is: {}", t);
t
}
}
#[derive(Debug)]
pub struct DrivenAxles;
impl DrivenAxles {
pub fn set_rotations(&self, torque: u32) {
let rpm: f64 = torque as f64 / 150.0;
println!("Driven axles rpm: {}", rpm);
}
}
}
use self::complex_parts::*;
#[derive(Debug)]
struct VehicleFacade {
throttler: Cell<Throttle>,
engine: Engine,
axles: DrivenAxles
}
impl VehicleFacade {
fn ride(&self, power: u32) {
self.throttler.replace(Throttle::Fw);
self.engine.set_rpm(power);
self.axles.set_rotations(self.engine.get_torque());
}
fn stop(&self) {
self.throttler.replace(Throttle::N);
self.engine.set_rpm(0);
self.axles.set_rotations(self.engine.get_torque());
}
fn dump_state(&self) {
println!("{:?}", self);
println!("-------------------- dump_vehicle_state_end --------------------");
}
}
pub fn run_oo() {
println!("-------------------- {} --------------------", file!());
let vf = VehicleFacade {
throttler: Cell::new(Throttle::N),
engine: Engine::new(),
axles: DrivenAxles
};
vf.ride(234);
vf.dump_state();
vf.ride(235);
vf.dump_state();
vf.stop();
vf.dump_state();
} |
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use common_catalog::table::Table;
use common_catalog::table_context::TableContext;
use common_exception::Result;
use common_expression::types::number::NumberScalar;
use common_expression::types::DataType;
use common_expression::types::NumberDataType;
use common_expression::ColumnBuilder;
use common_expression::DataBlock;
use common_expression::Scalar;
use common_expression::TableDataType;
use common_expression::TableField;
use common_expression::TableSchemaRefExt;
use common_meta_app::schema::TableIdent;
use common_meta_app::schema::TableInfo;
use common_meta_app::schema::TableMeta;
use crate::SyncOneBlockSystemTable;
use crate::SyncSystemTable;
pub struct ClustersTable {
table_info: TableInfo,
}
impl SyncSystemTable for ClustersTable {
const NAME: &'static str = "system.cluster";
fn get_table_info(&self) -> &TableInfo {
&self.table_info
}
fn get_full_data(&self, ctx: Arc<dyn TableContext>) -> Result<DataBlock> {
let cluster_nodes = ctx.get_cluster().nodes.clone();
let mut names = ColumnBuilder::with_capacity(&DataType::String, cluster_nodes.len());
let mut addresses = ColumnBuilder::with_capacity(&DataType::String, cluster_nodes.len());
let mut addresses_port = ColumnBuilder::with_capacity(
&DataType::Number(NumberDataType::UInt16),
cluster_nodes.len(),
);
let mut versions = ColumnBuilder::with_capacity(&DataType::String, cluster_nodes.len());
for cluster_node in &cluster_nodes {
let (ip, port) = cluster_node.ip_port()?;
names.push(Scalar::String(cluster_node.id.as_bytes().to_vec()).as_ref());
addresses.push(Scalar::String(ip.as_bytes().to_vec()).as_ref());
addresses_port.push(Scalar::Number(NumberScalar::UInt16(port)).as_ref());
versions.push(Scalar::String(cluster_node.binary_version.as_bytes().to_vec()).as_ref());
}
Ok(DataBlock::new_from_columns(vec![
names.build(),
addresses.build(),
addresses_port.build(),
versions.build(),
]))
}
}
impl ClustersTable {
pub fn create(table_id: u64) -> Arc<dyn Table> {
let schema = TableSchemaRefExt::create(vec![
TableField::new("name", TableDataType::String),
TableField::new("host", TableDataType::String),
TableField::new("port", TableDataType::Number(NumberDataType::UInt16)),
TableField::new("version", TableDataType::String),
]);
let table_info = TableInfo {
desc: "'system'.'clusters'".to_string(),
name: "clusters".to_string(),
ident: TableIdent::new(table_id, 0),
meta: TableMeta {
schema,
engine: "SystemClusters".to_string(),
..Default::default()
},
..Default::default()
};
SyncOneBlockSystemTable::create(ClustersTable { table_info })
}
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::sync::Arc;
use ce::types::decimal::DecimalSize;
use ce::types::DecimalDataType;
use ce::types::NumberDataType;
use chrono::TimeZone;
use chrono::Utc;
use common_expression as ce;
use common_expression::TableDataType;
use common_expression::TableField;
use common_expression::TableSchema;
use common_meta_app::schema as mt;
use common_meta_app::share;
use common_proto_conv::FromToProto;
use common_proto_conv::Incompatible;
use common_proto_conv::VER;
use maplit::btreemap;
use pretty_assertions::assert_eq;
fn s(ss: impl ToString) -> String {
ss.to_string()
}
fn new_db_meta_share() -> mt::DatabaseMeta {
mt::DatabaseMeta {
engine: "44".to_string(),
engine_options: btreemap! {s("abc") => s("def")},
options: btreemap! {s("xyz") => s("foo")},
created_on: Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap(),
updated_on: Utc.with_ymd_and_hms(2014, 11, 29, 12, 0, 9).unwrap(),
comment: "foo bar".to_string(),
drop_on: None,
shared_by: BTreeSet::new(),
from_share: Some(share::ShareNameIdent {
tenant: "tenant".to_string(),
share_name: "share".to_string(),
}),
}
}
fn new_db_meta() -> mt::DatabaseMeta {
mt::DatabaseMeta {
engine: "44".to_string(),
engine_options: btreemap! {s("abc") => s("def")},
options: btreemap! {s("xyz") => s("foo")},
created_on: Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap(),
updated_on: Utc.with_ymd_and_hms(2014, 11, 29, 12, 0, 9).unwrap(),
comment: "foo bar".to_string(),
drop_on: None,
shared_by: BTreeSet::from_iter(vec![1].into_iter()),
from_share: None,
}
}
fn new_share_meta_share_from_db_ids() -> share::ShareMeta {
let now = Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap();
let db_entry = share::ShareGrantEntry::new(
share::ShareGrantObject::Database(1),
share::ShareGrantObjectPrivilege::Usage,
now,
);
let mut entries = BTreeMap::new();
for entry in vec![share::ShareGrantEntry::new(
share::ShareGrantObject::Table(19),
share::ShareGrantObjectPrivilege::Select,
now,
)] {
entries.insert(entry.to_string().clone(), entry);
}
share::ShareMeta {
database: Some(db_entry),
entries,
accounts: BTreeSet::from_iter(vec![s("a"), s("b")].into_iter()),
share_from_db_ids: BTreeSet::from_iter(vec![1, 2].into_iter()),
comment: Some(s("comment")),
share_on: Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap(),
update_on: Some(Utc.with_ymd_and_hms(2014, 11, 29, 12, 0, 9).unwrap()),
}
}
fn new_share_meta() -> share::ShareMeta {
let now = Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap();
let db_entry = share::ShareGrantEntry::new(
share::ShareGrantObject::Database(1),
share::ShareGrantObjectPrivilege::Usage,
now,
);
let mut entries = BTreeMap::new();
for entry in vec![share::ShareGrantEntry::new(
share::ShareGrantObject::Table(19),
share::ShareGrantObjectPrivilege::Select,
now,
)] {
entries.insert(entry.to_string().clone(), entry);
}
share::ShareMeta {
database: Some(db_entry),
entries,
accounts: BTreeSet::from_iter(vec![s("a"), s("b")].into_iter()),
share_from_db_ids: BTreeSet::new(),
comment: Some(s("comment")),
share_on: Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap(),
update_on: Some(Utc.with_ymd_and_hms(2014, 11, 29, 12, 0, 9).unwrap()),
}
}
fn new_share_account_meta() -> share::ShareAccountMeta {
share::ShareAccountMeta {
account: s("account"),
share_id: 4,
share_on: Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap(),
accept_on: Some(Utc.with_ymd_and_hms(2014, 11, 29, 12, 0, 9).unwrap()),
}
}
fn new_table_meta() -> mt::TableMeta {
mt::TableMeta {
schema: Arc::new(ce::TableSchema::new_from(
vec![
ce::TableField::new(
"nullable",
ce::TableDataType::Nullable(Box::new(ce::TableDataType::Number(
NumberDataType::Int8,
))),
)
.with_default_expr(Some("a + 3".to_string())),
ce::TableField::new("bool", ce::TableDataType::Boolean),
ce::TableField::new("int8", ce::TableDataType::Number(NumberDataType::Int8)),
ce::TableField::new("int16", ce::TableDataType::Number(NumberDataType::Int16)),
ce::TableField::new("int32", ce::TableDataType::Number(NumberDataType::Int32)),
ce::TableField::new("int64", ce::TableDataType::Number(NumberDataType::Int64)),
ce::TableField::new("uint8", ce::TableDataType::Number(NumberDataType::UInt8)),
ce::TableField::new("uint16", ce::TableDataType::Number(NumberDataType::UInt16)),
ce::TableField::new("uint32", ce::TableDataType::Number(NumberDataType::UInt32)),
ce::TableField::new("uint64", ce::TableDataType::Number(NumberDataType::UInt64)),
ce::TableField::new(
"float32",
ce::TableDataType::Number(NumberDataType::Float32),
),
ce::TableField::new(
"float64",
ce::TableDataType::Number(NumberDataType::Float64),
),
ce::TableField::new("date", ce::TableDataType::Date),
ce::TableField::new("timestamp", ce::TableDataType::Timestamp),
ce::TableField::new("string", ce::TableDataType::String),
ce::TableField::new("struct", ce::TableDataType::Tuple {
fields_name: vec![s("foo"), s("bar")],
fields_type: vec![ce::TableDataType::Boolean, ce::TableDataType::String],
}),
ce::TableField::new(
"array",
ce::TableDataType::Array(Box::new(ce::TableDataType::Boolean)),
),
ce::TableField::new("variant", ce::TableDataType::Variant),
ce::TableField::new("variant_array", ce::TableDataType::Variant),
ce::TableField::new("variant_object", ce::TableDataType::Variant),
// NOTE: It is safe to convert Interval to NULL, because `Interval` is never really used.
ce::TableField::new("interval", ce::TableDataType::Null),
],
btreemap! {s("a") => s("b")},
)),
catalog: "default".to_string(),
engine: "44".to_string(),
storage_params: None,
part_prefix: "".to_string(),
engine_options: btreemap! {s("abc") => s("def")},
options: btreemap! {s("xyz") => s("foo")},
default_cluster_key: Some("(a + 2, b)".to_string()),
cluster_keys: vec!["(a + 2, b)".to_string()],
default_cluster_key_id: Some(0),
created_on: Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap(),
updated_on: Utc.with_ymd_and_hms(2014, 11, 29, 12, 0, 10).unwrap(),
comment: s("table_comment"),
field_comments: vec!["c".to_string(); 21],
drop_on: None,
statistics: Default::default(),
}
}
pub(crate) fn new_latest_schema() -> TableSchema {
let b1 = TableDataType::Tuple {
fields_name: vec!["b11".to_string(), "b12".to_string()],
fields_type: vec![TableDataType::Boolean, TableDataType::String],
};
let b = TableDataType::Tuple {
fields_name: vec!["b1".to_string(), "b2".to_string()],
fields_type: vec![b1, TableDataType::Number(NumberDataType::Int64)],
};
let fields = vec![
TableField::new("a", TableDataType::Number(NumberDataType::UInt64)),
TableField::new("b", b),
TableField::new("c", TableDataType::Number(NumberDataType::UInt64)),
TableField::new(
"decimal128",
TableDataType::Decimal(DecimalDataType::Decimal128(DecimalSize {
precision: 18,
scale: 3,
})),
),
TableField::new(
"decimal256",
TableDataType::Decimal(DecimalDataType::Decimal256(DecimalSize {
precision: 46,
scale: 6,
})),
),
TableField::new("empty_map", TableDataType::EmptyMap),
];
TableSchema::new(fields)
}
pub(crate) fn new_table_copied_file_info_v6() -> mt::TableCopiedFileInfo {
mt::TableCopiedFileInfo {
etag: Some("etag".to_string()),
content_length: 1024,
last_modified: Some(Utc.with_ymd_and_hms(2014, 11, 29, 12, 0, 9).unwrap()),
}
}
pub(crate) fn new_table_copied_file_lock_v7() -> mt::TableCopiedFileLock {
mt::TableCopiedFileLock {}
}
#[test]
fn test_pb_from_to() -> anyhow::Result<()> {
let db = new_db_meta();
let p = db.to_pb()?;
let got = mt::DatabaseMeta::from_pb(p)?;
assert_eq!(db, got);
let tbl = new_table_meta();
let p = tbl.to_pb()?;
let got = mt::TableMeta::from_pb(p)?;
assert_eq!(tbl, got);
let share = new_share_meta();
let p = share.to_pb()?;
let got = share::ShareMeta::from_pb(p)?;
assert_eq!(share, got);
let share_account_meta = new_share_account_meta();
let p = share_account_meta.to_pb()?;
let got = share::ShareAccountMeta::from_pb(p)?;
assert_eq!(share_account_meta, got);
Ok(())
}
#[test]
fn test_incompatible() -> anyhow::Result<()> {
let db_meta = new_db_meta();
let mut p = db_meta.to_pb()?;
p.ver = VER + 1;
p.min_reader_ver = VER + 1;
let res = mt::DatabaseMeta::from_pb(p);
assert_eq!(
Incompatible {
reason: format!(
"executable ver={} is smaller than the min reader version({}) that can read this message",
VER,
VER + 1
)
},
res.unwrap_err()
);
let db_meta = new_db_meta();
let mut p = db_meta.to_pb()?;
p.ver = 0;
p.min_reader_ver = 0;
let res = mt::DatabaseMeta::from_pb(p);
assert_eq!(
Incompatible {
reason: s(
"message ver=0 is smaller than executable MIN_MSG_VER(1) that this program can read"
)
},
res.unwrap_err()
);
Ok(())
}
#[test]
fn test_build_pb_buf() -> anyhow::Result<()> {
// build serialized buf of protobuf data, for backward compatibility test with a new version binary.
// DatabaseMeta
{
let db_meta = new_db_meta_share();
let p = db_meta.to_pb()?;
let mut buf = vec![];
common_protos::prost::Message::encode(&p, &mut buf)?;
println!("db:{:?}", buf);
}
// TableMeta
{
let tbl = new_table_meta();
let p = tbl.to_pb()?;
let mut buf = vec![];
common_protos::prost::Message::encode(&p, &mut buf)?;
println!("table:{:?}", buf);
}
// ShareMeta
{
let tbl = new_share_meta_share_from_db_ids();
let p = tbl.to_pb()?;
let mut buf = vec![];
common_protos::prost::Message::encode(&p, &mut buf)?;
println!("share:{:?}", buf);
}
// ShareAccountMeta
{
let share_account_meta = new_share_account_meta();
let p = share_account_meta.to_pb()?;
let mut buf = vec![];
common_protos::prost::Message::encode(&p, &mut buf)?;
println!("share account:{:?}", buf);
}
// TableCopiedFileInfo
{
let copied_file = new_table_copied_file_info_v6();
let p = copied_file.to_pb()?;
let mut buf = vec![];
common_protos::prost::Message::encode(&p, &mut buf)?;
println!("copied_file:{:?}", buf);
}
// TableCopiedFileLock
{
let copied_file_lock = new_table_copied_file_lock_v7();
let p = copied_file_lock.to_pb()?;
let mut buf = vec![];
common_protos::prost::Message::encode(&p, &mut buf)?;
println!("copied_file_lock:{:?}", buf);
}
// schema
{
let schema = new_latest_schema();
let p = schema.to_pb()?;
let mut buf = vec![];
common_protos::prost::Message::encode(&p, &mut buf)?;
println!("schema:{:?}", buf);
}
Ok(())
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {
#[cfg(feature = "Win32_Foundation")]
pub fn AbortSystemShutdownA(lpmachinename: super::super::Foundation::PSTR) -> super::super::Foundation::BOOL;
#[cfg(feature = "Win32_Foundation")]
pub fn AbortSystemShutdownW(lpmachinename: super::super::Foundation::PWSTR) -> super::super::Foundation::BOOL;
#[cfg(feature = "Win32_Foundation")]
pub fn CheckForHiberboot(phiberboot: *mut super::super::Foundation::BOOLEAN, bclearflag: super::super::Foundation::BOOLEAN) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn ExitWindowsEx(uflags: EXIT_WINDOWS_FLAGS, dwreason: u32) -> super::super::Foundation::BOOL;
#[cfg(feature = "Win32_Foundation")]
pub fn InitiateShutdownA(lpmachinename: super::super::Foundation::PSTR, lpmessage: super::super::Foundation::PSTR, dwgraceperiod: u32, dwshutdownflags: SHUTDOWN_FLAGS, dwreason: SHUTDOWN_REASON) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn InitiateShutdownW(lpmachinename: super::super::Foundation::PWSTR, lpmessage: super::super::Foundation::PWSTR, dwgraceperiod: u32, dwshutdownflags: SHUTDOWN_FLAGS, dwreason: SHUTDOWN_REASON) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn InitiateSystemShutdownA(lpmachinename: super::super::Foundation::PSTR, lpmessage: super::super::Foundation::PSTR, dwtimeout: u32, bforceappsclosed: super::super::Foundation::BOOL, brebootaftershutdown: super::super::Foundation::BOOL) -> super::super::Foundation::BOOL;
#[cfg(feature = "Win32_Foundation")]
pub fn InitiateSystemShutdownExA(lpmachinename: super::super::Foundation::PSTR, lpmessage: super::super::Foundation::PSTR, dwtimeout: u32, bforceappsclosed: super::super::Foundation::BOOL, brebootaftershutdown: super::super::Foundation::BOOL, dwreason: SHUTDOWN_REASON) -> super::super::Foundation::BOOL;
#[cfg(feature = "Win32_Foundation")]
pub fn InitiateSystemShutdownExW(lpmachinename: super::super::Foundation::PWSTR, lpmessage: super::super::Foundation::PWSTR, dwtimeout: u32, bforceappsclosed: super::super::Foundation::BOOL, brebootaftershutdown: super::super::Foundation::BOOL, dwreason: SHUTDOWN_REASON) -> super::super::Foundation::BOOL;
#[cfg(feature = "Win32_Foundation")]
pub fn InitiateSystemShutdownW(lpmachinename: super::super::Foundation::PWSTR, lpmessage: super::super::Foundation::PWSTR, dwtimeout: u32, bforceappsclosed: super::super::Foundation::BOOL, brebootaftershutdown: super::super::Foundation::BOOL) -> super::super::Foundation::BOOL;
#[cfg(feature = "Win32_Foundation")]
pub fn LockWorkStation() -> super::super::Foundation::BOOL;
#[cfg(feature = "Win32_Foundation")]
pub fn ShutdownBlockReasonCreate(hwnd: super::super::Foundation::HWND, pwszreason: super::super::Foundation::PWSTR) -> super::super::Foundation::BOOL;
#[cfg(feature = "Win32_Foundation")]
pub fn ShutdownBlockReasonDestroy(hwnd: super::super::Foundation::HWND) -> super::super::Foundation::BOOL;
#[cfg(feature = "Win32_Foundation")]
pub fn ShutdownBlockReasonQuery(hwnd: super::super::Foundation::HWND, pwszbuff: super::super::Foundation::PWSTR, pcchbuff: *mut u32) -> super::super::Foundation::BOOL;
}
pub type EXIT_WINDOWS_FLAGS = u32;
pub const EWX_HYBRID_SHUTDOWN: EXIT_WINDOWS_FLAGS = 4194304u32;
pub const EWX_LOGOFF: EXIT_WINDOWS_FLAGS = 0u32;
pub const EWX_POWEROFF: EXIT_WINDOWS_FLAGS = 8u32;
pub const EWX_REBOOT: EXIT_WINDOWS_FLAGS = 2u32;
pub const EWX_RESTARTAPPS: EXIT_WINDOWS_FLAGS = 64u32;
pub const EWX_SHUTDOWN: EXIT_WINDOWS_FLAGS = 1u32;
pub const MAX_NUM_REASONS: u32 = 256u32;
pub const MAX_REASON_BUGID_LEN: u32 = 32u32;
pub const MAX_REASON_COMMENT_LEN: u32 = 512u32;
pub const MAX_REASON_DESC_LEN: u32 = 256u32;
pub const MAX_REASON_NAME_LEN: u32 = 64u32;
pub const POLICY_SHOWREASONUI_ALWAYS: u32 = 1u32;
pub const POLICY_SHOWREASONUI_NEVER: u32 = 0u32;
pub const POLICY_SHOWREASONUI_SERVERONLY: u32 = 3u32;
pub const POLICY_SHOWREASONUI_WORKSTATIONONLY: u32 = 2u32;
pub type SHUTDOWN_FLAGS = u32;
pub const SHUTDOWN_FORCE_OTHERS: SHUTDOWN_FLAGS = 1u32;
pub const SHUTDOWN_FORCE_SELF: SHUTDOWN_FLAGS = 2u32;
pub const SHUTDOWN_RESTART: SHUTDOWN_FLAGS = 4u32;
pub const SHUTDOWN_POWEROFF: SHUTDOWN_FLAGS = 8u32;
pub const SHUTDOWN_NOREBOOT: SHUTDOWN_FLAGS = 16u32;
pub const SHUTDOWN_GRACE_OVERRIDE: SHUTDOWN_FLAGS = 32u32;
pub const SHUTDOWN_INSTALL_UPDATES: SHUTDOWN_FLAGS = 64u32;
pub const SHUTDOWN_RESTARTAPPS: SHUTDOWN_FLAGS = 128u32;
pub const SHUTDOWN_SKIP_SVC_PRESHUTDOWN: SHUTDOWN_FLAGS = 256u32;
pub const SHUTDOWN_HYBRID: SHUTDOWN_FLAGS = 512u32;
pub const SHUTDOWN_RESTART_BOOTOPTIONS: SHUTDOWN_FLAGS = 1024u32;
pub const SHUTDOWN_SOFT_REBOOT: SHUTDOWN_FLAGS = 2048u32;
pub const SHUTDOWN_MOBILE_UI: SHUTDOWN_FLAGS = 4096u32;
pub const SHUTDOWN_ARSO: SHUTDOWN_FLAGS = 8192u32;
pub const SHUTDOWN_CHECK_SAFE_FOR_SERVER: SHUTDOWN_FLAGS = 16384u32;
pub const SHUTDOWN_VAIL_CONTAINER: SHUTDOWN_FLAGS = 32768u32;
pub const SHUTDOWN_SYSTEM_INITIATED: SHUTDOWN_FLAGS = 65536u32;
pub type SHUTDOWN_REASON = u32;
pub const SHTDN_REASON_NONE: SHUTDOWN_REASON = 0u32;
pub const SHTDN_REASON_FLAG_COMMENT_REQUIRED: SHUTDOWN_REASON = 16777216u32;
pub const SHTDN_REASON_FLAG_DIRTY_PROBLEM_ID_REQUIRED: SHUTDOWN_REASON = 33554432u32;
pub const SHTDN_REASON_FLAG_CLEAN_UI: SHUTDOWN_REASON = 67108864u32;
pub const SHTDN_REASON_FLAG_DIRTY_UI: SHUTDOWN_REASON = 134217728u32;
pub const SHTDN_REASON_FLAG_MOBILE_UI_RESERVED: SHUTDOWN_REASON = 268435456u32;
pub const SHTDN_REASON_FLAG_USER_DEFINED: SHUTDOWN_REASON = 1073741824u32;
pub const SHTDN_REASON_FLAG_PLANNED: SHUTDOWN_REASON = 2147483648u32;
pub const SHTDN_REASON_MAJOR_OTHER: SHUTDOWN_REASON = 0u32;
pub const SHTDN_REASON_MAJOR_NONE: SHUTDOWN_REASON = 0u32;
pub const SHTDN_REASON_MAJOR_HARDWARE: SHUTDOWN_REASON = 65536u32;
pub const SHTDN_REASON_MAJOR_OPERATINGSYSTEM: SHUTDOWN_REASON = 131072u32;
pub const SHTDN_REASON_MAJOR_SOFTWARE: SHUTDOWN_REASON = 196608u32;
pub const SHTDN_REASON_MAJOR_APPLICATION: SHUTDOWN_REASON = 262144u32;
pub const SHTDN_REASON_MAJOR_SYSTEM: SHUTDOWN_REASON = 327680u32;
pub const SHTDN_REASON_MAJOR_POWER: SHUTDOWN_REASON = 393216u32;
pub const SHTDN_REASON_MAJOR_LEGACY_API: SHUTDOWN_REASON = 458752u32;
pub const SHTDN_REASON_MINOR_OTHER: SHUTDOWN_REASON = 0u32;
pub const SHTDN_REASON_MINOR_NONE: SHUTDOWN_REASON = 255u32;
pub const SHTDN_REASON_MINOR_MAINTENANCE: SHUTDOWN_REASON = 1u32;
pub const SHTDN_REASON_MINOR_INSTALLATION: SHUTDOWN_REASON = 2u32;
pub const SHTDN_REASON_MINOR_UPGRADE: SHUTDOWN_REASON = 3u32;
pub const SHTDN_REASON_MINOR_RECONFIG: SHUTDOWN_REASON = 4u32;
pub const SHTDN_REASON_MINOR_HUNG: SHUTDOWN_REASON = 5u32;
pub const SHTDN_REASON_MINOR_UNSTABLE: SHUTDOWN_REASON = 6u32;
pub const SHTDN_REASON_MINOR_DISK: SHUTDOWN_REASON = 7u32;
pub const SHTDN_REASON_MINOR_PROCESSOR: SHUTDOWN_REASON = 8u32;
pub const SHTDN_REASON_MINOR_NETWORKCARD: SHUTDOWN_REASON = 9u32;
pub const SHTDN_REASON_MINOR_POWER_SUPPLY: SHUTDOWN_REASON = 10u32;
pub const SHTDN_REASON_MINOR_CORDUNPLUGGED: SHUTDOWN_REASON = 11u32;
pub const SHTDN_REASON_MINOR_ENVIRONMENT: SHUTDOWN_REASON = 12u32;
pub const SHTDN_REASON_MINOR_HARDWARE_DRIVER: SHUTDOWN_REASON = 13u32;
pub const SHTDN_REASON_MINOR_OTHERDRIVER: SHUTDOWN_REASON = 14u32;
pub const SHTDN_REASON_MINOR_BLUESCREEN: SHUTDOWN_REASON = 15u32;
pub const SHTDN_REASON_MINOR_SERVICEPACK: SHUTDOWN_REASON = 16u32;
pub const SHTDN_REASON_MINOR_HOTFIX: SHUTDOWN_REASON = 17u32;
pub const SHTDN_REASON_MINOR_SECURITYFIX: SHUTDOWN_REASON = 18u32;
pub const SHTDN_REASON_MINOR_SECURITY: SHUTDOWN_REASON = 19u32;
pub const SHTDN_REASON_MINOR_NETWORK_CONNECTIVITY: SHUTDOWN_REASON = 20u32;
pub const SHTDN_REASON_MINOR_WMI: SHUTDOWN_REASON = 21u32;
pub const SHTDN_REASON_MINOR_SERVICEPACK_UNINSTALL: SHUTDOWN_REASON = 22u32;
pub const SHTDN_REASON_MINOR_HOTFIX_UNINSTALL: SHUTDOWN_REASON = 23u32;
pub const SHTDN_REASON_MINOR_SECURITYFIX_UNINSTALL: SHUTDOWN_REASON = 24u32;
pub const SHTDN_REASON_MINOR_MMC: SHUTDOWN_REASON = 25u32;
pub const SHTDN_REASON_MINOR_SYSTEMRESTORE: SHUTDOWN_REASON = 26u32;
pub const SHTDN_REASON_MINOR_TERMSRV: SHUTDOWN_REASON = 32u32;
pub const SHTDN_REASON_MINOR_DC_PROMOTION: SHUTDOWN_REASON = 33u32;
pub const SHTDN_REASON_MINOR_DC_DEMOTION: SHUTDOWN_REASON = 34u32;
pub const SHTDN_REASON_UNKNOWN: SHUTDOWN_REASON = 255u32;
pub const SHTDN_REASON_LEGACY_API: SHUTDOWN_REASON = 2147942400u32;
pub const SHTDN_REASON_VALID_BIT_MASK: SHUTDOWN_REASON = 3238002687u32;
pub const SHUTDOWN_TYPE_LEN: u32 = 32u32;
pub const SNAPSHOT_POLICY_ALWAYS: u32 = 1u32;
pub const SNAPSHOT_POLICY_NEVER: u32 = 0u32;
pub const SNAPSHOT_POLICY_UNPLANNED: u32 = 2u32;
|
use ::std::ptr;
use std::alloc::{alloc, dealloc, Layout};
use std::io::{Read, Result, Write};
use std::mem::{align_of, size_of};
// -----------------------------------------------------------------------------
// - BadRingBuffer struct -
// -----------------------------------------------------------------------------
pub struct BadRingBuffer<T> {
head: usize,
tail: usize,
start_ptr: *mut T,
capacity: usize,
count: usize,
}
impl<T> BadRingBuffer<T> {
pub fn with_capacity(capacity: usize) -> Self {
// Define memory layout
let layout = Layout::from_size_align(capacity * size_of::<T>(), align_of::<T>())
.expect("could not create memory layout");
//Allocate memory according to defined layout
let mem = unsafe { alloc(layout) };
// Cast ptr to the current T size because alloc always returns a u8
let start_ptr = mem.cast::<T>();
Self {
head: 0,
tail: 0,
start_ptr,
capacity,
count: 0,
}
}
pub fn empty(&self) -> bool {
self.count == 0
}
pub fn full(&self) -> bool {
self.count == self.capacity
}
pub fn capacity(&self) -> usize {
self.capacity
}
pub fn push(&mut self, value: T) {
unsafe {
let next_writabel_address = self.start_ptr.offset(self.head as isize);
ptr::write(next_writabel_address, value);
}
// if we still have room increment the count
if self.count < self.capacity {
self.count += 1;
}
// increment and wrap if needed
self.head = (self.head + 1) % self.capacity;
// if head has passed the tail and buffer is full ]
// we increment the tail to read the oldest generation
if self.head > self.tail && self.count == self.capacity {
self.tail += 1;
}
}
pub fn clear(&mut self) {
self.count = 0;
self.head = 0;
self.tail = 0;
}
pub fn drain(&mut self) -> Vec<T> {
let values = self.collect::<Vec<_>>();
self.clear();
values
}
}
// -----------------------------------------------------------------------------
// - Iterator impl -
// -----------------------------------------------------------------------------
impl<T> Iterator for BadRingBuffer<T> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
if self.empty() {
return None;
}
let p = unsafe { self.start_ptr.offset(self.tail as isize).read() };
// increment the tail after value read
self.tail = (self.tail + 1) % self.capacity;
// decrement count after read
self.count -= 1;
Some(p)
}
}
// -----------------------------------------------------------------------------
// - Read imp -
// -----------------------------------------------------------------------------
impl Read for BadRingBuffer<u8> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
let mut index = 0;
let buf_len = buf.len();
while let Some(value) = self.next() {
buf[index] = value;
index += 1;
if index == buf_len {
break;
}
}
Ok(index)
}
}
// -----------------------------------------------------------------------------
// - Write impl -
// -----------------------------------------------------------------------------
impl Write for BadRingBuffer<u8> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
buf.iter().for_each(|v| self.push(*v));
Ok(buf.len())
}
fn flush(&mut self) -> Result<()> {
self.clear();
Ok(())
}
}
// -----------------------------------------------------------------------------
// - Drop impl -
// -----------------------------------------------------------------------------
impl<T> Drop for BadRingBuffer<T> {
fn drop(&mut self) {
unsafe {
let layout = Layout::from_size_align(self.capacity * size_of::<T>(), align_of::<T>())
.expect("could not create layout");
dealloc(self.start_ptr.cast::<u8>(), layout);
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_read_empty() {
let mut rb = BadRingBuffer::<u8>::with_capacity(8);
assert!(rb.next().is_none());
}
#[test]
fn test_is_empty() {
let rb = BadRingBuffer::<u8>::with_capacity(8);
assert!(rb.empty());
}
#[test]
fn test_read_capacity() {
let capacity = 8;
let rb = BadRingBuffer::<u8>::with_capacity(capacity);
assert_eq!(rb.capacity(), capacity);
}
#[test]
fn test_is_full() {
let mut rb = BadRingBuffer::with_capacity(2);
rb.push(1);
rb.push(2);
assert!(rb.full());
}
#[test]
fn test_non_wrapping_write() {
let mut rb = BadRingBuffer::with_capacity(3);
rb.push(0);
rb.push(1);
rb.push(2);
assert_eq!(rb.next(), Some(0));
assert_eq!(rb.next(), Some(1));
assert_eq!(rb.next(), Some(2));
}
#[test]
fn test_wrapping_write() {
let mut rb = BadRingBuffer::with_capacity(2);
rb.push(0);
rb.push(1);
rb.push(2);
assert_eq!(rb.next(), Some(1));
assert_eq!(rb.next(), Some(2));
}
#[test]
fn test_read() {
let mut buf = [0; 1024];
let mut rb = BadRingBuffer::with_capacity(4);
rb.push(1);
rb.push(2);
rb.push(3);
rb.push(4);
let read_bytes = rb.read(&mut buf).unwrap();
assert_eq!(&buf[0..read_bytes], &[1, 2, 3, 4])
}
#[test]
fn test_should_drain() {
let mut rb = BadRingBuffer::with_capacity(3);
rb.push(1);
rb.push(2);
rb.push(3);
let values = rb.drain();
assert_eq!(values, &[1, 2, 3]);
assert!(rb.empty());
}
#[test]
fn test_write() {
let buf = [1, 2, 3, 4];
let mut rb = BadRingBuffer::with_capacity(4);
let bytes_written = rb.write(&buf).unwrap();
assert_eq!(4, bytes_written);
assert_eq!(rb.drain(), vec![1, 2, 3, 4]);
}
#[test]
fn test_clearing() {
let mut rb = BadRingBuffer::with_capacity(4);
rb.push(1);
rb.clear();
assert!(rb.next().is_none());
}
}
|
use git2::{ObjectType, Oid, Repository};
use regex::Regex;
use std::collections::HashMap;
use std::str::from_utf8;
use termion::color::{self, Fg};
// Macros for logging
macro_rules! info {
() => { format!("{}[INFO]{}", Fg(color::Green), Fg(color::Reset)) };
}
macro_rules! critical {
() => { format!("{}[CRITICAL]{}", Fg(color::Red), Fg(color::Reset)) };
}
fn main() {
// Get config string
// let conf_str = fs::read_to_string(CONFIG_FILE).unwrap();
// Make a hashmap of uncompiled regex expressions
// let conf: HashMap<String, String> = serde_json::from_str(&conf_str).unwrap();
let rules: HashMap<&str, &str> = [
("Slack Token", "(xox[p|b|o|a]-[0-9]{12}-[0-9]{12}-[0-9]{12}-[a-z0-9]{32})"),
("RSA private key", "-----BEGIN RSA PRIVATE KEY-----"),
("SSH (OPENSSH) private key", "-----BEGIN OPENSSH PRIVATE KEY-----"),
("SSH (DSA) private key", "-----BEGIN DSA PRIVATE KEY-----"),
("SSH (EC) private key", "-----BEGIN EC PRIVATE KEY-----"),
("PGP private key block", "-----BEGIN PGP PRIVATE KEY BLOCK-----"),
("Facebook Oauth", "[f|F][a|A][c|C][e|E][b|B][o|O][o|O][k|K].{0,30}['\"\\s][0-9a-f]{32}['\"\\s]"),
("Twitter Oauth", "[t|T][w|W][i|I][t|T][t|T][e|E][r|R].{0,30}['\"\\s][0-9a-zA-Z]{35,44}['\"\\s]"),
("GitHub", "[g|G][i|I][t|T][h|H][u|U][b|B].{0,30}['\"\\s][0-9a-zA-Z]{35,40}['\"\\s]"),
("Google Oauth", "(\"client_secret\":\"[a-zA-Z0-9-_]{24}\")"),
("AWS API Key", "AKIA[0-9A-Z]{16}"),
("Heroku API Key", "[h|H][e|E][r|R][o|O][k|K][u|U].{0,30}[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}"),
("Generic Secret", "[s|S][e|E][c|C][r|R][e|E][t|T].{0,30}['\"\\s][0-9a-zA-Z]{32,45}['\"\\s]"),
("Generic API Key", "[a|A][p|P][i|I][_]?[k|K][e|E][y|Y].{0,30}['\"\\s][0-9a-zA-Z]{32,45}['\"\\s]"),
("Slack Webhook", "https://hooks.slack.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8}/[a-zA-Z0-9_]{24}"),
("Google (GCP) Service-account", "\"type\": \"service_account\""),
("Twilio API Key", "SK[a-z0-9]{32}"),
("Password in URL", "[a-zA-Z]{3,10}://[^/\\s:@]{3,20}:[^/\\s:@]{3,20}@.{1,100}[\"'\\s]"),
].iter().cloned().collect();
// Get path to git repo via command line args or assume current directory
let repo_root: String = std::env::args().nth(1).unwrap_or(".".to_string());
// Open git repo
let repo = Repository::open(repo_root.as_str()).expect("Couldn't open repository");
println!(
"{} {} state={:?}",
info!(),
repo.path().display(),
repo.state()
);
println!("{} checking {} key templates", info!(), rules.len());
println!("--------------------------------------------------------------------------");
// Get object database from the repo
let odb = repo.odb().unwrap();
let mut children = vec![];
// Loop through objects in db
odb.foreach(|oid| {
let object_id = oid.clone();
let config = rules.clone();
let repository = Repository::open(repo_root.as_str()).expect("Couldn't open repository");
// Spawn a thread to look for secrets in the object
children.push(std::thread::spawn( move || scan_object(repository, &object_id, config)));
// Return true because the closure has to return a boolean
true
})
.unwrap();
let num_children = &children.len();
for child in children {
let _ = child.join();
}
println!("{} Spawned {} threads", info!(), num_children);
}
fn scan_object(repo: Repository, oid: &Oid, conf: HashMap<&str, &str>){
// Get the object from the oid
let obj = repo.revparse_single(&oid.to_string()).unwrap();
// println!("{} {}\n--", obj.kind().unwrap().str(), obj.id());
match obj.kind() {
// Only grab objects associated with blobs
Some(ObjectType::Blob) => {
let blob_str = match from_utf8(obj.as_blob().unwrap().content()) {
Ok(x)=>x,
Err(_)=>return,
};
// println!("{}",blob_str);
// Check if the blob contains secrets
match is_bad(blob_str, &conf) {
Some(bad_commits) => {
for bad in bad_commits {
println!("{} commit {} has a secret of type `{}`", critical!(), oid, bad);
}
},
// None => println!("{} oid {} is {}", INFO, oid, "safe".to_string()),
None => (),
}
}
_ => (), // only care about the blobs so ignore anything else.
}
}
// is_bad : if secrets are found in blob then they are returned as a vector, otherwise return None
fn is_bad(maybe: &str, bads: &HashMap<&str, &str>) -> Option<Vec<String>> {
let mut bad_commits = vec![];
for (key, val) in bads {
// Use regex from rules file to match against blob
let re = Regex::new(val).unwrap();
if re.is_match(maybe) {
bad_commits.push(key.to_string());
}
}
if bad_commits.len() > 0 {
// Return bad commits if there are any
return Some(bad_commits);
}
None
}
|
pub mod obscurificator;
|
use std::str::FromStr;
use std::path::PathBuf;
use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr};
use super::FromCommandLine;
impl FromCommandLine for PathBuf {
fn from_argument(s: &str) -> Result<Self, String> {
Ok(From::from(s))
}
}
impl FromCommandLine for f32 {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for f64 {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
// TODO(tailhook) implement various radices for integer values
impl FromCommandLine for isize {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for i8 {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for i16 {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for i32 {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for i64 {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for usize {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for u8 {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for u16 {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for u32 {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for u64 {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for bool {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for String {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|_| unreachable!())
}
}
impl FromCommandLine for Ipv4Addr {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for Ipv6Addr {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
impl FromCommandLine for SocketAddr {
fn from_argument(s: &str) -> Result<Self, String> {
FromStr::from_str(s).map_err(|e| format!("{:?}", e))
}
}
|
use bytes::Buf;
use futures::{Async, Future, Poll};
use h2::{Reason, SendStream};
use http::header::{
HeaderName, CONNECTION, PROXY_AUTHENTICATE, PROXY_AUTHORIZATION, TE, TRAILER,
TRANSFER_ENCODING, UPGRADE,
};
use http::HeaderMap;
use body::Payload;
mod client;
pub(crate) mod server;
pub(crate) use self::client::Client;
pub(crate) use self::server::Server;
fn strip_connection_headers(headers: &mut HeaderMap, is_request: bool) {
// List of connection headers from:
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Connection
//
// TE headers are allowed in HTTP/2 requests as long as the value is "trailers", so they're
// tested separately.
let connection_headers = [
HeaderName::from_lowercase(b"keep-alive").unwrap(),
HeaderName::from_lowercase(b"proxy-connection").unwrap(),
PROXY_AUTHENTICATE,
PROXY_AUTHORIZATION,
TRAILER,
TRANSFER_ENCODING,
UPGRADE,
];
for header in connection_headers.iter() {
if headers.remove(header).is_some() {
warn!("Connection header illegal in HTTP/2: {}", header.as_str());
}
}
if is_request {
if headers.get(TE).map(|te_header| te_header != "trailers").unwrap_or(false) {
warn!("TE headers not set to \"trailers\" are illegal in HTTP/2 requests");
headers.remove(TE);
}
} else {
if headers.remove(TE).is_some() {
warn!("TE headers illegal in HTTP/2 responses");
}
}
if let Some(header) = headers.remove(CONNECTION) {
warn!(
"Connection header illegal in HTTP/2: {}",
CONNECTION.as_str()
);
let header_contents = header.to_str().unwrap();
// A `Connection` header may have a comma-separated list of names of other headers that
// are meant for only this specific connection.
//
// Iterate these names and remove them as headers. Connection-specific headers are
// forbidden in HTTP2, as that information has been moved into frame types of the h2
// protocol.
for name in header_contents.split(',') {
let name = name.trim();
headers.remove(name);
}
}
}
// body adapters used by both Client and Server
struct PipeToSendStream<S>
where
S: Payload,
{
body_tx: SendStream<SendBuf<S::Data>>,
data_done: bool,
stream: S,
}
impl<S> PipeToSendStream<S>
where
S: Payload,
{
fn new(stream: S, tx: SendStream<SendBuf<S::Data>>) -> PipeToSendStream<S> {
PipeToSendStream {
body_tx: tx,
data_done: false,
stream: stream,
}
}
fn on_err(&mut self, err: S::Error) -> ::Error {
let err = ::Error::new_user_body(err);
trace!("send body user stream error: {}", err);
self.body_tx.send_reset(Reason::INTERNAL_ERROR);
err
}
fn send_eos_frame(&mut self) -> ::Result<()> {
trace!("send body eos");
self.body_tx
.send_data(SendBuf(None), true)
.map_err(::Error::new_body_write)
}
}
impl<S> Future for PipeToSendStream<S>
where
S: Payload,
{
type Item = ();
type Error = ::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
loop {
if !self.data_done {
// we don't have the next chunk of data yet, so just reserve 1 byte to make
// sure there's some capacity available. h2 will handle the capacity management
// for the actual body chunk.
self.body_tx.reserve_capacity(1);
if self.body_tx.capacity() == 0 {
loop {
match try_ready!(self.body_tx.poll_capacity().map_err(::Error::new_body_write)) {
Some(0) => {}
Some(_) => break,
None => return Err(::Error::new_canceled(None::<::Error>)),
}
}
} else {
if let Async::Ready(reason) =
self.body_tx.poll_reset().map_err(::Error::new_body_write)?
{
debug!("stream received RST_STREAM: {:?}", reason);
return Err(::Error::new_body_write(::h2::Error::from(reason)));
}
}
match try_ready!(self.stream.poll_data().map_err(|e| self.on_err(e))) {
Some(chunk) => {
let is_eos = self.stream.is_end_stream();
trace!(
"send body chunk: {} bytes, eos={}",
chunk.remaining(),
is_eos,
);
let buf = SendBuf(Some(chunk));
self.body_tx
.send_data(buf, is_eos)
.map_err(::Error::new_body_write)?;
if is_eos {
return Ok(Async::Ready(()));
}
}
None => {
self.body_tx.reserve_capacity(0);
let is_eos = self.stream.is_end_stream();
if is_eos {
return self.send_eos_frame().map(Async::Ready);
} else {
self.data_done = true;
// loop again to poll_trailers
}
}
}
} else {
if let Async::Ready(reason) =
self.body_tx.poll_reset().map_err(|e| ::Error::new_body_write(e))?
{
debug!("stream received RST_STREAM: {:?}", reason);
return Err(::Error::new_body_write(::h2::Error::from(reason)));
}
match try_ready!(self.stream.poll_trailers().map_err(|e| self.on_err(e))) {
Some(trailers) => {
self.body_tx
.send_trailers(trailers)
.map_err(::Error::new_body_write)?;
return Ok(Async::Ready(()));
}
None => {
// There were no trailers, so send an empty DATA frame...
return self.send_eos_frame().map(Async::Ready);
}
}
}
}
}
}
struct SendBuf<B>(Option<B>);
impl<B: Buf> Buf for SendBuf<B> {
#[inline]
fn remaining(&self) -> usize {
self.0.as_ref().map(|b| b.remaining()).unwrap_or(0)
}
#[inline]
fn bytes(&self) -> &[u8] {
self.0.as_ref().map(|b| b.bytes()).unwrap_or(&[])
}
#[inline]
fn advance(&mut self, cnt: usize) {
self.0.as_mut().map(|b| b.advance(cnt));
}
}
|
mod get_proof;
mod get_transaction_status;
pub(crate) use get_proof::get_proof;
pub(crate) use get_transaction_status::get_transaction_status;
|
use crate::wallet::wallet::WalletType;
use crate::address::traits::address::AddressI;
use crate::address::traits::address::AddressCheckI;
use crate::address::types::sm2p256v1::AddressSM2P256V1;
pub struct AddressBuilder {
address : Box<dyn AddressI>,
}
impl AddressBuilder {
pub fn new(seed_type: WalletType, seed: &Vec<u8>) -> Self {
let address = AddressBuilder::build_address(seed_type, &seed);
AddressBuilder {
address : address,
}
}
fn build_address(seed_type: WalletType, seed: &Vec<u8>) -> Box<dyn AddressI> {
match seed_type {
WalletType::ED25519 => {
return Box::new(AddressSM2P256V1::new(&seed));
},
WalletType::SECP256K1 => {
return Box::new(AddressSM2P256V1::new(&seed));
},
WalletType::SM2P256V1 => {
return Box::new(AddressSM2P256V1::new(&seed));
}
}
}
}
// ----------------------------------------------------------------------------------------------------------
// AddressBuilder 对 trait AddressI 的实现。
// ----------------------------------------------------------------------------------------------------------
impl AddressI for AddressBuilder {
fn human_account_id(&self) -> String {
self.address.human_account_id()
}
fn public_key(&self) -> String {
self.address.public_key()
}
fn public_key_hex(&self) -> String {
self.address.public_key_hex()
}
fn private_key(&self) -> String {
self.address.private_key()
}
}
// ----------------------------------------------------------------------------------------------------------
// AddressBuilder 对 trait AddressCheckI 的实现。
// ----------------------------------------------------------------------------------------------------------
impl AddressCheckI for AddressBuilder {
fn check(&self, _address: &String) -> bool {
true
}
} |
// `with_empty_list_options` in integration tests
// `with_link_and_monitor_in_options_list` in integration tests
// `with_link_in_options_list` in integration tests
// `with_monitor_in_options_list` in integration tests
use super::*;
#[test]
fn without_proper_list_options_errors_badarg() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
strategy::term::is_function(arc_process.clone()),
strategy::term::is_not_list(arc_process.clone()),
)
},
|(arc_process, function, tail)| {
let options = arc_process.improper_list_from_slice(&[atom!("link")], tail);
prop_assert_badarg!(result(&arc_process, function, options), "improper list");
Ok(())
},
);
}
|
extern crate tiny_http;
extern crate multipart;
use std::io::{self, Cursor, Write};
use multipart::server::{Multipart, Entries, SaveResult};
use multipart::mock::StdoutTee;
use tiny_http::{Response, StatusCode, Request};
fn main() {
// Starting a server on `localhost:80`
let server = tiny_http::Server::http("localhost:80").expect("Could not bind localhost:80");
loop {
// This blocks until the next request is received
let mut request = server.recv().unwrap();
// Processes a request and returns response or an occured error
let result = process_request(&mut request);
let resp = match result {
Ok(resp) => resp,
Err(e) => {
println!("An error has occured during request proccessing: {:?}", e);
build_response(500, "The received data was not correctly proccessed on the server")
}
};
// Answers with a response to a client
request.respond(resp).unwrap();
}
}
type RespBody = Cursor<Vec<u8>>;
/// Processes a request and returns response or an occured error.
fn process_request(request: &mut Request) -> io::Result<Response<RespBody>> {
// Getting a multipart reader wrapper
match Multipart::from_request(request) {
Ok(mut multipart) => {
// Fetching all data and processing it.
// save().temp() reads the request fully, parsing all fields and saving all files
// in a new temporary directory under the OS temporary directory.
match multipart.save().temp() {
SaveResult::Full(entries) => process_entries(entries),
SaveResult::Partial(entries, reason) => {
process_entries(entries.keep_partial())?;
// We don't set limits
Err(reason.unwrap_err())
}
SaveResult::Error(error) => Err(error),
}
}
Err(_) => Ok(build_response(400, "The request is not multipart")),
}
}
/// Processes saved entries from multipart request.
/// Returns an OK response or an error.
fn process_entries(entries: Entries) -> io::Result<Response<RespBody>> {
let mut data = Vec::new();
{
let stdout = io::stdout();
let tee = StdoutTee::new(&mut data, &stdout);
entries.write_debug(tee)?;
}
writeln!(data, "Entries processed")?;
Ok(build_response(200, data))
}
fn build_response<D: Into<Vec<u8>>>(status_code: u16, data: D) -> Response<RespBody> {
let data = data.into();
let data_len = data.len();
Response::new(StatusCode(status_code),
vec![],
Cursor::new(data),
Some(data_len),
None)
}
|
use byteorder::LittleEndian;
use crate::io::Buf;
use crate::mysql::protocol::Status;
// https://dev.mysql.com/doc/dev/mysql-server/8.0.12/page_protocol_basic_eof_packet.html
// https://mariadb.com/kb/en/eof_packet/
#[derive(Debug)]
pub struct EofPacket {
pub warnings: u16,
pub status: Status,
}
impl EofPacket {
pub(crate) fn read(mut buf: &[u8]) -> crate::Result<Self>
where
Self: Sized,
{
let header = buf.get_u8()?;
if header != 0xFE {
return Err(protocol_err!(
"expected EOF (0xFE); received 0x{:X}",
header
))?;
}
let warnings = buf.get_u16::<LittleEndian>()?;
let status = buf.get_u16::<LittleEndian>()?;
Ok(Self {
warnings,
status: Status::from_bits_truncate(status),
})
}
}
|
use std::fs;
use std::time::Instant;
use std::hash::Hash;
use std::collections::HashMap;
#[derive(Debug,Ord, PartialOrd, Eq, PartialEq,Clone,Hash)]
struct Contraint {
first_range: (u16, u16),
second_range: (u16, u16),
name:String,
}
fn parse_constraint(puzzle: Vec<String>) -> Vec<Contraint> {
let mut constraints = vec![];
for str in puzzle {
if str.is_empty() {
break;
}
let mut it = str.split(": ");
let _name = it.next().unwrap();
let fields = it.next().unwrap();
assert!(it.next().is_none());
let mut it = fields.split(" or ");
let field1 = it.next().unwrap();
let field2 = it.next().unwrap();
assert!(it.next().is_none());
let mut it = field1.split("-");
let mut it2 = field2.split("-");
let constraint = Contraint {
first_range: (it.next().unwrap().parse::<u16>().unwrap(), it.next().unwrap().parse::<u16>().unwrap()),
second_range: (it2.next().unwrap().parse::<u16>().unwrap(), it2.next().unwrap().parse::<u16>().unwrap()),
name: _name.to_string()
};
constraints.push(constraint);
}
constraints
}
fn parse_ticket(puzzle: Vec<String>, mut skip:u32) -> Vec<Vec<u16>> {
let mut it =puzzle.iter();
loop{
if skip==0{
break;
}
let res=it.next();
if res.is_none(){
break;
}
if res.unwrap().is_empty(){
skip-=1;
}
}
it.next();
let mut result =vec![];
loop {
let res = it.next();
if res.is_none() {
break;
}
let line=res.unwrap();
if line.is_empty(){
break;
}
result.push(line.split(",").map(|x| x.parse::<u16>().unwrap()).collect::<Vec<u16>>());
}
result
}
fn part1(puzzle: Vec<String>) -> usize {
let constraint=parse_constraint(puzzle.clone());
let _mine=parse_ticket(puzzle.clone(),1);
let others=parse_ticket(puzzle.clone(),2);
let mut eer =0usize;
for ticket in others{
for field in ticket {
let mut good =false;
for c in &constraint{
if !( (field>c.first_range.1 || field<c.first_range.0) &&(field>c.second_range.1 || field<c.second_range.0) ) {
good=true;
}
}
if !good{
eer+=field as usize;
}
}
}
eer
}
fn filter(others:Vec<Vec<u16>>, constraint:Vec<Contraint>) -> (Vec<Vec<u16>>, Vec<Vec<u16>>) {
let (mut b, mut g)=(vec![], vec![]);
for ticket in others{
let mut bad=false;
for field in &ticket {
let mut good =false;
for c in &constraint{
if !( (field> &c.first_range.1 || field< &c.first_range.0) &&(field> &c.second_range.1 || field< &c.second_range.0) ) {
good=true;
}
}
if !good{
bad=true;
break;
}
}
if bad{
b.push(ticket.clone());
}else{
g.push(ticket.clone());
}
}
(b,g)
}
fn part2(puzzle: Vec<String>) -> usize {
let constraint=parse_constraint(puzzle.clone());
let mine=parse_ticket(puzzle.clone(),1);
let others=parse_ticket(puzzle.clone(),2);
let (_,good_tickets)=filter(others,constraint.clone());
assert!(good_tickets.len()>0);
let mut attributed_constraints =vec![];
for i in 0..good_tickets.get(0).unwrap().len(){
let mut good_contraints =vec![];
for c in &constraint{
let mut good =true;
for el in &good_tickets{
let field=el.get(i).unwrap();
if (field> &c.first_range.1 || field< &c.first_range.0) &&(field> &c.second_range.1 || field< &c.second_range.0){
good=false;
break;
}
}
if good{
good_contraints.push((c,i));
}
}
attributed_constraints.push(good_contraints);
}
attributed_constraints.sort_by(|a,b| a.len().cmp(&b.len()));
let mut blacklist:Vec<&Contraint>=vec![];
let mut map:HashMap<&Contraint,usize>=HashMap::new();
for attributed_constraint in attributed_constraints {
let rest=attributed_constraint.iter().filter(|&x| !blacklist.contains(&x.0)).collect::<Vec<&(&Contraint, usize)>>();
assert_eq!(rest.len(), 1);
let t=rest.first().unwrap();
blacklist.push(t.0);
map.insert(t.0, t.1);
}
let mut s =1;
let my_ticket=mine.first().unwrap();
for v in map.keys(){
if v.name.starts_with("departure"){
let pos=map.get(v).unwrap();
let nbr=my_ticket.get(*pos).unwrap();
s*= *nbr as usize;
}
}
s
}
fn main() {
let input = fs::read_to_string("input/input.txt")
.expect("Something went wrong reading the file");
let lines = input.lines();
let mut puzzle: Vec<String> = vec![];
for line in lines {
puzzle.push(line.parse::<String>().expect("Ouf that's not a string !"));
}
println!("Running part1");
let now = Instant::now();
println!("Found {}", part1(puzzle.clone()));
println!("Took {}us", now.elapsed().as_micros());
println!("Running part2");
let now = Instant::now();
println!("Found {}", part2(puzzle.clone()));
println!("Took {}us", now.elapsed().as_micros());
} |
use gstreamer::glib;
use gstreamer::glib::translate::{from_glib, FromGlib, IntoGlib, ToGlibPtr, ToGlibPtrMut};
use gstreamer::glib::value::FromValue;
use gstreamer::glib::StaticType;
use std::ffi::{c_char, c_int};
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy)]
#[non_exhaustive]
pub enum AudioVisualizerShader {
None,
Fade,
FadeAndMoveUp,
FadeAndMoveDown,
FadeAndMoveLeft,
FadeAndMoveRight,
FadeAndMoveHorizOut,
FadeAndMoveHorizIn,
FadeAndMoveVertOut,
FadeAndMoveVertIn,
__Unknown(i32),
}
pub type GstAudioVisualizerShader = c_int;
pub const GST_AUDIO_VISUALIZER_SHADER_NONE: GstAudioVisualizerShader = 0;
pub const GST_AUDIO_VISUALIZER_SHADER_FADE: GstAudioVisualizerShader = 1;
pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_UP: GstAudioVisualizerShader = 2;
pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_DOWN: GstAudioVisualizerShader = 3;
pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_LEFT: GstAudioVisualizerShader = 4;
pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_RIGHT: GstAudioVisualizerShader = 5;
pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_HORIZ_OUT: GstAudioVisualizerShader = 6;
pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_HORIZ_IN: GstAudioVisualizerShader = 7;
pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_VERT_OUT: GstAudioVisualizerShader = 8;
pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_VERT_IN: GstAudioVisualizerShader = 9;
impl IntoGlib for AudioVisualizerShader {
type GlibType = GstAudioVisualizerShader;
#[inline]
fn into_glib(self) -> Self::GlibType {
match self {
Self::None => GST_AUDIO_VISUALIZER_SHADER_NONE,
Self::Fade => GST_AUDIO_VISUALIZER_SHADER_FADE,
Self::FadeAndMoveUp => GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_UP,
Self::FadeAndMoveDown => GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_DOWN,
Self::FadeAndMoveLeft => GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_LEFT,
Self::FadeAndMoveRight => GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_RIGHT,
Self::FadeAndMoveHorizOut => GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_HORIZ_OUT,
Self::FadeAndMoveHorizIn => GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_HORIZ_IN,
Self::FadeAndMoveVertOut => GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_VERT_OUT,
Self::FadeAndMoveVertIn => GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_VERT_IN,
Self::__Unknown(value) => value,
}
}
}
impl FromGlib<GstAudioVisualizerShader> for AudioVisualizerShader {
#[inline]
unsafe fn from_glib(value: GstAudioVisualizerShader) -> Self {
match value {
GST_AUDIO_VISUALIZER_SHADER_NONE => Self::None,
GST_AUDIO_VISUALIZER_SHADER_FADE => Self::Fade,
GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_UP => Self::FadeAndMoveUp,
GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_DOWN => Self::FadeAndMoveDown,
GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_LEFT => Self::FadeAndMoveLeft,
GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_RIGHT => Self::FadeAndMoveRight,
GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_HORIZ_OUT => Self::FadeAndMoveHorizOut,
GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_HORIZ_IN => Self::FadeAndMoveHorizIn,
GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_VERT_OUT => Self::FadeAndMoveVertOut,
GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_VERT_IN => Self::FadeAndMoveVertIn,
_ => Self::__Unknown(value),
}
}
}
impl StaticType for AudioVisualizerShader {
#[inline]
fn static_type() -> glib::Type {
unsafe {
let value = glib::gobject_ffi::g_type_from_name(
"GstAudioVisualizerShader".as_ptr() as *const c_char
);
assert_ne!(0, value);
from_glib(value)
}
}
}
impl glib::ToValue for AudioVisualizerShader {
#[inline]
fn to_value(&self) -> glib::Value {
let mut value = glib::Value::for_value_type::<Self>();
unsafe {
glib::gobject_ffi::g_value_set_enum(value.to_glib_none_mut().0, self.into_glib());
}
value
}
#[inline]
fn value_type(&self) -> glib::Type {
Self::static_type()
}
}
unsafe impl<'a> FromValue<'a> for AudioVisualizerShader {
type Checker = glib::value::GenericValueTypeChecker<Self>;
#[inline]
unsafe fn from_value(value: &'a glib::Value) -> Self {
from_glib(glib::gobject_ffi::g_value_get_enum(value.to_glib_none().0))
}
}
impl glib::value::ValueType for AudioVisualizerShader {
type Type = Self;
}
|
extern crate libc as c;
use std::net::{Ipv4Addr,SocketAddrV4};
use std;
pub fn into_c_sockaddr(addr: &SocketAddrV4) -> c::sockaddr_in {
let ip = addr.ip();
let octet = ip.octets();
let inaddr = c::in_addr{ s_addr: (((octet[0] as u32) << 24) |
((octet[1] as u32) << 16) |
((octet[2] as u32) << 8) |
(octet[3] as u32)).to_be() };
let addr = c::sockaddr_in{ sin_family: c::AF_INET as u16,
sin_port: u16::from_be(addr.port()),
sin_addr: inaddr,
sin_zero: [0u8; 8]};
addr
}
pub fn from_c_sockaddr(addr: &c::sockaddr_in) -> SocketAddrV4 {
let port = addr.sin_port as u16;
let bits = addr.sin_addr.s_addr.to_be();
let octet = [(bits >> 24) as u8, (bits >> 16) as u8, (bits >> 8) as u8, bits as u8];
let ip = Ipv4Addr::new(octet[0], octet[1], octet[2], octet[3]);
SocketAddrV4::new(ip, port.to_be())
}
pub fn to_ptr(addr: &c::sockaddr_in) -> (c::socklen_t, *const c::sockaddr) {
let addrlen = std::mem::size_of_val(addr) as c::socklen_t;
let sockaddr = addr as *const c::sockaddr_in as *const c::sockaddr;
(addrlen,sockaddr)
}
pub fn to_mut_ptr(addr: &mut c::sockaddr_in) -> (c::socklen_t, *mut c::sockaddr) {
let addrlen = std::mem::size_of_val(addr) as c::socklen_t;
let sockaddr = addr as *mut c::sockaddr_in as *mut c::sockaddr;
(addrlen,sockaddr)
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[cfg(feature = "Devices_PointOfService_Provider")]
pub mod Provider;
#[link(name = "windows")]
extern "system" {}
pub type BarcodeScanner = *mut ::core::ffi::c_void;
pub type BarcodeScannerCapabilities = *mut ::core::ffi::c_void;
pub type BarcodeScannerDataReceivedEventArgs = *mut ::core::ffi::c_void;
pub type BarcodeScannerErrorOccurredEventArgs = *mut ::core::ffi::c_void;
pub type BarcodeScannerImagePreviewReceivedEventArgs = *mut ::core::ffi::c_void;
pub type BarcodeScannerReport = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct BarcodeScannerStatus(pub i32);
impl BarcodeScannerStatus {
pub const Online: Self = Self(0i32);
pub const Off: Self = Self(1i32);
pub const Offline: Self = Self(2i32);
pub const OffOrOffline: Self = Self(3i32);
pub const Extended: Self = Self(4i32);
}
impl ::core::marker::Copy for BarcodeScannerStatus {}
impl ::core::clone::Clone for BarcodeScannerStatus {
fn clone(&self) -> Self {
*self
}
}
pub type BarcodeScannerStatusUpdatedEventArgs = *mut ::core::ffi::c_void;
pub type BarcodeSymbologyAttributes = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct BarcodeSymbologyDecodeLengthKind(pub i32);
impl BarcodeSymbologyDecodeLengthKind {
pub const AnyLength: Self = Self(0i32);
pub const Discrete: Self = Self(1i32);
pub const Range: Self = Self(2i32);
}
impl ::core::marker::Copy for BarcodeSymbologyDecodeLengthKind {}
impl ::core::clone::Clone for BarcodeSymbologyDecodeLengthKind {
fn clone(&self) -> Self {
*self
}
}
pub type CashDrawer = *mut ::core::ffi::c_void;
pub type CashDrawerCapabilities = *mut ::core::ffi::c_void;
pub type CashDrawerCloseAlarm = *mut ::core::ffi::c_void;
pub type CashDrawerClosedEventArgs = *mut ::core::ffi::c_void;
pub type CashDrawerEventSource = *mut ::core::ffi::c_void;
pub type CashDrawerOpenedEventArgs = *mut ::core::ffi::c_void;
pub type CashDrawerStatus = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct CashDrawerStatusKind(pub i32);
impl CashDrawerStatusKind {
pub const Online: Self = Self(0i32);
pub const Off: Self = Self(1i32);
pub const Offline: Self = Self(2i32);
pub const OffOrOffline: Self = Self(3i32);
pub const Extended: Self = Self(4i32);
}
impl ::core::marker::Copy for CashDrawerStatusKind {}
impl ::core::clone::Clone for CashDrawerStatusKind {
fn clone(&self) -> Self {
*self
}
}
pub type CashDrawerStatusUpdatedEventArgs = *mut ::core::ffi::c_void;
pub type ClaimedBarcodeScanner = *mut ::core::ffi::c_void;
pub type ClaimedBarcodeScannerClosedEventArgs = *mut ::core::ffi::c_void;
pub type ClaimedCashDrawer = *mut ::core::ffi::c_void;
pub type ClaimedCashDrawerClosedEventArgs = *mut ::core::ffi::c_void;
pub type ClaimedJournalPrinter = *mut ::core::ffi::c_void;
pub type ClaimedLineDisplay = *mut ::core::ffi::c_void;
pub type ClaimedLineDisplayClosedEventArgs = *mut ::core::ffi::c_void;
pub type ClaimedMagneticStripeReader = *mut ::core::ffi::c_void;
pub type ClaimedMagneticStripeReaderClosedEventArgs = *mut ::core::ffi::c_void;
pub type ClaimedPosPrinter = *mut ::core::ffi::c_void;
pub type ClaimedPosPrinterClosedEventArgs = *mut ::core::ffi::c_void;
pub type ClaimedReceiptPrinter = *mut ::core::ffi::c_void;
pub type ClaimedSlipPrinter = *mut ::core::ffi::c_void;
pub type ICashDrawerEventSourceEventArgs = *mut ::core::ffi::c_void;
pub type ICommonClaimedPosPrinterStation = *mut ::core::ffi::c_void;
pub type ICommonPosPrintStationCapabilities = *mut ::core::ffi::c_void;
pub type ICommonReceiptSlipCapabilities = *mut ::core::ffi::c_void;
pub type IPosPrinterJob = *mut ::core::ffi::c_void;
pub type IReceiptOrSlipJob = *mut ::core::ffi::c_void;
pub type JournalPrintJob = *mut ::core::ffi::c_void;
pub type JournalPrinterCapabilities = *mut ::core::ffi::c_void;
pub type LineDisplay = *mut ::core::ffi::c_void;
pub type LineDisplayAttributes = *mut ::core::ffi::c_void;
pub type LineDisplayCapabilities = *mut ::core::ffi::c_void;
pub type LineDisplayCursor = *mut ::core::ffi::c_void;
pub type LineDisplayCursorAttributes = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct LineDisplayCursorType(pub i32);
impl LineDisplayCursorType {
pub const None: Self = Self(0i32);
pub const Block: Self = Self(1i32);
pub const HalfBlock: Self = Self(2i32);
pub const Underline: Self = Self(3i32);
pub const Reverse: Self = Self(4i32);
pub const Other: Self = Self(5i32);
}
impl ::core::marker::Copy for LineDisplayCursorType {}
impl ::core::clone::Clone for LineDisplayCursorType {
fn clone(&self) -> Self {
*self
}
}
pub type LineDisplayCustomGlyphs = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct LineDisplayDescriptorState(pub i32);
impl LineDisplayDescriptorState {
pub const Off: Self = Self(0i32);
pub const On: Self = Self(1i32);
pub const Blink: Self = Self(2i32);
}
impl ::core::marker::Copy for LineDisplayDescriptorState {}
impl ::core::clone::Clone for LineDisplayDescriptorState {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct LineDisplayHorizontalAlignment(pub i32);
impl LineDisplayHorizontalAlignment {
pub const Left: Self = Self(0i32);
pub const Center: Self = Self(1i32);
pub const Right: Self = Self(2i32);
}
impl ::core::marker::Copy for LineDisplayHorizontalAlignment {}
impl ::core::clone::Clone for LineDisplayHorizontalAlignment {
fn clone(&self) -> Self {
*self
}
}
pub type LineDisplayMarquee = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct LineDisplayMarqueeFormat(pub i32);
impl LineDisplayMarqueeFormat {
pub const None: Self = Self(0i32);
pub const Walk: Self = Self(1i32);
pub const Place: Self = Self(2i32);
}
impl ::core::marker::Copy for LineDisplayMarqueeFormat {}
impl ::core::clone::Clone for LineDisplayMarqueeFormat {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct LineDisplayPowerStatus(pub i32);
impl LineDisplayPowerStatus {
pub const Unknown: Self = Self(0i32);
pub const Online: Self = Self(1i32);
pub const Off: Self = Self(2i32);
pub const Offline: Self = Self(3i32);
pub const OffOrOffline: Self = Self(4i32);
}
impl ::core::marker::Copy for LineDisplayPowerStatus {}
impl ::core::clone::Clone for LineDisplayPowerStatus {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct LineDisplayScrollDirection(pub i32);
impl LineDisplayScrollDirection {
pub const Up: Self = Self(0i32);
pub const Down: Self = Self(1i32);
pub const Left: Self = Self(2i32);
pub const Right: Self = Self(3i32);
}
impl ::core::marker::Copy for LineDisplayScrollDirection {}
impl ::core::clone::Clone for LineDisplayScrollDirection {
fn clone(&self) -> Self {
*self
}
}
pub type LineDisplayStatisticsCategorySelector = *mut ::core::ffi::c_void;
pub type LineDisplayStatusUpdatedEventArgs = *mut ::core::ffi::c_void;
pub type LineDisplayStoredBitmap = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct LineDisplayTextAttribute(pub i32);
impl LineDisplayTextAttribute {
pub const Normal: Self = Self(0i32);
pub const Blink: Self = Self(1i32);
pub const Reverse: Self = Self(2i32);
pub const ReverseBlink: Self = Self(3i32);
}
impl ::core::marker::Copy for LineDisplayTextAttribute {}
impl ::core::clone::Clone for LineDisplayTextAttribute {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct LineDisplayTextAttributeGranularity(pub i32);
impl LineDisplayTextAttributeGranularity {
pub const NotSupported: Self = Self(0i32);
pub const EntireDisplay: Self = Self(1i32);
pub const PerCharacter: Self = Self(2i32);
}
impl ::core::marker::Copy for LineDisplayTextAttributeGranularity {}
impl ::core::clone::Clone for LineDisplayTextAttributeGranularity {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct LineDisplayVerticalAlignment(pub i32);
impl LineDisplayVerticalAlignment {
pub const Top: Self = Self(0i32);
pub const Center: Self = Self(1i32);
pub const Bottom: Self = Self(2i32);
}
impl ::core::marker::Copy for LineDisplayVerticalAlignment {}
impl ::core::clone::Clone for LineDisplayVerticalAlignment {
fn clone(&self) -> Self {
*self
}
}
pub type LineDisplayWindow = *mut ::core::ffi::c_void;
pub type MagneticStripeReader = *mut ::core::ffi::c_void;
pub type MagneticStripeReaderAamvaCardDataReceivedEventArgs = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct MagneticStripeReaderAuthenticationLevel(pub i32);
impl MagneticStripeReaderAuthenticationLevel {
pub const NotSupported: Self = Self(0i32);
pub const Optional: Self = Self(1i32);
pub const Required: Self = Self(2i32);
}
impl ::core::marker::Copy for MagneticStripeReaderAuthenticationLevel {}
impl ::core::clone::Clone for MagneticStripeReaderAuthenticationLevel {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct MagneticStripeReaderAuthenticationProtocol(pub i32);
impl MagneticStripeReaderAuthenticationProtocol {
pub const None: Self = Self(0i32);
pub const ChallengeResponse: Self = Self(1i32);
}
impl ::core::marker::Copy for MagneticStripeReaderAuthenticationProtocol {}
impl ::core::clone::Clone for MagneticStripeReaderAuthenticationProtocol {
fn clone(&self) -> Self {
*self
}
}
pub type MagneticStripeReaderBankCardDataReceivedEventArgs = *mut ::core::ffi::c_void;
pub type MagneticStripeReaderCapabilities = *mut ::core::ffi::c_void;
pub type MagneticStripeReaderErrorOccurredEventArgs = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct MagneticStripeReaderErrorReportingType(pub i32);
impl MagneticStripeReaderErrorReportingType {
pub const CardLevel: Self = Self(0i32);
pub const TrackLevel: Self = Self(1i32);
}
impl ::core::marker::Copy for MagneticStripeReaderErrorReportingType {}
impl ::core::clone::Clone for MagneticStripeReaderErrorReportingType {
fn clone(&self) -> Self {
*self
}
}
pub type MagneticStripeReaderReport = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct MagneticStripeReaderStatus(pub i32);
impl MagneticStripeReaderStatus {
pub const Unauthenticated: Self = Self(0i32);
pub const Authenticated: Self = Self(1i32);
pub const Extended: Self = Self(2i32);
}
impl ::core::marker::Copy for MagneticStripeReaderStatus {}
impl ::core::clone::Clone for MagneticStripeReaderStatus {
fn clone(&self) -> Self {
*self
}
}
pub type MagneticStripeReaderStatusUpdatedEventArgs = *mut ::core::ffi::c_void;
pub type MagneticStripeReaderTrackData = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct MagneticStripeReaderTrackErrorType(pub i32);
impl MagneticStripeReaderTrackErrorType {
pub const None: Self = Self(0i32);
pub const StartSentinelError: Self = Self(1i32);
pub const EndSentinelError: Self = Self(2i32);
pub const ParityError: Self = Self(3i32);
pub const LrcError: Self = Self(4i32);
pub const Unknown: Self = Self(-1i32);
}
impl ::core::marker::Copy for MagneticStripeReaderTrackErrorType {}
impl ::core::clone::Clone for MagneticStripeReaderTrackErrorType {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct MagneticStripeReaderTrackIds(pub i32);
impl MagneticStripeReaderTrackIds {
pub const None: Self = Self(0i32);
pub const Track1: Self = Self(1i32);
pub const Track2: Self = Self(2i32);
pub const Track3: Self = Self(4i32);
pub const Track4: Self = Self(8i32);
}
impl ::core::marker::Copy for MagneticStripeReaderTrackIds {}
impl ::core::clone::Clone for MagneticStripeReaderTrackIds {
fn clone(&self) -> Self {
*self
}
}
pub type MagneticStripeReaderVendorSpecificCardDataReceivedEventArgs = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PosConnectionTypes(pub u32);
impl PosConnectionTypes {
pub const Local: Self = Self(1u32);
pub const IP: Self = Self(2u32);
pub const Bluetooth: Self = Self(4u32);
pub const All: Self = Self(4294967295u32);
}
impl ::core::marker::Copy for PosConnectionTypes {}
impl ::core::clone::Clone for PosConnectionTypes {
fn clone(&self) -> Self {
*self
}
}
pub type PosPrinter = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PosPrinterAlignment(pub i32);
impl PosPrinterAlignment {
pub const Left: Self = Self(0i32);
pub const Center: Self = Self(1i32);
pub const Right: Self = Self(2i32);
}
impl ::core::marker::Copy for PosPrinterAlignment {}
impl ::core::clone::Clone for PosPrinterAlignment {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PosPrinterBarcodeTextPosition(pub i32);
impl PosPrinterBarcodeTextPosition {
pub const None: Self = Self(0i32);
pub const Above: Self = Self(1i32);
pub const Below: Self = Self(2i32);
}
impl ::core::marker::Copy for PosPrinterBarcodeTextPosition {}
impl ::core::clone::Clone for PosPrinterBarcodeTextPosition {
fn clone(&self) -> Self {
*self
}
}
pub type PosPrinterCapabilities = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PosPrinterCartridgeSensors(pub u32);
impl PosPrinterCartridgeSensors {
pub const None: Self = Self(0u32);
pub const Removed: Self = Self(1u32);
pub const Empty: Self = Self(2u32);
pub const HeadCleaning: Self = Self(4u32);
pub const NearEnd: Self = Self(8u32);
}
impl ::core::marker::Copy for PosPrinterCartridgeSensors {}
impl ::core::clone::Clone for PosPrinterCartridgeSensors {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PosPrinterColorCapabilities(pub u32);
impl PosPrinterColorCapabilities {
pub const None: Self = Self(0u32);
pub const Primary: Self = Self(1u32);
pub const Custom1: Self = Self(2u32);
pub const Custom2: Self = Self(4u32);
pub const Custom3: Self = Self(8u32);
pub const Custom4: Self = Self(16u32);
pub const Custom5: Self = Self(32u32);
pub const Custom6: Self = Self(64u32);
pub const Cyan: Self = Self(128u32);
pub const Magenta: Self = Self(256u32);
pub const Yellow: Self = Self(512u32);
pub const Full: Self = Self(1024u32);
}
impl ::core::marker::Copy for PosPrinterColorCapabilities {}
impl ::core::clone::Clone for PosPrinterColorCapabilities {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PosPrinterColorCartridge(pub i32);
impl PosPrinterColorCartridge {
pub const Unknown: Self = Self(0i32);
pub const Primary: Self = Self(1i32);
pub const Custom1: Self = Self(2i32);
pub const Custom2: Self = Self(3i32);
pub const Custom3: Self = Self(4i32);
pub const Custom4: Self = Self(5i32);
pub const Custom5: Self = Self(6i32);
pub const Custom6: Self = Self(7i32);
pub const Cyan: Self = Self(8i32);
pub const Magenta: Self = Self(9i32);
pub const Yellow: Self = Self(10i32);
}
impl ::core::marker::Copy for PosPrinterColorCartridge {}
impl ::core::clone::Clone for PosPrinterColorCartridge {
fn clone(&self) -> Self {
*self
}
}
pub type PosPrinterFontProperty = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PosPrinterLineDirection(pub i32);
impl PosPrinterLineDirection {
pub const Horizontal: Self = Self(0i32);
pub const Vertical: Self = Self(1i32);
}
impl ::core::marker::Copy for PosPrinterLineDirection {}
impl ::core::clone::Clone for PosPrinterLineDirection {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PosPrinterLineStyle(pub i32);
impl PosPrinterLineStyle {
pub const SingleSolid: Self = Self(0i32);
pub const DoubleSolid: Self = Self(1i32);
pub const Broken: Self = Self(2i32);
pub const Chain: Self = Self(3i32);
}
impl ::core::marker::Copy for PosPrinterLineStyle {}
impl ::core::clone::Clone for PosPrinterLineStyle {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PosPrinterMapMode(pub i32);
impl PosPrinterMapMode {
pub const Dots: Self = Self(0i32);
pub const Twips: Self = Self(1i32);
pub const English: Self = Self(2i32);
pub const Metric: Self = Self(3i32);
}
impl ::core::marker::Copy for PosPrinterMapMode {}
impl ::core::clone::Clone for PosPrinterMapMode {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PosPrinterMarkFeedCapabilities(pub u32);
impl PosPrinterMarkFeedCapabilities {
pub const None: Self = Self(0u32);
pub const ToTakeUp: Self = Self(1u32);
pub const ToCutter: Self = Self(2u32);
pub const ToCurrentTopOfForm: Self = Self(4u32);
pub const ToNextTopOfForm: Self = Self(8u32);
}
impl ::core::marker::Copy for PosPrinterMarkFeedCapabilities {}
impl ::core::clone::Clone for PosPrinterMarkFeedCapabilities {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PosPrinterMarkFeedKind(pub i32);
impl PosPrinterMarkFeedKind {
pub const ToTakeUp: Self = Self(0i32);
pub const ToCutter: Self = Self(1i32);
pub const ToCurrentTopOfForm: Self = Self(2i32);
pub const ToNextTopOfForm: Self = Self(3i32);
}
impl ::core::marker::Copy for PosPrinterMarkFeedKind {}
impl ::core::clone::Clone for PosPrinterMarkFeedKind {
fn clone(&self) -> Self {
*self
}
}
pub type PosPrinterPrintOptions = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PosPrinterPrintSide(pub i32);
impl PosPrinterPrintSide {
pub const Unknown: Self = Self(0i32);
pub const Side1: Self = Self(1i32);
pub const Side2: Self = Self(2i32);
}
impl ::core::marker::Copy for PosPrinterPrintSide {}
impl ::core::clone::Clone for PosPrinterPrintSide {
fn clone(&self) -> Self {
*self
}
}
pub type PosPrinterReleaseDeviceRequestedEventArgs = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PosPrinterRotation(pub i32);
impl PosPrinterRotation {
pub const Normal: Self = Self(0i32);
pub const Right90: Self = Self(1i32);
pub const Left90: Self = Self(2i32);
pub const Rotate180: Self = Self(3i32);
}
impl ::core::marker::Copy for PosPrinterRotation {}
impl ::core::clone::Clone for PosPrinterRotation {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct PosPrinterRuledLineCapabilities(pub u32);
impl PosPrinterRuledLineCapabilities {
pub const None: Self = Self(0u32);
pub const Horizontal: Self = Self(1u32);
pub const Vertical: Self = Self(2u32);
}
impl ::core::marker::Copy for PosPrinterRuledLineCapabilities {}
impl ::core::clone::Clone for PosPrinterRuledLineCapabilities {
fn clone(&self) -> Self {
*self
}
}
pub type PosPrinterStatus = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PosPrinterStatusKind(pub i32);
impl PosPrinterStatusKind {
pub const Online: Self = Self(0i32);
pub const Off: Self = Self(1i32);
pub const Offline: Self = Self(2i32);
pub const OffOrOffline: Self = Self(3i32);
pub const Extended: Self = Self(4i32);
}
impl ::core::marker::Copy for PosPrinterStatusKind {}
impl ::core::clone::Clone for PosPrinterStatusKind {
fn clone(&self) -> Self {
*self
}
}
pub type PosPrinterStatusUpdatedEventArgs = *mut ::core::ffi::c_void;
pub type ReceiptPrintJob = *mut ::core::ffi::c_void;
pub type ReceiptPrinterCapabilities = *mut ::core::ffi::c_void;
#[repr(C)]
pub struct SizeUInt32 {
pub Width: u32,
pub Height: u32,
}
impl ::core::marker::Copy for SizeUInt32 {}
impl ::core::clone::Clone for SizeUInt32 {
fn clone(&self) -> Self {
*self
}
}
pub type SlipPrintJob = *mut ::core::ffi::c_void;
pub type SlipPrinterCapabilities = *mut ::core::ffi::c_void;
pub type UnifiedPosErrorData = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct UnifiedPosErrorReason(pub i32);
impl UnifiedPosErrorReason {
pub const UnknownErrorReason: Self = Self(0i32);
pub const NoService: Self = Self(1i32);
pub const Disabled: Self = Self(2i32);
pub const Illegal: Self = Self(3i32);
pub const NoHardware: Self = Self(4i32);
pub const Closed: Self = Self(5i32);
pub const Offline: Self = Self(6i32);
pub const Failure: Self = Self(7i32);
pub const Timeout: Self = Self(8i32);
pub const Busy: Self = Self(9i32);
pub const Extended: Self = Self(10i32);
}
impl ::core::marker::Copy for UnifiedPosErrorReason {}
impl ::core::clone::Clone for UnifiedPosErrorReason {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct UnifiedPosErrorSeverity(pub i32);
impl UnifiedPosErrorSeverity {
pub const UnknownErrorSeverity: Self = Self(0i32);
pub const Warning: Self = Self(1i32);
pub const Recoverable: Self = Self(2i32);
pub const Unrecoverable: Self = Self(3i32);
pub const AssistanceRequired: Self = Self(4i32);
pub const Fatal: Self = Self(5i32);
}
impl ::core::marker::Copy for UnifiedPosErrorSeverity {}
impl ::core::clone::Clone for UnifiedPosErrorSeverity {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct UnifiedPosHealthCheckLevel(pub i32);
impl UnifiedPosHealthCheckLevel {
pub const UnknownHealthCheckLevel: Self = Self(0i32);
pub const POSInternal: Self = Self(1i32);
pub const External: Self = Self(2i32);
pub const Interactive: Self = Self(3i32);
}
impl ::core::marker::Copy for UnifiedPosHealthCheckLevel {}
impl ::core::clone::Clone for UnifiedPosHealthCheckLevel {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct UnifiedPosPowerReportingType(pub i32);
impl UnifiedPosPowerReportingType {
pub const UnknownPowerReportingType: Self = Self(0i32);
pub const Standard: Self = Self(1i32);
pub const Advanced: Self = Self(2i32);
}
impl ::core::marker::Copy for UnifiedPosPowerReportingType {}
impl ::core::clone::Clone for UnifiedPosPowerReportingType {
fn clone(&self) -> Self {
*self
}
}
|
mod buf;
mod buf_mut;
pub(crate) use buf::MssqlBufExt;
pub(crate) use buf_mut::MssqlBufMutExt;
|
pub mod canvas;
pub mod ray_tracer;
pub mod material;
|
#![allow(dead_code)]
//use rmp;
//use rmp_serialize;
use std::collections::HashMap;
use std::cell::{RefCell, RefMut};
use std::rc::Rc;
use std::fmt;
use woot::{IncrementalStamper};
use document::{Node, NodeP};
use layout::Writer;
use environment::{LocalEnv, LayoutChain, prepare_graph};
use futures::Future;
use wheel::prelude::*;
use config::Config;
use super::LoomError;
use nodes::Module;
pub type TypeId = u16;
pub type DataSize = u32;
pub type Stamp = (u32, u32);
pub type Data = <File as AsyncRead>::Buffer;
pub struct IoCreate {
stamp: Stamp,
io_ref: Io
}
impl IoCreate {
pub fn submit(self, data: &[u8]) {
// This is not Send -> submit can't be called twice at the same time
self.io_ref.borrow_mut().add_data(self.stamp, data);
}
pub fn stamp(&self) -> Stamp {
self.stamp
}
}
#[derive(Clone)]
pub struct Io {
io: Rc<RefCell<IoMachine>>,
pub log: Log
}
impl Io {
fn borrow_mut(&self) -> RefMut<IoMachine> {
self.io.borrow_mut()
}
pub fn yarn(&self, yarn: String) -> Box<Future<Item=Yarn, Error=LoomError>> {
let env = prepare_graph(self);
let io = self.clone();
// the lifetime of io.clone() ensures no borrow exists when the function
// returns from this call
box Module::parse(io.clone(), env.clone(), yarn)
.and_then(move |root: NodeP| {
let io = io;
// thus this call can not fail
io.borrow_mut().insert_node(root.clone());
Ok(Yarn {
root: root,
env: env.take()
})
})
}
pub fn load_yarn(&self, yarn: File) -> Box<Future<Item=Yarn, Error=LoomError>>
{
let io = self.clone();
trace!(self.log, "load_yarn");
box read(yarn)
.and_then(move |data| {
let io = io;
let string = String::from_utf8(data.to_vec()).expect("invalid utf8");
io.yarn(string)
})
}
pub fn create(&self) -> IoCreate {
IoCreate {
stamp: self.borrow_mut().stamp(),
io_ref: self.clone()
}
}
pub fn config<F, O>(&self, f: F) -> O where F: FnOnce(&Config) -> O {
f(&self.io.borrow().config)
}
}
pub struct Yarn {
root: NodeP,
env: LocalEnv
}
impl Yarn {
pub fn layout<W: Writer>(&self, w: &mut W) {
self.root.layout(LayoutChain::root(&self.env), w)
}
}
impl fmt::Debug for Yarn {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Yarn")
}
}
struct NodeType {
_name: String,
//decode: Box<Fn(&mut Decoder<Vec<u8>>) -> Result<Decoder::Error>>,
}
enum StreamItem {
/// add a new node to the known nodes.
/// This if the name is known, the appropriate entry is inserted into the
/// IoMachine and the environment.
/// Optionally it can be tried to load the name from a dynamic library.
NodeDecl(String),
CreateNode(TypeId, DataSize, Stamp),
DeleteNode(Stamp),
Op(Stamp, DataSize)
}
pub struct IoMachine {
// we write everything to disk as well
// uncompressed data to go into compressor
nodes: HashMap<Stamp, NodeP>,
stamper: IncrementalStamper<u32, u32>,
typelist: Vec<NodeType>,
config: Config,
}
impl IoMachine {
fn add_data(&self, _stamp: Stamp, _data: &[u8]) {
}
fn stamp(&mut self) -> Stamp {
self.stamper.stamp()
}
pub fn flush(&mut self) {
//self.encoder.flush();
}
/// storage: Some(path) to store the document
/// None to throw it away.
pub fn new(config: Config) -> IoMachine {
IoMachine {
nodes: HashMap::new(),
stamper: IncrementalStamper::init_random(),
typelist: vec![],
config: config
}
}
pub fn insert_node(&mut self, node: NodeP) {
// Nodes may have strange links. Avoid recursion!
let mut queue = vec![node];
while let Some(n) = queue.pop() {
// add childs to quue
n.childs(&mut queue);
// make up an ID
let id = self.stamper.stamp();
// push into queue
//self.queue_out.push(Shared{ id: id, node: n });
// store object (consumes it)
self.nodes.insert(id, n);
}
}
pub fn to_ref(self) -> Io {
Io {
io: Rc::new(RefCell::new(self)),
log: Log::root().branch()
}
}
}
pub fn open_dir(name: &str) -> Box<Future<Item=Directory, Error=LoomError>>
{
box Directory::open(name)
.map_err(|e| LoomError::DirectoryOpen(e))
}
pub fn open(dir: &Directory, name: &str) -> Box<Future<Item=File, Error=LoomError>>
{
box dir.get_file(name)
.map_err(|e| LoomError::DirectoryGetFile(e))
}
pub fn read(file: File) -> Box<Future<Item=Data, Error=LoomError>> {
box file.read().map_err(|e| LoomError::FileRead(e))
}
pub fn open_read(dir: &Directory, name: &str) -> Box<Future<Item=Data, Error=LoomError>> {
box dir.get_file(name)
.map_err(|e| LoomError::DirectoryGetFile(e))
.and_then(|file| file.read().map_err(|e| LoomError::FileRead(e)))
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Defines `CobaltSender` to encapsulate encoding `CobaltEvent` and sending them to the Cobalt
//! FIDL service.
use {
crate::traits::AsEventCodes,
fidl_fuchsia_cobalt::{CobaltEvent, CountEvent, EventPayload, HistogramBucket},
futures::channel::mpsc,
log::{error, info},
std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
};
macro_rules! gen_comment {
($x:expr) => {
#[doc = $x]
extern {}
};
($x:expr, $($tt:tt)*) => {
#[doc = $x]
$($tt)*
};
}
/// Wraps around an `mpsc::Sender` to allow sending CobaltEvents asynchronously to the Cobalt
/// FIDL service.
#[derive(Clone, Debug)]
pub struct CobaltSender {
sender: mpsc::Sender<CobaltEvent>,
is_blocked: Arc<AtomicBool>,
}
macro_rules! impl_wrapped_methods {
($(#[$variant:ident] $name:ident($($arg:ident: $ty:ty),*))*) => {
$(
gen_comment!{
concat! {
"Logs a CobaltEvent of type `EventPayload::",
stringify!($variant),
"` by wrapping a call to `CobaltSenderWithComponent::",
stringify!($name), "`."
},
pub fn $name<Codes: AsEventCodes>(
&mut self,
metric_id: u32,
event_codes: Codes,
$($arg: $ty),*
) {
self.with_component().$name::<_, String, _>(
metric_id,
event_codes,
None,
$($arg),*
);
}
}
)*
}
}
impl CobaltSender {
/// Constructs a new CobaltSender object.
///
/// # Arguments
///
/// * `sender` - The sending end of a `mpsc::channel`
pub fn new(sender: mpsc::Sender<CobaltEvent>) -> CobaltSender {
CobaltSender { sender, is_blocked: Arc::new(AtomicBool::new(false)) }
}
/// Accesses the sidecar struct `CobaltSenderWithComponent` to allow logging to cobalt with
/// component strings.
pub fn with_component(&mut self) -> CobaltSenderWithComponent {
CobaltSenderWithComponent(self)
}
/// Logs a CobaltEvent of type `EventPayload::Event`.
pub fn log_event<Codes: AsEventCodes>(&mut self, metric_id: u32, event_codes: Codes) {
self.log_event_value(CobaltEvent {
metric_id,
event_codes: event_codes.as_event_codes(),
component: None,
payload: EventPayload::Event(fidl_fuchsia_cobalt::Event {}),
});
}
/// Logs a CobaltEvent of type `EventPayload::StringEvent`.
pub fn log_string<S: Into<String>>(&mut self, metric_id: u32, string: S) {
self.log_event_value(CobaltEvent {
metric_id,
event_codes: vec![],
component: None,
payload: EventPayload::StringEvent(string.into()),
});
}
/// Logs a plain CobaltEvent.
pub fn log_cobalt_event(&mut self, event: CobaltEvent) {
self.log_event_value(event);
}
fn log_event_value(&mut self, event: CobaltEvent) {
if self.sender.try_send(event).is_err() {
let was_blocked = self.is_blocked.compare_and_swap(false, true, Ordering::SeqCst);
if !was_blocked {
error!("cobalt sender drops a event/events: either buffer is full or no receiver is waiting");
}
} else {
let was_blocked = self.is_blocked.compare_and_swap(true, false, Ordering::SeqCst);
if was_blocked {
info!("cobalt sender recovers and resumes sending")
}
}
}
impl_wrapped_methods! {
#[CountEvent]
log_event_count(period_duration_micros: i64, count: i64)
#[ElapsedMicros]
log_elapsed_time(elapsed_micros: i64)
#[Fps]
log_frame_rate(fps: f32)
#[MemoryBytesUsed]
log_memory_usage(bytes: i64)
#[IntHistogram]
log_int_histogram(values: Vec<HistogramBucket>)
}
}
/// Allows logging to cobalt with component strings
///
/// Component strings are relatively uncommon, so this is a sidecar struct that contains
/// methods with component string arguments. To access it, you call
/// `CobaltSender::with_component()` followed by the method. Only event types that accept
/// component strings are present on this struct.
pub struct CobaltSenderWithComponent<'a>(&'a mut CobaltSender);
macro_rules! impl_log_methods{
($(#[$variant:ident] $name:ident($($arg:ident: $ty:ty),*) => $payload:expr)*) => {
$(
gen_comment!{
concat! {
"Logs a CobaltEvent of type `EventPayload::",
stringify!($variant),
"` with a component string"
},
pub fn $name<Codes, S, Component>(
&mut self,
metric_id: u32,
event_codes: Codes,
component: Component,
$($arg: $ty),*
) where
Codes: AsEventCodes,
S: Into<String>,
Component: Into<Option<S>>
{
self.0.log_event_value(CobaltEvent {
metric_id,
component: component.into().map(|s| s.into()),
event_codes: event_codes.as_event_codes(),
payload: $payload
})
}
}
)*
}
}
impl<'a> CobaltSenderWithComponent<'a> {
impl_log_methods! {
#[CountEvent]
log_event_count(period_duration_micros: i64, count: i64) => {
EventPayload::EventCount(CountEvent {
period_duration_micros,
count,
})
}
#[ElapsedMicros]
log_elapsed_time(elapsed_micros: i64) => {
EventPayload::ElapsedMicros(elapsed_micros)
}
#[Fps]
log_frame_rate(fps: f32) => {
EventPayload::Fps(fps)
}
#[MemoryBytesUsed]
log_memory_usage(bytes: i64) => {
EventPayload::MemoryBytesUsed(bytes)
}
#[IntHistogram]
log_int_histogram(values: Vec<HistogramBucket>) => {
EventPayload::IntHistogram(values)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_cobalt_sender() {
let (sender, mut receiver) = mpsc::channel(1);
let mut sender = CobaltSender::new(sender);
sender.log_event(1, 1);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 1,
event_codes: vec![1],
component: None,
payload: EventPayload::Event(fidl_fuchsia_cobalt::Event {}),
}
);
sender.log_event_count(2, (), 10, 1);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 2,
event_codes: vec![],
component: None,
payload: EventPayload::EventCount(CountEvent {
period_duration_micros: 10,
count: 1
})
}
);
sender.with_component().log_event_count(2, (), "test", 11, 2);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 2,
event_codes: vec![],
component: Some("test".to_owned()),
payload: EventPayload::EventCount(CountEvent {
period_duration_micros: 11,
count: 2
})
}
);
sender.log_elapsed_time(3, [1, 2], 30);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 3,
event_codes: vec![1, 2],
component: None,
payload: EventPayload::ElapsedMicros(30),
}
);
sender.with_component().log_elapsed_time(3, [1, 2], "test".to_owned(), 30);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 3,
event_codes: vec![1, 2],
component: Some("test".to_owned()),
payload: EventPayload::ElapsedMicros(30),
}
);
sender.log_frame_rate(4, [1, 2, 3, 4], 10.0);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 4,
event_codes: vec![1, 2, 3, 4],
component: None,
payload: EventPayload::Fps(10.0),
}
);
sender.with_component().log_frame_rate(4, (), "testing", 100.0);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 4,
event_codes: vec![],
component: Some("testing".to_owned()),
payload: EventPayload::Fps(100.0),
}
);
sender.log_memory_usage(5, [1, 2, 3, 4, 5], 100);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 5,
event_codes: vec![1, 2, 3, 4, 5],
component: None,
payload: EventPayload::MemoryBytesUsed(100),
}
);
sender.with_component().log_memory_usage(5, [1, 2, 3, 4, 5], "a test", 100);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 5,
event_codes: vec![1, 2, 3, 4, 5],
component: Some("a test".to_owned()),
payload: EventPayload::MemoryBytesUsed(100),
}
);
sender.log_string(6, "A STRING EVENT");
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 6,
event_codes: vec![],
component: None,
payload: EventPayload::StringEvent("A STRING EVENT".to_owned()),
}
);
sender.log_int_histogram(4, [1, 2, 3], vec![HistogramBucket { index: 2, count: 2 }]);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 4,
event_codes: vec![1, 2, 3],
component: None,
payload: EventPayload::IntHistogram(vec![HistogramBucket { index: 2, count: 2 }]),
}
);
sender.with_component().log_int_histogram(
4,
[1, 2, 3],
"Component",
vec![HistogramBucket { index: 2, count: 2 }],
);
assert_eq!(
receiver.try_next().unwrap().unwrap(),
CobaltEvent {
metric_id: 4,
event_codes: vec![1, 2, 3],
component: Some("Component".to_owned()),
payload: EventPayload::IntHistogram(vec![HistogramBucket { index: 2, count: 2 }]),
}
);
}
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
#[repr(transparent)]
pub struct SceneAlphaMode(pub i32);
impl SceneAlphaMode {
pub const Opaque: Self = Self(0i32);
pub const AlphaTest: Self = Self(1i32);
pub const Blend: Self = Self(2i32);
}
impl ::core::marker::Copy for SceneAlphaMode {}
impl ::core::clone::Clone for SceneAlphaMode {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct SceneAttributeSemantic(pub i32);
impl SceneAttributeSemantic {
pub const Index: Self = Self(0i32);
pub const Vertex: Self = Self(1i32);
pub const Normal: Self = Self(2i32);
pub const TexCoord0: Self = Self(3i32);
pub const TexCoord1: Self = Self(4i32);
pub const Color: Self = Self(5i32);
pub const Tangent: Self = Self(6i32);
}
impl ::core::marker::Copy for SceneAttributeSemantic {}
impl ::core::clone::Clone for SceneAttributeSemantic {
fn clone(&self) -> Self {
*self
}
}
pub type SceneBoundingBox = *mut ::core::ffi::c_void;
pub type SceneComponent = *mut ::core::ffi::c_void;
pub type SceneComponentCollection = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct SceneComponentType(pub i32);
impl SceneComponentType {
pub const MeshRendererComponent: Self = Self(0i32);
}
impl ::core::marker::Copy for SceneComponentType {}
impl ::core::clone::Clone for SceneComponentType {
fn clone(&self) -> Self {
*self
}
}
pub type SceneMaterial = *mut ::core::ffi::c_void;
pub type SceneMaterialInput = *mut ::core::ffi::c_void;
pub type SceneMesh = *mut ::core::ffi::c_void;
pub type SceneMeshMaterialAttributeMap = *mut ::core::ffi::c_void;
pub type SceneMeshRendererComponent = *mut ::core::ffi::c_void;
pub type SceneMetallicRoughnessMaterial = *mut ::core::ffi::c_void;
pub type SceneModelTransform = *mut ::core::ffi::c_void;
pub type SceneNode = *mut ::core::ffi::c_void;
pub type SceneNodeCollection = *mut ::core::ffi::c_void;
pub type SceneObject = *mut ::core::ffi::c_void;
pub type ScenePbrMaterial = *mut ::core::ffi::c_void;
pub type SceneRendererComponent = *mut ::core::ffi::c_void;
pub type SceneSurfaceMaterialInput = *mut ::core::ffi::c_void;
pub type SceneVisual = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct SceneWrappingMode(pub i32);
impl SceneWrappingMode {
pub const ClampToEdge: Self = Self(0i32);
pub const MirroredRepeat: Self = Self(1i32);
pub const Repeat: Self = Self(2i32);
}
impl ::core::marker::Copy for SceneWrappingMode {}
impl ::core::clone::Clone for SceneWrappingMode {
fn clone(&self) -> Self {
*self
}
}
|
use std::collections::HashSet;
fn first_frequency_reached_twice(input: String) -> i32 {
let changes = input.split("\n").map(|x| x.parse::<i32>().unwrap()).cycle();
let mut frequency_history = HashSet::new();
frequency_history.insert(0);
let mut freq: i32 = 0;
for change in changes {
freq += change;
if frequency_history.contains(&freq) {
break;
}
frequency_history.insert(freq);
}
freq
}
fn main() {
let input = include_str!("input.txt").into();
println!("{}", first_frequency_reached_twice(input));
}
#[test]
fn test_01b() {
assert_eq!(first_frequency_reached_twice("+1\n-1".into()), 0);
assert_eq!(
first_frequency_reached_twice("+3\n+3\n+4\n-2\n-4".into()),
10
);
assert_eq!(
first_frequency_reached_twice("-6\n+3\n+8\n+5\n-6".into()),
5
);
assert_eq!(
first_frequency_reached_twice("+7\n+7\n-2\n-7\n-4".into()),
14
);
}
|
//! Implementation for creating instances for deployed contracts.
use crate::errors::DeployError;
use crate::future::CompatCallFuture;
use futures::compat::Future01CompatExt;
use pin_project::pin_project;
use std::future::Future;
use std::marker::PhantomData;
use std::pin::Pin;
use std::task::{Context, Poll};
use web3::api::Web3;
use web3::Transport;
/// a factory trait for deployable contract instances. this traits provides
/// functionality for creating instances of a contract type for a given network
/// ID.
///
/// this allows generated contracts to be deployable without having to create
/// new builder and future types.
pub trait FromNetwork<T: Transport>: Sized {
/// Context passed to the `Deployments`.
type Context;
/// Create a contract instance for the specified network. This method should
/// return `None` when no deployment can be found for the specified network
/// ID.
fn from_network(web3: Web3<T>, network_id: &str, cx: Self::Context) -> Option<Self>;
}
/// Future for creating a deployed contract instance.
#[must_use = "futures do nothing unless you `.await` or poll them"]
#[pin_project]
pub struct DeployedFuture<T, I>
where
T: Transport,
I: FromNetwork<T>,
{
/// The deployment arguments.
args: Option<(Web3<T>, I::Context)>,
/// The factory used to locate the contract address from a netowkr ID.
/// Underlying future for retrieving the network ID.
#[pin]
network_id: CompatCallFuture<T, String>,
_instance: PhantomData<Box<I>>,
}
impl<T, I> DeployedFuture<T, I>
where
T: Transport,
I: FromNetwork<T>,
{
/// Construct a new future that resolves when a deployed contract is located
/// from a `web3` provider and artifact data.
pub fn new(web3: Web3<T>, context: I::Context) -> Self {
let net = web3.net();
DeployedFuture {
args: Some((web3, context)),
network_id: net.version().compat(),
_instance: PhantomData,
}
}
}
impl<T, I> Future for DeployedFuture<T, I>
where
T: Transport,
I: FromNetwork<T>,
{
type Output = Result<I, DeployError>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
self.as_mut()
.project()
.network_id
.poll(cx)
.map(|network_id| {
let network_id = network_id?;
let (web3, context) = self.args.take().expect("called more than once");
I::from_network(web3, &network_id, context).ok_or(DeployError::NotFound(network_id))
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::contract::{Deployments, Instance};
use crate::test::prelude::*;
use ethcontract_common::truffle::Network;
use ethcontract_common::Artifact;
use web3::types::H256;
type InstanceDeployedFuture<T> = DeployedFuture<T, Instance<T>>;
#[test]
fn deployed() {
let mut transport = TestTransport::new();
let web3 = Web3::new(transport.clone());
let network_id = "42";
let address = addr!("0x0102030405060708091011121314151617181920");
let transaction_hash = Some(H256::repeat_byte(0x42));
let artifact = {
let mut artifact = Artifact::empty();
artifact.networks.insert(
network_id.to_string(),
Network {
address,
transaction_hash,
},
);
artifact
};
transport.add_response(json!(network_id)); // get network ID response
let networks = Deployments::new(artifact);
let instance = InstanceDeployedFuture::new(web3, networks)
.immediate()
.expect("successful deployment");
transport.assert_request("net_version", &[]);
transport.assert_no_more_requests();
assert_eq!(instance.address(), address);
assert_eq!(instance.transaction_hash(), transaction_hash);
}
#[test]
fn deployed_not_found() {
let mut transport = TestTransport::new();
let web3 = Web3::new(transport.clone());
let network_id = "42";
transport.add_response(json!(network_id)); // get network ID response
let networks = Deployments::new(Artifact::empty());
let err = InstanceDeployedFuture::new(web3, networks)
.immediate()
.expect_err("unexpected success getting deployed contract");
transport.assert_request("net_version", &[]);
transport.assert_no_more_requests();
assert!(
match &err {
DeployError::NotFound(id) => id == network_id,
_ => false,
},
"expected network {} not found error but got '{:?}'",
network_id,
err
);
}
}
|
use std::fmt::{Display, Formatter, Result as FmtResult};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use serde::de::{self, Deserialize, Deserializer};
use serde::ser::{Serialize, Serializer};
use super::{FilesystemId, OutputId};
use crate::hash::Hash;
use crate::name::Name;
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct ManifestId {
name: Name,
version: String,
hash: Hash,
}
impl ManifestId {
pub const fn new(name: Name, version: String, hash: Hash) -> Self {
ManifestId {
name,
version,
hash,
}
}
pub fn parse<S: AsRef<str>>(name: S, version: S, hash: S) -> Result<Self, ()> {
Ok(ManifestId {
name: name.as_ref().parse()?,
version: version.as_ref().into(),
hash: hash.as_ref().parse()?,
})
}
#[inline]
pub fn name(&self) -> &str {
self.name.as_str()
}
#[inline]
pub fn version(&self) -> &str {
self.version.as_str()
}
#[inline]
pub const fn hash(&self) -> &Hash {
&self.hash
}
pub fn is_same_package(&self, output_id: &OutputId) -> bool {
let name_matches = self.name.as_str() == output_id.name();
let version_matches = self.version.as_str() == output_id.version();
name_matches && version_matches
}
}
impl Display for ManifestId {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
write!(fmt, "{}@{}-{}", self.name, self.version, self.hash)
}
}
impl FilesystemId for ManifestId {
fn from_path<P: AsRef<Path>>(path: P) -> Result<Self, ()> {
let raw_stem = path.as_ref().file_stem().ok_or(())?;
let stem = raw_stem.to_str().ok_or(())?;
ManifestId::from_str(stem)
}
fn to_path(&self) -> PathBuf {
let path = format!("{}.toml", self);
PathBuf::from(path)
}
}
impl FromStr for ManifestId {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut tokens = s.rsplitn(2, '-');
let hash = tokens.next().ok_or(())?;
let remainder = tokens.next().ok_or(())?;
let mut tokens = remainder.rsplitn(2, '@');
let version = tokens.next().ok_or(())?;
let name = tokens.next().ok_or(())?;
ManifestId::parse(name, version, hash)
}
}
impl PartialEq<str> for ManifestId {
fn eq(&self, other: &str) -> bool {
let s = self.to_string();
s.as_str() == other
}
}
impl PartialEq<&'_ str> for ManifestId {
fn eq(&self, other: &&str) -> bool {
self == *other
}
}
impl PartialEq<ManifestId> for str {
fn eq(&self, other: &ManifestId) -> bool {
other.to_string().as_str() == self
}
}
impl PartialEq<ManifestId> for &'_ str {
fn eq(&self, other: &ManifestId) -> bool {
other == self
}
}
impl<'de> Deserialize<'de> for ManifestId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s: &str = Deserialize::deserialize(deserializer)?;
ManifestId::from_str(&s).map_err(|_err| de::Error::custom("failed to deserialize"))
}
}
impl Serialize for ManifestId {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.to_string().serialize(serializer)
}
}
#[cfg(test)]
mod tests {
use super::*;
const HASH: &'static str = "fc3j3vub6kodu4jtfoakfs5xhumqi62m";
const EXAMPLE_ID: &'static str = "foobar@1.0.0-fc3j3vub6kodu4jtfoakfs5xhumqi62m";
#[test]
fn is_send_and_sync() {
fn assert_send_sync<T: Send + Sync>() {}
assert_send_sync::<ManifestId>();
}
#[test]
fn path_ends_with_toml() {
let id: ManifestId = EXAMPLE_ID.parse().expect("Failed to parse ID");
let path = id.to_path();
assert_eq!(path.extension().and_then(|s| s.to_str()), Some("toml"));
}
#[test]
fn parse_from_string() {
let expected = ManifestId::parse("foobar", "1.0.0", HASH).expect("Failed to init ID");
let actual: ManifestId = EXAMPLE_ID.parse().expect("Failed to parse ID");
assert_eq!(expected, actual);
assert_eq!(expected.name(), actual.name());
assert_eq!(expected.version(), actual.version());
assert_eq!(expected.hash(), actual.hash());
}
#[test]
fn parse_roundtrip() {
let original: ManifestId = EXAMPLE_ID.parse().expect("Failed to parse ID");
let text_form = original.to_string();
let parsed: ManifestId = text_form.parse().expect("Failed to parse ID from text");
assert_eq!(original, parsed);
}
}
|
use crate::bing_maps::MapItem;
use std::{collections::HashMap, path::PathBuf};
use clap::Parser;
use tokio::{
fs::{create_dir, read_dir, File},
io::{AsyncReadExt, AsyncWriteExt},
};
#[derive(Clone, Parser)]
pub struct CleanArgs {
#[clap(value_parser)]
input_dir: String,
#[clap(value_parser)]
output_dir: String,
}
pub async fn clean(cli: CleanArgs) -> anyhow::Result<()> {
let input_dir = PathBuf::from(cli.input_dir);
let output_dir = PathBuf::from(cli.output_dir);
create_dir(&output_dir).await?;
let mut reader = read_dir(&input_dir).await?;
while let Some(entry) = reader.next_entry().await? {
let name = entry
.file_name()
.into_string()
.map_err(|_| anyhow::Error::msg("failed to convert strings"))?;
if name.starts_with(".") || !name.to_lowercase().ends_with(".json") {
continue;
}
clean_file(entry.path(), output_dir.join(entry.file_name())).await?;
}
Ok(())
}
pub async fn clean_file(src: PathBuf, dst: PathBuf) -> anyhow::Result<()> {
let items = read_map_items(src).await?;
// Count using the entry() API, as presented in the docs:
// https://doc.rust-lang.org/std/collections/index.html#counting-the-number-of-times-each-character-in-a-string-occurs
let mut counter = HashMap::new();
for item in &items {
if let Some(id) = &item.chain_id {
*counter.entry(id).or_insert(0) += 1;
}
}
// Only filter by chain ID if some chain was present.
if let Some((chain_id, _)) =
counter
.into_iter()
.reduce(|(chain_id, count), (new_chain_id, new_count)| {
if new_count > count {
(new_chain_id, new_count)
} else {
(chain_id, count)
}
})
{
write_map_items(
dst,
items
.iter()
.filter(|x| x.chain_id.as_deref() == Some(chain_id)),
)
.await
} else {
write_map_items(dst, items.iter()).await
}
}
pub async fn read_map_items(src: PathBuf) -> anyhow::Result<Vec<MapItem>> {
let mut reader = File::open(src).await?;
let mut data = String::new();
reader.read_to_string(&mut data).await?;
let result = data
.split("\n")
.into_iter()
.filter(|x| x.len() > 0)
.map(|x| serde_json::from_str(&x))
.collect::<Result<Vec<_>, _>>();
Ok(result?)
}
pub async fn write_map_items(
path: PathBuf,
items: impl Iterator<Item = &MapItem>,
) -> anyhow::Result<()> {
let mut writer = File::create(path).await?;
for item in items {
writer
.write_all(format!("{}\n", serde_json::to_string(item)?).as_bytes())
.await?;
}
writer.flush().await?;
Ok(())
}
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue 27282: Example 2: This sidesteps the AST checks disallowing
// mutable borrows in match guards by hiding the mutable borrow in a
// guard behind a move (of the mutably borrowed match input) within a
// closure.
//
// This example is not rejected by AST borrowck (and then reliably
// reaches the panic code when executed, despite the compiler warning
// about that match arm being unreachable.
#![feature(nll)]
fn main() {
let b = &mut true;
match b {
&mut false => {},
_ if { (|| { let bar = b; *bar = false; })();
//~^ ERROR cannot move out of `b` because it is borrowed [E0505]
false } => { },
&mut true => { println!("You might think we should get here"); },
//~^ ERROR use of moved value: `*b` [E0382]
_ => panic!("surely we could never get here, since rustc warns it is unreachable."),
}
}
|
//! Gestionnaire des parties virtuelles de puissance 4
//!
//! # Fonctionnalités
//!
//! Fournit un ensemble d’outils afin de gérer une partie de puissance 4.
//!
//! Ce dernier fournit trois éléments conçu pour cela:
//! * L’objet [`Engine`] permettant de gérer une partie de puissance 4 ainsi que les interactions entre les joueurs et le jeu.
//! * L’objet [`Area`] correspondant à l’aire de jeu (ici le plateau virtuel de puissance 4)
//! * Le trait [`Interface`] permettant d’implémenter les fonctions essentielles entre l’interface utilisateur et le jeu.
//!
//! # Exemple
//!
//! Voici un exemple faisant jouer deux intelligences artificielles de manière aléatoire:
//!
//! ```rust
//! extern crate getrandom;
//!
//! mod connect_four;
//!
//! use super::connect_four::{Interface,Area};
//!
//! pub struct RandomBot {
//! name : String
//! }
//!
//! impl RandomBot {
//! pub fn new(name : &str) -> Self {
//! Self {
//! name: String::from(name)
//! }
//! }
//! }
//!
//! impl Interface for RandomBot {
//! fn play(&self, area: &Area) -> usize {
//! let available = area.get_available_columns();
//! const SZ : usize = std::mem::size_of::<usize>();
//!
//! let mut idx_a : [u8; SZ] = [0; SZ];
//!
//! getrandom::getrandom(&mut idx_a).unwrap();
//!
//! let idx : usize = unsafe { std::mem::transmute::<[u8; SZ], usize>(idx_a) } % available.len();
//!
//! return available[idx];
//! }
//!
//! fn name(&self) -> String {
//! return self.name.clone();
//! }
//! }
//!
//! fn main() {
//! let p1 = RandomBot::new("Joueur 1");
//! let p2 = RandomBot::new("Joueur 2");
//!
//! let mut game = p4e::Engine::new(&p1, &p2);
//!
//! if let Ok(v) = game.play() {
//! println!("{}", game.get_disposition());
//!
//! if let Some(p) = v {
//! println!("{} a gagné", p);
//! } else {
//! println!("Match nul");
//! }
//! }
//! }
//! ```
//!
//! [`Engine`]: struct.Engine.html
//! [`Area`]: struct.Area.html
//! [`Interface`]: trait.Interface.html
use std::fmt::{Debug, Formatter, Display};
use std::cell::RefCell;
/// Nombre de lignes sur le plateau.
///
/// Nombre de jetons pouvant être verticalement alignés.
///
/// Défini directement la zone de jeu et vaut 6 par default.
pub const AREA_ROWS : usize = 6;
/// Nombre de colonnes sur le plateau.
///
/// Nombre de jetons pouvant être horizontalement alignés.
///
/// Défini directement la zone de jeu et vaut 7 par default.
pub const AREA_COLS : usize = 7;
/// Nombre de jetons à aligner afin d’enclencher la victoire.
///
/// Ce dernier correspond au nombre de jetons à aligner horizontalement, verticalement ou en
/// diagonale et permettant à l’un des deux joueurs de gagner.
pub const VICTORY_NUMBER : usize = 4;
/// Un type [`Result`] spécialisé aux opérations du moteur de jeu.
///
/// Ce type est utilisé dans tout le module [`connect_four`] pour toutes les opération pouvant
/// émettre une erreur.
///
/// Cet alias est défini dans le but de limiter le nombre de réécriture de [`connect_four::Error`].
///
/// [`Result`]: std::result::Result
/// [`connect_four`]: self
/// [`connect_four::Error`]: Error
pub type Result<T> = std::result::Result<T, Error>;
/// État d’une case dans la zone de jeu.
#[derive(Eq, PartialEq, Copy, Clone)]
pub enum State {
/// La case ne contient pas de jeton. Cette valeur est interne à l’objet Area
NoToken,
/// La case contient un jeton rouge
RedToken,
/// La case contient un jeton jaune
YellowToken,
}
/// Liste des erreurs pouvant être émises par les objet du module
pub enum Error {
/// L’identifiant de la colonne est invalide. Cette dernière doit être comprise entre 0 et `AREA_COLS-1`.
InvalidColumn,
/// L’état demandé ne correspond pas à celui d’un jeton.
NotAToken,
/// La colonne dont il est demandé une modification est déjà remplie.
FilledColumn,
/// L’identifiant du joueur est invalide (doit valoir 0 ou 1)
InvalidPlayerId(usize),
}
/// Gère les parties de puissance 4.
///
/// Ce dernier possède les fonctionnalités suivante:
/// * Générer une zone de jeu,
/// * Réinitialiser cette dernière,
/// * Permettre les interaction entre les joueur ou les intelligences artificielles via une unique
/// interface.
pub struct Engine<'a> {
#[doc(hidden)]
area : Area,
#[doc(hidden)]
player_one_interface : &'a RefCell<dyn Interface>,
#[doc(hidden)]
player_two_interface : &'a RefCell<dyn Interface>,
}
/// Zone de jeu
///
/// Ce dernier possède les fonctionnalités suivantes:
/// * Gérer l’ajout des jetons,
/// * Vérifier l’alignment de quatre jetons identiques.
#[derive(Clone)]
pub struct Area {
#[doc(hidden)]
area : [[State; AREA_ROWS]; AREA_COLS]
}
/// Interface entre les joueurs et le jeu.
///
/// Met en place les fonctions permettant le bon déroulement du jeu.
pub trait Interface {
/// Demande au joueur de jouer
///
/// # Arguments
///
/// * area : La référence vers l’aire de jeu actuel. Ce dernier peut être cloné pour tester
/// des coups où être affiché par le joueur
/// * token : Le jeton joué par le joueur
///
/// # Retour
///
/// Une valeur correspondant à l’indice où placer le jeton. Ce dernier doit être compris entre
/// 0 et `AREA_COLS-1`
fn play(&mut self, area : &Area, token : State) -> usize;
/// Donne le nom donné au joueur.
///
/// # Retour
///
/// Le nom du joueur sous la forme de chaine de caractères.
fn name(&self) -> String;
}
impl Default for State {
fn default() -> Self {
State::NoToken
}
}
impl Display for Area {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let sep = String::from_utf8(vec![0x2D;5*AREA_COLS+1]).unwrap();
for col in 0..AREA_COLS {
if let Err(e) = f.write_fmt(format_args!("|{: ^4}", col)) {
return Err(e);
}
}
if let Err(e) = f.write_fmt(format_args!("|\n{}\n", sep)) {
return Err(e);
}
for row in 0..AREA_ROWS {
for col in 0..AREA_COLS {
if let Err(e) = f.write_fmt(format_args!("|{}", match self.area[col][AREA_ROWS - 1 - row] {
State::NoToken => " ",
State::RedToken => " 🔴 ",
State::YellowToken => " 🟡 ",
})) {
return Err(e);
}
}
if let Err(e) = f.write_str("|\n") {
return Err(e);
}
}
if let Err(e) = f.write_str(&sep) {
return Err(e);
}
return Ok(());
}
}
impl Debug for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Error::FilledColumn => f.write_str("La colonne choisie est déjà pleine"),
Error::InvalidColumn => f.write_str("La colonne choisie est invalide"),
Error::NotAToken => f.write_str("L’élément fourni n’est pas un jeton"),
Error::InvalidPlayerId(id) => f.write_str(format!("Le joueur {} n’existe pas. Seul 1 et 2 sont acceptés", id).as_str())
}
}
}
impl std::ops::Index<(usize, usize)> for Area {
type Output = State;
fn index(&self, index: (usize, usize)) -> &Self::Output {
return self.area.index(index.0).index(AREA_ROWS - 1 - index.1);
}
}
impl std::ops::IndexMut<(usize, usize)> for Area {
fn index_mut(&mut self, index: (usize, usize)) -> &mut Self::Output {
return self.area.index_mut(index.0).index_mut(AREA_ROWS - 1 - index.1);
}
}
impl<'a> Engine<'a> {
/// Crée un nouveau gestionnaire de jeux
///
/// # Arguments
///
/// * `player_one_interface`: Interface vers le premier joueur, doit implémenter le trait `Interface`
/// * `player_two_interface`: Interface vers le second joueur, doit implémenter le trait `Interface`
///
/// # Retour
///
/// Une nouvelle instance de l’objet `Engine`
pub fn new(player_one_interface : &'a RefCell<dyn Interface>, player_two_interface : &'a RefCell<dyn Interface>) -> Self {
Self {
area: Area { area: Default::default() },
player_one_interface, player_two_interface
}
}
/// Modifie le joueur identifié par `player_id` avec une nouvelle interface
///
/// # Arguments
///
/// * `player_id`: L’identifiant du joueur (doit valoir soit 1 pour le joueur 1 ou 2 pour le
/// joueur 2)
/// * `interface`: L’interface pour le joueur identifié par `player_id`
///
/// # Retour
///
/// Une erreur si l’identifiant du joueur est mauvais
///
/// # Liste des erreurs possibles
///
/// * `InvalidPlayerId`: L’identifiant du joueur est invalide (doit valoir 1 ou 2)
pub fn set_player(&mut self, player_id : usize, interface : &'a RefCell<dyn Interface>) -> Result<()> {
if player_id != 1 && player_id != 2 {
return Err(Error::InvalidPlayerId(player_id));
}
if player_id == 1 {
self.player_one_interface = interface;
} else {
self.player_two_interface = interface;
}
return Ok(());
}
/// Fourni une référence vers la zone de jeu
///
/// # Retour
///
/// Une référence vers la zone de jeu
pub fn get_disposition(&self) -> &Area {
return &self.area;
}
/// Réinitialise la zone de jeu
///
/// Une fois appelé, toutes les cases se retrouvent avec l’état sans jeton.
pub fn reset(&mut self) {
self.area.area = Default::default();
}
/// Joues une partie de puissance 4
///
/// # Retour
///
/// Une fois une partie finie, retourne un objet option valant None si le match est nul ou le
/// nom du gagnant.
///
/// Si une erreur se produit durant la partie, retourne l’erreur via l’objet `Error`. Ces
/// erreurs sont émises en cas de problème avec les joueurs.
///
/// # Liste des erreurs possibles
///
/// * `InvalidColumn` - L’identifiant de la colonne est invalide. Cette dernière doit être comprise entre 0 et `AREA_COLS-1`.
/// * `FilledColumn` - La colonne dont il est demandé une modification est déjà remplie.
pub fn play(&mut self) -> Result<Option<String>> {
let mut player = true;
loop {
let (token, interface) = if player {
(State::YellowToken, self.player_one_interface)
} else {
(State::RedToken, self.player_two_interface)
};
let col = interface.borrow_mut().play(&self.area, token);
match self.area.set_token(token, col) {
Ok(t) => if t { return Ok(Some(interface.borrow().name())) },
Err(e) => return Err(e),
}
if self.area.get_available_columns().is_empty() {
return Ok(None);
}
player = !player;
}
}
}
impl Area {
/// Récupère la liste des colonnes où on peut ajouter des jetons
///
/// Vérifie la présence de jeton sur la dernière ligne pour chaque colonnes afin de construire
/// une liste contenant seulement les indices de colonnes pouvant être joués
///
/// # Retour
///
/// La liste des colonnes non remplies
pub fn get_available_columns(&self) -> Vec<usize> {
let mut ret = Vec::with_capacity(AREA_COLS);
for i in 0..AREA_COLS {
if !self.is_filled_column(i).unwrap() {
ret.push(i);
}
}
return ret;
}
/// Vérifie si la colonne demandée est remplie
///
/// # Arguments
///
/// * `column` : La colonne à vérifier, doit être comprise entre 0 et `AREA_COLS-1`
///
/// # Retour
///
/// Un booléen valant `true` si la colonne est remplie sinon `false` ou une erreur si la valeur
/// donnée dans l’argument `column` est mauvaise.
///
/// # Liste des erreurs possibles
/// * `InvalidColumn` - L’identifiant de la colonne est invalide. Cette dernière doit être comprise
/// entre 0 et `AREA_COLS-1`.
pub fn is_filled_column(&self, column : usize) -> Result<bool> {
if column >= AREA_COLS {
return Err(Error::InvalidColumn);
}
Ok(State::NoToken != self.area[column][AREA_ROWS-1])
}
/// Ajoute, si possible, un jeton dans l’aire de jeu
///
/// # Arguments
///
/// * `token` : Le jeton. Doit avoir pour valeur `State::RedToken` ou `State::YellowToken`
/// * `column` : La colonne où ajouter le jeton, doit être comprise entre 0 et `AREA_COLS-1`.
///
/// # Retour
///
/// Un booléen valant `true` si le jeton ajouté permet la victoire sinon `false` ou une erreur
/// si la valeur donné par `token` ne corresponds pas à celui d’un jeton ou si la valeur donnée
/// par `column` est mauvaise.
///
/// # Liste des erreurs possibles
///
/// * `InvalidColumn` - L’identifiant de la colonne est invalide. Cette dernière doit être comprise
/// entre 0 et `AREA_COLS-1`.
/// * `NotAToken` - L’état demandé ne correspond pas à celui d’un jeton.
/// * `FilledColumn` - La colonne dont il est demandé une modification est déjà remplie.
pub fn set_token(&mut self, token : State, column : usize) -> Result<bool> {
if let State::NoToken = token {
return Err(Error::NotAToken);
}
if column >= AREA_COLS {
return Err(Error::InvalidColumn);
}
if let Some(row) = self.find_available_row(column) {
self.area[column][row] = token;
Ok(self.check_victory_from(column, row))
} else {
Err(Error::FilledColumn)
}
}
/// Vérifie si la zone de jeu est vide (premier coup)
///
/// # Retour
///
/// Retourne `true` si la zone de jeu est vide sinon `false`
pub fn is_empty(&self) -> bool {
for col in 0..AREA_COLS {
if self.area[col][0] != State::NoToken {
return false;
}
}
return true;
}
fn check_victory_from(&self, col : usize, row : usize) -> bool {
let token = self.area[col][row];
let a = self.check_linear(token, col, true);
let b = self.check_linear(token, row, false);
let c = self.check_diagonal(token, col, row, true);
let d = self.check_diagonal(token, col, row, false);
return a || b || c || d
}
fn find_available_row(&self, column : usize) -> Option<usize> {
for i in 0..AREA_ROWS {
if let State::NoToken = self.area[column][i] {
return Some(i);
}
}
return None;
}
fn check_linear(&self, token : State, pos : usize, col : bool) -> bool {
fn get(area : &[[State; AREA_ROWS]; AREA_COLS], pos : usize, i : usize, col : bool) -> State {
if col {
area[pos][i]
} else {
area[i][pos]
}
}
let mut n = 0;
let max = if col { AREA_ROWS } else { AREA_COLS };
for i in 0..max {
if token == get(&self.area, pos, i, col) {
n += 1;
} else {
n = 0;
}
if n >= VICTORY_NUMBER {
return true;
}
}
return false;
}
fn check_diagonal(&self, token : State, col : usize, row : usize, decr : bool) -> bool {
fn get(area : &[[State; AREA_ROWS]; AREA_COLS], origin : (usize, usize), i : usize, decr : bool) -> State {
if decr {
area[origin.0 + i][origin.1 + i]
} else {
area[origin.0 + i][origin.1 - i]
}
}
let (origin, n_max) = if decr {
let diff1 = usize::min(col, row);
let diff2 = usize::min(AREA_COLS - col - 1, AREA_ROWS - row - 1);
((col - diff1, row - diff1), diff1 + diff2 + 1)
} else {
let diff1 = usize::min(col, AREA_ROWS - row - 1);
let diff2 = usize::min(AREA_COLS - col - 1, row);
((col - diff1, row + diff1), diff1 + diff2 + 1)
};
if n_max < VICTORY_NUMBER {
return false;
}
let mut n = 0;
for i in 0..n_max {
if token == get(&self.area, origin, i, decr) {
n += 1;
} else {
n = 0;
}
if n >= VICTORY_NUMBER {
return true;
}
}
return false;
}
} |
use std::cmp::{min, Ordering};
use std::fmt;
use std::iter::empty;
use itertools::{EitherOrBoth, Itertools};
use crate::{BitPage, BitPageVec};
// @author shailendra.sharma
use crate::bit_page::BitPageWithPosition;
use crate::bit_page_vec::BitPageVecKind;
// use std::time::Instant;
pub type PageItem = (usize, u64);
pub type PageIterator<'a> = Box<dyn Iterator<Item = PageItem> + 'a>;
pub struct BitPageVecIter<'a> {
kind: BitPageVecKind,
iter: PageIterator<'a>,
last_bit_index: (usize, usize),
}
impl<'a> fmt::Debug for BitPageVecIter<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "BitPageVecIter(kind={:?})", self.kind)
}
}
impl<'a> BitPageVecIter<'a> {
pub fn new(kind: BitPageVecKind, iter: PageIterator, last_bit_index: (usize, usize)) -> BitPageVecIter {
BitPageVecIter {
kind,
iter,
last_bit_index,
}
}
pub fn kind(&self) -> &BitPageVecKind {
&self.kind
}
pub fn into_bit_page_vec(self) -> BitPageVec {
// let instant = Instant::now();
// let kind = self.kind;
// if log_enabled!(target: "bit_page_vec_log", Level::Debug) {
// debug!(target: "bit_page_vec_log", "into_bit_page_vec(kind={:?})", self.kind);
// }
let result = match self.kind {
BitPageVecKind::AllZeroes => BitPageVec::all_zeros(self.last_bit_index),
BitPageVecKind::SparseWithZeroesHole => {
let pages = self
.iter
.filter_map(|(page_idx, bit_page)| {
if BitPage::is_zeroes(&bit_page) {
None
} else {
Some(BitPageWithPosition { page_idx, bit_page })
}
})
.collect_vec();
Self::compact_sparse_with_zeroes_hole(pages, self.last_bit_index)
}
BitPageVecKind::AllOnes => BitPageVec::all_ones(self.last_bit_index),
BitPageVecKind::SparseWithOnesHole => {
let pages = self
.iter
.filter_map(|(page_idx, bit_page)| {
if BitPage::is_ones(&bit_page) {
None
} else {
Some(BitPageWithPosition { page_idx, bit_page })
}
})
.collect_vec();
Self::compact_sparse_with_ones_hole(pages, self.last_bit_index)
}
};
// if log_enabled!(target: "bit_page_vec_log", Level::Debug) {
// debug!(target: "bit_page_vec_log", "into_bit_page_vec(kind={:?}):: time taken={:?} and result={:?}", kind, instant.elapsed(), result);
// }
result
}
pub fn not(self) -> BitPageVecIter<'a> {
match self.kind {
BitPageVecKind::AllZeroes => BitPageVec::all_ones(self.last_bit_index).into_iter(),
BitPageVecKind::SparseWithZeroesHole => BitPageVecIter::new(
BitPageVecKind::SparseWithOnesHole,
Box::new(self.iter.map(|(page_idx, bit_page)| (page_idx, !bit_page))),
self.last_bit_index,
),
BitPageVecKind::AllOnes => BitPageVec::all_zeros(self.last_bit_index).into_iter(),
BitPageVecKind::SparseWithOnesHole => BitPageVecIter::new(
BitPageVecKind::SparseWithZeroesHole,
Box::new(self.iter.map(|(page_idx, bit_page)| (page_idx, !bit_page))),
self.last_bit_index,
),
}
}
pub fn or(first: BitPageVecIter<'a>, second: BitPageVecIter<'a>) -> BitPageVecIter<'a> {
// if log_enabled!(target: "bit_page_vec_log", Level::Debug) {
// debug!(target: "bit_page_vec_log", "BitPageVecIter::OR first={:?} second={:?}", first, second);
// }
let result = match first.kind {
BitPageVecKind::AllZeroes => second,
BitPageVecKind::SparseWithZeroesHole => match second.kind {
BitPageVecKind::AllZeroes => first,
BitPageVecKind::SparseWithZeroesHole => {
// merge here... same type with zeroes hole
// 0 | 0 => 0
// some | 0 => some
// 0 | some => some
// some | some => or(some)
let iter = first.iter.merge_join_by(second.iter, merge_cmp).map(|either| match either {
EitherOrBoth::Both((idx_1, mut page_one), (_idx_2, page_two)) => {
page_one |= page_two;
(idx_1, page_one)
}
EitherOrBoth::Left((idx, page)) | EitherOrBoth::Right((idx, page)) => (idx, page),
});
BitPageVecIter::new(
BitPageVecKind::SparseWithZeroesHole,
Box::new(iter),
min_last_bit_index(first.last_bit_index, second.last_bit_index),
)
}
BitPageVecKind::AllOnes => second,
BitPageVecKind::SparseWithOnesHole => {
// merge here... cross type
let iter = first.iter.merge_join_by(second.iter, merge_cmp).filter_map(or_merge_cross_types);
BitPageVecIter::new(
BitPageVecKind::SparseWithOnesHole,
Box::new(iter),
min_last_bit_index(first.last_bit_index, second.last_bit_index),
)
}
},
BitPageVecKind::AllOnes => first,
BitPageVecKind::SparseWithOnesHole => match second.kind {
BitPageVecKind::AllZeroes => first,
BitPageVecKind::SparseWithZeroesHole => {
// merge here... cross type
let iter = second.iter.merge_join_by(first.iter, merge_cmp).filter_map(or_merge_cross_types);
// return type would be SparseWithOnesHole
BitPageVecIter::new(
BitPageVecKind::SparseWithOnesHole,
Box::new(iter),
min_last_bit_index(first.last_bit_index, second.last_bit_index),
)
}
BitPageVecKind::AllOnes => second,
BitPageVecKind::SparseWithOnesHole => {
// merge here... same type with ones hole
// 1 | 1 => 1
// some | 1 => 1
// 1 | some => 1
// some | some => or(some)
// where 1 is hole
let iter = first.iter.merge_join_by(second.iter, merge_cmp).filter_map(|either| match either {
EitherOrBoth::Both((idx_1, mut page_one), (_idx_2, page_two)) => {
page_one |= page_two;
Some((idx_1, page_one))
}
EitherOrBoth::Left(_) | EitherOrBoth::Right(_) => None,
});
BitPageVecIter::new(
BitPageVecKind::SparseWithOnesHole,
Box::new(iter),
min_last_bit_index(first.last_bit_index, second.last_bit_index),
)
}
},
};
// if log_enabled!(target: "bit_page_vec_log", Level::Debug) {
// debug!(target: "bit_page_vec_log", "BitPageVecIter::OR result={:?}", result);
// }
result
}
pub fn and(first: BitPageVecIter<'a>, second: BitPageVecIter<'a>) -> BitPageVecIter<'a> {
// if log_enabled!(target: "bit_page_vec_log", Level::Debug) {
// debug!(target: "bit_page_vec_log", "BitPageVecIter::AND first={:?} second={:?}", first, second);
// }
let result = match first.kind {
BitPageVecKind::AllZeroes => first, // essentially AllZeroes
BitPageVecKind::SparseWithZeroesHole => match second.kind {
BitPageVecKind::AllZeroes => second,
BitPageVecKind::SparseWithZeroesHole => {
// merge here... same type (with zeroes hole)
let iter = first.iter.merge_join_by(second.iter, merge_cmp).filter_map(|either| match either {
EitherOrBoth::Both((idx_1, mut page_one), (_idx_2, page_two)) => {
page_one &= page_two;
if BitPage::is_zeroes(&page_one) {
None
} else {
Some((idx_1, page_one))
}
}
EitherOrBoth::Left(_) | EitherOrBoth::Right(_) => None,
});
BitPageVecIter::new(
BitPageVecKind::SparseWithZeroesHole,
Box::new(iter),
min_last_bit_index(first.last_bit_index, second.last_bit_index),
)
}
BitPageVecKind::AllOnes => first,
BitPageVecKind::SparseWithOnesHole => {
// merge here... cross type
let iter = first.iter.merge_join_by(second.iter, merge_cmp).filter_map(and_merge_cross_types);
// return type would be SparseWithZeroesHole
BitPageVecIter::new(
BitPageVecKind::SparseWithZeroesHole,
Box::new(iter),
min_last_bit_index(first.last_bit_index, second.last_bit_index),
)
}
},
BitPageVecKind::AllOnes => second,
BitPageVecKind::SparseWithOnesHole => match second.kind {
BitPageVecKind::AllZeroes => second, // essentially AllZeroes
BitPageVecKind::SparseWithZeroesHole => {
// merge here... cross type
// reverse the merge join... so first is always sparse with zeroes and second is always sparse with ones
let iter = second.iter.merge_join_by(first.iter, merge_cmp).filter_map(and_merge_cross_types);
// return type would be SparseWithZeroesHole
BitPageVecIter::new(
BitPageVecKind::SparseWithZeroesHole,
Box::new(iter),
min_last_bit_index(first.last_bit_index, second.last_bit_index),
)
}
BitPageVecKind::AllOnes => first,
BitPageVecKind::SparseWithOnesHole => {
// merge here... same type (with ones hole)
let iter = first.iter.merge_join_by(second.iter, merge_cmp).map(|either| match either {
EitherOrBoth::Both((idx_1, mut page_one), (_idx_2, page_two)) => {
page_one &= page_two;
(idx_1, page_one)
}
EitherOrBoth::Left((idx, page)) | EitherOrBoth::Right((idx, page)) => (idx, page),
});
BitPageVecIter::new(
BitPageVecKind::SparseWithOnesHole,
Box::new(iter),
min_last_bit_index(first.last_bit_index, second.last_bit_index),
)
}
},
};
// if log_enabled!(target: "bit_page_vec_log", Level::Debug) {
// debug!(target: "bit_page_vec_log", "BitPageVecIter::AND result={:?}", result);
// }
result
}
pub(crate) fn compact_sparse_with_zeroes_hole(pages: Vec<BitPageWithPosition>, last_bit_index: (usize, usize)) -> BitPageVec {
// if log_enabled!(target: "bit_page_vec_log", Level::Trace) {
// trace!(target: "bit_page_vec_log", "compact_sparse_with_zeroes_hole - pages len={}", pages.len());
// }
let result = if pages.is_empty() {
BitPageVec::all_zeros(last_bit_index)
} else if pages.len() <= 10_000 {
BitPageVec::new(BitPageVecKind::SparseWithZeroesHole, Some(pages), last_bit_index)
} else {
let start_page = pages[0].page_idx;
let end_page = pages[pages.len() - 1].page_idx;
let max_possible_length = (end_page - start_page + 1) as f64;
let actual_length = pages.len() as f64;
// if log_enabled!(target: "bit_page_vec_log", Level::Trace) {
// trace!(target: "bit_page_vec_log", "compact_sparse_with_zeroes_hole - start_page={} end_page={} max_possible_length={} actual_length={}", start_page, end_page, max_possible_length, actual_length);
// }
// find start page, end page, and length
// if length >= 75% of (end - start) page
// and # of active bits >= 75% of active bits needed for fully packed 75%
if actual_length >= 0.75 * max_possible_length
&& BitPageVec::count_ones(Some(&pages)) as f64 >= 0.75 * max_possible_length * 64.0
{
// if log_enabled!(target: "bit_page_vec_log", Level::Trace) {
// trace!(target: "bit_page_vec_log", "compact_sparse_with_zeroes_hole::compacting - ones={}", BitPageVec::count_ones(Some(&pages)));
// }
// filter out all page with max value
// and include pages with holes
let pages = (0..=last_bit_index.0)
.merge_join_by(pages.into_iter(), |page_1_idx, BitPageWithPosition { page_idx: page_2_idx, .. }| {
page_1_idx.cmp(page_2_idx)
})
.filter_map(|either| {
match either {
EitherOrBoth::Both(_, BitPageWithPosition { page_idx, bit_page }) => {
if BitPage::is_ones(&bit_page) {
None
} else {
Some(BitPageWithPosition { page_idx, bit_page })
}
}
EitherOrBoth::Left(page_idx) => Some(BitPageWithPosition {
page_idx,
bit_page: BitPage::zeroes(),
}),
EitherOrBoth::Right(_) => {
// this case should not arise
None
}
}
})
.collect_vec();
BitPageVec::new(BitPageVecKind::SparseWithOnesHole, Some(pages), last_bit_index)
} else {
BitPageVec::new(BitPageVecKind::SparseWithZeroesHole, Some(pages), last_bit_index)
}
};
// if log_enabled!(target: "bit_page_vec_log", Level::Trace) {
// trace!(target: "bit_page_vec_log", "compact_sparse_with_zeroes_hole::result={:?}", result);
// }
result
}
pub(crate) fn compact_sparse_with_ones_hole(pages: Vec<BitPageWithPosition>, last_bit_index: (usize, usize)) -> BitPageVec {
// if log_enabled!(target: "bit_page_vec_log", Level::Trace) {
// trace!(target: "bit_page_vec_log", "compact_sparse_with_ones_hole - pages len={}", pages.len());
// }
let result = if pages.is_empty() {
BitPageVec::all_ones(last_bit_index)
} else if pages.len() <= 10_000 {
BitPageVec::new(BitPageVecKind::SparseWithOnesHole, Some(pages), last_bit_index)
} else {
let start_page = pages[0].page_idx;
let end_page = pages[pages.len() - 1].page_idx;
let max_possible_length = (end_page - start_page + 1) as f64;
let actual_length = pages.len() as f64;
// if log_enabled!(target: "bit_page_vec_log", Level::Trace) {
// debug!(target: "bit_page_vec_log", "compact_sparse_with_ones_hole - start_page={} end_page={} max_possible_length={} actual_length={}", start_page, end_page, max_possible_length, actual_length);
// }
// find start page, end page, and length
// if length >= 75% of (end - start) page
// and # of active bits <= 25% of active bits needed for fully packed 75%
if actual_length >= 0.75 * max_possible_length
&& BitPageVec::count_ones(Some(&pages)) as f64 <= 0.25 * max_possible_length * 64.0
{
// if log_enabled!(target: "bit_page_vec_log", Level::Trace) {
// debug!(target: "bit_page_vec_log", "compact_sparse_with_ones_hole::compacting - ones={}", BitPageVec::count_ones(Some(&pages)));
// }
// filter out all page with max value
// and include pages with holes
let pages = (0..=last_bit_index.0)
.merge_join_by(pages.into_iter(), |page_1_idx, BitPageWithPosition { page_idx: page_2_idx, .. }| {
page_1_idx.cmp(page_2_idx)
})
.filter_map(|either| {
match either {
EitherOrBoth::Both(_, BitPageWithPosition { page_idx, bit_page }) => {
if BitPage::is_zeroes(&bit_page) {
None
} else {
Some(BitPageWithPosition { page_idx, bit_page })
}
}
EitherOrBoth::Left(page_idx) => Some(BitPageWithPosition {
page_idx,
bit_page: BitPage::ones(),
}),
EitherOrBoth::Right(_) => {
// this case should not arise
None
}
}
})
.collect_vec();
BitPageVec::new(BitPageVecKind::SparseWithZeroesHole, Some(pages), last_bit_index)
} else {
BitPageVec::new(BitPageVecKind::SparseWithOnesHole, Some(pages), last_bit_index)
}
};
// if log_enabled!(target: "bit_page_vec_log", Level::Trace) {
// trace!(target: "bit_page_vec_log", "compact_sparse_with_ones_hole::result={:?}", result);
// }
result
}
}
impl BitPageVec {
pub fn iter(&self) -> BitPageVecIter {
match self.kind {
BitPageVecKind::AllZeroes => {
let iter = empty::<PageItem>();
BitPageVecIter::new(BitPageVecKind::AllZeroes, Box::new(iter), self.last_bit_index)
}
BitPageVecKind::SparseWithZeroesHole => {
if let Some(ref pages) = self.pages {
let iter = pages
.iter()
.map(|BitPageWithPosition { page_idx, bit_page }| (*page_idx, *bit_page));
BitPageVecIter::new(BitPageVecKind::SparseWithZeroesHole, Box::new(iter), self.last_bit_index)
} else {
let iter = empty::<PageItem>();
BitPageVecIter::new(BitPageVecKind::AllZeroes, Box::new(iter), self.last_bit_index)
}
}
BitPageVecKind::AllOnes => {
let iter = empty::<PageItem>();
BitPageVecIter::new(BitPageVecKind::AllOnes, Box::new(iter), self.last_bit_index)
}
BitPageVecKind::SparseWithOnesHole => {
if let Some(ref pages) = self.pages {
let iter = pages
.iter()
.map(|BitPageWithPosition { page_idx, bit_page }| (*page_idx, *bit_page));
BitPageVecIter::new(BitPageVecKind::SparseWithOnesHole, Box::new(iter), self.last_bit_index)
} else {
let iter = empty::<PageItem>();
BitPageVecIter::new(BitPageVecKind::AllOnes, Box::new(iter), self.last_bit_index)
}
}
}
}
pub fn into_iter<'a>(self) -> BitPageVecIter<'a> {
match self.kind {
BitPageVecKind::AllZeroes => {
let iter = empty::<PageItem>();
BitPageVecIter::new(BitPageVecKind::AllZeroes, Box::new(iter), self.last_bit_index)
}
BitPageVecKind::SparseWithZeroesHole => {
if let Some(pages) = self.pages {
let iter = pages
.into_iter()
.map(|BitPageWithPosition { page_idx, bit_page }| (page_idx, bit_page));
BitPageVecIter::new(BitPageVecKind::SparseWithZeroesHole, Box::new(iter), self.last_bit_index)
} else {
let iter = empty::<PageItem>();
BitPageVecIter::new(BitPageVecKind::AllZeroes, Box::new(iter), self.last_bit_index)
}
}
BitPageVecKind::AllOnes => {
let iter = empty::<PageItem>();
BitPageVecIter::new(BitPageVecKind::AllOnes, Box::new(iter), self.last_bit_index)
}
BitPageVecKind::SparseWithOnesHole => {
if let Some(pages) = self.pages {
let iter = pages
.into_iter()
.map(|BitPageWithPosition { page_idx, bit_page }| (page_idx, bit_page));
BitPageVecIter::new(BitPageVecKind::SparseWithOnesHole, Box::new(iter), self.last_bit_index)
} else {
let iter = empty::<PageItem>();
BitPageVecIter::new(BitPageVecKind::AllOnes, Box::new(iter), self.last_bit_index)
}
}
}
}
}
pub(crate) fn merge_cmp((idx_1, _): &PageItem, (idx_2, _): &PageItem) -> Ordering {
idx_1.cmp(idx_2)
}
#[inline]
// first one is sparse with zeroes, second one is sparse with ones
// i.e. first hole = 0 and second hole = 1
// MISSING: first hole(0) | second hole(1) => second hole (1)... this is missing index... return type would be sparse with ones
// RIGHT: first hole(0) | some => some
// LEFT: some | second hole(1) => second hole(1)... should be filtered... return type would be sparse with ones
// BOTH: some | some => some
pub(crate) fn or_merge_cross_types(either: EitherOrBoth<PageItem, PageItem>) -> Option<PageItem> {
match either {
EitherOrBoth::Both((idx_1, mut page_one), (_idx_2, page_two)) => {
page_one |= page_two;
// some | some
Some((idx_1, page_one))
}
EitherOrBoth::Left(_) => None, // some | 1
EitherOrBoth::Right((idx, page)) => {
// 0 | some
Some((idx, page))
}
}
}
#[inline]
// first one is sparse with zeroes, second one is sparse with ones
// i.e. first hole = 0 and second hole = 1
// * MISSING: first hole & second hole => 0... return type would be sparse with zeroes
// * RIGHT: first hole & some => 0... should be filtered as return type would be sparse with zeroes
// * LEFT: some & second hole => some
// * BOTH: some & some => some
pub(crate) fn and_merge_cross_types(either: EitherOrBoth<PageItem, PageItem>) -> Option<PageItem> {
match either {
EitherOrBoth::Both((idx_1, mut page_one), (_idx_2, page_two)) => {
page_one &= page_two;
// some & some
Some((idx_1, page_one))
}
EitherOrBoth::Left((idx, page)) => {
// some & 1
Some((idx, page))
}
EitherOrBoth::Right(_) => None, // 0 & some
}
}
pub(crate) fn min_last_bit_index(first: (usize, usize), second: (usize, usize)) -> (usize, usize) {
min(first, second)
}
|
use std::io;
use std::fs;
use std::path::{Path, PathBuf};
use std::ffi::OsStr;
//Stores options to be used during tree building
#[derive(Debug, PartialEq)]
pub struct Options{
pub dir: PathBuf,
pub all : bool, //Traverse all nodes, including hidden nodes
pub count : bool, //Count the number of files and subdirs in dir
pub files : bool, //Show the files in each subdirectory
}
//Represents result return from the treeify_function
pub struct DirData{
pub tree : String,
pub subdirs : u32,
pub files : u32
}
impl DirData{
fn new(tree : String) -> Self{
DirData{
tree,
subdirs : 0,
files : 0,
}
}
}
fn treeify_path(
data : &mut DirData,
dir : &Path,
depth : u64,
depth_str: String,
all : bool,
files : bool,
) -> io::Result<()>{
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path : PathBuf = entry.path();
let mut path_str = String::from(
path
.file_name()
.unwrap_or(OsStr::new("")) // In case a path ends in ''
.to_str()
.unwrap()
);
if path_str.starts_with(".") && !all{ // Make this optional base on params
continue;
}
if path.is_dir(){
path_str.push_str("/"); //Add slash to directories
data.tree.push_str(&format!("|{}{}\n", depth_str, path_str));
data.subdirs += 1;
treeify_path(
data,
path.as_path(),
depth + 1,
format!("{}---", depth_str), all, files
)?;
}
else{
//TODO: Add logic for symlinks
if !files{
continue;
}
data.files += 1;
data.tree.push_str(&format!("|{}{}\n", depth_str, path_str));
}
}
Ok(())
}
pub fn run(options : &Options) -> io::Result<DirData> {
//Add root dir string
let mut tree_str = String::from("|");
tree_str.push_str(
options.dir.as_path()
.file_name()
.unwrap_or(OsStr::new("")) // In case a path end in ''
.to_str().unwrap()
);
tree_str.push_str("/\n");
let mut dir_data = DirData::new(tree_str);
treeify_path(
&mut dir_data,
options.dir.as_path(),
1,
String::from("---"),
options.all,
options.files,
)?;
//Print directory tree
Ok(dir_data)
}
#[cfg(test)]
mod tests{
use std::fs::{DirBuilder, File};
use std::fs;
use super::{Options, Path};
fn setup(root : &Path){
//Create test data
let builder = DirBuilder::new();
builder.create(root).unwrap();
builder.create(root.join("foo")).unwrap();
builder.create(root.join("foo1")).unwrap();
builder.create(root.join(".foo2")).unwrap();
File::create(root.join("foo.txt")).unwrap();
}
fn teardown(root: &Path){
//Clean up test data
fs::remove_dir(root.join("foo")).unwrap();
fs::remove_dir(root.join("foo1")).unwrap();
fs::remove_dir(root.join(".foo2")).unwrap();
fs::remove_file(root.join("foo.txt")).unwrap();
fs::remove_dir(root).unwrap();
}
#[test]
fn test_run(){
let root = Path::new("./_test");
setup(root);
let options = Options{
dir : root.to_path_buf(),
all : false,
count : false,
files : false
};
let result = super::run(&options).unwrap();
let expected = "|_test/\n|---foo/\n|---foo1/\n";
assert_eq!(result.tree, expected);
teardown(root);
}
#[test]
fn test_run_with_files(){
let root = Path::new("./_test");
setup(root);
let options = Options{
dir : root.to_path_buf(),
all : false,
count : false,
files : true
};
let result = super::run(&options).unwrap();
let expected = "|_test/\n|---foo/\n|---foo1/\n|---foo.txt\n";
assert_eq!(result.tree, expected);
teardown(root);
}
#[test]
fn test_run_with_hidden_true(){
let root = Path::new("./_test");
setup(root);
let options = Options{
dir : root.to_path_buf(),
all : true,
count : false,
files : true
};
let result = super::run(&options).unwrap();
let expected = "|_test/\n|---.foo2/\n|---foo/\n|---foo1/\n|---foo.txt\n";
assert_eq!(result.tree, expected);
teardown(root);
}
#[test]
fn test_run_with_count_true(){
let root = Path::new("./_test");
setup(root);
let options = Options{
dir : root.to_path_buf(),
all : true,
count: true,
files : true
};
let result = super::run(&options).unwrap();
let expected = "|_test/\n|---.foo2/\n|---foo/\n|---foo1/\n|---foo.txt\n";
assert_eq!(result.tree, expected);
assert_eq!(result.subdirs, 3);
assert_eq!(result.files, 1);
teardown(root);
}
}
|
use std::backtrace::Backtrace;
///! This module exposes 32-bit architecture specific values and functions
///!
///! See the module doc in arch_64.rs for more information
use std::cmp;
use std::fmt;
use std::sync::Arc;
use crate::erts::exception::InternalResult;
use liblumen_core::sys::sysconf::MIN_ALIGN;
use liblumen_term::{Encoding as TermEncoding, Encoding32, Tag};
const_assert!(MIN_ALIGN >= 4);
use crate::erts::term::prelude::*;
use super::Repr;
#[cfg_attr(not(target_pointer_width = "32"), allow(unused))]
pub type Encoding = Encoding32;
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct RawTerm(u32);
impl RawTerm {
pub const NONE: Self = Self(Encoding::NONE);
pub const NIL: Self = Self(Encoding::TAG_NIL);
pub const HEADER_TUPLE: u32 = Encoding::TAG_TUPLE;
pub const HEADER_BIG_INTEGER: u32 = Encoding::TAG_BIG_INTEGER;
pub const HEADER_REFERENCE: u32 = Encoding::TAG_REFERENCE;
pub const HEADER_CLOSURE: u32 = Encoding::TAG_CLOSURE;
pub const HEADER_FLOAT: u32 = Encoding::TAG_FLOAT;
pub const HEADER_RESOURCE_REFERENCE: u32 = Encoding::TAG_RESOURCE_REFERENCE;
pub const HEADER_PROCBIN: u32 = Encoding::TAG_PROCBIN;
pub const HEADER_BINARY_LITERAL: u32 = Encoding::TAG_PROCBIN;
pub const HEADER_HEAPBIN: u32 = Encoding::TAG_HEAPBIN;
pub const HEADER_SUBBINARY: u32 = Encoding::TAG_SUBBINARY;
pub const HEADER_MATCH_CTX: u32 = Encoding::TAG_MATCH_CTX;
pub const HEADER_EXTERN_PID: u32 = Encoding::TAG_EXTERN_PID;
pub const HEADER_EXTERN_PORT: u32 = Encoding::TAG_EXTERN_PORT;
pub const HEADER_EXTERN_REF: u32 = Encoding::TAG_EXTERN_REF;
pub const HEADER_MAP: u32 = Encoding::TAG_MAP;
#[inline(always)]
pub fn is_none(&self) -> bool {
self.0 == Encoding::NONE
}
#[inline]
fn type_of(&self) -> Tag<u32> {
Encoding::type_of(self.0)
}
#[inline]
fn encode_immediate(value: u32, tag: u32) -> Self {
Self(Encoding::encode_immediate(value, tag))
}
#[inline]
fn encode_list(value: *const Cons) -> Self {
Self(Encoding::encode_list(value))
}
#[inline]
fn encode_box<T: ?Sized>(value: *const T) -> Self {
Self(Encoding::encode_box(value))
}
#[inline]
fn encode_literal<T: ?Sized>(value: *const T) -> Self {
Self(Encoding::encode_literal(value))
}
#[cfg_attr(not(target_pointer_width = "32"), allow(unused))]
#[inline]
pub(crate) fn encode_header(value: u32, tag: u32) -> Self {
Self(Encoding::encode_header(value, tag))
}
#[inline]
unsafe fn decode_box(self) -> *mut Self {
Encoding::decode_box(self.0)
}
#[inline]
unsafe fn decode_list(self) -> Boxed<Cons> {
let ptr: *mut Cons = Encoding::decode_list(self.0);
Boxed::new_unchecked(ptr)
}
#[inline]
fn decode_smallint(self) -> SmallInteger {
let i = Encoding::decode_smallint(self.0);
unsafe { SmallInteger::new_unchecked(i as isize) }
}
#[inline]
fn decode_atom(self) -> Atom {
unsafe { Atom::from_id(Encoding::decode_immediate(self.0) as usize) }
}
#[inline]
fn decode_pid(self) -> Pid {
unsafe { Pid::from_raw(Encoding::decode_immediate(self.0) as usize) }
}
#[inline]
fn decode_port(self) -> Port {
unsafe { Port::from_raw(Encoding::decode_immediate(self.0) as usize) }
}
}
impl fmt::Binary for RawTerm {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:#b}", self.value())
}
}
impl Repr for RawTerm {
type Encoding = Encoding32;
#[inline]
fn as_usize(&self) -> usize {
self.0 as usize
}
#[inline(always)]
fn value(&self) -> u32 {
self.0
}
}
unsafe impl Send for RawTerm {}
impl Encode<RawTerm> for u8 {
fn encode(&self) -> InternalResult<RawTerm> {
Ok(RawTerm::encode_immediate(
(*self) as u32,
Encoding::TAG_SMALL_INTEGER,
))
}
}
impl Encode<RawTerm> for SmallInteger {
fn encode(&self) -> InternalResult<RawTerm> {
let i: i32 = (*self)
.try_into()
.map_err(|_| TermEncodingError::ValueOutOfRange)?;
Ok(RawTerm::encode_immediate(
i as u32,
Encoding::TAG_SMALL_INTEGER,
))
}
}
impl Encode<RawTerm> for bool {
fn encode(&self) -> InternalResult<RawTerm> {
let atom = Atom::try_from_str(&self.to_string()).unwrap();
Ok(RawTerm::encode_immediate(
atom.id() as u32,
Encoding::TAG_ATOM,
))
}
}
impl Encode<RawTerm> for Atom {
fn encode(&self) -> InternalResult<RawTerm> {
Ok(RawTerm::encode_immediate(
self.id() as u32,
Encoding::TAG_ATOM,
))
}
}
impl Encode<RawTerm> for Pid {
fn encode(&self) -> InternalResult<RawTerm> {
Ok(RawTerm::encode_immediate(
self.as_usize() as u32,
Encoding::TAG_PID,
))
}
}
impl Encode<RawTerm> for Port {
fn encode(&self) -> InternalResult<RawTerm> {
Ok(RawTerm::encode_immediate(
self.as_usize() as u32,
Encoding::TAG_PORT,
))
}
}
impl From<*mut RawTerm> for RawTerm {
fn from(ptr: *mut RawTerm) -> Self {
RawTerm::encode_box(ptr)
}
}
impl_list!(RawTerm);
impl_boxable!(Float, RawTerm);
impl_boxable!(BigInteger, RawTerm);
impl_boxable!(Reference, RawTerm);
impl_boxable!(ExternalPid, RawTerm);
impl_boxable!(ExternalPort, RawTerm);
impl_boxable!(ExternalReference, RawTerm);
impl_boxable!(Resource, RawTerm);
impl_boxable!(Map, RawTerm);
impl_boxable!(ProcBin, RawTerm);
impl_boxable!(SubBinary, RawTerm);
impl_boxable!(MatchContext, RawTerm);
impl_unsized_boxable!(Tuple, RawTerm);
impl_unsized_boxable!(Closure, RawTerm);
impl_unsized_boxable!(HeapBin, RawTerm);
impl_literal!(BinaryLiteral, RawTerm);
impl Cast<*mut RawTerm> for RawTerm {
#[inline]
default fn dyn_cast(self) -> *mut RawTerm {
assert!(self.is_boxed() || self.is_literal() || self.is_non_empty_list());
unsafe { self.decode_box() }
}
}
impl<T> Cast<Boxed<T>> for RawTerm
where
T: Boxable<RawTerm>,
{
#[inline]
default fn dyn_cast(self) -> Boxed<T> {
assert!(self.is_boxed() || self.is_literal() || self.is_non_empty_list());
Boxed::new(unsafe { self.decode_box() as *mut T }).unwrap()
}
}
impl<T> Cast<*mut T> for RawTerm
where
T: Boxable<RawTerm>,
{
#[inline]
default fn dyn_cast(self) -> *mut T {
assert!(self.is_boxed() || self.is_literal());
unsafe { self.decode_box() as *mut T }
}
}
impl Cast<*mut Cons> for RawTerm {
#[inline]
fn dyn_cast(self) -> *mut Cons {
assert!(self.is_non_empty_list());
unsafe { self.decode_box() as *mut Cons }
}
}
impl Cast<*const RawTerm> for RawTerm {
#[inline]
default fn dyn_cast(self) -> *const RawTerm {
assert!(self.is_boxed() || self.is_literal() || self.is_non_empty_list());
unsafe { self.decode_box() as *const RawTerm }
}
}
impl<T> Cast<*const T> for RawTerm
where
T: Boxable<RawTerm>,
{
#[inline]
default fn dyn_cast(self) -> *const T {
assert!(self.is_boxed() || self.is_literal());
unsafe { self.decode_box() as *const T }
}
}
impl Cast<*const Cons> for RawTerm {
#[inline]
fn dyn_cast(self) -> *const Cons {
assert!(self.is_non_empty_list());
unsafe { self.decode_box() as *const Cons }
}
}
impl Encoded for RawTerm {
#[inline]
fn decode(&self) -> Result<TypedTerm, TermDecodingError> {
let tag = self.type_of();
match tag {
Tag::Nil => Ok(TypedTerm::Nil),
Tag::List => Ok(TypedTerm::List(unsafe { self.decode_list() })),
Tag::SmallInteger => Ok(TypedTerm::SmallInteger(self.decode_smallint())),
Tag::Atom => Ok(TypedTerm::Atom(self.decode_atom())),
Tag::Pid => Ok(TypedTerm::Pid(self.decode_pid())),
Tag::Port => Ok(TypedTerm::Port(self.decode_port())),
Tag::Box | Tag::Literal => {
// NOTE: If the pointer we extract here is bogus or unmapped memory, we'll segfault,
// but that is reflective of a bug where a term is being created or dereferenced
// incorrectly, to find the source, you'll need to examine the trace
// to see where the input term is defined
let ptr = unsafe { self.decode_box() };
let unboxed = unsafe { &*ptr };
match unboxed.type_of() {
Tag::Nil => Ok(TypedTerm::Nil),
Tag::List => Ok(TypedTerm::List(unsafe { unboxed.decode_list() })),
Tag::SmallInteger => Ok(TypedTerm::SmallInteger(unboxed.decode_smallint())),
Tag::Atom => Ok(TypedTerm::Atom(unboxed.decode_atom())),
Tag::Pid => Ok(TypedTerm::Pid(unboxed.decode_pid())),
Tag::Port => Ok(TypedTerm::Port(unboxed.decode_port())),
Tag::Box | Tag::Literal => Err(TermDecodingError::MoveMarker {
backtrace: Arc::new(Backtrace::capture()),
}),
Tag::Unknown(_) => Err(TermDecodingError::InvalidTag {
backtrace: Arc::new(Backtrace::capture()),
}),
Tag::None => Err(TermDecodingError::NoneValue {
backtrace: Arc::new(Backtrace::capture()),
}),
header => unboxed.decode_header(header, Some(tag == Tag::Literal)),
}
}
Tag::Unknown(_) => Err(TermDecodingError::InvalidTag {
backtrace: Arc::new(Backtrace::capture()),
}),
Tag::None => Err(TermDecodingError::NoneValue {
backtrace: Arc::new(Backtrace::capture()),
}),
header => self.decode_header(header, None),
}
}
}
impl fmt::Debug for RawTerm {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.type_of() {
Tag::None => write!(f, "Term(None)"),
Tag::Nil => write!(f, "Term(Nil)"),
Tag::List => {
let ptr = unsafe { self.decode_box() };
let unboxed = unsafe { *ptr };
if unboxed.is_none() {
let forwarding_addr_ptr = unsafe { ptr.offset(1) };
let forwarding_addr = unsafe { *forwarding_addr_ptr };
write!(f, "MoveMarker({:?} => {:?})", ptr, forwarding_addr)
} else {
let value = unsafe { self.decode_list() };
write!(f, "Term({:?})", value)
}
}
Tag::SmallInteger => {
let value = self.decode_smallint();
write!(f, "Term({})", value)
}
Tag::Atom => {
let value = self.decode_atom();
write!(f, "Term({})", value)
}
Tag::Pid => {
let value = self.decode_pid();
write!(f, "Term({})", value)
}
Tag::Port => {
let value = self.decode_port();
write!(f, "Term({})", value)
}
Tag::Box | Tag::Literal => {
let is_literal = self.0 & Encoding::TAG_LITERAL == Encoding::TAG_LITERAL;
let ptr = unsafe { self.decode_box() };
let unboxed = unsafe { &*ptr };
write!(
f,
"Box({:p}, literal = {}, value={:?})",
ptr, is_literal, unboxed
)
}
Tag::Unknown(invalid_tag) => write!(f, "InvalidTerm(tag: {:032b})", invalid_tag),
header => match self.decode_header(header, None) {
Ok(term) => write!(f, "Term({:?})", term),
Err(_) => write!(f, "InvalidHeader(tag: {:?})", header),
},
}
}
}
impl fmt::Display for RawTerm {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.decode() {
Ok(term) => write!(f, "{}", term),
Err(err) => write!(f, "{:?}", err),
}
}
}
impl PartialEq<RawTerm> for RawTerm {
fn eq(&self, other: &RawTerm) -> bool {
match (self.decode(), other.decode()) {
(Ok(ref lhs), Ok(ref rhs)) => lhs.eq(rhs),
(Err(_), Err(_)) => true,
_ => false,
}
}
}
impl Eq for RawTerm {}
impl PartialOrd<RawTerm> for RawTerm {
fn partial_cmp(&self, other: &RawTerm) -> Option<core::cmp::Ordering> {
if let Ok(ref lhs) = self.decode() {
if let Ok(ref rhs) = other.decode() {
return lhs.partial_cmp(rhs);
}
}
None
}
}
impl Ord for RawTerm {
fn cmp(&self, other: &RawTerm) -> cmp::Ordering {
self.partial_cmp(other).unwrap()
}
}
impl core::hash::Hash for RawTerm {
fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
self.decode().unwrap().hash(state)
}
}
#[cfg(all(test, target_pointer_width = "32"))]
pub mod tests {
#[cfg(target_arch = "wasm32")]
use wasm_bindgen_test::*;
use core::convert::TryInto;
use crate::borrow::CloneToProcess;
use crate::erts::process::alloc::TermAlloc;
use crate::erts::testing::RegionHeap;
use super::*;
const MAX_IMMEDIATE_VALUE: u32 = Encoding32::MAX_IMMEDIATE_VALUE;
const MAX_ATOM_ID: u32 = Encoding32::MAX_ATOM_ID;
const MIN_SMALLINT_VALUE: i32 = Encoding32::MIN_SMALLINT_VALUE;
const MAX_SMALLINT_VALUE: i32 = Encoding32::MAX_SMALLINT_VALUE;
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn none_encoding_arch32() {
assert_eq!(RawTerm::NONE, RawTerm::NONE);
assert!(RawTerm::NONE.is_none());
assert_eq!(RawTerm::NONE.type_of(), Tag::None);
assert!(!RawTerm::NONE.is_boxed());
assert!(!RawTerm::NONE.is_header());
assert!(!RawTerm::NONE.is_immediate());
let none: *const BigInteger = core::ptr::null();
let none_boxed: RawTerm = none.into();
assert!(none_boxed.is_none());
assert_eq!(none_boxed.type_of(), Tag::None);
assert!(!none_boxed.is_boxed());
assert!(!none_boxed.is_bigint());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn literal_encoding_arch32() {
let literal: *const BigInteger = core::ptr::null();
let literal_boxed = RawTerm::encode_literal(literal);
assert!(!literal_boxed.is_boxed());
assert!(!literal_boxed.is_literal());
assert_eq!(literal_boxed.type_of(), Tag::None);
assert!(!literal_boxed.is_header());
assert!(!literal_boxed.is_immediate());
let literal: *const BigInteger = 0xABCD00usize as *const usize as *const BigInteger;
let literal_boxed = RawTerm::encode_literal(literal);
assert!(literal_boxed.is_boxed());
assert!(literal_boxed.is_literal());
assert_eq!(literal_boxed.type_of(), Tag::Literal);
assert!(!literal_boxed.is_header());
assert!(!literal_boxed.is_immediate());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn float_encoding_arch32() {
let mut heap = RegionHeap::default();
let float = heap.float(std::f64::MAX).unwrap();
let float_term: RawTerm = float.encode().unwrap();
assert!(float_term.is_boxed());
assert_eq!(float_term.type_of(), Tag::Box);
assert!(!float_term.is_float());
let unboxed: *const RawTerm = float_term.dyn_cast();
let float_header = unsafe { *unboxed };
assert!(float_header.is_header());
assert!(float_header.is_float());
assert_eq!(float_header.type_of(), Tag::Float);
let float_decoded: Result<Boxed<Float>, _> = float_term.decode().unwrap().try_into();
assert!(float_decoded.is_ok());
let float_box = float_decoded.unwrap();
assert_eq!(&float, float_box.as_ref());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn fixnum_encoding_arch32() {
let max: SmallInteger = MAX_SMALLINT_VALUE.try_into().unwrap();
let min: SmallInteger = MIN_SMALLINT_VALUE.try_into().unwrap();
let max_term: RawTerm = max.encode().unwrap();
let min_term: RawTerm = min.encode().unwrap();
assert!(max_term.is_integer());
assert!(min_term.is_integer());
assert_eq!(max_term.type_of(), Tag::SmallInteger);
assert_eq!(min_term.type_of(), Tag::SmallInteger);
assert!(max_term.is_smallint());
assert!(min_term.is_smallint());
assert!(max_term.is_immediate());
assert!(min_term.is_immediate());
assert!(!max_term.is_header());
assert!(!min_term.is_header());
assert!(!max_term.is_boxed());
assert!(!min_term.is_boxed());
let max_decoded: Result<SmallInteger, _> = max_term.decode().unwrap().try_into();
assert!(max_decoded.is_ok());
assert_eq!(max, max_decoded.unwrap());
let min_decoded: Result<SmallInteger, _> = min_term.decode().unwrap().try_into();
assert!(min_decoded.is_ok());
assert_eq!(min, min_decoded.unwrap());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn atom_encoding_arch32() {
let atom = unsafe { Atom::from_id(MAX_ATOM_ID as usize) };
let atom_term: RawTerm = atom.encode().unwrap();
assert_eq!(atom_term.type_of(), Tag::Atom);
assert!(atom_term.is_atom());
assert!(atom_term.is_immediate());
assert!(!atom_term.is_integer());
assert!(!atom_term.is_header());
assert!(!atom_term.is_boxed());
let atom_decoded: Result<Atom, _> = atom_term.decode().unwrap().try_into();
assert!(atom_decoded.is_ok());
assert_eq!(atom, atom_decoded.unwrap());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn pid_encoding_arch32() {
let pid = unsafe { Pid::from_raw(MAX_IMMEDIATE_VALUE as usize) };
let pid_term: RawTerm = pid.encode().unwrap();
assert!(pid_term.is_local_pid());
assert!(!pid_term.is_remote_pid());
assert_eq!(pid_term.type_of(), Tag::Pid);
assert!(pid_term.is_immediate());
assert!(!pid_term.is_integer());
assert!(!pid_term.is_header());
assert!(!pid_term.is_boxed());
let pid_decoded: Result<Pid, _> = pid_term.decode().unwrap().try_into();
assert!(pid_decoded.is_ok());
assert_eq!(pid, pid_decoded.unwrap());
// This function pierces boxes
assert!(pid_term.is_pid());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn port_encoding_arch32() {
let port = unsafe { Port::from_raw(MAX_IMMEDIATE_VALUE as usize) };
let port_term: RawTerm = port.encode().unwrap();
assert!(port_term.is_local_port());
assert!(!port_term.is_remote_port());
assert_eq!(port_term.type_of(), Tag::Port);
assert!(port_term.is_immediate());
assert!(!port_term.is_integer());
assert!(!port_term.is_header());
assert!(!port_term.is_boxed());
let port_decoded: Result<Port, _> = port_term.decode().unwrap().try_into();
assert!(port_decoded.is_ok());
assert_eq!(port, port_decoded.unwrap());
// This function pierces boxes
assert!(port_term.is_port());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn bigint_encoding_arch32() {
let big: BigInteger = (MAX_SMALLINT_VALUE + 1).try_into().unwrap();
let boxed = Boxed::new(&big as *const _ as *mut BigInteger).unwrap();
let big_term: RawTerm = boxed.encode().unwrap();
assert!(big_term.is_boxed());
assert_eq!(big_term.type_of(), Tag::Box);
assert!(!big_term.is_bigint());
let unboxed: *const RawTerm = big_term.dyn_cast();
let big_header = unsafe { *unboxed };
assert!(big_header.is_header());
assert!(big_header.is_bigint());
assert_eq!(big_header.type_of(), Tag::BigInteger);
let big_decoded: Result<Boxed<BigInteger>, _> = big_term.decode().unwrap().try_into();
assert!(big_decoded.is_ok());
assert_eq!(&big, big_decoded.unwrap().as_ref());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn tuple_encoding_arch32() {
let mut heap = RegionHeap::default();
// Empty tuple
let tuple = Tuple::new(&mut heap, 0).unwrap();
let tuple_term: RawTerm = tuple.encode().unwrap();
assert!(tuple_term.is_boxed());
assert_eq!(tuple_term.type_of(), Tag::Box);
assert!(!tuple_term.is_tuple());
let unboxed: *const RawTerm = tuple_term.dyn_cast();
let tuple_header = unsafe { *unboxed };
assert!(tuple_header.is_header());
assert!(tuple_header.is_tuple());
assert_eq!(tuple_header.type_of(), Tag::Tuple);
let tuple_decoded: Result<Boxed<Tuple>, _> = tuple_term.decode().unwrap().try_into();
assert!(tuple_decoded.is_ok());
let tuple_box = tuple_decoded.unwrap();
assert_eq!(&tuple, tuple_box.as_ref());
assert_eq!(tuple_box.len(), 0);
// Non-empty tuple
let elements = vec![fixnum!(1), fixnum!(2), fixnum!(3), fixnum!(4)];
let tuple2 = Tuple::from_slice(&mut heap, elements.as_slice()).unwrap();
let tuple2_term: RawTerm = tuple2.encode().unwrap();
assert!(tuple2_term.is_boxed());
assert_eq!(tuple2_term.type_of(), Tag::Box);
assert!(!tuple2_term.is_tuple());
let unboxed: *const RawTerm = tuple2_term.dyn_cast();
let tuple2_header = unsafe { *unboxed };
assert!(tuple2_header.is_header());
assert!(tuple2_header.is_tuple());
assert_eq!(tuple2_header.type_of(), Tag::Tuple);
let tuple2_decoded: Result<Boxed<Tuple>, _> = tuple2_term.decode().unwrap().try_into();
assert!(tuple2_decoded.is_ok());
let tuple2_box = tuple2_decoded.unwrap();
assert_eq!(&tuple2, tuple2_box.as_ref());
assert_eq!(tuple2_box.len(), 4);
assert_eq!(tuple2_box.get_element(0), Ok(fixnum!(1)));
assert_eq!(tuple2_box.get_element(3), Ok(fixnum!(4)));
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn list_encoding_arch32() {
let mut heap = RegionHeap::default();
// Empty list
assert!(RawTerm::NIL.is_list());
assert!(!RawTerm::NIL.is_non_empty_list());
assert_eq!(RawTerm::NIL.type_of(), Tag::Nil);
assert!(RawTerm::NIL.is_nil());
assert!(RawTerm::NIL.is_immediate());
// Non-empty list
let list = cons!(heap, fixnum!(1), fixnum!(2));
let list_term: RawTerm = list.encode().unwrap();
assert!(!list_term.is_boxed());
assert!(list_term.is_non_empty_list());
assert_eq!(list_term.type_of(), Tag::List);
let unboxed: *const RawTerm = list_term.dyn_cast();
let car = unsafe { *unboxed };
assert!(!car.is_header());
assert!(car.is_smallint());
assert_eq!(car.type_of(), Tag::SmallInteger);
let list_decoded: Result<Boxed<Cons>, _> = list_term.decode().unwrap().try_into();
assert!(list_decoded.is_ok());
let list_box = list_decoded.unwrap();
assert_eq!(&list, list_box.as_ref());
assert_eq!(list_box.count(), Some(2));
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn map_encoding_arch32() {
let mut heap = RegionHeap::default();
let pairs = vec![(atom!("foo"), fixnum!(1)), (atom!("bar"), fixnum!(2))];
let map = Map::from_slice(pairs.as_slice());
let map_term = map.clone_to_heap(&mut heap).unwrap();
assert!(map_term.is_boxed());
assert_eq!(map_term.type_of(), Tag::Box);
assert!(!map_term.is_map());
let unboxed: *const RawTerm = map_term.dyn_cast();
let map_header = unsafe { *unboxed };
assert!(map_header.is_header());
assert!(map_header.is_map());
assert_eq!(map_header.type_of(), Tag::Map);
let map_decoded: Result<Boxed<Map>, _> = map_term.decode().unwrap().try_into();
assert!(map_decoded.is_ok());
let map_box = map_decoded.unwrap();
assert_eq!(&map, map_box.as_ref());
assert_eq!(map.len(), 2);
assert_eq!(map.get(atom!("bar")), Some(fixnum!(2)));
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn closure_encoding_arch32() {
use crate::erts::process::Process;
use crate::erts::term::closure::*;
use std::sync::Arc;
let mut heap = RegionHeap::default();
let creator = Pid::new(1, 0).unwrap();
let module = Atom::try_from_str("module").unwrap();
let arity = 0;
let code = |_arc_process: &Arc<Process>| Ok(());
let one = fixnum!(1);
let two = fixnum!(2);
let index = 1 as Index;
let old_unique = 2 as OldUnique;
let unique = [0u8; 16];
let closure = heap
.anonymous_closure_with_env_from_slices(
module,
index,
old_unique,
unique,
arity,
Some(code),
Creator::Local(creator),
&[&[one, two]],
)
.unwrap();
let mfa = closure.module_function_arity();
assert_eq!(closure.env_len(), 2);
let closure_term: RawTerm = closure.into();
assert!(closure_term.is_boxed());
assert_eq!(closure_term.type_of(), Tag::Box);
assert!(!closure_term.is_function());
let unboxed: *const RawTerm = closure_term.dyn_cast();
let closure_header = unsafe { *unboxed };
assert!(closure_header.is_header());
assert!(closure_header.is_function());
assert_eq!(closure_header.type_of(), Tag::Closure);
let closure_decoded: Result<Boxed<Closure>, _> = closure_term.decode().unwrap().try_into();
assert!(closure_decoded.is_ok());
let closure_box = closure_decoded.unwrap();
assert_eq!(&closure, closure_box.as_ref());
assert_eq!(closure_box.env_len(), 2);
assert_eq!(closure_box.module_function_arity(), mfa);
assert_eq!(closure_box.get_env_element(0), one);
assert_eq!(closure_box.get_env_element(1), two);
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn procbin_encoding_arch32() {
let mut heap = RegionHeap::default();
let bin = heap.procbin_from_str("hello world!").unwrap();
assert_eq!(bin.as_str(), "hello world!");
let bin_term: RawTerm = bin.into();
assert!(bin_term.is_boxed());
assert_eq!(bin_term.type_of(), Tag::Box);
assert!(!bin_term.is_procbin());
let unboxed: *const RawTerm = bin_term.dyn_cast();
let bin_header = unsafe { *unboxed };
assert!(bin_header.is_header());
assert!(bin_header.is_procbin());
assert_eq!(bin_header.type_of(), Tag::ProcBin);
let bin_decoded: Result<Boxed<ProcBin>, _> = bin_term.decode().unwrap().try_into();
assert!(bin_decoded.is_ok());
let bin_box = bin_decoded.unwrap();
assert_eq!(&bin, bin_box.as_ref());
assert_eq!(bin_box.as_str(), "hello world!");
// These functions pierce the box
assert!(bin_term.is_binary());
assert!(bin_term.is_bitstring());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn heapbin_encoding_arch32() {
let mut heap = RegionHeap::default();
let bin = heap.heapbin_from_str("hello world!").unwrap();
assert_eq!(bin.as_str(), "hello world!");
let bin_term: RawTerm = bin.into();
assert!(bin_term.is_boxed());
assert_eq!(bin_term.type_of(), Tag::Box);
assert!(!bin_term.is_procbin());
let unboxed: *const RawTerm = bin_term.dyn_cast();
let bin_header = unsafe { *unboxed };
assert!(bin_header.is_header());
assert!(bin_header.is_heapbin());
assert_eq!(bin_header.type_of(), Tag::HeapBinary);
let bin_decoded: Result<Boxed<HeapBin>, _> = bin_term.decode().unwrap().try_into();
assert!(bin_decoded.is_ok());
let bin_box = bin_decoded.unwrap();
assert_eq!(&bin, bin_box.as_ref());
assert_eq!(bin_box.as_str(), "hello world!");
// These functions pierce the box
assert!(bin_term.is_binary());
assert!(bin_term.is_bitstring());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn subbinary_encoding_arch32() {
let mut heap = RegionHeap::default();
let bin = heap.heapbin_from_str("hello world!").unwrap();
let bin_term: RawTerm = bin.into();
// Slice out 'world!'
let byte_offset = 6;
let len = 6;
let sub = heap
.subbinary_from_original(bin_term, byte_offset, 0, len, 0)
.unwrap();
let sub_term: RawTerm = sub.into();
assert!(sub_term.is_boxed());
assert_eq!(sub_term.type_of(), Tag::Box);
assert!(!sub_term.is_subbinary());
let unboxed: *const RawTerm = sub_term.dyn_cast();
let sub_header = unsafe { *unboxed };
assert!(sub_header.is_header());
assert!(sub_header.is_subbinary());
assert_eq!(sub_header.type_of(), Tag::SubBinary);
let sub_decoded: Result<Boxed<SubBinary>, _> = sub_term.decode().unwrap().try_into();
assert!(sub_decoded.is_ok());
let sub_box = sub_decoded.unwrap();
assert_eq!(&sub, sub_box.as_ref());
assert!(sub_box.is_aligned());
assert!(sub_box.is_binary());
assert_eq!(sub_box.try_into(), Ok("world!".to_owned()));
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn match_context_encoding_arch32() {
let mut heap = RegionHeap::default();
let bin = heap.heapbin_from_str("hello world!").unwrap();
let match_ctx = heap.match_context_from_binary(bin).unwrap();
let match_ctx_term: RawTerm = match_ctx.into();
assert!(match_ctx_term.is_boxed());
assert_eq!(match_ctx_term.type_of(), Tag::Box);
assert!(!match_ctx_term.is_match_context());
let unboxed: *const RawTerm = match_ctx_term.dyn_cast();
let match_ctx_header = unsafe { *unboxed };
assert!(match_ctx_header.is_header());
assert!(match_ctx_header.is_match_context());
assert_eq!(match_ctx_header.type_of(), Tag::MatchContext);
let match_ctx_decoded: Result<Boxed<MatchContext>, _> =
match_ctx_term.decode().unwrap().try_into();
assert!(match_ctx_decoded.is_ok());
let match_ctx_box = match_ctx_decoded.unwrap();
assert_eq!(&match_ctx, match_ctx_box.as_ref());
assert!(match_ctx_box.is_aligned());
assert!(match_ctx_box.is_binary());
assert_eq!(match_ctx_box.try_into(), Ok("hello world!".to_owned()));
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn resource_encoding_arch32() {
use core::any::Any;
let mut heap = RegionHeap::default();
// Need a concrete type for casting
let code: Box<dyn Any> = Box::new(Predicate::new(|input: bool| Some(input)));
let resource = Resource::from_value(&mut heap, code).unwrap();
let resource_term: RawTerm = resource.into();
assert!(resource_term.is_boxed());
assert_eq!(resource_term.type_of(), Tag::Box);
assert!(!resource_term.is_resource_reference());
let unboxed: *const RawTerm = resource_term.dyn_cast();
let resource_header = unsafe { *unboxed };
assert!(resource_header.is_header());
assert!(resource_header.is_resource_reference());
assert_eq!(resource_header.type_of(), Tag::ResourceReference);
let resource_decoded: Result<Boxed<Resource>, _> =
resource_term.decode().unwrap().try_into();
assert!(resource_decoded.is_ok());
let resource_box = resource_decoded.unwrap();
assert_eq!(&resource, resource_box.as_ref());
let resource_code = resource.downcast_ref::<Predicate>().unwrap();
assert_eq!(resource_code.invoke(true), Some(true));
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn reference_encoding_arch32() {
use crate::erts::scheduler;
let mut heap = RegionHeap::default();
let reference = heap.reference(scheduler::id::next(), 0).unwrap();
let reference_term: RawTerm = reference.into();
assert!(reference_term.is_boxed());
assert_eq!(reference_term.type_of(), Tag::Box);
assert!(!reference_term.is_local_reference());
let unboxed: *const RawTerm = reference_term.dyn_cast();
let reference_header = unsafe { *unboxed };
assert!(reference_header.is_header());
assert!(reference_header.is_local_reference());
assert_eq!(reference_header.type_of(), Tag::Reference);
let reference_decoded: Result<Boxed<Reference>, _> =
reference_term.decode().unwrap().try_into();
assert!(reference_decoded.is_ok());
let reference_box = reference_decoded.unwrap();
assert_eq!(&reference, reference_box.as_ref());
// This function pierces the box
assert!(reference_term.is_reference());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
fn external_pid_encoding_arch32() {
use crate::erts::Node;
use std::sync::Arc;
let mut heap = RegionHeap::default();
let arc_node = Arc::new(Node::new(
1,
Atom::try_from_str("node@external").unwrap(),
0,
));
let pid = ExternalPid::new(arc_node, 1, 0).unwrap();
let pid_term = pid.clone_to_heap(&mut heap).unwrap();
assert!(pid_term.is_boxed());
assert_eq!(pid_term.type_of(), Tag::Box);
assert!(!pid_term.is_remote_pid());
let unboxed: *const RawTerm = pid_term.dyn_cast();
let pid_header = unsafe { *unboxed };
assert!(pid_header.is_header());
assert!(pid_header.is_remote_pid());
assert!(!pid_header.is_local_pid());
assert_eq!(pid_header.type_of(), Tag::ExternalPid);
let pid_decoded: Result<Boxed<ExternalPid>, _> = pid_term.decode().unwrap().try_into();
assert!(pid_decoded.is_ok());
let pid_box = pid_decoded.unwrap();
assert_eq!(&pid, pid_box.as_ref());
// This function pierces the box
assert!(pid_term.is_pid());
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
#[ignore]
fn external_port_encoding_arch32() {
// TODO: let mut heap = RegionHeap::default();
// Waiting on implementation of this type
}
#[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
#[test]
#[ignore]
fn external_reference_encoding_arch32() {
// TODO: let mut heap = RegionHeap::default();
// Waiting on implementation of this type
}
struct Predicate {
pred: Box<dyn Fn(bool) -> Option<bool>>,
}
impl Predicate {
pub(super) fn new(pred: impl Fn(bool) -> Option<bool> + 'static) -> Self {
Self {
pred: Box::new(pred),
}
}
pub(super) fn invoke(&self, input: bool) -> Option<bool> {
(self.pred)(input)
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.