lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
fall/gen/src/generate/codegen.rs
zayenz/fall
71cd05c6eaa026f692db87d4a8ffe1326706efd4
use serde_json; use tera::Context; use fall_tree::{Text, AstNode}; use lang_fall::syntax::{FallFile, SynRule, LexRule, Expr, BlockExpr, MethodDef, Parameter}; use lang_fall::{RefKind, CallKind, MethodKind, Analysis, PratVariant, PrattOp, Arity, ChildKind}; use fall_parse as dst; use crate::util::{scream, camel}; pub type Result<T> = std::result::Result<T, ::failure::Error>; pub(super) struct Codegen<'a, 'f: 'a> { analysis: &'a Analysis<'f>, node_types: Vec<(Text<'f>, bool)>, expressions: Vec<dst::Expr>, } impl<'a, 'f> Codegen<'a, 'f> { pub fn new(analysis: &'a Analysis<'f>) -> Codegen<'a, 'f> { let node_types = { let mut result = Vec::new(); if let Some(tokenizer) = analysis.ast().tokenizer_def() { result.extend( tokenizer.lex_rules() .map(|r| (r.node_type(), r.is_skip())) ) } result.extend( analysis.ast() .syn_rules() .filter(|r| r.is_pub() && r.type_attr().is_none()) .filter_map(|r| r.name()) .map(|n| (n, false)) ); result }; Codegen { analysis, node_types, expressions: Vec::new(), } } pub fn generate(&mut self) -> Result<Context> { let mut context = Context::new(); context.insert("node_types", &self.node_types); for _ in self.file().syn_rules() { self.expressions.push(dst::Expr::Any) } for (i, r) in self.file().syn_rules().enumerate() { let expr = self.gen_rule(r)?; self.expressions[i] = expr; } let parser = serde_json::to_string(&self.expressions).unwrap(); context.insert("parser_json", &parser); let lex_rules = self.file().tokenizer_def() .ok_or(format_err!("no tokens defined"))? .lex_rules() .filter(|r| !r.is_contextual()) .map(|r| { let re = r.token_re().ok_or(format_err!("Bad token"))?; Ok(CtxLexRule { ty: r.node_type(), re: format!("{:?}", re), f: r.extern_fn() }) }).collect::<Result<Vec<_>>>()?; context.insert("lex_rules", &lex_rules); let verbatim = self.file().verbatim_def().map(|v| v.contents()); context.insert("verbatim", &verbatim); context.insert("has_whitespace_binder", &verbatim.map(|t| t.contains("whitespace_binder")).unwrap_or(false)); if let Some(ast) = self.file().ast_def() { context.insert("ast_nodes", &ast.ast_nodes().map(|node| { Ok(CtxAstNode { struct_name: camel(node.name()), node_type_name: scream(node.name()), methods: node.methods() .map(|method| self.gen_method(method)) .collect::<Result<Vec<CtxMethod>>>()?, }) }).collect::<Result<Vec<_>>>()?); context.insert("ast_classes", &ast.ast_classes().map(|class| { CtxAstClass { enum_name: camel(class.name()), variants: class.variants().map(|variant| (scream(variant), camel(variant))).collect(), } }).collect::<Vec<_>>()); context.insert("ast_traits", &ast.ast_traits().map(|trait_| { Ok(CtxAstTrait { trait_name: camel(trait_.name()), methods: trait_.methods() .map(|method| self.gen_method(method)) .collect::<Result<Vec<CtxMethod>>>()?, impl_for: ast.ast_nodes() .filter(|&node| { self.analysis.ast_node_traits(node).contains(&trait_) }) .map(|node| camel(node.name())) .collect(), }) }).collect::<Result<Vec<_>>>()?); } Ok(context) } fn file(&self) -> FallFile<'f> { self.analysis.ast() } fn syn_rule_ty(&self, rule: SynRule<'f>) -> Option<dst::NodeTypeRef> { let name = rule.ty_name()?; self.node_types.iter() .position(|&(ty_name, _)| ty_name == name) .map(|i| dst::NodeTypeRef((i + 1) as u32)) } fn syn_rule_ref(&self, rule: SynRule<'f>) -> dst::ExprRef { let idx = self.file().syn_rules().position(|r| r.node() == rule.node()).unwrap(); dst::ExprRef(idx as u32) } fn lex_rule_ty(&self, rule: LexRule<'f>) -> dst::NodeTypeRef { let name = rule.node_type(); let i = self.node_types.iter() .position(|&(ty_name, _)| ty_name == name) .unwrap(); dst::NodeTypeRef((i + 1) as u32) } fn param_ref(&self, param: Parameter<'f>) -> dst::Arg { let idx = self.file().syn_rules() .filter_map(|rule| rule.parameters()) .flat_map(|p| p.parameters()) .position(|p| p.node() == param.node()) .unwrap(); dst::Arg(idx as u32) } fn gen_rule(&mut self, rule: SynRule<'f>) -> Result<dst::Expr> { let body = match (rule.is_pratt(), rule.body()) { (true, Expr::BlockExpr(block)) => { let pratt = dst::Expr::Pratt(Box::new(self.gen_pratt(block)?)); self.push_expr(pratt) } (true, _) => unreachable!(), (false, body) => self.gen_expr(body)? }; let body = match (self.syn_rule_ty(rule), rule.is_replaces(), rule.is_cached()) { (Some(ty), true, _) => dst::Expr::PubReplace { ty, body, }, (Some(ty), false, false) => dst::Expr::Pub { ty, body, replaceable: rule.is_replaceable(), }, (Some(ty), false, true) => { let body = self.push_expr(dst::Expr::Cached(body)); dst::Expr::Pub { ty, body, replaceable: rule.is_replaceable(), } } (None, false, true) => { assert_eq!(self.expressions.len() - 1, body.0 as usize); dst::Expr::Cached(body) } (None, false, false) => { assert_eq!(self.expressions.len() - 1, body.0 as usize); self.expressions.pop().unwrap() } _ => unreachable!(), }; Ok(body) } fn push_expr(&mut self, expr: dst::Expr) -> dst::ExprRef { let idx = self.expressions.len(); self.expressions.push(expr); dst::ExprRef(idx as u32) } fn gen_expr(&mut self, expr: Expr<'f>) -> Result<dst::ExprRef> { let result = match expr { Expr::BlockExpr(block) => dst::Expr::Or(block.alts().map(|e| self.gen_expr(e)).collect::<Result<Vec<_>>>()?), Expr::SeqExpr(seq) => { fn is_commit(part: Expr) -> bool { part.node().text() == "<commit>" } let commit = seq.parts().position(is_commit); let parts = seq.parts() .filter(|&p| !is_commit(p)) .map(|e| self.gen_expr(e)) .collect::<Result<Vec<_>>>()?; dst::Expr::And(parts, commit) } Expr::RefExpr(ref_) => { let ref_ = self.analysis.resolve_reference(ref_) .ok_or(format_err!("Unresolved references: {}", ref_.node().text()))?; match ref_ { RefKind::Token(rule) => { let ty_ref = self.lex_rule_ty(rule); if rule.is_contextual() { dst::Expr::ContextualToken( ty_ref, rule.token_text() .ok_or(format_err!("Missing contextual token text"))? .to_string(), ) } else { dst::Expr::Token(ty_ref) } } RefKind::RuleReference(rule) => return Ok(self.syn_rule_ref(rule)), RefKind::Param(p) => dst::Expr::Var(self.param_ref(p)), } } Expr::CallExpr(call) => { let call = self.analysis.resolve_call(call) .ok_or(format_err!("Failed to compile {}", call.node().text()))?; match call { CallKind::Eof => dst::Expr::Eof, CallKind::Any => dst::Expr::Any, CallKind::Enter(idx, expr) => dst::Expr::Enter( dst::Context(idx as u32), self.gen_expr(expr)?, ), CallKind::Exit(idx, expr) => dst::Expr::Exit( dst::Context(idx as u32), self.gen_expr(expr)?, ), CallKind::IsIn(idx) => dst::Expr::IsIn( dst::Context(idx as u32) ), CallKind::Not(expr) => dst::Expr::Not(self.gen_expr(expr)?), CallKind::Layer(e1, e2) => dst::Expr::Layer( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::WithSkip(e1, e2) => dst::Expr::WithSkip( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::Inject(e1, e2) => dst::Expr::Inject( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::RuleCall(rule, args) => dst::Expr::Call( self.syn_rule_ref(rule), args.iter() .map(|&(p, e)| Ok((self.param_ref(p), self.gen_expr(e)?))) .collect::<Result<Vec<_>>>()?, ), CallKind::PrevIs(tokens) => dst::Expr::PrevIs( tokens.iter().map(|&r| self.syn_rule_ty(r).unwrap()).collect() ), CallKind::Commit => panic!("Should be handled specially"), } } Expr::OptExpr(opt_expr) => dst::Expr::Opt(self.gen_expr(opt_expr.expr())?), Expr::RepExpr(rep_expr) => dst::Expr::Rep(self.gen_expr(rep_expr.expr())?), }; Ok(self.push_expr(result)) } fn gen_pratt(&mut self, ast: BlockExpr<'f>) -> Result<dst::PrattTable> { fn alt_to_rule<'f>(analysis: &Analysis<'f>, alt: Expr<'f>) -> Result<SynRule<'f>> { match alt { Expr::SeqExpr(expr) => match expr.parts().next() { Some(Expr::RefExpr(ref_)) => match analysis.resolve_reference(ref_) { Some(RefKind::RuleReference(rule)) => Ok(rule), _ => return Err(format_err!("Bad pratt spec")), }, _ => return Err(format_err!("Bad pratt spec")) }, _ => return Err(format_err!("Bad pratt spec")) } } let mut result = dst::PrattTable { atoms: Vec::new(), prefixes: Vec::new(), infixes: Vec::new(), }; for alt in ast.alts() { let rule = alt_to_rule(&self.analysis, alt)?; let ty = self.syn_rule_ty(rule) .ok_or(format_err!("non public pratt rule"))?; let prat_kind = self.analysis.resolve_pratt_variant(rule) .ok_or(format_err!("pratt rule without attributes"))?; match prat_kind { PratVariant::Atom(_) => result.atoms.push(self.syn_rule_ref(rule)), PratVariant::Postfix(PrattOp { op, priority }) => { result.infixes.push(dst::Infix { ty, op: self.gen_expr(op)?, priority, has_rhs: false, }); } PratVariant::Prefix(PrattOp { op, priority }) => { result.prefixes.push(dst::Prefix { ty, op: self.gen_expr(op)?, priority, }) } PratVariant::Bin(PrattOp { op, priority }) => { result.infixes.push(dst::Infix { ty, op: self.gen_expr(op)?, priority, has_rhs: true, }); } }; } Ok(result) } fn gen_method(&self, method: MethodDef<'f>) -> Result<CtxMethod<'f>> { let description = self.analysis.resolve_method(method) .ok_or(format_err!("Bad method `{}`", method.node().text()))?; let (ret_type, body) = match description { MethodKind::TextAccessor(lex_rule, arity) => { let node_type = scream(lex_rule.node_type()); match arity { Arity::Single => ("rt::Text<'f>".to_owned(), format!("rt::child_of_type_exn(self.node(), {}).text()", node_type)), Arity::Optional => ("Option<rt::Text<'f>>".to_owned(), format!("rt::child_of_type(self.node(), {}).map(|n| n.text())", node_type)), Arity::Many => unimplemented!(), } } MethodKind::NodeAccessor(kind, arity) => { match (kind, arity) { (ChildKind::AstNode(n), Arity::Single) => (format!("{}<'f>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next().unwrap()".to_owned()), (ChildKind::AstNode(n), Arity::Optional) => (format!("Option<{}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next()".to_owned()), (ChildKind::AstNode(n), Arity::Many) => (format!("rt::AstChildren<'f, {}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children())".to_owned()), (ChildKind::AstClass(n), Arity::Single) => (format!("{}<'f>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next().unwrap()".to_owned()), (ChildKind::AstClass(n), Arity::Optional) => (format!("Option<{}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next()".to_owned()), (ChildKind::AstClass(n), Arity::Many) => (format!("rt::AstChildren<'f, {}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children())".to_owned()), (ChildKind::Token(lex_rule), arity) => { let node_type = scream(lex_rule.node_type()); match arity { Arity::Single => ("rt::Node<'f>".to_owned(), format!("self.node().children().find(|n| n.ty() == {}).unwrap()", node_type)), Arity::Optional => ("Option<rt::Node<'f>>".to_owned(), format!("self.node().children().find(|n| n.ty() == {})", node_type)), Arity::Many => unimplemented!(), } } } } }; Ok(CtxMethod { name: method.name(), ret_type, body }) } } #[derive(Serialize)] struct CtxLexRule<'f> { ty: Text<'f>, re: String, f: Option<Text<'f>>, } #[derive(Serialize)] struct CtxAstNode<'f> { struct_name: String, node_type_name: String, methods: Vec<CtxMethod<'f>>, } #[derive(Serialize)] struct CtxAstClass { enum_name: String, variants: Vec<(String, String)>, } #[derive(Serialize)] struct CtxAstTrait<'f> { trait_name: String, methods: Vec<CtxMethod<'f>>, impl_for: Vec<String>, } #[derive(Serialize)] struct CtxMethod<'f> { name: Text<'f>, ret_type: String, body: String, }
use serde_json; use tera::Context; use fall_tree::{Text, AstNode}; use lang_fall::syntax::{FallFile, SynRule, LexRule, Expr, BlockExpr, MethodDef, Parameter}; use lang_fall::{RefKind, CallKind, MethodKind, Analysis, PratVariant, PrattOp, Arity, ChildKind}; use fall_parse as dst; use crate::util::{scream, camel}; pub type Result<T> = std::result::Result<T, ::failure::Error>; pub(super) struct Codegen<'a, 'f: 'a> { analysis: &'a Analysis<'f>, node_types: Vec<(Text<'f>, bool)>, expressions: Vec<dst::Expr>, } impl<'a, 'f> Codegen<'a, 'f> { pub fn new(analysis: &'a Analysis<'f>) -> Codegen<'a, 'f> { let node_types = { let mut result = Vec::new(); if let Some(tokenizer) = analysis.ast().tokenizer_def() { result.extend( tokenizer.lex_rules() .map(|r| (r.node_type(), r.is_skip())) ) } result.extend( analysis.ast() .syn_rules() .filter(|r| r.is_pub() && r.type_attr().is_none()) .filter_map(|r| r.name()) .map(|n| (n, false)) ); result }; Codegen { analysis, node_types, expressions: Vec::new(), } } pub fn generate(&mut self) -> Result<Context> { let mut context = Context::new(); context.insert("node_types", &self.node_types); for _ in self.file().syn_rules() { self.expressions.push(dst::Expr::Any) } for (i, r) in self.file().syn_rules().enumerate() { let expr = self.gen_rule(r)?; self.expressions[i] = expr; } let parser = serde_json::to_string(&self.expressions).unwrap(); context.insert("parser_json", &parser); let lex_rules = self.file().tokenizer_def() .ok_or(format_err!("no tokens defined"))? .lex_rules() .filter(|r| !r.is_contextual()) .map(|r| { let re = r.token_re().ok_or(format_err!("Bad token"))?; Ok(CtxLexRule { ty: r.node_type(), re: format!("{:?}", re), f: r.extern_fn() }) }).collect::<Result<Vec<_>>>()?; context.insert("lex_rules", &lex_rules); let verbatim = self.file().verbatim_def().map(|v| v.contents()); context.insert("verbatim", &verbatim); context.insert("has_whitespace_binder", &verbatim.map(|t| t.contains("whitespace_binder")).unwrap_or(false)); if let Some(ast) = self.file().ast_def() { context.insert("ast_nodes", &ast.ast_nodes().map(|node| { Ok(CtxAstNode { struct_name: camel(node.name()), node_type_name: scream(node.name()), methods: node.methods() .map(|method| self.gen_method(method)) .collect::<Result<Vec<CtxMethod>>>()?, }) }).collect::<Result<Vec<_>>>()?); context.insert("ast_classes", &ast.ast_classes().map(|class| { CtxAstClass { enum_name: camel(class.name()), variants: class.variants().map(|variant| (scream(variant), camel(variant))).collect(), } }).collect::<Vec<_>>()); context.insert("ast_traits", &ast.ast_traits().map(|trait_| { Ok(CtxAstTrait { trait_name: camel(trait_.name()), methods: trait_.methods() .map(|method| self.gen_method(method)) .collect::<Result<Vec<CtxMethod>>>()?, impl_for: ast.ast_nodes() .filter(|&node| { self.analysis.ast_node_traits(node).contains(&trait_) }) .map(|node| camel(node.name())) .collect(), }) }).collect::<Result<Vec<_>>>()?); } Ok(context) } fn file(&self) -> FallFile<'f> { self.analysis.ast() } fn syn_rule_ty(&self, rule: SynRule<'f>) -> Option<dst::NodeTypeRef> { let name = rule.ty_name()?; self.node_types.iter() .position(|&(ty_name, _)| ty_name == name) .map(|i| dst::NodeTypeRef((i + 1) as u32)) } fn syn_rule_ref(&self, rule: SynRule<'f>) -> dst::ExprRef { let idx = self.file().syn_rules().position(|r| r.node() == rule.node()).unwrap(); dst::ExprRef(idx as u32) } fn lex_rule_ty(&self, rule: LexRule<'f>) -> dst::NodeTypeRef { let name = rule.node_type(); let i = self.node_types.iter() .position(|&(ty_name, _)| ty_name == name) .unwrap(); dst::NodeTypeRef((i + 1) as u32) } fn param_ref(&self, param: Parameter<'f>) -> dst::Arg { let idx = self.file().syn_rules() .filter_map(|rule| rule.parameters()) .flat_map(|p| p.parameters()) .position(|p| p.node() == param.node()) .unwrap(); dst::Arg(idx as u32) } fn gen_rule(&mut self, rule: SynRule<'f>) -> Result<dst::Expr> { let body = match (rule.is_pratt(), rule.body()) { (true, Expr::BlockExpr(block)) => { let pratt = dst::Expr::Pratt(Box::new(self.gen_pratt(block)?)); self.push_expr(pratt) } (true, _) => unreachable!(), (false, body) => self.gen_expr(body)? }; let body = match (self.syn_rule_ty(rule), rule.is_replaces(), rule.is_cached()) { (Some(ty), true, _) => dst::Expr::PubReplace { ty, body, }, (Some(ty), false, false) => dst::Expr::Pub { ty, body, replaceable: rule.is_replaceable(), }, (Some(ty), false, true) => { let body = self.push_expr(dst::Expr::Cached(body)); dst::Expr::Pub { ty, body, replaceable: rule.is_replaceable(), } } (None, false, true) => { assert_eq!(self.expressions.len() - 1, body.0 as usize); dst::Expr::Cached(body) } (None, false, false) => { assert_eq!(self.expressions.len() - 1, body.0 as usize); self.expressions.pop().unwrap() } _ => unreachable!(), }; Ok(body) } fn push_expr(&mut self, expr: dst::Expr) -> dst::ExprRef { let idx = self.expressions.len(); self.expressions.push(expr); dst::ExprRef(idx as u32) } fn gen_expr(&mut self, expr: Expr<'f>) -> Result<dst::ExprRef> { let result = match expr { Expr::BlockExpr(block) => dst::Expr::Or(block.alts().map(|e| self.gen_expr(e)).collect::<Result<Vec<_>>>()?), Expr::SeqExpr(seq) => { fn is_commit(part: Expr) -> bool { part.node().text() == "<commit>" } let commit = seq.parts().position(is_commit); let parts = seq.parts() .filter(|&p| !is_commit(p)) .map(|e| self.gen_expr(e)) .collect::<Result<Vec<_>>>()?; dst::Expr::And(parts, commit) } Expr::RefExpr(ref_) => { let ref_ = self.analysis.resolve_reference(ref_) .ok_or(format_err!("Unresolved references: {}", ref_.node().text()))?; match ref_ { RefKind::Token(rule) => { let ty_ref = self.lex_rule_ty(rule);
} RefKind::RuleReference(rule) => return Ok(self.syn_rule_ref(rule)), RefKind::Param(p) => dst::Expr::Var(self.param_ref(p)), } } Expr::CallExpr(call) => { let call = self.analysis.resolve_call(call) .ok_or(format_err!("Failed to compile {}", call.node().text()))?; match call { CallKind::Eof => dst::Expr::Eof, CallKind::Any => dst::Expr::Any, CallKind::Enter(idx, expr) => dst::Expr::Enter( dst::Context(idx as u32), self.gen_expr(expr)?, ), CallKind::Exit(idx, expr) => dst::Expr::Exit( dst::Context(idx as u32), self.gen_expr(expr)?, ), CallKind::IsIn(idx) => dst::Expr::IsIn( dst::Context(idx as u32) ), CallKind::Not(expr) => dst::Expr::Not(self.gen_expr(expr)?), CallKind::Layer(e1, e2) => dst::Expr::Layer( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::WithSkip(e1, e2) => dst::Expr::WithSkip( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::Inject(e1, e2) => dst::Expr::Inject( self.gen_expr(e1)?, self.gen_expr(e2)?, ), CallKind::RuleCall(rule, args) => dst::Expr::Call( self.syn_rule_ref(rule), args.iter() .map(|&(p, e)| Ok((self.param_ref(p), self.gen_expr(e)?))) .collect::<Result<Vec<_>>>()?, ), CallKind::PrevIs(tokens) => dst::Expr::PrevIs( tokens.iter().map(|&r| self.syn_rule_ty(r).unwrap()).collect() ), CallKind::Commit => panic!("Should be handled specially"), } } Expr::OptExpr(opt_expr) => dst::Expr::Opt(self.gen_expr(opt_expr.expr())?), Expr::RepExpr(rep_expr) => dst::Expr::Rep(self.gen_expr(rep_expr.expr())?), }; Ok(self.push_expr(result)) } fn gen_pratt(&mut self, ast: BlockExpr<'f>) -> Result<dst::PrattTable> { fn alt_to_rule<'f>(analysis: &Analysis<'f>, alt: Expr<'f>) -> Result<SynRule<'f>> { match alt { Expr::SeqExpr(expr) => match expr.parts().next() { Some(Expr::RefExpr(ref_)) => match analysis.resolve_reference(ref_) { Some(RefKind::RuleReference(rule)) => Ok(rule), _ => return Err(format_err!("Bad pratt spec")), }, _ => return Err(format_err!("Bad pratt spec")) }, _ => return Err(format_err!("Bad pratt spec")) } } let mut result = dst::PrattTable { atoms: Vec::new(), prefixes: Vec::new(), infixes: Vec::new(), }; for alt in ast.alts() { let rule = alt_to_rule(&self.analysis, alt)?; let ty = self.syn_rule_ty(rule) .ok_or(format_err!("non public pratt rule"))?; let prat_kind = self.analysis.resolve_pratt_variant(rule) .ok_or(format_err!("pratt rule without attributes"))?; match prat_kind { PratVariant::Atom(_) => result.atoms.push(self.syn_rule_ref(rule)), PratVariant::Postfix(PrattOp { op, priority }) => { result.infixes.push(dst::Infix { ty, op: self.gen_expr(op)?, priority, has_rhs: false, }); } PratVariant::Prefix(PrattOp { op, priority }) => { result.prefixes.push(dst::Prefix { ty, op: self.gen_expr(op)?, priority, }) } PratVariant::Bin(PrattOp { op, priority }) => { result.infixes.push(dst::Infix { ty, op: self.gen_expr(op)?, priority, has_rhs: true, }); } }; } Ok(result) } fn gen_method(&self, method: MethodDef<'f>) -> Result<CtxMethod<'f>> { let description = self.analysis.resolve_method(method) .ok_or(format_err!("Bad method `{}`", method.node().text()))?; let (ret_type, body) = match description { MethodKind::TextAccessor(lex_rule, arity) => { let node_type = scream(lex_rule.node_type()); match arity { Arity::Single => ("rt::Text<'f>".to_owned(), format!("rt::child_of_type_exn(self.node(), {}).text()", node_type)), Arity::Optional => ("Option<rt::Text<'f>>".to_owned(), format!("rt::child_of_type(self.node(), {}).map(|n| n.text())", node_type)), Arity::Many => unimplemented!(), } } MethodKind::NodeAccessor(kind, arity) => { match (kind, arity) { (ChildKind::AstNode(n), Arity::Single) => (format!("{}<'f>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next().unwrap()".to_owned()), (ChildKind::AstNode(n), Arity::Optional) => (format!("Option<{}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next()".to_owned()), (ChildKind::AstNode(n), Arity::Many) => (format!("rt::AstChildren<'f, {}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children())".to_owned()), (ChildKind::AstClass(n), Arity::Single) => (format!("{}<'f>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next().unwrap()".to_owned()), (ChildKind::AstClass(n), Arity::Optional) => (format!("Option<{}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children()).next()".to_owned()), (ChildKind::AstClass(n), Arity::Many) => (format!("rt::AstChildren<'f, {}<'f>>", camel(n.name())), "rt::AstChildren::new(self.node().children())".to_owned()), (ChildKind::Token(lex_rule), arity) => { let node_type = scream(lex_rule.node_type()); match arity { Arity::Single => ("rt::Node<'f>".to_owned(), format!("self.node().children().find(|n| n.ty() == {}).unwrap()", node_type)), Arity::Optional => ("Option<rt::Node<'f>>".to_owned(), format!("self.node().children().find(|n| n.ty() == {})", node_type)), Arity::Many => unimplemented!(), } } } } }; Ok(CtxMethod { name: method.name(), ret_type, body }) } } #[derive(Serialize)] struct CtxLexRule<'f> { ty: Text<'f>, re: String, f: Option<Text<'f>>, } #[derive(Serialize)] struct CtxAstNode<'f> { struct_name: String, node_type_name: String, methods: Vec<CtxMethod<'f>>, } #[derive(Serialize)] struct CtxAstClass { enum_name: String, variants: Vec<(String, String)>, } #[derive(Serialize)] struct CtxAstTrait<'f> { trait_name: String, methods: Vec<CtxMethod<'f>>, impl_for: Vec<String>, } #[derive(Serialize)] struct CtxMethod<'f> { name: Text<'f>, ret_type: String, body: String, }
if rule.is_contextual() { dst::Expr::ContextualToken( ty_ref, rule.token_text() .ok_or(format_err!("Missing contextual token text"))? .to_string(), ) } else { dst::Expr::Token(ty_ref) }
if_condition
[ { "content": "fn parse_expr_pred(p: &mut Parser, expr: ExprRef, tokens: Pos) -> Option<Pos> {\n\n let old_mode = p.predicate_mode;\n\n p.predicate_mode = true;\n\n let result = parse_expr(p, expr, tokens);\n\n p.predicate_mode = old_mode;\n\n result\n\n}\n\n\n", "file_path": "fall/parse/src/s...
Rust
truck-rendimpl/src/shaperend.rs
mattiasgronlund/truck
df78ff348b448d41743c2f7db1f93ebb3c0f41ca
use crate::*; use truck_meshalgo::tessellation::*; use truck_topology::*; impl Default for ShapeInstanceDescriptor { #[inline(always)] fn default() -> Self { ShapeInstanceDescriptor { instance_state: Default::default(), mesh_precision: 0.005, } } } impl<Shape: MeshableShape> TryIntoInstance<PolygonInstance> for Shape { type Descriptor = ShapeInstanceDescriptor; fn try_into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> Option<PolygonInstance> { let polygon = self.triangulation(desc.mesh_precision)?.into_polygon(); Some(polygon.into_instance( handler, shaders, &PolygonInstanceDescriptor { instance_state: desc.instance_state.clone(), }, )) } } impl<P, C, S> IntoInstance<PolygonInstance> for Shell<P, C, S> where Shell<P, C, S>: MeshableShape, { type Descriptor = ShapeInstanceDescriptor; #[inline(always)] fn into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> PolygonInstance { self.try_into_instance(handler, shaders, desc) .expect("failed to create instance") } } impl<P, C, S> IntoInstance<PolygonInstance> for Solid<P, C, S> where Solid<P, C, S>: MeshableShape, { type Descriptor = ShapeInstanceDescriptor; #[inline(always)] fn into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> PolygonInstance { self.try_into_instance(handler, shaders, desc) .expect("failed to create instance") } } impl<C, S> IntoInstance<WireFrameInstance> for Shell<Point3, C, S> where C: PolylineableCurve, { type Descriptor = ShapeWireFrameDescriptor; fn into_instance( &self, handler: &DeviceHandler, shaders: &WireShaders, desc: &ShapeWireFrameDescriptor, ) -> WireFrameInstance { let mut lengths = Vec::new(); let points: Vec<[f32; 3]> = self .face_iter() .flat_map(|face| face.boundary_iters()) .flatten() .flat_map(|edge| { let curve = edge.oriented_curve(); let division = curve.parameter_division(curve.parameter_range(), desc.polyline_precision); lengths.push(division.len() as u32); division .into_iter() .map(move |t| curve.subs(t).cast().unwrap().into()) }) .collect(); let mut strips = Vec::<u32>::new(); let mut counter = 0_u32; for len in lengths { for i in 1..len { strips.push(counter + i - 1); strips.push(counter + i); } counter += len; } let vertices = BufferHandler::from_slice(&points, handler.device(), BufferUsages::VERTEX); let strips = BufferHandler::from_slice(&strips, handler.device(), BufferUsages::INDEX); WireFrameInstance { vertices: Arc::new(vertices), strips: Arc::new(strips), state: desc.wireframe_state.clone(), shaders: shaders.clone(), id: RenderID::gen(), } } } impl<C, S> IntoInstance<WireFrameInstance> for Solid<Point3, C, S> where C: PolylineableCurve, { type Descriptor = ShapeWireFrameDescriptor; fn into_instance( &self, handler: &DeviceHandler, shaders: &WireShaders, desc: &ShapeWireFrameDescriptor, ) -> WireFrameInstance { let mut lengths = Vec::new(); let points: Vec<[f32; 3]> = self .boundaries() .iter() .flatten() .flat_map(|face| face.boundary_iters()) .flatten() .flat_map(|edge| { let curve = edge.oriented_curve(); let division = curve.parameter_division(curve.parameter_range(), desc.polyline_precision); lengths.push(division.len() as u32); division .into_iter() .map(move |t| curve.subs(t).cast().unwrap().into()) }) .collect(); let mut strips = Vec::<u32>::new(); let mut counter = 0_u32; for len in lengths { for i in 1..len { strips.push(counter + i - 1); strips.push(counter + i); } counter += len; } let vertices = BufferHandler::from_slice(&points, handler.device(), BufferUsages::VERTEX); let strips = BufferHandler::from_slice(&strips, handler.device(), BufferUsages::INDEX); WireFrameInstance { vertices: Arc::new(vertices), strips: Arc::new(strips), state: desc.wireframe_state.clone(), shaders: shaders.clone(), id: RenderID::gen(), } } }
use crate::*; use truck_meshalgo::tessellation::*; use truck_topology::*; impl Default for ShapeInstanceDescriptor { #[inline(always)] fn default() -> Self { ShapeInstanceDescriptor { instance_state: Default::default(), mesh_precision: 0.005, } } } impl<Shape: MeshableShape> TryIntoInstance<PolygonInstance> for Shape { type Descriptor = ShapeInstanceDescriptor; fn try_into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> Option<PolygonInstance> { let polygon = self.triangulation(desc.mesh_precision)?.into_polygon(); Some(polygon.into_instance( handler, shaders, &PolygonInstanceDescriptor { instance_state: desc.instance_state.clone(), }, )) } } impl<P, C, S> IntoInstance<PolygonInstance> for Shell<P, C, S> where Shell<P, C, S>: MeshableShape, { type Descriptor = ShapeInstanceDescriptor; #[inline(always)] fn into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> PolygonInstance { self.try_into_instance(handler, shaders, desc) .expect("failed to create instance") } } impl<P, C, S> IntoInstance<PolygonInstance> for Solid<P, C, S> where Solid<P, C, S>: MeshableShape, { type Descriptor = ShapeInstanceDescriptor; #[inline(always)] fn into_instance( &self, handler: &DeviceHandler, shaders: &PolygonShaders, desc: &ShapeInstanceDescriptor, ) -> PolygonInstance { self.try_into_instance(handler, shaders, desc) .expect("failed to create instance") } } impl<C, S> IntoInstance<WireFrameInstance> for Shell<Point3, C, S> where C: PolylineableCurve, { type Descriptor = ShapeWireFrameDescriptor;
} impl<C, S> IntoInstance<WireFrameInstance> for Solid<Point3, C, S> where C: PolylineableCurve, { type Descriptor = ShapeWireFrameDescriptor; fn into_instance( &self, handler: &DeviceHandler, shaders: &WireShaders, desc: &ShapeWireFrameDescriptor, ) -> WireFrameInstance { let mut lengths = Vec::new(); let points: Vec<[f32; 3]> = self .boundaries() .iter() .flatten() .flat_map(|face| face.boundary_iters()) .flatten() .flat_map(|edge| { let curve = edge.oriented_curve(); let division = curve.parameter_division(curve.parameter_range(), desc.polyline_precision); lengths.push(division.len() as u32); division .into_iter() .map(move |t| curve.subs(t).cast().unwrap().into()) }) .collect(); let mut strips = Vec::<u32>::new(); let mut counter = 0_u32; for len in lengths { for i in 1..len { strips.push(counter + i - 1); strips.push(counter + i); } counter += len; } let vertices = BufferHandler::from_slice(&points, handler.device(), BufferUsages::VERTEX); let strips = BufferHandler::from_slice(&strips, handler.device(), BufferUsages::INDEX); WireFrameInstance { vertices: Arc::new(vertices), strips: Arc::new(strips), state: desc.wireframe_state.clone(), shaders: shaders.clone(), id: RenderID::gen(), } } }
fn into_instance( &self, handler: &DeviceHandler, shaders: &WireShaders, desc: &ShapeWireFrameDescriptor, ) -> WireFrameInstance { let mut lengths = Vec::new(); let points: Vec<[f32; 3]> = self .face_iter() .flat_map(|face| face.boundary_iters()) .flatten() .flat_map(|edge| { let curve = edge.oriented_curve(); let division = curve.parameter_division(curve.parameter_range(), desc.polyline_precision); lengths.push(division.len() as u32); division .into_iter() .map(move |t| curve.subs(t).cast().unwrap().into()) }) .collect(); let mut strips = Vec::<u32>::new(); let mut counter = 0_u32; for len in lengths { for i in 1..len { strips.push(counter + i - 1); strips.push(counter + i); } counter += len; } let vertices = BufferHandler::from_slice(&points, handler.device(), BufferUsages::VERTEX); let strips = BufferHandler::from_slice(&strips, handler.device(), BufferUsages::INDEX); WireFrameInstance { vertices: Arc::new(vertices), strips: Arc::new(strips), state: desc.wireframe_state.clone(), shaders: shaders.clone(), id: RenderID::gen(), } }
function_block-full_function
[ { "content": "fn nontex_inst_desc() -> PolygonInstanceDescriptor {\n\n PolygonInstanceDescriptor {\n\n instance_state: InstanceState {\n\n matrix: Matrix4::from_cols(\n\n [1.0, 2.0, 3.0, 4.0].into(),\n\n [5.0, 6.0, 7.0, 8.0].into(),\n\n [9.0, 10....
Rust
src/units/units.rs
zboldyga/rustysd
ecc35896385d5909686f275dc6356226c9e5113b
use crate::fd_store::FDStore; use crate::platform::EventFd; use crate::services::Service; use crate::sockets::{Socket, SocketKind, SpecializedSocketConfig}; use crate::units::*; use nix::unistd::Pid; use std::collections::HashMap; use std::sync::{Arc, Mutex, RwLock}; use std::{fmt, path::PathBuf}; #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub enum UnitIdKind { Target, Socket, Service, } #[derive(Clone, Copy, Eq, PartialEq, Hash)] pub struct UnitId(pub UnitIdKind, pub u64); impl fmt::Debug for UnitId { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(format!("{}", self.1).as_str()) } } impl fmt::Display for UnitId { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(format!("{:?}", self).as_str()) } } impl std::cmp::PartialOrd for UnitId { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { self.1.partial_cmp(&other.1) } } impl std::cmp::Ord for UnitId { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.1.cmp(&other.1) } } pub type UnitTable = HashMap<UnitId, Arc<Mutex<Unit>>>; pub type ArcMutUnitTable = Arc<RwLock<UnitTable>>; pub type StatusTable = HashMap<UnitId, Arc<Mutex<UnitStatus>>>; pub type ArcMutStatusTable = Arc<RwLock<StatusTable>>; pub type PidTable = HashMap<Pid, PidEntry>; pub type ArcMutPidTable = Arc<Mutex<PidTable>>; pub type ArcMutFDStore = Arc<RwLock<FDStore>>; pub struct RuntimeInfo { pub unit_table: ArcMutUnitTable, pub status_table: ArcMutStatusTable, pub pid_table: ArcMutPidTable, pub fd_store: ArcMutFDStore, pub config: crate::config::Config, pub last_id: Arc<Mutex<u64>>, } pub type ArcRuntimeInfo = Arc<RuntimeInfo>; pub fn lock_all( units: &mut Vec<(UnitId, Arc<Mutex<Unit>>)>, ) -> HashMap<UnitId, std::sync::MutexGuard<'_, Unit>> { let mut units_locked = HashMap::new(); units.sort_by(|(lid, _), (rid, _)| lid.cmp(rid)); for (id, unit) in units { trace!("Lock unit: {:?}", id); let other_unit_locked = unit.lock().unwrap(); trace!("Locked unit: {:?}", id); units_locked.insert(id.clone(), other_unit_locked); } units_locked } #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub enum PidEntry { Service(UnitId, ServiceType), OneshotExited(crate::signal_handler::ChildTermination), Helper(UnitId, String), HelperExited(crate::signal_handler::ChildTermination), } #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub enum UnitStatus { NeverStarted, Starting, Started, StartedWaitingForSocket, Stopping, Stopped, StoppedFinal(String), } #[derive(Debug)] pub enum UnitSpecialized { Socket(Socket), Service(Service), Target, } #[derive(Debug, Default)] pub struct Install { pub wants: Vec<UnitId>, pub requires: Vec<UnitId>, pub wanted_by: Vec<UnitId>, pub required_by: Vec<UnitId>, pub before: Vec<UnitId>, pub after: Vec<UnitId>, pub install_config: Option<InstallConfig>, } pub struct Unit { pub id: UnitId, pub conf: UnitConfig, pub specialized: UnitSpecialized, pub install: Install, } impl Unit { pub fn is_service(&self) -> bool { if let UnitSpecialized::Service(_) = self.specialized { true } else { false } } pub fn is_socket(&self) -> bool { if let UnitSpecialized::Socket(_) = self.specialized { true } else { false } } pub fn is_target(&self) -> bool { if let UnitSpecialized::Target = self.specialized { true } else { false } } pub fn dedup_dependencies(&mut self) { self.install.wants.sort(); self.install.wanted_by.sort(); self.install.required_by.sort(); self.install.before.sort(); self.install.after.sort(); self.install.requires.sort(); self.install.wants.dedup(); self.install.requires.dedup(); self.install.wanted_by.dedup(); self.install.required_by.dedup(); self.install.before.dedup(); self.install.after.dedup(); } pub fn activate( &mut self, run_info: ArcRuntimeInfo, notification_socket_path: std::path::PathBuf, eventfds: &[EventFd], allow_ignore: bool, ) -> Result<UnitStatus, UnitOperationError> { match &mut self.specialized { UnitSpecialized::Target => trace!("Reached target {}", self.conf.name()), UnitSpecialized::Socket(sock) => { sock.open_all( self.conf.name(), self.id, &mut *run_info.fd_store.write().unwrap(), ) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::SocketOpenError(format!("{}", e)), })?; } UnitSpecialized::Service(srvc) => { match srvc .start( self.id, &self.conf.name(), run_info, notification_socket_path, eventfds, allow_ignore, ) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::ServiceStartError(e), })? { crate::services::StartResult::Started => return Ok(UnitStatus::Started), crate::services::StartResult::WaitingForSocket => { return Ok(UnitStatus::StartedWaitingForSocket) } } } } Ok(UnitStatus::Started) } pub fn deactivate(&mut self, run_info: ArcRuntimeInfo) -> Result<(), UnitOperationError> { trace!("Deactivate unit: {}", self.conf.name()); match &mut self.specialized { UnitSpecialized::Target => { /* nothing to do */ } UnitSpecialized::Socket(sock) => { sock.close_all(self.conf.name(), &mut *run_info.fd_store.write().unwrap()) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::SocketCloseError(e), })?; } UnitSpecialized::Service(srvc) => { srvc.kill(self.id, &self.conf.name(), run_info) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::ServiceStopError(e), })?; } } Ok(()) } } #[derive(Debug)] pub struct UnitConfig { pub filepath: PathBuf, pub description: String, pub wants: Vec<String>, pub requires: Vec<String>, pub before: Vec<String>, pub after: Vec<String>, } impl UnitConfig { pub fn name(&self) -> String { let name = self .filepath .file_name() .unwrap() .to_str() .unwrap() .to_owned(); name } pub fn name_without_suffix(&self) -> String { let name = self.name(); let split: Vec<_> = name.split('.').collect(); split[0..split.len() - 1].join(".") } } #[derive(Clone)] pub struct SocketConfig { pub kind: SocketKind, pub specialized: SpecializedSocketConfig, } impl fmt::Debug for SocketConfig { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { write!( f, "SocketConfig {{ kind: {:?}, specialized: {:?} }}", self.kind, self.specialized )?; Ok(()) } } unsafe impl Send for SocketConfig {} #[derive(Debug)] pub struct InstallConfig { pub wanted_by: Vec<String>, pub required_by: Vec<String>, } #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub enum ServiceType { Simple, Notify, Dbus, OneShot, } #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub enum NotifyKind { Main, Exec, All, None, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum ServiceRestart { Always, No, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum Timeout { Duration(std::time::Duration), Infinity, } #[derive(Clone, Eq, PartialEq, Debug)] pub struct ExecConfig { pub user: Option<String>, pub group: Option<String>, pub supplementary_groups: Vec<String>, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum CommandlinePrefix { AtSign, Minus, Colon, Plus, Exclamation, DoubleExclamation, } #[derive(Clone, Eq, PartialEq, Debug)] pub struct Commandline { pub cmd: String, pub args: Vec<String>, pub prefixes: Vec<CommandlinePrefix>, } impl ToString for Commandline { fn to_string(&self) -> String { format!("{:?}", self) } } #[derive(Clone, Eq, PartialEq, Debug)] pub struct ServiceConfig { pub restart: ServiceRestart, pub accept: bool, pub notifyaccess: NotifyKind, pub exec: Commandline, pub stop: Vec<Commandline>, pub stoppost: Vec<Commandline>, pub startpre: Vec<Commandline>, pub startpost: Vec<Commandline>, pub srcv_type: ServiceType, pub starttimeout: Option<Timeout>, pub stoptimeout: Option<Timeout>, pub generaltimeout: Option<Timeout>, pub exec_config: ExecConfig, pub dbus_name: Option<String>, pub sockets: Vec<String>, }
use crate::fd_store::FDStore; use crate::platform::EventFd; use crate::services::Service; use crate::sockets::{Socket, SocketKind, SpecializedSocketConfig}; use crate::units::*; use nix::unistd::Pid; use std::collections::HashMap; use std::sync::{Arc, Mutex, RwLock}; use std::{fmt, path::PathBuf}; #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub enum UnitIdKind { Target, Socket, Service, } #[derive(Clone, Copy, Eq, PartialEq, Hash)] pub struct UnitId(pub UnitIdKind, pub u64); impl fmt::Debug for UnitId { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(format!("{}", self.1).as_str()) } } impl fmt::Display for UnitId { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.write_str(format!("{:?}", self).as_str()) } } impl std::cmp::PartialOrd for UnitId { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { self.1.partial_cmp(&other.1) } } impl std::cmp::Ord for UnitId { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.1.cmp(&other.1) } } pub type UnitTable = HashMap<UnitId, Arc<Mutex<Unit>>>; pub type ArcMutUnitTable = Arc<RwLock<UnitTable>>; pub type StatusTable = HashMap<UnitId, Arc<Mutex<UnitStatus>>>; pub type ArcMutStatusTable = Arc<RwLock<StatusTable>>; pub type PidTable = HashMap<Pid, PidEntry>; pub type ArcMutPidTable = Arc<Mutex<PidTable>>; pub type ArcMutFDStore = Arc<RwLock<FDStore>>; pub struct RuntimeInfo { pub unit_table: ArcMutUnitTable, pub status_table: ArcMutStatusTable, pub pid_table: ArcMutPidTable, pub fd_store: ArcMutFDStore, pub config: crate::config::Config, pub last_id: Arc<Mutex<u64>>, } pub type ArcRuntimeInfo = Arc<RuntimeInfo>; pub fn lock_all( units: &mut Vec<(UnitId, Arc<Mutex<Unit>>)>, ) -> HashMap<UnitId, std::sync::MutexGuard<'_, Unit>> { let mut units_locked = HashMap::new(); units.sort_by(|(lid, _), (rid, _)| lid.cmp(rid)); for (id, unit) in units { trace!("Lock unit: {:?}", id); let other_unit_locked = unit.lock().unwrap(); trace!("Locked unit: {:?}", id); units_locked.insert(id.clone(), other_unit_locked); } units_locked } #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub enum PidEntry { Service(UnitId, ServiceType), OneshotExited(crate::signal_handler::ChildTermination), Helper(UnitId, String), HelperExited(crate::signal_handler::ChildTermination), } #[derive(Clone, Eq, PartialEq, Hash, Debug)] pub enum UnitStatus { NeverStarted, Starting, Started, StartedWaitingForSocket, Stopping, Stopped, StoppedFinal(String), } #[derive(Debug)] pub enum UnitSpecialized { Socket(Socket), Service(Service), Target, } #[derive(Debug, Default)] pub struct Install { pub wants: Vec<UnitId>, pub requires: Vec<UnitId>, pub wanted_by: Vec<UnitId>, pub required_by: Vec<UnitId>, pub before: Vec<UnitId>, pub after: Vec<UnitId>, pub install_config: Option<InstallConfig>, } pub struct Unit { pub id: UnitId, pub conf: UnitConfig, pub specialized: UnitSpecialized, pub install: Install, } impl Unit { pub fn is_service(&self) -> bool { if let UnitSpecialized::Service(_) = self.specialized { true } else { false } } pub fn is_socket(&self) -> bool { if let UnitSpecialized::Socket(_) = self.specialized { true } else { false } } pub fn is_target(&self) -> bool { if let UnitSpecialized::Target = self.specialized { true } else { false } } pub fn dedup_dependencies(&mut self) { self.install.wants.sort(); self.install.wanted_by.sort(); self.install.required_by.sort(); self.install.before.sort(); self.install.after.sort(); self.install.requires.sort(); self.install.wants.dedup(); self.install.requires.dedup(); self.install.wanted_by.dedup(); self.install.required_by.dedup(); self.install.before.dedup(); self.install.after.dedup(); } pub fn activate( &mut self, run_info: ArcRuntimeInfo, notification_socket_path: std::path::PathBuf, eventfds: &[EventFd], allow_ignore: bool, ) -> Result<UnitStatus, UnitOperationError> { match &mut self.specialized { UnitSpecialized::Target => trace!("Reached target {}", self.conf.name()), UnitSpecialized::Socket(sock) => { sock.open_all( self.conf.name(), self.id, &mut *run_info.fd_store.write().unwrap(), ) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::SocketOpenError(format!("{}", e)), })?; } UnitSpecialized::Service(srvc) => { match srvc .start( self.id, &self.conf.name(), run_info, notification_socket_path, eventfds, allow_ignore, ) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::ServiceStartError(e), })? { crate::services::StartResult::Started => return Ok(UnitStatus::Started), crate::services::StartResult::WaitingForSocket => { return Ok(UnitStatus::StartedWaitingForSocket) } } } } Ok(UnitStatus::Started) }
} #[derive(Debug)] pub struct UnitConfig { pub filepath: PathBuf, pub description: String, pub wants: Vec<String>, pub requires: Vec<String>, pub before: Vec<String>, pub after: Vec<String>, } impl UnitConfig { pub fn name(&self) -> String { let name = self .filepath .file_name() .unwrap() .to_str() .unwrap() .to_owned(); name } pub fn name_without_suffix(&self) -> String { let name = self.name(); let split: Vec<_> = name.split('.').collect(); split[0..split.len() - 1].join(".") } } #[derive(Clone)] pub struct SocketConfig { pub kind: SocketKind, pub specialized: SpecializedSocketConfig, } impl fmt::Debug for SocketConfig { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { write!( f, "SocketConfig {{ kind: {:?}, specialized: {:?} }}", self.kind, self.specialized )?; Ok(()) } } unsafe impl Send for SocketConfig {} #[derive(Debug)] pub struct InstallConfig { pub wanted_by: Vec<String>, pub required_by: Vec<String>, } #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub enum ServiceType { Simple, Notify, Dbus, OneShot, } #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub enum NotifyKind { Main, Exec, All, None, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum ServiceRestart { Always, No, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum Timeout { Duration(std::time::Duration), Infinity, } #[derive(Clone, Eq, PartialEq, Debug)] pub struct ExecConfig { pub user: Option<String>, pub group: Option<String>, pub supplementary_groups: Vec<String>, } #[derive(Clone, Eq, PartialEq, Debug)] pub enum CommandlinePrefix { AtSign, Minus, Colon, Plus, Exclamation, DoubleExclamation, } #[derive(Clone, Eq, PartialEq, Debug)] pub struct Commandline { pub cmd: String, pub args: Vec<String>, pub prefixes: Vec<CommandlinePrefix>, } impl ToString for Commandline { fn to_string(&self) -> String { format!("{:?}", self) } } #[derive(Clone, Eq, PartialEq, Debug)] pub struct ServiceConfig { pub restart: ServiceRestart, pub accept: bool, pub notifyaccess: NotifyKind, pub exec: Commandline, pub stop: Vec<Commandline>, pub stoppost: Vec<Commandline>, pub startpre: Vec<Commandline>, pub startpost: Vec<Commandline>, pub srcv_type: ServiceType, pub starttimeout: Option<Timeout>, pub stoptimeout: Option<Timeout>, pub generaltimeout: Option<Timeout>, pub exec_config: ExecConfig, pub dbus_name: Option<String>, pub sockets: Vec<String>, }
pub fn deactivate(&mut self, run_info: ArcRuntimeInfo) -> Result<(), UnitOperationError> { trace!("Deactivate unit: {}", self.conf.name()); match &mut self.specialized { UnitSpecialized::Target => { /* nothing to do */ } UnitSpecialized::Socket(sock) => { sock.close_all(self.conf.name(), &mut *run_info.fd_store.write().unwrap()) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::SocketCloseError(e), })?; } UnitSpecialized::Service(srvc) => { srvc.kill(self.id, &self.conf.name(), run_info) .map_err(|e| UnitOperationError { unit_name: self.conf.name(), unit_id: self.id, reason: UnitOperationErrorReason::ServiceStopError(e), })?; } } Ok(()) }
function_block-full_function
[ { "content": "// make edges between units visible on bot sides: required <-> required_by after <-> before\n\npub fn fill_dependencies(units: &mut HashMap<UnitId, Unit>) {\n\n let mut name_to_id = HashMap::new();\n\n\n\n for (id, unit) in &*units {\n\n let name = unit.conf.name();\n\n name_t...
Rust
tests/roundtrip.rs
rrbutani/tower-web-protobuf
6787af73a44f5d4873d58dced0cbe11805cf7767
mod common; use common::*; use reqwest::{Client, StatusCode}; use std::io::Read; use std::net::SocketAddr; use tower_web_protobuf::MessagePlus; #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum Format { Protobuf, Json, } impl Format { fn get_header(self) -> &'static str { match self { Format::Protobuf => "application/protobuf", Format::Json => "application/json", } } fn encode<M: MessagePlus>(self, data: &M) -> Vec<u8> { match self { Format::Protobuf => { let mut buf = Vec::with_capacity(data.encoded_len()); data.encode(&mut buf).unwrap(); buf } Format::Json => serde_json::to_vec_pretty(&data).unwrap(), } } fn decode<M: MessagePlus>(self, data: &[u8]) -> M { match self { Format::Protobuf => M::decode(data).unwrap(), Format::Json => serde_json::from_slice(data).unwrap(), } } } fn identity_test<T: MessagePlus + PartialEq + Clone>( uri: String, send: Format, receive: Format, socket: SocketAddr, ) -> impl Fn(T) { move |data: T| { let mut buf = Vec::with_capacity(data.encoded_len()); data.encode(&mut buf).unwrap(); let mut resp = Client::new() .get(format!("http://{}:{}{}", socket.ip(), socket.port(), uri).as_str()) .header("Content-Type", send.get_header()) .header("Accept", receive.get_header()) .body(send.encode(&data)) .send() .unwrap(); assert_eq!(resp.status(), StatusCode::OK); assert_eq!( resp.headers() .get("Content-Type") .unwrap() .to_str() .unwrap(), receive.get_header() ); let mut buf = Vec::new(); assert!(resp.read_to_end(&mut buf).is_ok()); assert_eq!(data, receive.decode(&mut buf)); } } #[test] fn identity_tests() { run_service_test((true, true), |socket| { use Format::*; const FORMATS: [Format; 2] = [Json, Protobuf]; fn endpoint_test<T: MessagePlus + PartialEq + Clone>( endpoint: &'static str, socket: SocketAddr, val: T, ) { FORMATS.iter().for_each(|inp| { FORMATS .iter() .for_each(|out| identity_test(endpoint.into(), *inp, *out, socket)(val.clone())) }); } endpoint_test( "/identity/track/", *socket, Track { name: "4′33″".into(), length: (4.0 * 60.0 + 33.333), number: 1, id: 0, }, ); endpoint_test( "/identity/album/", *socket, Album { name: "In Colour".into(), id: 2015, album_type: 2, tracks: vec![ Track { name: "Sleep Sound".into(), length: (3 * 60 + 52) as f32, number: 2, id: 947, }, Track { name: "Loud Places".into(), length: (4 * 60 + 43) as f32, number: 8, id: 1056, }, ], }, ); }); }
mod common; use common::*; use reqwest::{Client, StatusCode}; use std::io::Read; use std::net::SocketAddr; use tower_web_protobuf::MessagePlus; #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum Format { Protobuf, Json, } impl Format { fn get_header(self) -> &'static str { match self { Format::Protobuf => "application/protobuf", Format::Json => "application/json", } } fn encode<M: MessagePlus>(self, data: &M) -> Vec<u8> { match self { Format::Protobuf => { let mut buf = Vec::with_capacity(data.encoded_len()); data.encode(&mut buf).unwrap(); buf } Format::Json => serde_json::to_vec_pretty(&data).unwrap(), } } fn decode<M: MessagePlus>(self, data: &[u8]) -> M { match self { Format::Protobuf => M::decode(data).unwrap(), Format::Json => serde_json::from_slice(data).unwrap(), } } } fn identity_test<T: MessagePlus + PartialEq + Clone>( uri: String, send: Format, receive: Format, socket: SocketAddr, ) -> impl Fn(T) { move |data: T| { let mut buf = Vec::with_capacity(data.encoded_len()); data.encode(&mut buf).unwrap(); let mut resp = Client::new() .get(format!("http://{}:{}{}", socket.ip(), socket.port(), uri).as_str()) .header("Content-Type", send.get_header()) .header("Accept", receive.get_header()) .body(send.encode(&data)) .send() .unwrap(); assert_eq!(resp.status(), StatusCode::OK); assert_eq!( resp.headers() .get("Content-Type") .unwrap() .to_str() .unwrap(), receive.get_header() ); let mut buf = Vec::new(); assert!(resp.read_to_end(&mut buf).is_ok()); assert_eq!(data, receive.decode(&mut buf)); } } #[test] fn identity_tests() { run_service_test((true, true),
id: 0, }, ); endpoint_test( "/identity/album/", *socket, Album { name: "In Colour".into(), id: 2015, album_type: 2, tracks: vec![ Track { name: "Sleep Sound".into(), length: (3 * 60 + 52) as f32, number: 2, id: 947, }, Track { name: "Loud Places".into(), length: (4 * 60 + 43) as f32, number: 8, id: 1056, }, ], }, ); }); }
|socket| { use Format::*; const FORMATS: [Format; 2] = [Json, Protobuf]; fn endpoint_test<T: MessagePlus + PartialEq + Clone>( endpoint: &'static str, socket: SocketAddr, val: T, ) { FORMATS.iter().for_each(|inp| { FORMATS .iter() .for_each(|out| identity_test(endpoint.into(), *inp, *out, socket)(val.clone())) }); } endpoint_test( "/identity/track/", *socket, Track { name: "4′33″".into(), length: (4.0 * 60.0 + 33.333), number: 1,
function_block-random_span
[ { "content": "fn setup(options: (bool, bool), socket: &SocketAddr) {\n\n ServiceBuilder::new()\n\n .resource(MusicService::new())\n\n .middleware(ProtobufMiddleware::new(options.0, options.1))\n\n .run(&socket)\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/common.rs", "r...
Rust
oot-explorer-demo/src/reflect_text.rs
mvanbem/oot-explorer
a2574ac5d2c3b2eb8bc6229e887b0ad7ff0fe732
use oot_explorer_read::{FromVrom, ReadError}; use oot_explorer_reflect::{ BitfieldDescriptor, EnumDescriptor, FieldDescriptor, PointerDescriptor, PrimitiveType, StructDescriptor, StructFieldLocation, TypeDescriptor, UnionDescriptor, }; use oot_explorer_segment::{SegmentAddr, SegmentTable}; use oot_explorer_vrom::{Vrom, VromAddr}; pub fn dump( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: TypeDescriptor, addr: VromAddr, indent_level: usize, ) { match desc { TypeDescriptor::Struct(desc) => { dump_struct(vrom, segment_table, desc, addr, indent_level); } TypeDescriptor::Union(desc) => { dump_union(vrom, segment_table, desc, addr, indent_level); } TypeDescriptor::Enum(desc) => dump_enum(vrom, desc, addr), TypeDescriptor::Bitfield(desc) => dump_bitfield(vrom, desc, addr), TypeDescriptor::Primitive(desc) => dump_primitive(vrom, desc, addr), TypeDescriptor::Pointer(desc) => { dump_pointer(vrom, segment_table, desc, addr, indent_level) } } } fn dump_bitfield(vrom: Vrom<'_>, desc: &'static BitfieldDescriptor, addr: VromAddr) -> () { let value = match desc.underlying.read_as_u32(vrom, addr) { Ok(value) => value, Err(e) => { print!("{}", e); return; } }; let mut first = true; for field in desc.fields { if first { first = false; } else { print!(" | "); } let value = (value >> field.shift) & field.mask; print!("{}", value); } } fn dump_enum(vrom: Vrom<'_>, desc: &'static EnumDescriptor, addr: VromAddr) { match desc.underlying.read_as_u32(vrom, addr) { Ok(value) => match desc.values.binary_search_by_key(&value, |&(x, _)| x) { Ok(index) => print!("{}", desc.values[index].1), Err(_) => print!("(unknown value 0x{:x}", value), }, Err(e) => print!("{}", e), } } fn dump_primitive(vrom: Vrom<'_>, desc: PrimitiveType, addr: VromAddr) -> () { let try_print = || { match desc { PrimitiveType::Bool => print!("{}", bool::from_vrom(vrom, addr)?), PrimitiveType::U8 => print!("{}", u8::from_vrom(vrom, addr)?), PrimitiveType::I8 => print!("{}", i8::from_vrom(vrom, addr)?), PrimitiveType::U16 => { print!("{}", u16::from_vrom(vrom, addr)?) } PrimitiveType::I16 => { print!("{}", i16::from_vrom(vrom, addr)?) } PrimitiveType::U32 => { print!("{}", u32::from_vrom(vrom, addr)?) } PrimitiveType::I32 => { print!("{}", i32::from_vrom(vrom, addr)?) } PrimitiveType::VromAddr => { print!("{:?}", VromAddr::from_vrom(vrom, addr)?) } PrimitiveType::SegmentAddr => { print!("{:?}", SegmentAddr::from_vrom(vrom, addr)?) } } Result::<(), ReadError>::Ok(()) }; if let Err(e) = try_print() { print!("{}", e); } } fn dump_pointer( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static PointerDescriptor, addr: VromAddr, indent_level: usize, ) { let segment_addr = match SegmentAddr::from_vrom(vrom, addr) { Ok(segment_addr) => segment_addr, Err(e) => { print!("{}", e); return; } }; let vrom_addr = match segment_table.resolve(segment_addr) { Ok(vrom_addr) => vrom_addr, Err(e) => { print!("{}", e); return; } }; print!("&"); dump(vrom, segment_table, desc.target, vrom_addr, indent_level) } fn dump_struct( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static StructDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); println!("{} {{", desc.name); for field in desc.fields { dump_field(vrom, segment_table, field, addr, indent_level + 1); } print!("{}}}", indent); } fn dump_union( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static UnionDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); println!("{} {{", desc.name); dump_union_body(vrom, segment_table, desc, addr, indent_level + 1); print!("{}}}", indent); } fn dump_union_body( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static UnionDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); let discriminant_addr = addr + desc.discriminant_offset; match desc .discriminant_desc .read_as_u32(vrom, discriminant_addr) .expect("enum discriminant must be readable as u32") { Ok(discriminant) => { print!( "{}(0x{:08x}) discriminant: {} = ", indent, discriminant_addr.0, desc.discriminant_desc.name(), ); dump( vrom, segment_table, desc.discriminant_desc, discriminant_addr, indent_level, ); println!(" (0x{:x})", discriminant); match desc .variants .binary_search_by_key(&discriminant, |&(x, _)| x) { Ok(index) => match desc.variants[index].1 { TypeDescriptor::Struct(desc) => { for field in desc.fields { dump_field(vrom, segment_table, field, addr, indent_level); } } TypeDescriptor::Union(desc) => { dump_union_body(vrom, segment_table, desc, addr, indent_level); } _ => unimplemented!( "variant `{}` of union `{}` is not a struct or union", desc.variants[index].1.name(), desc.name, ), }, Err(_) => { println!("{}(unknown variant)", indent); } } } Err(e) => { println!("{}{}", indent, e); } } } fn dump_field( vrom: Vrom<'_>, segment_table: &SegmentTable, field: &'static FieldDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); match field.location { StructFieldLocation::Simple { offset } => { let addr = addr + offset; print!( "{}(0x{:08x}) {}: {} = ", indent, addr.0, field.name, field.desc.name(), ); dump(vrom, segment_table, field.desc, addr, indent_level); println!(); } StructFieldLocation::Slice { count_offset, count_desc, ptr_offset, } => { let count_addr = addr + count_offset; print!( "{}(0x{:08x}) {}_count: {} = ", indent, count_addr.0, field.name, count_desc.name(), ); let count = match count_desc.read_as_u32(vrom, count_addr) { Ok(count) => { println!("{}", count); Some(count) } Err(e) => { println!("{}", e); None } }; let ptr_addr = addr + ptr_offset; print!( "{}(0x{:08x}) {}_ptr: &{} = ", indent, ptr_addr.0, field.name, field.desc.name(), ); let segment_ptr = match SegmentAddr::from_vrom(vrom, ptr_addr) { Ok(segment_ptr) => { println!("{:?}", segment_ptr); Some(segment_ptr) } Err(e) => { println!("{}", e); None } }; if let (Some(count), Some(segment_ptr)) = (count, segment_ptr) { match segment_table.resolve(segment_ptr) { Ok(mut vrom_addr) => { println!( "{}{}: &[{}; {}] = &[", indent, field.name, field.desc.name(), count, ); for _ in 0..count { print!("{} (0x{:08x}) ", indent, vrom_addr.0); dump(vrom, segment_table, field.desc, vrom_addr, indent_level + 1); println!(); vrom_addr += match field.desc.size() { Some(size) => size, None => panic!( "slice element {} has no size, referenced from field {}", field.desc.name(), field.name, ), }; } println!("{}]", indent) } Err(e) => { print!( "{}{}: &[{}; {}] = {}", indent, field.name, field.desc.name(), count, e, ); } } } } StructFieldLocation::InlineDelimitedList { offset } => { let mut addr = addr + offset; println!( "{}(0x{:08x}) {}: [{}; N] = [", indent, addr.0, field.name, field.desc.name(), ); loop { print!("{} (0x{:08x}) ", indent, addr.0); dump(vrom, segment_table, field.desc, addr, indent_level + 1); println!(); if (field .desc .is_end() .expect("inline delimited list element has no is_end"))( vrom, addr ) { break; } addr += field .desc .size() .expect("inline delimited list element has no size"); } println!("{}]", indent) } } }
use oot_explorer_read::{FromVrom, ReadError}; use oot_explorer_reflect::{ BitfieldDescriptor, EnumDescriptor, FieldDescriptor, PointerDescriptor, PrimitiveType, StructDescriptor, StructFieldLocation, TypeDescriptor, UnionDescriptor, }; use oot_explorer_segment::{SegmentAddr, SegmentTable}; use oot_explorer_vrom::{Vrom, VromAddr}; pub fn dump( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: TypeDescriptor, addr: VromAddr, indent_level: usize, ) { match desc { TypeDescriptor::Struct(desc) => { dump_struct(vrom, segment_table, desc, addr, indent_level); } TypeDescriptor::Union(desc) => { dump_union(vrom, segment_table, desc, addr, indent_level); } TypeDescriptor::Enum(desc) => dump_enum(vrom, desc, addr), TypeDescriptor::Bitfield(desc) => dump_bitfield(vrom, desc, addr), TypeDescriptor::Primitive(desc) => dump_primitive(vrom, desc, addr), TypeDescriptor::Pointer(desc) => { dump_pointer(vrom, segment_table, desc, addr, indent_level) } } } fn dump_bitfield(vrom: Vrom<'_>, desc: &'static BitfieldDescriptor, addr: VromAddr) -> () { let value = match desc.underlying.read_as_u32(vrom, addr) { Ok(value) => value, Err(e) => { print!("{}", e); return; } }; let mut first = true; for field in desc.fields { if first { first = false; } else { print!(" | "); } let value = (value >> field.shift) & field.mask; print!("{}", value); } } fn dump_enum(vrom: Vrom<'_>, desc: &'static EnumDescriptor, addr: VromAddr) { match desc.underlying.read_as_u32(vrom, addr) { Ok(value) =>
, Err(e) => print!("{}", e), } } fn dump_primitive(vrom: Vrom<'_>, desc: PrimitiveType, addr: VromAddr) -> () { let try_print = || { match desc { PrimitiveType::Bool => print!("{}", bool::from_vrom(vrom, addr)?), PrimitiveType::U8 => print!("{}", u8::from_vrom(vrom, addr)?), PrimitiveType::I8 => print!("{}", i8::from_vrom(vrom, addr)?), PrimitiveType::U16 => { print!("{}", u16::from_vrom(vrom, addr)?) } PrimitiveType::I16 => { print!("{}", i16::from_vrom(vrom, addr)?) } PrimitiveType::U32 => { print!("{}", u32::from_vrom(vrom, addr)?) } PrimitiveType::I32 => { print!("{}", i32::from_vrom(vrom, addr)?) } PrimitiveType::VromAddr => { print!("{:?}", VromAddr::from_vrom(vrom, addr)?) } PrimitiveType::SegmentAddr => { print!("{:?}", SegmentAddr::from_vrom(vrom, addr)?) } } Result::<(), ReadError>::Ok(()) }; if let Err(e) = try_print() { print!("{}", e); } } fn dump_pointer( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static PointerDescriptor, addr: VromAddr, indent_level: usize, ) { let segment_addr = match SegmentAddr::from_vrom(vrom, addr) { Ok(segment_addr) => segment_addr, Err(e) => { print!("{}", e); return; } }; let vrom_addr = match segment_table.resolve(segment_addr) { Ok(vrom_addr) => vrom_addr, Err(e) => { print!("{}", e); return; } }; print!("&"); dump(vrom, segment_table, desc.target, vrom_addr, indent_level) } fn dump_struct( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static StructDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); println!("{} {{", desc.name); for field in desc.fields { dump_field(vrom, segment_table, field, addr, indent_level + 1); } print!("{}}}", indent); } fn dump_union( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static UnionDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); println!("{} {{", desc.name); dump_union_body(vrom, segment_table, desc, addr, indent_level + 1); print!("{}}}", indent); } fn dump_union_body( vrom: Vrom<'_>, segment_table: &SegmentTable, desc: &'static UnionDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); let discriminant_addr = addr + desc.discriminant_offset; match desc .discriminant_desc .read_as_u32(vrom, discriminant_addr) .expect("enum discriminant must be readable as u32") { Ok(discriminant) => { print!( "{}(0x{:08x}) discriminant: {} = ", indent, discriminant_addr.0, desc.discriminant_desc.name(), ); dump( vrom, segment_table, desc.discriminant_desc, discriminant_addr, indent_level, ); println!(" (0x{:x})", discriminant); match desc .variants .binary_search_by_key(&discriminant, |&(x, _)| x) { Ok(index) => match desc.variants[index].1 { TypeDescriptor::Struct(desc) => { for field in desc.fields { dump_field(vrom, segment_table, field, addr, indent_level); } } TypeDescriptor::Union(desc) => { dump_union_body(vrom, segment_table, desc, addr, indent_level); } _ => unimplemented!( "variant `{}` of union `{}` is not a struct or union", desc.variants[index].1.name(), desc.name, ), }, Err(_) => { println!("{}(unknown variant)", indent); } } } Err(e) => { println!("{}{}", indent, e); } } } fn dump_field( vrom: Vrom<'_>, segment_table: &SegmentTable, field: &'static FieldDescriptor, addr: VromAddr, indent_level: usize, ) { let indent = std::iter::repeat(' ') .take(4 * indent_level) .collect::<String>(); match field.location { StructFieldLocation::Simple { offset } => { let addr = addr + offset; print!( "{}(0x{:08x}) {}: {} = ", indent, addr.0, field.name, field.desc.name(), ); dump(vrom, segment_table, field.desc, addr, indent_level); println!(); } StructFieldLocation::Slice { count_offset, count_desc, ptr_offset, } => { let count_addr = addr + count_offset; print!( "{}(0x{:08x}) {}_count: {} = ", indent, count_addr.0, field.name, count_desc.name(), ); let count = match count_desc.read_as_u32(vrom, count_addr) { Ok(count) => { println!("{}", count); Some(count) } Err(e) => { println!("{}", e); None } }; let ptr_addr = addr + ptr_offset; print!( "{}(0x{:08x}) {}_ptr: &{} = ", indent, ptr_addr.0, field.name, field.desc.name(), ); let segment_ptr = match SegmentAddr::from_vrom(vrom, ptr_addr) { Ok(segment_ptr) => { println!("{:?}", segment_ptr); Some(segment_ptr) } Err(e) => { println!("{}", e); None } }; if let (Some(count), Some(segment_ptr)) = (count, segment_ptr) { match segment_table.resolve(segment_ptr) { Ok(mut vrom_addr) => { println!( "{}{}: &[{}; {}] = &[", indent, field.name, field.desc.name(), count, ); for _ in 0..count { print!("{} (0x{:08x}) ", indent, vrom_addr.0); dump(vrom, segment_table, field.desc, vrom_addr, indent_level + 1); println!(); vrom_addr += match field.desc.size() { Some(size) => size, None => panic!( "slice element {} has no size, referenced from field {}", field.desc.name(), field.name, ), }; } println!("{}]", indent) } Err(e) => { print!( "{}{}: &[{}; {}] = {}", indent, field.name, field.desc.name(), count, e, ); } } } } StructFieldLocation::InlineDelimitedList { offset } => { let mut addr = addr + offset; println!( "{}(0x{:08x}) {}: [{}; N] = [", indent, addr.0, field.name, field.desc.name(), ); loop { print!("{} (0x{:08x}) ", indent, addr.0); dump(vrom, segment_table, field.desc, addr, indent_level + 1); println!(); if (field .desc .is_end() .expect("inline delimited list element has no is_end"))( vrom, addr ) { break; } addr += field .desc .size() .expect("inline delimited list element has no size"); } println!("{}]", indent) } } }
match desc.values.binary_search_by_key(&value, |&(x, _)| x) { Ok(index) => print!("{}", desc.values[index].1), Err(_) => print!("(unknown value 0x{:x}", value), }
if_condition
[ { "content": "pub fn is_end<T>(vrom: Vrom<'_>, addr: VromAddr) -> bool\n\nwhere\n\n T: FromVrom + Layout + Sentinel,\n\n{\n\n match T::from_vrom(vrom, addr) {\n\n Ok(value) => value.is_end(vrom),\n\n Err(_) => true,\n\n }\n\n}\n", "file_path": "oot-explorer-read/src/sentinel.rs", ...
Rust
src/trace/collector.rs
piercetrey-figure/minitrace-rust
1ab75c3399b07b86d8e142a8e5a81f1f0d1d9f17
use crossbeam::channel::Receiver; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Duration; use crate::span::Span; use crate::span::{Anchor, DefaultClock}; use crate::trace::acquirer::SpanCollection; pub struct Collector { receiver: Receiver<SpanCollection>, closed: Arc<AtomicBool>, } impl Collector { pub(crate) fn new(receiver: Receiver<SpanCollection>, closed: Arc<AtomicBool>) -> Self { Collector { receiver, closed } } pub fn collect(self) -> Vec<Span> { self.collect_with_args(CollectArgs { sync: false, duration_threshold: None, }) } pub fn collect_with_args( self, CollectArgs { sync, duration_threshold, }: CollectArgs, ) -> Vec<Span> { let span_collections: Vec<_> = if sync { self.receiver.iter().collect() } else { self.receiver.try_iter().collect() }; self.closed.store(true, Ordering::SeqCst); let anchor = DefaultClock::anchor(); if let Some(duration) = duration_threshold { if let Some(root_span) = span_collections.iter().find_map(|s| match s { SpanCollection::Span(s) if s.parent_id.0 == 0 => Some(s), _ => None, }) { let root_span = root_span.clone().into_span(anchor); if root_span.duration_ns < duration.as_nanos() as _ { return vec![root_span]; } } } Self::amend(span_collections, anchor) } } impl Collector { #[inline] fn amend(span_collections: Vec<SpanCollection>, anchor: Anchor) -> Vec<Span> { let capacity = span_collections .iter() .map(|sc| match sc { SpanCollection::LocalSpans { local_spans: raw_spans, .. } => raw_spans.spans.len(), SpanCollection::Span(_) => 1, }) .sum(); let mut spans = Vec::with_capacity(capacity); for span_collection in span_collections { match span_collection { SpanCollection::LocalSpans { local_spans: raw_spans, parent_id_of_root: span_id, } => { for span in &raw_spans.spans { let begin_unix_time_ns = DefaultClock::cycle_to_unix_time_ns(span.begin_cycle, anchor); let end_unix_time_ns = if span.end_cycle.is_zero() { DefaultClock::cycle_to_unix_time_ns(raw_spans.end_time, anchor) } else { DefaultClock::cycle_to_unix_time_ns(span.end_cycle, anchor) }; let parent_id = if span.parent_id.0 == 0 { span_id.0 } else { span.parent_id.0 }; spans.push(Span { id: span.id.0, parent_id, begin_unix_time_ns, duration_ns: end_unix_time_ns - begin_unix_time_ns, event: span.event, properties: span.properties.clone(), }); } } SpanCollection::Span(span) => spans.push(span.into_span(anchor)), } } spans } } #[derive(Default, Debug)] pub struct CollectArgs { sync: bool, duration_threshold: Option<Duration>, } impl CollectArgs { pub fn sync(self, sync: bool) -> Self { Self { sync, ..self } } pub fn duration_threshold(self, duration_threshold: Duration) -> Self { Self { duration_threshold: Some(duration_threshold), ..self } } }
use crossbeam::channel::Receiver; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Duration; use crate::span::Span; use crate::span::{Anchor, DefaultClock}; use crate::trace::acquirer::SpanCollection; pub struct Collector { receiver: Receiver<SpanCollection>, closed: Arc<AtomicBool>, } impl Collector { pub(crate) fn new(receiver: Receiver<SpanCollection>, closed: Arc<AtomicBool>) -> Self { Collector { receiver, closed } } pub fn collect(self) -> Vec<Span> { self.collect_with_args(CollectArgs { sync: false, duration_threshold: None, }) } pub fn collect_with_args( self, CollectArgs { sync, duration_threshold, }: CollectArgs, ) -> Vec<Span> { let span_collections: Vec<_> = if sync { self.receiver.iter().collect() } else { self.receiver.try_it
} impl Collector { #[inline] fn amend(span_collections: Vec<SpanCollection>, anchor: Anchor) -> Vec<Span> { let capacity = span_collections .iter() .map(|sc| match sc { SpanCollection::LocalSpans { local_spans: raw_spans, .. } => raw_spans.spans.len(), SpanCollection::Span(_) => 1, }) .sum(); let mut spans = Vec::with_capacity(capacity); for span_collection in span_collections { match span_collection { SpanCollection::LocalSpans { local_spans: raw_spans, parent_id_of_root: span_id, } => { for span in &raw_spans.spans { let begin_unix_time_ns = DefaultClock::cycle_to_unix_time_ns(span.begin_cycle, anchor); let end_unix_time_ns = if span.end_cycle.is_zero() { DefaultClock::cycle_to_unix_time_ns(raw_spans.end_time, anchor) } else { DefaultClock::cycle_to_unix_time_ns(span.end_cycle, anchor) }; let parent_id = if span.parent_id.0 == 0 { span_id.0 } else { span.parent_id.0 }; spans.push(Span { id: span.id.0, parent_id, begin_unix_time_ns, duration_ns: end_unix_time_ns - begin_unix_time_ns, event: span.event, properties: span.properties.clone(), }); } } SpanCollection::Span(span) => spans.push(span.into_span(anchor)), } } spans } } #[derive(Default, Debug)] pub struct CollectArgs { sync: bool, duration_threshold: Option<Duration>, } impl CollectArgs { pub fn sync(self, sync: bool) -> Self { Self { sync, ..self } } pub fn duration_threshold(self, duration_threshold: Duration) -> Self { Self { duration_threshold: Some(duration_threshold), ..self } } }
er().collect() }; self.closed.store(true, Ordering::SeqCst); let anchor = DefaultClock::anchor(); if let Some(duration) = duration_threshold { if let Some(root_span) = span_collections.iter().find_map(|s| match s { SpanCollection::Span(s) if s.parent_id.0 == 0 => Some(s), _ => None, }) { let root_span = root_span.clone().into_span(anchor); if root_span.duration_ns < duration.as_nanos() as _ { return vec![root_span]; } } } Self::amend(span_collections, anchor) }
function_block-function_prefixed
[ { "content": "#[proc_macro_attribute]\n\n#[proc_macro_error]\n\npub fn trace(args: TokenStream, item: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(item as syn::ItemFn);\n\n let event = syn::parse_macro_input!(args as syn::Expr);\n\n\n\n let syn::ItemFn {\n\n attrs,\n\n ...
Rust
src/bin/main.rs
yumcyaWiz/rusmallpt
aa7ce93096a6b5735a711cf6585dd969c4456389
use std::f32::consts::{FRAC_PI_2, FRAC_PI_4}; use std::sync::{Arc, Mutex}; use rusmallpt::camera::{Camera, PinholeCamera}; use rusmallpt::core::IntersectableLocal; use rusmallpt::image::Image; use rusmallpt::integrator::{Integrator, NormalIntegrator, PathTracingIntegrator}; use rusmallpt::sampler::Sampler; use rusmallpt::scene::{Material, Scene}; use rusmallpt::shape::{Plane, Sphere}; use rusmallpt::types::Real; use rusmallpt::vec2::Vec2; use rusmallpt::vec3::Vec3; fn simple_scene() -> (PinholeCamera, Scene) { let camera = PinholeCamera::new( Vec3::new(0.0, 0.0, 6.0), Vec3::new(0.0, 0.0, -1.0), FRAC_PI_2, ); let sphere1 = Box::new(Sphere::new(Vec3::new(0.0, 0.0, 0.0), 1.0)); let sphere2 = Box::new(Sphere::new(Vec3::new(-1.5, 0.0, -1.5), 1.0)); let sphere3 = Box::new(Sphere::new(Vec3::new(1.5, 0.0, 1.5), 1.0)); let floor = Box::new(Plane::new( Vec3::new(-3.0, -1.0, 3.0), Vec3::new(6.0, 0.0, 0.0), Vec3::new(0.0, 0.0, -6.0), )); let primitives: Vec<Box<dyn IntersectableLocal + Send + Sync>> = vec![sphere1, sphere2, sphere3, floor]; let materials: Vec<Material> = vec![ Material::new( Vec3::new(0.8, 0.2, 0.2), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec3::new(0.2, 0.8, 0.2), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec3::new(0.2, 0.2, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), ]; (camera, Scene::new(primitives, materials)) } fn cornellbox_scene() -> (PinholeCamera, Scene) { let camera = PinholeCamera::new( Vec3::new(278.0, 273.0, -900.0), Vec3::new(0.0, 0.0, 1.0), FRAC_PI_4, ); let floor = Box::new(Plane::new( Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), Vec3::new(556.0, 0.0, 0.0), )); let right_wall = Box::new(Plane::new( Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 548.8, 0.0), Vec3::new(0.0, 0.0, 559.2), )); let left_wall = Box::new(Plane::new( Vec3::new(556.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), Vec3::new(0.0, 548.8, 0.0), )); let ceil = Box::new(Plane::new( Vec3::new(0.0, 548.8, 0.0), Vec3::new(556.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), )); let back_wall = Box::new(Plane::new( Vec3::new(0.0, 0.0, 559.2), Vec3::new(0.0, 548.8, 0.0), Vec3::new(556.0, 0.0, 0.0), )); let short_box1 = Box::new(Plane::new( Vec3::new(130.0, 165.0, 65.0), Vec3::new(-48.0, 0.0, 160.0), Vec3::new(160.0, 0.0, 49.0), )); let short_box2 = Box::new(Plane::new( Vec3::new(290.0, 0.0, 114.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(-50.0, 0.0, 158.0), )); let short_box3 = Box::new(Plane::new( Vec3::new(130.0, 0.0, 65.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(160.0, 0.0, 49.0), )); let short_box4 = Box::new(Plane::new( Vec3::new(82.0, 0.0, 225.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(48.0, 0.0, -160.0), )); let short_box5 = Box::new(Plane::new( Vec3::new(240.0, 0.0, 272.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(-158.0, 0.0, -47.0), )); let tall_box1 = Box::new(Plane::new( Vec3::new(423.0, 330.0, 247.0), Vec3::new(-158.0, 0.0, 49.0), Vec3::new(49.0, 0.0, 159.0), )); let tall_box2 = Box::new(Plane::new( Vec3::new(423.0, 0.0, 247.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(49.0, 0.0, 159.0), )); let tall_box3 = Box::new(Plane::new( Vec3::new(472.0, 0.0, 406.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(-158.0, 0.0, 50.0), )); let tall_box4 = Box::new(Plane::new( Vec3::new(314.0, 0.0, 456.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(-49.0, 0.0, -160.0), )); let tall_box5 = Box::new(Plane::new( Vec3::new(265.0, 0.0, 296.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(158.0, 0.0, -49.0), )); let light = Box::new(Plane::new( Vec3::new(343.0, 548.6, 227.0), Vec3::new(-130.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 105.0), )); let primitives: Vec<Box<dyn IntersectableLocal + Send + Sync>> = vec![ floor, right_wall, left_wall, ceil, back_wall, short_box1, short_box2, short_box3, short_box4, short_box5, tall_box1, tall_box2, tall_box3, tall_box4, tall_box5, light, ]; let white = Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let red = Material::new( Vec3::new(0.8, 0.05, 0.05), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let green = Material::new( Vec3::new(0.05, 0.8, 0.05), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let light_material = Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(34.0, 19.0, 10.0), ); let materials = vec![ white.clone(), red, green, white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white, light_material, ]; (camera, Scene::new(primitives, materials)) } fn main() { let width = 512; let height = 512; let n_samples = 100; let max_depth = 100; let image = Arc::new(Mutex::new(Image::new(width, height))); let (camera, scene) = cornellbox_scene(); let camera = Arc::new(camera); let scene = Arc::new(scene); let integrator = Arc::new(PathTracingIntegrator::new(max_depth)); let pool = rayon::ThreadPoolBuilder::new() .num_threads(16) .build() .unwrap(); pool.scope(|s| { for i in 0..height { for j in 0..width { let (image, camera, scene, integrator) = ( image.clone(), camera.clone(), scene.clone(), integrator.clone(), ); s.spawn(move |_| { let seed = j + width * i; let mut sampler = Sampler::new(seed as u64); for _k in 0..n_samples { sampler.next_1d(); } let width = width as Real; let height = height as Real; let mut radiance = Vec3::new(0.0, 0.0, 0.0); for _k in 0..n_samples { let uv = Vec2::new( (2.0 * (j as Real + sampler.next_1d()) - width) / height, (2.0 * (i as Real + sampler.next_1d()) - height) / height, ); let ray = camera.sample_ray(uv, &mut sampler); radiance += integrator.integrate(&scene, &mut sampler, &ray); } radiance /= n_samples as Real; image.lock().unwrap().set_pixel(i, j, radiance); }); } } }); image.lock().unwrap().gamma_correction(); image.lock().unwrap().write_ppm(); }
use std::f32::consts::{FRAC_PI_2, FRAC_PI_4}; use std::sync::{Arc, Mutex}; use rusmallpt::camera::{Camera, PinholeCamera}; use rusmallpt::core::IntersectableLocal; use rusmallpt::image::Image; use rusmallpt::integrator::{Integrator, NormalIntegrator, PathTracingIntegrator}; use rusmallpt::sampler::Sampler; use rusmallpt::scene::{Material, Scene}; use rusmallpt::shape::{Plane, Sphere}; use rusmallpt::types::Real; use rusmallpt::vec2::Vec2; use rusmallpt::vec3::Vec3; fn simple_scene() -> (PinholeCamera, Scene) { let camera = PinholeCamera::new( Vec3::new(0.0, 0.0, 6.0), Vec3::new(0.0, 0.0, -1.0), FRAC_PI_2, ); let sphere1 = Box::new(Sphere::new(Vec3::new(0.0, 0.0, 0.0), 1.0)); let sphere2 = Box::new(Sphere::new(Vec3::new(-1.5, 0.0, -1.5), 1.0)); let sphere3 = Box::new(Sphere::new(Vec3::new(1.5, 0.0, 1.5), 1.0)); let floor = Box::new(Plane::new( Vec3::new(-3.0, -1.0, 3.0), Vec3::new(6.0, 0.0, 0.0), Vec3::new(0.0, 0.0, -6.0), )); let primitives: Vec<Box<dyn IntersectableLocal + Send + Sync>> = vec![sphere1, sphere2, sphere3, floor]; let materials: Vec<Material> = vec![ Material::new( Vec3::new(0.8, 0.2, 0.2), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec
fn cornellbox_scene() -> (PinholeCamera, Scene) { let camera = PinholeCamera::new( Vec3::new(278.0, 273.0, -900.0), Vec3::new(0.0, 0.0, 1.0), FRAC_PI_4, ); let floor = Box::new(Plane::new( Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), Vec3::new(556.0, 0.0, 0.0), )); let right_wall = Box::new(Plane::new( Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 548.8, 0.0), Vec3::new(0.0, 0.0, 559.2), )); let left_wall = Box::new(Plane::new( Vec3::new(556.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), Vec3::new(0.0, 548.8, 0.0), )); let ceil = Box::new(Plane::new( Vec3::new(0.0, 548.8, 0.0), Vec3::new(556.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 559.2), )); let back_wall = Box::new(Plane::new( Vec3::new(0.0, 0.0, 559.2), Vec3::new(0.0, 548.8, 0.0), Vec3::new(556.0, 0.0, 0.0), )); let short_box1 = Box::new(Plane::new( Vec3::new(130.0, 165.0, 65.0), Vec3::new(-48.0, 0.0, 160.0), Vec3::new(160.0, 0.0, 49.0), )); let short_box2 = Box::new(Plane::new( Vec3::new(290.0, 0.0, 114.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(-50.0, 0.0, 158.0), )); let short_box3 = Box::new(Plane::new( Vec3::new(130.0, 0.0, 65.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(160.0, 0.0, 49.0), )); let short_box4 = Box::new(Plane::new( Vec3::new(82.0, 0.0, 225.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(48.0, 0.0, -160.0), )); let short_box5 = Box::new(Plane::new( Vec3::new(240.0, 0.0, 272.0), Vec3::new(0.0, 165.0, 0.0), Vec3::new(-158.0, 0.0, -47.0), )); let tall_box1 = Box::new(Plane::new( Vec3::new(423.0, 330.0, 247.0), Vec3::new(-158.0, 0.0, 49.0), Vec3::new(49.0, 0.0, 159.0), )); let tall_box2 = Box::new(Plane::new( Vec3::new(423.0, 0.0, 247.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(49.0, 0.0, 159.0), )); let tall_box3 = Box::new(Plane::new( Vec3::new(472.0, 0.0, 406.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(-158.0, 0.0, 50.0), )); let tall_box4 = Box::new(Plane::new( Vec3::new(314.0, 0.0, 456.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(-49.0, 0.0, -160.0), )); let tall_box5 = Box::new(Plane::new( Vec3::new(265.0, 0.0, 296.0), Vec3::new(0.0, 330.0, 0.0), Vec3::new(158.0, 0.0, -49.0), )); let light = Box::new(Plane::new( Vec3::new(343.0, 548.6, 227.0), Vec3::new(-130.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 105.0), )); let primitives: Vec<Box<dyn IntersectableLocal + Send + Sync>> = vec![ floor, right_wall, left_wall, ceil, back_wall, short_box1, short_box2, short_box3, short_box4, short_box5, tall_box1, tall_box2, tall_box3, tall_box4, tall_box5, light, ]; let white = Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let red = Material::new( Vec3::new(0.8, 0.05, 0.05), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let green = Material::new( Vec3::new(0.05, 0.8, 0.05), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ); let light_material = Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(34.0, 19.0, 10.0), ); let materials = vec![ white.clone(), red, green, white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white.clone(), white, light_material, ]; (camera, Scene::new(primitives, materials)) } fn main() { let width = 512; let height = 512; let n_samples = 100; let max_depth = 100; let image = Arc::new(Mutex::new(Image::new(width, height))); let (camera, scene) = cornellbox_scene(); let camera = Arc::new(camera); let scene = Arc::new(scene); let integrator = Arc::new(PathTracingIntegrator::new(max_depth)); let pool = rayon::ThreadPoolBuilder::new() .num_threads(16) .build() .unwrap(); pool.scope(|s| { for i in 0..height { for j in 0..width { let (image, camera, scene, integrator) = ( image.clone(), camera.clone(), scene.clone(), integrator.clone(), ); s.spawn(move |_| { let seed = j + width * i; let mut sampler = Sampler::new(seed as u64); for _k in 0..n_samples { sampler.next_1d(); } let width = width as Real; let height = height as Real; let mut radiance = Vec3::new(0.0, 0.0, 0.0); for _k in 0..n_samples { let uv = Vec2::new( (2.0 * (j as Real + sampler.next_1d()) - width) / height, (2.0 * (i as Real + sampler.next_1d()) - height) / height, ); let ray = camera.sample_ray(uv, &mut sampler); radiance += integrator.integrate(&scene, &mut sampler, &ray); } radiance /= n_samples as Real; image.lock().unwrap().set_pixel(i, j, radiance); }); } } }); image.lock().unwrap().gamma_correction(); image.lock().unwrap().write_ppm(); }
3::new(0.2, 0.8, 0.2), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec3::new(0.2, 0.2, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), Material::new( Vec3::new(0.8, 0.8, 0.8), Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0), ), ]; (camera, Scene::new(primitives, materials)) }
function_block-function_prefixed
[ { "content": "pub trait Camera {\n\n fn sample_ray(&self, uv: Vec2, sampler: &mut Sampler) -> Ray;\n\n}\n\n\n\npub struct PinholeCamera {\n\n position: Vec3, // camera position\n\n forward: Vec3, // camera forward direction\n\n right: Vec3, // camera right direction\n\n up: Vec3, // cam...
Rust
src/config.rs
foeb/bootimage
e159e4095ca03826a5b366b1cf528804a06bf6bc
use failure::{Error, ResultExt}; use std::path::PathBuf; use toml::Value; #[derive(Debug, Clone)] pub struct Config { pub manifest_path: PathBuf, pub default_target: Option<String>, pub output: Option<PathBuf>, pub bootloader: BootloaderConfig, pub minimum_image_size: Option<u64>, pub run_command: Vec<String>, pub package_filepath: Option<PathBuf>, } #[derive(Debug, Clone)] pub struct BootloaderConfig { pub name: Option<String>, pub target: PathBuf, pub default_features: bool, pub features: Vec<String>, } pub(crate) fn read_config(manifest_path: PathBuf) -> Result<Config, Error> { use std::{fs::File, io::Read}; let cargo_toml: Value = { let mut content = String::new(); File::open(&manifest_path) .with_context(|e| format!("Failed to open Cargo.toml: {}", e))? .read_to_string(&mut content) .with_context(|e| format!("Failed to read Cargo.toml: {}", e))?; content .parse::<Value>() .with_context(|e| format!("Failed to parse Cargo.toml: {}", e))? }; let metadata = cargo_toml .get("package") .and_then(|table| table.get("metadata")) .and_then(|table| table.get("bootimage")); let metadata = match metadata { None => { return Ok(ConfigBuilder { manifest_path: Some(manifest_path), ..Default::default() } .into()); } Some(metadata) => metadata.as_table().ok_or(format_err!( "Bootimage configuration invalid: {:?}", metadata ))?, }; /* * The user shouldn't specify any features if they're using a precompiled bootloader, as we * don't actually compile it. */ if cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader_precompiled")) .and_then(|table| { table .get("features") .or_else(|| table.get("default-features")) }) .is_some() { return Err(format_err!( "Can't change features of precompiled bootloader!" )); } let bootloader_dependency = cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader")); let bootloader_default_features = match bootloader_dependency.and_then(|table| table.get("default-features")) { None => None, Some(Value::Boolean(default_features)) => Some(*default_features), Some(_) => { return Err(format_err!( "Bootloader 'default-features' field should be a bool!" )); } }; let bootloader_features = match cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader")) .and_then(|table| table.get("features")) { None => None, Some(Value::Array(array)) => { let mut features = Vec::new(); for feature_string in array { match feature_string { Value::String(feature) => features.push(feature.clone()), _ => return Err(format_err!("Bootloader features are malformed!")), } } Some(features) } Some(_) => return Err(format_err!("Bootloader features are malformed!")), }; let mut config = ConfigBuilder { manifest_path: Some(manifest_path), bootloader: BootloaderConfigBuilder { features: bootloader_features, default_features: bootloader_default_features, ..Default::default() }, ..Default::default() }; for (key, value) in metadata { match (key.as_str(), value.clone()) { ("default-target", Value::String(s)) => config.default_target = From::from(s), ("output", Value::String(s)) => config.output = Some(PathBuf::from(s)), ("bootloader", Value::Table(t)) => { for (key, value) in t { match (key.as_str(), value) { ("name", Value::String(s)) => config.bootloader.name = From::from(s), ("target", Value::String(s)) => { config.bootloader.target = Some(PathBuf::from(s)) } (k @ "precompiled", _) | (k @ "version", _) | (k @ "git", _) | (k @ "branch", _) | (k @ "path", _) => Err(format_err!( "the \ `package.metadata.bootimage.bootloader` key `{}` was deprecated\n\n\ In case you just updated bootimage from an earlier version, \ check out the migration guide at \ https://github.com/rust-osdev/bootimage/pull/16.", k ))?, (key, value) => Err(format_err!( "unexpected \ `package.metadata.bootimage.bootloader` key `{}` with value `{}`", key, value ))?, } } } ("minimum-image-size", Value::Integer(x)) => { if x >= 0 { config.minimum_image_size = Some((x * 1024 * 1024) as u64); } else { Err(format_err!( "unexpected `package.metadata.bootimage` \ key `minimum-image-size` with negative value `{}`", value ))? } } ("run-command", Value::Array(array)) => { let mut command = Vec::new(); for value in array { match value { Value::String(s) => command.push(s), _ => Err(format_err!("run-command must be a list of strings"))?, } } config.run_command = Some(command); } ("package-file", Value::String(path)) => { config.package_filepath = Some(PathBuf::from(path)); } (key, value) => Err(format_err!( "unexpected `package.metadata.bootimage` \ key `{}` with value `{}`", key, value ))?, } } Ok(config.into()) } #[derive(Default)] struct ConfigBuilder { manifest_path: Option<PathBuf>, default_target: Option<String>, output: Option<PathBuf>, bootloader: BootloaderConfigBuilder, minimum_image_size: Option<u64>, run_command: Option<Vec<String>>, package_filepath: Option<PathBuf>, } #[derive(Default)] struct BootloaderConfigBuilder { name: Option<String>, target: Option<PathBuf>, features: Option<Vec<String>>, default_features: Option<bool>, } impl Into<Config> for ConfigBuilder { fn into(self) -> Config { Config { manifest_path: self.manifest_path.expect("manifest path must be set"), default_target: self.default_target, output: self.output, bootloader: self.bootloader.into(), minimum_image_size: self.minimum_image_size, run_command: self.run_command.unwrap_or(vec![ "qemu-system-x86_64".into(), "-drive".into(), "format=raw,file={}".into(), ]), package_filepath: self.package_filepath, } } } impl Into<BootloaderConfig> for BootloaderConfigBuilder { fn into(self) -> BootloaderConfig { BootloaderConfig { name: self.name, target: self .target .unwrap_or(PathBuf::from("x86_64-bootloader.json")), features: self.features.unwrap_or(Vec::with_capacity(0)), default_features: self.default_features.unwrap_or(true), } } }
use failure::{Error, ResultExt}; use std::path::PathBuf; use toml::Value; #[derive(Debug, Clone)] pub struct Config { pub manifest_path: PathBuf, pub default_target: Option<String>, pub output: Option<PathBuf>, pub bootloader: BootloaderConfig, pub minimum_image_size: Option<u64>, pub run_command: Vec<String>, pub package_filepath: Option<PathBuf>, } #[derive(Debug, Clone)] pub struct BootloaderConfig { pub name: Option<String>, pub target: PathBuf, pub default_features: bool, pub features: Vec<String>, } pub(crate) fn read_config(manifest_path: PathBuf) -> Result<Config, Error> { use std::{fs::File, io::Read}; let cargo_toml: Value = { let mut content = String::new(); File::open(&manifest_path) .with_context(|e| format!("Failed to open Cargo.toml: {}", e))? .read_to_string(&mut content) .with_context(|e| format!("Failed to read Cargo.toml: {}", e))?; content .parse::<Value>() .with_context(|e| format!("Failed to parse Cargo.toml: {}", e))? }; let metadata = cargo_toml .get("package") .and_then(|table| table.get("metadata")) .and_then(|table| table.get("bootimage")); let metadata = match metadata { None => { return Ok(ConfigBuilder { manifest_path: Some(manifest_path), ..Default::default() } .into()); } Some(metadata) => metadata.as_table().ok_or(format_err!( "Bootimage configuration invalid: {:?}", metadata ))?, }; /* * The user shouldn't specify any features if they're using a precompiled bootloader, as we * don't actually compile it. */ if cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader_precompiled")) .and_then(|table| { table .get("features") .or_else(|| table.get("default-features")) }) .is_some() { return Err(format_err!( "Can't change features of precompiled bootloader!" )); } let bootloader_dependency = cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader")); let bootloader_default_features =
; let bootloader_features = match cargo_toml .get("dependencies") .and_then(|table| table.get("bootloader")) .and_then(|table| table.get("features")) { None => None, Some(Value::Array(array)) => { let mut features = Vec::new(); for feature_string in array { match feature_string { Value::String(feature) => features.push(feature.clone()), _ => return Err(format_err!("Bootloader features are malformed!")), } } Some(features) } Some(_) => return Err(format_err!("Bootloader features are malformed!")), }; let mut config = ConfigBuilder { manifest_path: Some(manifest_path), bootloader: BootloaderConfigBuilder { features: bootloader_features, default_features: bootloader_default_features, ..Default::default() }, ..Default::default() }; for (key, value) in metadata { match (key.as_str(), value.clone()) { ("default-target", Value::String(s)) => config.default_target = From::from(s), ("output", Value::String(s)) => config.output = Some(PathBuf::from(s)), ("bootloader", Value::Table(t)) => { for (key, value) in t { match (key.as_str(), value) { ("name", Value::String(s)) => config.bootloader.name = From::from(s), ("target", Value::String(s)) => { config.bootloader.target = Some(PathBuf::from(s)) } (k @ "precompiled", _) | (k @ "version", _) | (k @ "git", _) | (k @ "branch", _) | (k @ "path", _) => Err(format_err!( "the \ `package.metadata.bootimage.bootloader` key `{}` was deprecated\n\n\ In case you just updated bootimage from an earlier version, \ check out the migration guide at \ https://github.com/rust-osdev/bootimage/pull/16.", k ))?, (key, value) => Err(format_err!( "unexpected \ `package.metadata.bootimage.bootloader` key `{}` with value `{}`", key, value ))?, } } } ("minimum-image-size", Value::Integer(x)) => { if x >= 0 { config.minimum_image_size = Some((x * 1024 * 1024) as u64); } else { Err(format_err!( "unexpected `package.metadata.bootimage` \ key `minimum-image-size` with negative value `{}`", value ))? } } ("run-command", Value::Array(array)) => { let mut command = Vec::new(); for value in array { match value { Value::String(s) => command.push(s), _ => Err(format_err!("run-command must be a list of strings"))?, } } config.run_command = Some(command); } ("package-file", Value::String(path)) => { config.package_filepath = Some(PathBuf::from(path)); } (key, value) => Err(format_err!( "unexpected `package.metadata.bootimage` \ key `{}` with value `{}`", key, value ))?, } } Ok(config.into()) } #[derive(Default)] struct ConfigBuilder { manifest_path: Option<PathBuf>, default_target: Option<String>, output: Option<PathBuf>, bootloader: BootloaderConfigBuilder, minimum_image_size: Option<u64>, run_command: Option<Vec<String>>, package_filepath: Option<PathBuf>, } #[derive(Default)] struct BootloaderConfigBuilder { name: Option<String>, target: Option<PathBuf>, features: Option<Vec<String>>, default_features: Option<bool>, } impl Into<Config> for ConfigBuilder { fn into(self) -> Config { Config { manifest_path: self.manifest_path.expect("manifest path must be set"), default_target: self.default_target, output: self.output, bootloader: self.bootloader.into(), minimum_image_size: self.minimum_image_size, run_command: self.run_command.unwrap_or(vec![ "qemu-system-x86_64".into(), "-drive".into(), "format=raw,file={}".into(), ]), package_filepath: self.package_filepath, } } } impl Into<BootloaderConfig> for BootloaderConfigBuilder { fn into(self) -> BootloaderConfig { BootloaderConfig { name: self.name, target: self .target .unwrap_or(PathBuf::from("x86_64-bootloader.json")), features: self.features.unwrap_or(Vec::with_capacity(0)), default_features: self.default_features.unwrap_or(true), } } }
match bootloader_dependency.and_then(|table| table.get("default-features")) { None => None, Some(Value::Boolean(default_features)) => Some(*default_features), Some(_) => { return Err(format_err!( "Bootloader 'default-features' field should be a bool!" )); } }
if_condition
[ { "content": "fn build_bootloader(metadata: &CargoMetadata, config: &Config) -> Result<Box<[u8]>, Error> {\n\n use std::io::Read;\n\n\n\n let bootloader_metadata = metadata.packages.iter().find(|p| {\n\n if let Some(name) = config.bootloader.name.as_ref() {\n\n p.name == name.as_str()\n\...
Rust
der/src/asn1/bit_string.rs
xiaoyuxlu/utils
06276215ebdf9e5258c6daadcace9f9c64f57065
use crate::{ Any, ByteSlice, Encodable, Encoder, Error, ErrorKind, Header, Length, Result, Tag, Tagged, }; use core::convert::TryFrom; #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct BitString<'a> { inner: ByteSlice<'a>, } impl<'a> BitString<'a> { pub fn new(slice: &'a [u8]) -> Result<Self> { ByteSlice::new(slice) .map(|inner| Self { inner }) .map_err(|_| ErrorKind::Length { tag: Self::TAG }.into()) } pub fn as_bytes(&self) -> &'a [u8] { self.inner.as_bytes() } pub fn len(&self) -> Length { self.inner.len() } pub fn is_empty(&self) -> bool { self.inner.is_empty() } fn header(self) -> Result<Header> { Ok(Header { tag: Tag::BitString, length: (self.inner.len() + 1u16)?, }) } } impl AsRef<[u8]> for BitString<'_> { fn as_ref(&self) -> &[u8] { self.as_bytes() } } impl<'a> From<&BitString<'a>> for BitString<'a> { fn from(value: &BitString<'a>) -> BitString<'a> { *value } } impl<'a> TryFrom<Any<'a>> for BitString<'a> { type Error = Error; fn try_from(any: Any<'a>) -> Result<BitString<'a>> { any.tag().assert_eq(Tag::BitString)?; if let Some(bs) = any.as_bytes().get(1..) { if any.as_bytes()[0] == 0 { return ByteSlice::new(bs) .map(|inner| Self { inner }) .map_err(|_| ErrorKind::Length { tag: Self::TAG }.into()); } } Err(ErrorKind::Length { tag: Self::TAG }.into()) } } impl<'a> From<BitString<'a>> for Any<'a> { fn from(bit_string: BitString<'a>) -> Any<'a> { Any { tag: Tag::BitString, value: bit_string.inner, } } } impl<'a> From<BitString<'a>> for &'a [u8] { fn from(bit_string: BitString<'a>) -> &'a [u8] { bit_string.as_bytes() } } impl<'a> Encodable for BitString<'a> { fn encoded_len(&self) -> Result<Length> { self.header()?.encoded_len()? + 1u16 + self.inner.len() } fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> { self.header()?.encode(encoder)?; encoder.byte(0)?; encoder.bytes(self.as_bytes()) } } impl<'a> Tagged for BitString<'a> { const TAG: Tag = Tag::BitString; } #[cfg(test)] mod tests { use super::{Any, BitString, ErrorKind, Result, Tag}; use core::convert::TryInto; fn parse_bitstring_from_any(bytes: &[u8]) -> Result<BitString<'_>> { Any::new(Tag::BitString, bytes)?.try_into() } #[test] fn reject_non_prefixed_bitstring() { let err = parse_bitstring_from_any(&[]).err().unwrap(); assert_eq!( err.kind(), ErrorKind::Length { tag: Tag::BitString } ); } #[test] fn reject_non_zero_prefix() { let err = parse_bitstring_from_any(&[1, 1, 2, 3]).err().unwrap(); assert_eq!( err.kind(), ErrorKind::Length { tag: Tag::BitString } ); } #[test] fn decode_empty_bitstring() { let bs = parse_bitstring_from_any(&[0]).unwrap(); assert_eq!(bs.as_ref(), &[]); } #[test] fn decode_non_empty_bitstring() { let bs = parse_bitstring_from_any(&[0, 1, 2, 3]).unwrap(); assert_eq!(bs.as_ref(), &[1, 2, 3]); } }
use crate::{ Any, ByteSlice, Encodable, Encoder, Error, ErrorKind, Header, Length, Result, Tag, Tagged, }; use core::convert::TryFrom; #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct BitString<'a> { inner: ByteSlice<'a>, } impl<'a> BitString<'a> { pub fn new(slice: &'a [u8]) -> Result<Self> { ByteSlice::new(slice) .map(|inner| Self { inner }) .map_err(|_| ErrorKind::Length { tag: Self::TAG }.into()) } pub fn as_bytes(&self) -> &'a [u8] { self.inner.as_bytes() } pub fn len(&self) -> Length { self.inner.len() } pub fn is_empty(&self) -> bool { self.inner.is_empty() } fn header(self) -> Result<Header> { Ok(Header { tag: Tag::BitString, length: (self.inner.len() + 1u16)?, }) } } impl AsRef<[u8]> for BitString<'_> { fn as_ref(&self) -> &[u8] { self.as_bytes() } } impl<'a> From<&BitString<'a>> for BitString<'a> { fn from(value: &BitString<'a>) -> BitString<'a> { *value } } impl<'a> TryFrom<Any<'a>> for BitString<'a> { type Error = Error;
} impl<'a> From<BitString<'a>> for Any<'a> { fn from(bit_string: BitString<'a>) -> Any<'a> { Any { tag: Tag::BitString, value: bit_string.inner, } } } impl<'a> From<BitString<'a>> for &'a [u8] { fn from(bit_string: BitString<'a>) -> &'a [u8] { bit_string.as_bytes() } } impl<'a> Encodable for BitString<'a> { fn encoded_len(&self) -> Result<Length> { self.header()?.encoded_len()? + 1u16 + self.inner.len() } fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> { self.header()?.encode(encoder)?; encoder.byte(0)?; encoder.bytes(self.as_bytes()) } } impl<'a> Tagged for BitString<'a> { const TAG: Tag = Tag::BitString; } #[cfg(test)] mod tests { use super::{Any, BitString, ErrorKind, Result, Tag}; use core::convert::TryInto; fn parse_bitstring_from_any(bytes: &[u8]) -> Result<BitString<'_>> { Any::new(Tag::BitString, bytes)?.try_into() } #[test] fn reject_non_prefixed_bitstring() { let err = parse_bitstring_from_any(&[]).err().unwrap(); assert_eq!( err.kind(), ErrorKind::Length { tag: Tag::BitString } ); } #[test] fn reject_non_zero_prefix() { let err = parse_bitstring_from_any(&[1, 1, 2, 3]).err().unwrap(); assert_eq!( err.kind(), ErrorKind::Length { tag: Tag::BitString } ); } #[test] fn decode_empty_bitstring() { let bs = parse_bitstring_from_any(&[0]).unwrap(); assert_eq!(bs.as_ref(), &[]); } #[test] fn decode_non_empty_bitstring() { let bs = parse_bitstring_from_any(&[0, 1, 2, 3]).unwrap(); assert_eq!(bs.as_ref(), &[1, 2, 3]); } }
fn try_from(any: Any<'a>) -> Result<BitString<'a>> { any.tag().assert_eq(Tag::BitString)?; if let Some(bs) = any.as_bytes().get(1..) { if any.as_bytes()[0] == 0 { return ByteSlice::new(bs) .map(|inner| Self { inner }) .map_err(|_| ErrorKind::Length { tag: Self::TAG }.into()); } } Err(ErrorKind::Length { tag: Self::TAG }.into()) }
function_block-full_function
[ { "content": "/// Obtain the length of an ASN.1 `SEQUENCE` of [`Encodable`] values when\n\n/// serialized as ASN.1 DER, including the `SEQUENCE` tag and length prefix.\n\npub fn encoded_len(encodables: &[&dyn Encodable]) -> Result<Length> {\n\n let inner_len = encoded_len_inner(encodables)?;\n\n Header::n...
Rust
src/format.rs
samwho/hmm
8894296e98ab43e6937adbfe88af413c7edee881
use super::{entry::Entry, Result}; use chrono::prelude::*; use colored::*; use handlebars::{ Context, Handlebars, Helper, HelperDef, HelperResult, JsonRender, Output, RenderContext, }; use std::collections::BTreeMap; pub struct Format<'a> { renderer: Handlebars<'a>, data: BTreeMap<&'static str, String>, } impl<'a> Format<'a> { pub fn with_template(template: &str) -> Result<Self> { let mut renderer = Handlebars::new(); renderer.set_strict_mode(true); renderer.register_escape_fn(|s| s.trim().to_owned()); renderer.register_template_string("template", template)?; renderer.register_helper("indent", Box::new(IndentHelper::new())); renderer.register_helper("strftime", Box::new(StrftimeHelper {})); renderer.register_helper("color", Box::new(ColorHelper {})); renderer.register_helper("markdown", Box::new(MarkdownHelper {})); Ok(Format { renderer, data: BTreeMap::new(), }) } pub fn format_entry(&mut self, entry: &Entry) -> Result<String> { self.data.clear(); self.data.insert("datetime", entry.datetime().to_rfc3339()); self.data.insert("message", entry.message().to_owned()); Ok(self.renderer.render("template", &self.data)?) } } struct IndentHelper<'a> { wrapper: textwrap::Wrapper<'a, textwrap::HyphenSplitter>, } impl<'a> IndentHelper<'a> { fn new() -> Self { let wrapper = textwrap::Wrapper::with_termwidth() .initial_indent("│ ") .subsequent_indent("│ "); IndentHelper { wrapper } } } impl<'a> HelperDef for IndentHelper<'a> { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let param = h.param(0).unwrap(); Ok(out.write(&self.wrapper.fill(&param.value().render()))?) } } struct StrftimeHelper {} impl HelperDef for StrftimeHelper { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let date_str = h.param(1).unwrap().value().render(); let date = DateTime::parse_from_rfc3339(&date_str) .map_err(|_| handlebars::RenderError::new("couldn't parse date"))?; let local_date = date.with_timezone(&Local); let format_str = h.param(0).unwrap().value().render(); Ok(out.write(&local_date.format(&format_str).to_string())?) } } struct ColorHelper {} impl HelperDef for ColorHelper { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let color = h.param(0).unwrap().value().render(); let s = h.param(1).unwrap().value().render(); Ok(out.write(&format!("{}", s.color(color)))?) } } struct MarkdownHelper {} impl HelperDef for MarkdownHelper { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let s = h.param(0).unwrap().value().render(); Ok(out.write(&format!("{}", termimad::text(&s)))?) } } #[cfg(test)] mod tests { use super::*; use test_case::test_case; #[test_case("{{ message }}" => "hello world")] #[test_case("{{ color \"blue\" message }}" => "hello world".blue().to_string())] #[test_case("{{ indent message }}" => "│ hello world")] #[test_case("{{ strftime \"%Y-%m-%d %H:%M:%S\" datetime }}" => "2020-01-02 03:04:05")] fn test_format(template: &str) -> String { Format::with_template(template) .unwrap() .format_entry(&Entry::new( Utc.ymd(2020, 1, 2).and_hms(3, 4, 5).into(), "hello world".to_owned(), )) .unwrap() } }
use super::{entry::Entry, Result}; use chrono::prelude::*; use colored::*; use handlebars::{ Context, Handlebars, Helper, HelperDef, HelperResult, JsonRender, Output, RenderContext, }; use std::collections::BTreeMap; pub struct Format<'a> { renderer: Handlebars<'a>, data: BTreeMap<&'static str, String>, } impl<'a> Format<'a> { pub fn with_template(template: &str) -> Result<Self> { let mut renderer = Handlebars::new(); renderer.set_strict_mode(true); renderer.register_escape_fn(|s| s.trim().to_owned()); renderer.register_template_string("template", template)?; renderer.register_helper("indent", Box::new(IndentHelper::new())); renderer.register_helper("strftime", Box::new(StrftimeHelper {})); renderer.register_helper("color", Box::new(ColorHelper {})); renderer.register_helper("markdown", Box::new(MarkdownHelper {})); Ok(Format { renderer, data: BTreeMap::new(), }) } pub fn format_entry(&mut self, entry: &Entry) -> Result<String> { self.data.clear(); self.data.insert("datetime", entry.datetime().to_rfc3339()); self.data.insert("message", entry.message().to_owned()); Ok(self.renderer.render("template", &self.data)?) } } struct IndentHelper<'a> { wrapper: textwrap::Wrapper<'a, textwrap::HyphenSplitter>, } impl<'a> IndentHelper<'a> { fn new() -> Self { let wrapper = textwrap::Wrapper::with_termwidth() .initial_indent("│ ") .subsequent_indent("│ "); IndentHelper { wrapper } } } impl<'a> HelperDef for IndentHelper<'a> { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let param = h.param(0).unwrap(); Ok(out.write(&self.wrapper.fill(&param.value().render()))?) } } struct StrftimeHelper {} impl HelperDef for StrftimeHelper { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let date_str = h.param(1).unwrap().value().render(); let date = DateTime::parse_from_rfc3339(&date_str) .map_err(|_| handlebars::RenderError::new("couldn't parse date"))?; let local_date = date.with_timezone(&Local); let format_str = h.param(0).unwrap().value().render(); Ok(out.write(&local_date.format(&format_str).to_string())?) } } struct ColorHelper {} impl HelperDef for ColorHelper {
} struct MarkdownHelper {} impl HelperDef for MarkdownHelper { fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let s = h.param(0).unwrap().value().render(); Ok(out.write(&format!("{}", termimad::text(&s)))?) } } #[cfg(test)] mod tests { use super::*; use test_case::test_case; #[test_case("{{ message }}" => "hello world")] #[test_case("{{ color \"blue\" message }}" => "hello world".blue().to_string())] #[test_case("{{ indent message }}" => "│ hello world")] #[test_case("{{ strftime \"%Y-%m-%d %H:%M:%S\" datetime }}" => "2020-01-02 03:04:05")] fn test_format(template: &str) -> String { Format::with_template(template) .unwrap() .format_entry(&Entry::new( Utc.ymd(2020, 1, 2).and_hms(3, 4, 5).into(), "hello world".to_owned(), )) .unwrap() } }
fn call<'reg: 'rc, 'rc>( &self, h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output, ) -> HelperResult { let color = h.param(0).unwrap().value().render(); let s = h.param(1).unwrap().value().render(); Ok(out.write(&format!("{}", s.color(color)))?) }
function_block-full_function
[ { "content": "pub fn from_str(s: &str) -> Error {\n\n s.to_owned().into()\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Io(io::Error),\n\n Csv(csv::Error),\n\n QuickCsv(quick_csv::error::Error),\n\n ChronoParse(chrono::format::ParseError),\n\n SerdeJson(serde_json::error::Error),\n\n ...
Rust
src/jdk.rs
jht5945/buildj
26de9ddcc6dcd5c956da2f592d55bb504fe1a8f5
use std::{collections::HashMap, env, fs, str, path::Path, process::Command}; use rust_util::util_os; use rust_util::util_env; use crate::{local_util, tool, misc::VERBOSE}; use plist::Value; const PATH: &str = "PATH"; const JAVA_HOME: &str = "JAVA_HOME"; const OPENJDK_MACOS: &str = "openjdk-osx"; const JDK_LINUX: &str = "jdk-linux"; const OPENJDK_LINUX: &str = "openjdk-linux"; const MACOS_LIBEXEC_JAVAHOME: &str = "/usr/libexec/java_home"; pub const LOCAL_JAVA_HOME_BASE_DIR: &str = ".jssp/jdks"; lazy_static! { pub static ref BUILDJ_JAVA_NAME: Option<String> = env::var("BUILDJ_JAVA_NAME").ok(); } pub fn get_java_home(version: &str) -> Option<String> { match get_macos_java_home(version) { Some(j) => Some(j), None => match get_local_java_home(version) { Some(j) => Some(j), None => iff!(get_cloud_java(version), get_local_java_home(version), None), }, } } pub fn get_cloud_java(version: &str) -> bool { if !util_os::is_macos_or_linux() { return false; } let cloud_java_names = match &*BUILDJ_JAVA_NAME { None => if util_os::is_macos() { vec![OPENJDK_MACOS] } else if util_os::is_linux() { vec![JDK_LINUX, OPENJDK_LINUX] } else { vec![] }, Some(buildj_java_name) => vec![buildj_java_name.as_str()], }; let local_java_home_base_dir = match local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR) { Ok(o) => o, Err(_) => return false, }; for cloud_java_name in cloud_java_names { if tool::get_and_extract_tool_package(&local_java_home_base_dir, false, cloud_java_name, version, false).is_ok() { return true; } } failure!("Get java failed, version: {}", version); false } pub fn get_macos_java_home(version: &str) -> Option<String> { if !util_os::is_macos() || util_env::is_env_on("SKIP_CHECK_JAVA_HOME") { return None; } let java_home_x = Command::new(MACOS_LIBEXEC_JAVAHOME).arg("-x").output().ok()?; let java_home_plist_value = match Value::from_reader_xml(&*java_home_x.stdout) { Err(e) => { debugging!("Parse java_home outputs failed: {}", e); return None; } Ok(val) => val, }; let java_home_plist_value_array = match java_home_plist_value.as_array() { None => { debugging!("Covert java_home plist output to array failed: {:?}", java_home_plist_value); return None; } Some(val) => val, }; for java_home_plist_item in java_home_plist_value_array { debugging!("Checking: {:?}", java_home_plist_item); if let Some(jvm_item) = java_home_plist_item.as_dictionary() { let jvm_version_value = jvm_item.get("JVMVersion"); let jvm_home_path_value = jvm_item.get("JVMHomePath"); if let (Some(Value::String(jvm_version)), Some(Value::String(jvm_path))) = (jvm_version_value, jvm_home_path_value) { debugging!("Check version: {} vs {}", jvm_version, version); if jvm_version.starts_with(version) { debugging!("Check version success: {} -> {}", jvm_version, jvm_path); return Some(jvm_path.into()); } } } } None } pub fn get_local_java_home(version: &str) -> Option<String> { let local_java_home_base_dir = local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR).ok()?; let paths = fs::read_dir(Path::new(&local_java_home_base_dir)).ok()?; for path in paths { if let Ok(dir_entry) = path { if let Some(p) = dir_entry.path().to_str() { if *VERBOSE { debugging!("Try match path: {}", p); } let mut path_name = p; if p.ends_with('/') { path_name = &path_name[..path_name.len() - 1] } if let Some(i) = path_name.rfind('/') { path_name = &path_name[i + 1..]; } let matched_path_opt = if (path_name.starts_with("jdk-") && (&path_name[4..]).starts_with(version)) || (path_name.starts_with("jdk") && (&path_name[3..]).starts_with(version)) { Some(p) } else { None }; if let Some(matched_path) = matched_path_opt { if *VERBOSE { debugging!("Matched JDK path found: {}", matched_path); } return if local_util::is_path_exists(matched_path, "Contents/Home") { Some(format!("{}/{}", matched_path, "Contents/Home")) } else { Some(matched_path.to_string()) }; } } } } None } pub fn extract_jdk_and_wait(file_name: &str) { if let Ok(local_java_home_base_dir) = local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR) { local_util::extract_package_and_wait(&local_java_home_base_dir, file_name).unwrap_or_else(|err| { failure!("Extract file: {}, failed: {}", file_name, err); }); } } pub fn get_env() -> HashMap<String, String> { let mut new_env: HashMap<String, String> = HashMap::new(); for (key, value) in env::vars() { new_env.insert(key, value); } new_env } pub fn get_env_with_java_home(java_home: &str) -> HashMap<String, String> { let mut new_env: HashMap<String, String> = HashMap::new(); for (key, value) in env::vars() { let key_str = key.as_str(); if JAVA_HOME == key_str { } else if PATH == key_str { let path = value.to_string(); let new_path = format!("{}/bin:{}", java_home, path); new_env.insert(PATH.to_string(), new_path); } else { new_env.insert(key, value); } } new_env.insert(JAVA_HOME.to_string(), java_home.to_string()); new_env }
use std::{collections::HashMap, env, fs, str, path::Path, process::Command}; use rust_util::util_os; use rust_util::util_env; use crate::{local_util, tool, misc::VERBOSE}; use plist::Value; const PATH: &str = "PATH"; const JAVA_HOME: &str = "JAVA_HOME"; const OPENJDK_MACOS: &str = "openjdk-osx"; const JDK_LINUX: &str = "jdk-linux"; const OPENJDK_LINUX: &str = "openjdk-linux"; const MACOS_LIBEXEC_JAVAHOME: &str = "/usr/libexec/java_home"; pub const LOCAL_JAVA_HOME_BASE_DIR: &str = ".jssp/jdks"; lazy_static! { pub static ref BUILDJ_JAVA_NAME: Option<String> = env::var("BUILDJ_JAVA_NAME").ok(); } pub fn get_java_home(version: &str) -> Option<String> { match get_macos_java_home(version) { Some(j) => Some(j), None => match get_local_java_home(version) { Some(j) => Some(j), None => iff!(get_cloud_java(version), get_local_java_home(version), None), }, } } pub fn get_cloud_java(version: &str) -> bool { if !util_os::is_macos_or_linux() { return false; } let cloud_java_names = match &*BUILDJ_JAVA_NAME { None => if util_os::is_macos() { vec![OPENJDK_MACOS] } else if util_os::is_linux() { vec![JDK_LINUX, OPENJDK_LINUX] } else { vec![] }, Some(buildj_java_name) => vec![buildj_java_name.as_str()], }; let local_java_home_base_dir = match local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR) { Ok(o) => o, Err(_) => return false, }; for cloud_java_name in cloud_java_names { if tool::get_and_extract_tool_package(&local_java_home_base_dir, false, cloud_java_name, version, false).is_ok() { return true; } } failure!("Get java failed, version: {}", version); false } pub fn get_macos_java_home(version: &str) -> Option<String> { if !util_os::is_macos() || util_env::is_env_on("SKIP_CHECK_JAVA_HOME") { return None; } let java_home_x = Command::new(MACOS_LIBEXEC_JAVAHOME).arg("-x").output().ok()?; let java_home_plist_value = mat
{ let mut new_env: HashMap<String, String> = HashMap::new(); for (key, value) in env::vars() { new_env.insert(key, value); } new_env } pub fn get_env_with_java_home(java_home: &str) -> HashMap<String, String> { let mut new_env: HashMap<String, String> = HashMap::new(); for (key, value) in env::vars() { let key_str = key.as_str(); if JAVA_HOME == key_str { } else if PATH == key_str { let path = value.to_string(); let new_path = format!("{}/bin:{}", java_home, path); new_env.insert(PATH.to_string(), new_path); } else { new_env.insert(key, value); } } new_env.insert(JAVA_HOME.to_string(), java_home.to_string()); new_env }
ch Value::from_reader_xml(&*java_home_x.stdout) { Err(e) => { debugging!("Parse java_home outputs failed: {}", e); return None; } Ok(val) => val, }; let java_home_plist_value_array = match java_home_plist_value.as_array() { None => { debugging!("Covert java_home plist output to array failed: {:?}", java_home_plist_value); return None; } Some(val) => val, }; for java_home_plist_item in java_home_plist_value_array { debugging!("Checking: {:?}", java_home_plist_item); if let Some(jvm_item) = java_home_plist_item.as_dictionary() { let jvm_version_value = jvm_item.get("JVMVersion"); let jvm_home_path_value = jvm_item.get("JVMHomePath"); if let (Some(Value::String(jvm_version)), Some(Value::String(jvm_path))) = (jvm_version_value, jvm_home_path_value) { debugging!("Check version: {} vs {}", jvm_version, version); if jvm_version.starts_with(version) { debugging!("Check version success: {} -> {}", jvm_version, jvm_path); return Some(jvm_path.into()); } } } } None } pub fn get_local_java_home(version: &str) -> Option<String> { let local_java_home_base_dir = local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR).ok()?; let paths = fs::read_dir(Path::new(&local_java_home_base_dir)).ok()?; for path in paths { if let Ok(dir_entry) = path { if let Some(p) = dir_entry.path().to_str() { if *VERBOSE { debugging!("Try match path: {}", p); } let mut path_name = p; if p.ends_with('/') { path_name = &path_name[..path_name.len() - 1] } if let Some(i) = path_name.rfind('/') { path_name = &path_name[i + 1..]; } let matched_path_opt = if (path_name.starts_with("jdk-") && (&path_name[4..]).starts_with(version)) || (path_name.starts_with("jdk") && (&path_name[3..]).starts_with(version)) { Some(p) } else { None }; if let Some(matched_path) = matched_path_opt { if *VERBOSE { debugging!("Matched JDK path found: {}", matched_path); } return if local_util::is_path_exists(matched_path, "Contents/Home") { Some(format!("{}/{}", matched_path, "Contents/Home")) } else { Some(matched_path.to_string()) }; } } } } None } pub fn extract_jdk_and_wait(file_name: &str) { if let Ok(local_java_home_base_dir) = local_util::get_user_home_dir(LOCAL_JAVA_HOME_BASE_DIR) { local_util::extract_package_and_wait(&local_java_home_base_dir, file_name).unwrap_or_else(|err| { failure!("Extract file: {}, failed: {}", file_name, err); }); } } pub fn get_env() -> HashMap<String, String>
random
[ { "content": "pub fn get_and_extract_tool_package(base_dir: &str, dir_with_name: bool, name: &str, version: &str, extract_match: bool) -> XResult<bool> {\n\n let tool_package_detail = get_tool_package_detail(name, version)?;\n\n let build_json_object = json::parse(&tool_package_detail)?;\n\n if *VERBOS...
Rust
tests/basic_choice.rs
pasa-v2x/asn1rs
e8ab92d96c57e17f7cd82cdf825c0063c95a8430
use asn1rs::prelude::*; use asn1rs::syn::io::UperReader as NewUperReader; use asn1rs::syn::io::UperWriter as NewUperWriter; asn_to_rust!( r"BasicChoice DEFINITIONS AUTOMATIC TAGS ::= BEGIN Basic ::= CHOICE { abc UTF8String, def UTF8String, ghi INTEGER } Extensible ::= CHOICE { abc UTF8String, def INTEGER, ..., -- whatever reserved blubber comment ghi INTEGER, jkl Basic, mno UTF8String } END" ); fn serialize_uper(to_uper: &impl Writable) -> (usize, Vec<u8>) { let mut writer = NewUperWriter::default(); writer.write(to_uper).unwrap(); let bits = writer.bit_len(); (bits, writer.into_bytes_vec()) } fn deserialize_uper<T: Readable>(data: &[u8], bits: usize) -> T { let mut reader = NewUperReader::from_bits(data, bits); reader.read::<T>().unwrap() } fn serialize_and_deserialize_uper<T: Readable + Writable + std::fmt::Debug + PartialEq>( bits: usize, data: &[u8], uper: &T, ) { let serialized = serialize_uper(uper); assert_eq!((bits, data), (serialized.0, &serialized.1[..])); assert_eq!(uper, &deserialize_uper::<T>(data, bits)); } #[test] fn test_extensible_uper() { serialize_and_deserialize_uper(10, &[0x00, 0x00], &Extensible::Abc(String::default())); serialize_and_deserialize_uper( 106, &[ 0x03, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x15, 0xdb, 0xdc, 0x9b, 0x19, 0x08, 0x40, ], &Extensible::Abc("Hello World!".to_string()), ); serialize_and_deserialize_uper(18, &[0x40, 0x40, 0x00], &Extensible::Def(0)); serialize_and_deserialize_uper(26, &[0x40, 0x81, 0x4e, 0x40], &Extensible::Def(1337)); serialize_and_deserialize_uper(32, &[0x80_u8, 0x02, 0x01, 0x00], &Extensible::Ghi(0)); serialize_and_deserialize_uper(32, &[0x80_u8, 0x02, 0x01, 0x1B], &Extensible::Ghi(27)); serialize_and_deserialize_uper( 40, &[0x80_u8, 0x03, 0x02, 0x05, 0x39], &Extensible::Ghi(1337), ); serialize_and_deserialize_uper( 120, &[ 0x82, 0x0d, 0x0c, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x61, 0x67, 0x61, 0x69, 0x6e, 0x21, ], &Extensible::Mno("Hello again!".to_string()), ); } #[test] pub fn test_basic_uper() { serialize_and_deserialize_uper( 106, &[ 0x03, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x15, 0xdb, 0xdc, 0x9b, 0x19, 0x08, 0x40, ], &Basic::Abc("Hello World!".to_string()), ); serialize_and_deserialize_uper( 106, &[ 0x43, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x18, 0x59, 0xd8, 0x5a, 0x5b, 0x88, 0x40, ], &Basic::Def("Hello again!".to_string()), ); serialize_and_deserialize_uper(26, &[0x80, 0x81, 0x4e, 0x40], &Basic::Ghi(1337)); } #[test] fn test_extensible_choice_inner_complex() { let jkl = Extensible::Jkl(Basic::Ghi(1337)); let (bits, buffer) = serialize_uper(&jkl); let jkl_deserialized = deserialize_uper(&buffer[..], bits); assert_eq!(jkl, jkl_deserialized); } #[test] fn test_basic_variants_parsed() { let _abc = Basic::Abc(String::default()); let _def = Basic::Def(String::default()); let _ghi = Basic::Ghi(123_u64); match Basic::Abc(String::default()) { Basic::Abc(_) | Basic::Def(_) | Basic::Ghi(_) => {} } }
use asn1rs::prelude::*; use asn1rs::syn::io::UperReader as NewUperReader; use asn1rs::syn::io::UperWriter as NewUperWriter; asn_to_rust!( r"BasicChoice DEFINITIONS AUTOMATIC TAGS ::= BEGIN Basic ::= CHOICE { abc UTF8String, def UTF8String, ghi INTEGER } Extensible ::= CHOICE { abc UTF8String, def INTEGER, ..., -- whatever reserved blubber comment ghi INTEGER, jkl Basic, mno UTF8String } END" ); fn serialize_uper(to_uper: &impl Writable) -> (usize, Vec<u8>) { let mut writer = NewUperWriter::default(); writer.write(to_uper).unwrap(); let bits = writer.bit_len(); (bits, writer.into_bytes_vec()) } fn deserialize_uper<T: Readable>(data: &[u8], bits: usize) -> T { let mut reader = NewUperReader::from_bits(data, bits); reader.read::<T>().unwrap() } fn serialize_and_deserialize_uper<T: Readable + Writable + std::fmt::Debug + PartialEq>( bits: usize, data: &[u8], uper: &T, ) { let serialized = serialize_uper(uper); assert_eq!((bits, data), (serialized.0, &serialized.1[..])); assert_eq!(uper, &deserialize_uper::<T>(data, bits)); } #[test] fn test_extensible_uper() { serialize_and_deserialize_uper(10, &[0x00, 0x00], &Extensible::Abc(String::default())); serialize_and_deserialize_uper( 106, &[ 0x03, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x15, 0xdb, 0xdc, 0x9b, 0x19, 0x08, 0x40, ], &Extensible::Abc("Hello World!".to_string()), ); serialize_and_deserialize_uper(18, &[0x40, 0x40, 0x00], &Extensible::Def(0)); serialize_and_deserialize_uper(26, &[0x40, 0x81, 0x4e, 0x40], &Extensible::Def(1337)); serialize_and_deserialize_uper(32, &[0x80_u8, 0x02, 0x01, 0x00], &Extensible::Ghi(0)); serialize_and_deserialize_uper(32, &[0x80_u8, 0x02, 0x01, 0x1B], &Extensible::Ghi(27)); serialize_and_deserialize_uper( 40, &[0x80_u8, 0x03, 0x02, 0x05, 0x39], &Extensible::Ghi(1337), ); serialize_and_deserialize_uper( 120, &[ 0x82, 0x0d, 0x0c, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x61, 0x67, 0x61, 0x69, 0x6e, 0x21, ], &Extensible::Mno("Hello again!".to_string()), ); } #[test] pub fn test_basic_uper() { serialize_and_deserialize_uper( 106, &[ 0x03, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x15, 0xdb, 0xdc, 0x9b, 0x19, 0x08, 0x40, ], &Basic::Abc("Hello World!".to_string()), ); serialize_and_deserialize_uper( 106, &[ 0x43, 0x12, 0x19, 0x5b, 0x1b, 0x1b, 0xc8, 0x18, 0x59, 0xd8, 0x5a, 0x5b, 0x88, 0x40, ], &Basic::Def("Hello again!".to_string()), ); serialize_and_deserialize_uper(26, &[0x80, 0x81, 0x4e, 0x40], &Basic::Ghi(1337)); } #[test] fn test_extensible_choice_inner_complex() { let jkl = Extensible::Jkl(Basic::Ghi(1337)); let (bits, buffer) = serialize_uper(&jkl); let jkl_deserialized = deserialize_uper(&buffer[..], bits); assert_eq!(jkl, jkl_deserialized); } #[test] fn test_basic_variants_parsed() {
let _abc = Basic::Abc(String::default()); let _def = Basic::Def(String::default()); let _ghi = Basic::Ghi(123_u64); match Basic::Abc(String::default()) { Basic::Abc(_) | Basic::Def(_) | Basic::Ghi(_) => {} } }
function_block-function_prefix_line
[ { "content": "fn serialize_uper(to_uper: impl Writable) -> (usize, Vec<u8>) {\n\n let mut writer = NewUperWriter::default();\n\n writer.write(&to_uper).unwrap();\n\n let bits = writer.bit_len();\n\n (bits, writer.into_bytes_vec())\n\n}\n\n\n", "file_path": "tests/basic_enumerated.rs", "rank"...
Rust
src/lib.rs
lucviala/rusty-vjoy
0767102ac650b357f8677ce4556732711468cbc1
mod ffi; pub type VJDStat = ffi::VjdStat; pub type JoystickPosition = ffi::_JOYSTICK_POSITION_V2; #[repr(u32)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum HidUsage { X = 0x30, Y = 0x31, Z = 0x32, RX = 0x33, RY = 0x34, RZ = 0x35, SL0 = 0x36, SL1 = 0x37, WHL = 0x38, POV = 0x39, } pub fn get_vjoy_version() -> i16 { unsafe { ffi::GetvJoyVersion() } } pub fn vjoy_enabled() -> bool { unsafe { ffi::vJoyEnabled() != 0 } } pub fn get_vjoy_product_string() -> String { unsafe { let ptr = ffi::GetvJoyProductString(); let mut string = String::with_capacity(23); for i in 0..46 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr as *mut i8); string.push_str(c_str.to_str().unwrap()); } string } } pub fn get_vjoy_manufacturer_string() -> String { unsafe { let ptr = ffi::GetvJoyManufacturerString(); let mut string = String::with_capacity(16); for i in 0..32 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr); string.push_str(c_str.to_str().unwrap()); } string } } pub fn get_vjoy_serial_number_string() -> String { unsafe { let ptr = ffi::GetvJoySerialNumberString(); let mut string = String::with_capacity(5); for i in 0..10 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr as *mut i8); string.push_str(c_str.to_str().unwrap()); } string } } pub fn driver_match() -> (bool, u16, u16) { let mut dll_ver = 0; let mut driver_ver = 0; unsafe { let status = ffi::DriverMatch(&mut dll_ver, &mut driver_ver) != 0; (status, dll_ver, driver_ver) } } pub fn get_vjoy_max_devices() -> i32 { let mut number = 0; unsafe { if ffi::GetvJoyMaxDevices(&mut number) != 0 { number } else { 0 } } } pub fn get_number_existing_vjd() -> i32 { let mut number = 0; unsafe { if ffi::GetNumberExistingVJD(&mut number) != 0 { number } else { 0 } } } pub fn get_vjd_button_number(id: u32) -> i32 { unsafe { ffi::GetVJDButtonNumber(id) } } pub fn get_vjd_disc_pov_number(id: u32) -> i32 { unsafe { ffi::GetVJDDiscPovNumber(id) } } pub fn get_vjd_cont_pov_number(id: u32) -> i32 { unsafe { ffi::GetVJDContPovNumber(id) } } pub fn get_vjd_axis_exist(id: u32, usage: HidUsage) -> bool { unsafe { ffi::GetVJDAxisExist(id, usage as u32) != 0 } } pub fn get_vjd_axis_max(id: u32, usage: HidUsage) -> i32 { let mut max = 0; unsafe { if ffi::GetVJDAxisMax(id, usage as u32, &mut max) != 0 { max } else { 0 } } } pub fn get_vjd_axis_min(id: u32, usage: HidUsage) -> i32 { let mut min = 0; unsafe { if ffi::GetVJDAxisMin(id, usage as u32, &mut min) != 0 { min } else { 0 } } } pub fn get_vjd_status(id: u32) -> VJDStat { unsafe { ffi::GetVJDStatus(id) } } pub fn is_vjd_exists(id: u32) -> bool { unsafe { ffi::isVJDExists(id) != 0 } } pub fn get_owner_pid(id: u32) -> i32 { unsafe { ffi::GetOwnerPid(id) } } pub fn acquire_vjd(id: u32) -> bool { unsafe { ffi::AcquireVJD(id) != 0 } } pub fn relinquish_vjd(id: u32) { unsafe { ffi::RelinquishVJD(id); } } pub fn update_vjd(id: u32, data: *mut JoystickPosition) { unsafe { ffi::UpdateVJD(id, data as *mut std::ffi::c_void); } } pub fn reset_vjd(id: u32) { unsafe { ffi::ResetVJD(id); } } pub fn reset_all() { unsafe { ffi::ResetAll(); } } pub fn reset_buttons(id: u32) { unsafe { ffi::ResetButtons(id); } } pub fn reset_povs(id: u32) { unsafe { ffi::ResetPovs(id); } } pub fn set_axis(value: i32, id: u32, axis: HidUsage) { unsafe { ffi::SetAxis(value, id, axis as u32); } } pub fn set_btn(value: i32, id: u32, n_btn: u8) { unsafe { ffi::SetBtn(value, id, n_btn); } } pub fn set_disc_pov(value: i32, id: u32, n_pov: u8) { unsafe { ffi::SetDiscPov(value, id, n_pov); } } pub fn set_cont_pov(value: u32, id: u32, n_pov: u8) { unsafe { ffi::SetContPov(value, id, n_pov); } } pub enum VjoyError { InitializationError, OpenVjoyDeviceError, } pub struct VjoyApi; impl VjoyApi { pub fn new() -> Result<Self, VjoyError> { if vjoy_enabled() && driver_match().0 { Ok(Self {}) } else { Err(VjoyError::InitializationError) } } pub fn acquire_device(id: u32) -> Result<VjoyDevice, VjoyError> { if id > 16 { return Err(VjoyError::OpenVjoyDeviceError); } Ok(VjoyDevice::new(id)?) } } pub struct VjoyDevice { pub id: u32, } impl VjoyDevice { fn new(id: u32) -> Result<Self, VjoyError> { match get_vjd_status(id) { VJDStat::VjdStatOwned => return Ok(Self { id }), VJDStat::VjdStatFree => { if acquire_vjd(id) { return Ok(Self { id }); } } _ => {} } Err(VjoyError::OpenVjoyDeviceError) } } impl Drop for VjoyDevice { fn drop(&mut self) { relinquish_vjd(self.id); } }
mod ffi; pub type VJDStat = ffi::VjdStat; pub type JoystickPosition = ffi::_JOYSTICK_POSITION_V2; #[repr(u32)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum HidUsage { X = 0x30, Y = 0x31, Z = 0x32, RX = 0x33, RY = 0x34, RZ = 0x35, SL0 = 0x36, SL1 = 0x37, WHL = 0x38, POV = 0x39, } pub fn get_vjoy_version() -> i16 { unsafe { ffi::GetvJoyVersion() } } pub fn vjoy_enabled() -> bool { unsafe { ffi::vJoyEnabled() != 0 } } pub fn get_vjoy_product_string() -> String { unsafe { let ptr = ffi::GetvJoyProductString(); let mut string = String::with_capacity(23); for i in 0..46 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr as *mut i8); string.push_str(c_str.to_str().unwrap()); } string } } pub fn get_vjoy_manufacturer_string() -> String { unsafe { let ptr = ffi::GetvJoyManufacturerString(); let mut string = String::with_capacity(16); for i in 0..32 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr); string.push_str(c_str.to_str().unwrap()); } string } } pub fn get_vjoy_serial_number_string() -> String { unsafe { let ptr = ffi::GetvJoySerialNumberString(); let mut string = String::with_capacity(5); for i in 0..10 { let ptr = ptr.add(i); let c_str = std::ffi::CStr::from_ptr(ptr as *mut i8); string.push_str(c_str.to_str().unwrap()); } string } } pub fn driver_match() -> (bool, u16, u16) { let mut dll_ver = 0; let mut driver_ver = 0; unsafe { let status = ffi::DriverMatch(&mut dll_ver, &mut driver_ver) != 0; (status, dll_ver, driver_ver) } } pub fn g
} } } pub fn get_number_existing_vjd() -> i32 { let mut number = 0; unsafe { if ffi::GetNumberExistingVJD(&mut number) != 0 { number } else { 0 } } } pub fn get_vjd_button_number(id: u32) -> i32 { unsafe { ffi::GetVJDButtonNumber(id) } } pub fn get_vjd_disc_pov_number(id: u32) -> i32 { unsafe { ffi::GetVJDDiscPovNumber(id) } } pub fn get_vjd_cont_pov_number(id: u32) -> i32 { unsafe { ffi::GetVJDContPovNumber(id) } } pub fn get_vjd_axis_exist(id: u32, usage: HidUsage) -> bool { unsafe { ffi::GetVJDAxisExist(id, usage as u32) != 0 } } pub fn get_vjd_axis_max(id: u32, usage: HidUsage) -> i32 { let mut max = 0; unsafe { if ffi::GetVJDAxisMax(id, usage as u32, &mut max) != 0 { max } else { 0 } } } pub fn get_vjd_axis_min(id: u32, usage: HidUsage) -> i32 { let mut min = 0; unsafe { if ffi::GetVJDAxisMin(id, usage as u32, &mut min) != 0 { min } else { 0 } } } pub fn get_vjd_status(id: u32) -> VJDStat { unsafe { ffi::GetVJDStatus(id) } } pub fn is_vjd_exists(id: u32) -> bool { unsafe { ffi::isVJDExists(id) != 0 } } pub fn get_owner_pid(id: u32) -> i32 { unsafe { ffi::GetOwnerPid(id) } } pub fn acquire_vjd(id: u32) -> bool { unsafe { ffi::AcquireVJD(id) != 0 } } pub fn relinquish_vjd(id: u32) { unsafe { ffi::RelinquishVJD(id); } } pub fn update_vjd(id: u32, data: *mut JoystickPosition) { unsafe { ffi::UpdateVJD(id, data as *mut std::ffi::c_void); } } pub fn reset_vjd(id: u32) { unsafe { ffi::ResetVJD(id); } } pub fn reset_all() { unsafe { ffi::ResetAll(); } } pub fn reset_buttons(id: u32) { unsafe { ffi::ResetButtons(id); } } pub fn reset_povs(id: u32) { unsafe { ffi::ResetPovs(id); } } pub fn set_axis(value: i32, id: u32, axis: HidUsage) { unsafe { ffi::SetAxis(value, id, axis as u32); } } pub fn set_btn(value: i32, id: u32, n_btn: u8) { unsafe { ffi::SetBtn(value, id, n_btn); } } pub fn set_disc_pov(value: i32, id: u32, n_pov: u8) { unsafe { ffi::SetDiscPov(value, id, n_pov); } } pub fn set_cont_pov(value: u32, id: u32, n_pov: u8) { unsafe { ffi::SetContPov(value, id, n_pov); } } pub enum VjoyError { InitializationError, OpenVjoyDeviceError, } pub struct VjoyApi; impl VjoyApi { pub fn new() -> Result<Self, VjoyError> { if vjoy_enabled() && driver_match().0 { Ok(Self {}) } else { Err(VjoyError::InitializationError) } } pub fn acquire_device(id: u32) -> Result<VjoyDevice, VjoyError> { if id > 16 { return Err(VjoyError::OpenVjoyDeviceError); } Ok(VjoyDevice::new(id)?) } } pub struct VjoyDevice { pub id: u32, } impl VjoyDevice { fn new(id: u32) -> Result<Self, VjoyError> { match get_vjd_status(id) { VJDStat::VjdStatOwned => return Ok(Self { id }), VJDStat::VjdStatFree => { if acquire_vjd(id) { return Ok(Self { id }); } } _ => {} } Err(VjoyError::OpenVjoyDeviceError) } } impl Drop for VjoyDevice { fn drop(&mut self) { relinquish_vjd(self.id); } }
et_vjoy_max_devices() -> i32 { let mut number = 0; unsafe { if ffi::GetvJoyMaxDevices(&mut number) != 0 { number } else { 0
function_block-random_span
[ { "content": "fn main() {\n\n //https://doc.rust-lang.org/rustc/command-line-arguments.html\n\n println!(r\"cargo:rustc-link-search=C:\\Program Files\\vJoy\\x64\");\n\n println!(\"cargo:rustc-link-lib=vJoyInterface\");\n\n\n\n //https://rust-lang.github.io/rust-bindgen\n\n let bindings = bindgen:...
Rust
src/parse/bam.rs
devsebb/ImmunoGenotyper
47be20f747e0e7541209be648e01aecf26433565
use debruijn::dna_string::DnaString; use rust_htslib::{bam, bam::record::Aux, bam::Read, bam::Reader}; pub struct UMIReader { reader: bam::Reader, pub current_umi_group: Vec<DnaString>, pub current_umi: String, pub current_cell_barcode: String, pub next_umi_group: Vec<DnaString>, next_umi: String } impl UMIReader { pub fn new(file_path: &str) -> UMIReader { UMIReader { reader: Reader::from_path(file_path).unwrap(), current_umi_group: Vec::new(), current_umi: String::new(), current_cell_barcode: String::new(), next_umi_group: Vec::new(), next_umi: String::new() } } pub fn next(&mut self) -> bool { let mut final_umi = false; if self.get_umi_from_bam().is_none() { final_umi = true; } final_umi } fn get_umi_from_bam(&mut self) -> Option<bool> { self.current_umi_group = self.next_umi_group.clone(); self.current_umi = self.next_umi.clone(); self.current_cell_barcode.clear(); self.next_umi_group.clear(); self.next_umi.clear(); for r in self.reader.records() { let record = r.unwrap(); let read_umi = if let Ok(Aux::String(s)) = record.aux(b"UR") { s.to_owned() } else { panic!("Error -- Could not read UMI, internal error."); }; let current_cell_barcode = if let Ok(Aux::String(s)) = record.aux(b"CR") { s.to_owned() } else { panic!("Error -- Could not read cell barcode, internal error."); }; if self.current_umi == "" { self.current_umi = read_umi.clone(); } let seq = UMIReader::strip_nonbio_regions(&record.seq().as_bytes()[..]); if self.current_umi == read_umi { self.current_umi_group .push(seq); self.current_cell_barcode = current_cell_barcode.clone(); } else { self.next_umi_group .push(seq); self.next_umi = read_umi.clone(); return Some(true); } } None } fn strip_nonbio_regions(seq: &[u8]) -> DnaString { let seq = String::from_utf8(seq.to_owned()).unwrap(); let mut tso_idx = seq.find("TTTCTTATATGGG"); if tso_idx.is_none() { tso_idx = seq.find("AAAGAATATACCC"); }; let seq = if tso_idx.is_some() { String::from_utf8(seq.as_bytes()[tso_idx.unwrap()+13..].to_vec()).unwrap() } else { seq }; let poly_t_tail_idx = seq.find("TTTTTTTTTTTTTTTTTTTTTTTTT"); let seq = if poly_t_tail_idx.is_some() { String::from_utf8(seq.as_bytes()[..poly_t_tail_idx.unwrap()].to_vec()).unwrap() } else { seq }; let mut reverse_primer_idx = seq.find("GTACTCTGCGTTGATACCACTGCTT"); if reverse_primer_idx.is_none() { reverse_primer_idx = seq.find("CATGAGACGCAACTATGGTGACGAA"); }; let seq = if reverse_primer_idx.is_some() { String::from_utf8(seq.as_bytes()[..reverse_primer_idx.unwrap()].to_vec()).unwrap() } else { seq }; DnaString::from_dna_string(&seq) } }
use debruijn::dna_string::DnaString; use rust_htslib::{bam, bam::record::Aux, bam::Read, bam::Reader}; pub struct UMIReader { reader: bam::Reader, pub current_umi_group: Vec<DnaString>, pub current_umi: String, pub current_cell_barcode: String, pub next_umi_group: Vec<DnaString>, next_umi: String } impl UMIReader { pub fn new(file_path: &str) -> UMIReader { UMIReader { reader: Reader::from_path(file_path).unwrap(), current_umi_group: Vec::new(), current_umi: String::new(), current_cell_barcode: String::new(), next_umi_group: Vec::new(), next_umi: String::new() } } pub fn next(&mut self) -> bool { let mut final_umi = false; if self.get_umi_from_bam().is_none() { final_umi = true; } final_umi } fn get_umi_from_bam(&mut self) -> Option<bool> { self.current_umi_group = self.next_umi_group.clone(); self.current_umi = self.next_umi.clone(); self.current_cell_barcode.clear(); self.next_umi_group.clear(); self.next_umi.clear(); for r in self.reader.records() { let record = r.unwrap(); let read_umi = if let Ok(Aux::String(s)) = record.aux(b"UR") { s.to_owned() } else { panic!("Error -- Could not read UMI, internal error."); }; let current_cell_barcode = if let Ok(Aux::String(s)) = record.aux(b"CR") { s.to_owned() } else { panic!("Error -- Could not read cell barcode, internal error."); }; if self.current_umi == "" { self.current_umi = read_umi.clone(); } let seq = UMIReader::strip_nonbio_regions(&record.seq().as_bytes()[..]); if self.current_umi == read_umi { self.current_umi_group .push(seq); self.cur
fn strip_nonbio_regions(seq: &[u8]) -> DnaString { let seq = String::from_utf8(seq.to_owned()).unwrap(); let mut tso_idx = seq.find("TTTCTTATATGGG"); if tso_idx.is_none() { tso_idx = seq.find("AAAGAATATACCC"); }; let seq = if tso_idx.is_some() { String::from_utf8(seq.as_bytes()[tso_idx.unwrap()+13..].to_vec()).unwrap() } else { seq }; let poly_t_tail_idx = seq.find("TTTTTTTTTTTTTTTTTTTTTTTTT"); let seq = if poly_t_tail_idx.is_some() { String::from_utf8(seq.as_bytes()[..poly_t_tail_idx.unwrap()].to_vec()).unwrap() } else { seq }; let mut reverse_primer_idx = seq.find("GTACTCTGCGTTGATACCACTGCTT"); if reverse_primer_idx.is_none() { reverse_primer_idx = seq.find("CATGAGACGCAACTATGGTGACGAA"); }; let seq = if reverse_primer_idx.is_some() { String::from_utf8(seq.as_bytes()[..reverse_primer_idx.unwrap()].to_vec()).unwrap() } else { seq }; DnaString::from_dna_string(&seq) } }
rent_cell_barcode = current_cell_barcode.clone(); } else { self.next_umi_group .push(seq); self.next_umi = read_umi.clone(); return Some(true); } } None }
function_block-function_prefixed
[ { "content": "// Takes a reader and returns a csv reader that wraps it, configures to use tab delimiters\n\npub fn get_tsv_reader<R: Read>(reader: R) -> Reader<R> {\n\n csv::ReaderBuilder::new()\n\n .delimiter(b'\\t')\n\n .from_reader(reader)\n\n}\n\n\n\n/* Takes a reference to the ReferenceMet...
Rust
loadstone_config/src/codegen/memory_map.rs
arron-speake-bluefruit/loadstone
c65cb32809dc069b17830dd4a39d95c913994200
use anyhow::Result; use quote::{format_ident, quote}; use std::{fs::OpenOptions, io::Write, path::Path}; use crate::{ memory::{ExternalMemoryMap, InternalMemoryMap, MemoryConfiguration}, port::{Port, Subfamily}, }; use super::prettify_file; pub fn generate<P: AsRef<Path>>( autogenerated_folder_path: P, memory_configuration: &MemoryConfiguration, port: &Port, ) -> Result<()> { let filename = autogenerated_folder_path.as_ref().join("memory_map.rs"); let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(&filename)?; let base_index = 1usize; let imports = generate_imports(&memory_configuration, port)?; let mcu_banks = generate_mcu_banks( base_index, &memory_configuration.internal_memory_map, memory_configuration.golden_index, )?; let external_banks = generate_external_banks( memory_configuration.internal_memory_map.banks.len() + base_index, &memory_configuration.external_memory_map, memory_configuration.golden_index, )?; file.write_all(imports.as_bytes())?; file.write_all(mcu_banks.as_bytes())?; file.write_all(external_banks.as_bytes())?; prettify_file(filename).ok(); Ok(()) } fn generate_imports(memory_configuration: &MemoryConfiguration, port: &Port) -> Result<String> { let external_address: Vec<_> = match &memory_configuration.external_flash { Some(external_flash) if external_flash.name.to_lowercase().contains("n25q128a") => { ["blue_hal", "drivers", "micron", "n25q128a_flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect() } None if *port == Port::Stm32F412 => ["blue_hal", "hal", "null", "NullAddress"] .iter() .map(|f| format_ident!("{}", f)) .collect(), _ => ["usize"].iter().map(|f| format_ident!("{}", f)).collect(), }; let mcu_address: Vec<_> = match port.subfamily() { Subfamily::Stm32f4 => ["blue_hal", "drivers", "stm32f4", "flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect(), Subfamily::Efm32Gg11 => ["blue_hal", "drivers", "efm32gg11b", "flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect(), }; let code = quote! { use crate::devices::image as image; #[allow(unused_imports)] use super::pin_configuration::ExternalFlash; use #(#mcu_address)::* as McuAddress; use #(#external_address)::* as ExternalAddress; }; Ok(format!("{}", code)) } fn generate_external_banks( base_index: usize, map: &ExternalMemoryMap, golden_index: Option<usize>, ) -> Result<String> { let number_of_external_banks = map.banks.len(); let index: Vec<u8> = map.banks.iter().enumerate().map(|(i, _)| (i + base_index) as u8).collect(); let bootable = vec![false; number_of_external_banks]; let location: Vec<u32> = map.banks.iter().map(|b| b.start_address).collect(); let size: Vec<usize> = map.banks.iter().map(|b| (b.size_kb * 1024) as usize).collect(); let golden: Vec<bool> = (0..number_of_external_banks).map(|i| Some((i + base_index).saturating_sub(1)) == golden_index).collect(); let code = quote! { const NUMBER_OF_EXTERNAL_BANKS: usize = #number_of_external_banks; pub static EXTERNAL_BANKS: [image::Bank<ExternalAddress>; NUMBER_OF_EXTERNAL_BANKS] = [ #(image::Bank { index: #index, bootable: #bootable, location: ExternalAddress(#location), size: #size, is_golden: #golden, }),* ]; }; Ok(format!("{}", code)) } fn generate_mcu_banks( base_index: usize, map: &InternalMemoryMap, golden_index: Option<usize>, ) -> Result<String> { let number_of_mcu_banks = map.banks.len(); let index: Vec<u8> = map.banks.iter().enumerate().map(|(i, _)| (i + base_index) as u8).collect(); let bootable: Vec<bool> = (0..number_of_mcu_banks).map(|i| Some(i) == map.bootable_index).collect(); let location: Vec<u32> = map.banks.iter().map(|b| b.start_address).collect(); let size: Vec<usize> = map.banks.iter().map(|b| (b.size_kb * 1024) as usize).collect(); let golden: Vec<bool> = (0..number_of_mcu_banks).map(|i| Some(i) == golden_index).collect(); let code = quote! { const NUMBER_OF_MCU_BANKS: usize = #number_of_mcu_banks; pub static MCU_BANKS: [image::Bank<McuAddress>; NUMBER_OF_MCU_BANKS] = [ #(image::Bank { index: #index, bootable: #bootable, location: McuAddress(#location), size: #size, is_golden: #golden, }),* ]; }; Ok(format!("{}", code)) }
use anyhow::Result; use quote::{format_ident, quote}; use std::{fs::OpenOptions, io::Write, path::Path}; use crate::{ memory::{ExternalMemoryMap, InternalMemoryMap, MemoryConfiguration}, port::{Port, Subfamily}, }; use super::prettify_file; pub fn generate<P: AsRef<Path>>( autogenerated_folder_path: P, memory_configuration: &MemoryConfiguration, port: &Port, ) -> Result<()> { let filename = autogenerated_folder_path.as_ref().join("memory_map.rs"); let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(&filename)?; let base_index = 1usize; let imports = generate_imports(&memory_configuration, port)?; let mcu_banks = generate_mcu_banks( base_index, &memory_configuration.internal_memory_map, memory_configuration.golden_index, )?; let external_banks = generate_external_banks( memory_configuration.internal_memory_map.banks.len() + base_index, &memory_configuration.external_memory_map, memory_configuration.golden_index, )?; file.write_all(imports.as_bytes())?; file.write_all(mcu_banks.as_bytes())?; file.write_all(external_banks.as_bytes())?; prettify_file(filename).ok(); Ok(()) }
fn generate_external_banks( base_index: usize, map: &ExternalMemoryMap, golden_index: Option<usize>, ) -> Result<String> { let number_of_external_banks = map.banks.len(); let index: Vec<u8> = map.banks.iter().enumerate().map(|(i, _)| (i + base_index) as u8).collect(); let bootable = vec![false; number_of_external_banks]; let location: Vec<u32> = map.banks.iter().map(|b| b.start_address).collect(); let size: Vec<usize> = map.banks.iter().map(|b| (b.size_kb * 1024) as usize).collect(); let golden: Vec<bool> = (0..number_of_external_banks).map(|i| Some((i + base_index).saturating_sub(1)) == golden_index).collect(); let code = quote! { const NUMBER_OF_EXTERNAL_BANKS: usize = #number_of_external_banks; pub static EXTERNAL_BANKS: [image::Bank<ExternalAddress>; NUMBER_OF_EXTERNAL_BANKS] = [ #(image::Bank { index: #index, bootable: #bootable, location: ExternalAddress(#location), size: #size, is_golden: #golden, }),* ]; }; Ok(format!("{}", code)) } fn generate_mcu_banks( base_index: usize, map: &InternalMemoryMap, golden_index: Option<usize>, ) -> Result<String> { let number_of_mcu_banks = map.banks.len(); let index: Vec<u8> = map.banks.iter().enumerate().map(|(i, _)| (i + base_index) as u8).collect(); let bootable: Vec<bool> = (0..number_of_mcu_banks).map(|i| Some(i) == map.bootable_index).collect(); let location: Vec<u32> = map.banks.iter().map(|b| b.start_address).collect(); let size: Vec<usize> = map.banks.iter().map(|b| (b.size_kb * 1024) as usize).collect(); let golden: Vec<bool> = (0..number_of_mcu_banks).map(|i| Some(i) == golden_index).collect(); let code = quote! { const NUMBER_OF_MCU_BANKS: usize = #number_of_mcu_banks; pub static MCU_BANKS: [image::Bank<McuAddress>; NUMBER_OF_MCU_BANKS] = [ #(image::Bank { index: #index, bootable: #bootable, location: McuAddress(#location), size: #size, is_golden: #golden, }),* ]; }; Ok(format!("{}", code)) }
fn generate_imports(memory_configuration: &MemoryConfiguration, port: &Port) -> Result<String> { let external_address: Vec<_> = match &memory_configuration.external_flash { Some(external_flash) if external_flash.name.to_lowercase().contains("n25q128a") => { ["blue_hal", "drivers", "micron", "n25q128a_flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect() } None if *port == Port::Stm32F412 => ["blue_hal", "hal", "null", "NullAddress"] .iter() .map(|f| format_ident!("{}", f)) .collect(), _ => ["usize"].iter().map(|f| format_ident!("{}", f)).collect(), }; let mcu_address: Vec<_> = match port.subfamily() { Subfamily::Stm32f4 => ["blue_hal", "drivers", "stm32f4", "flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect(), Subfamily::Efm32Gg11 => ["blue_hal", "drivers", "efm32gg11b", "flash", "Address"] .iter() .map(|f| format_ident!("{}", f)) .collect(), }; let code = quote! { use crate::devices::image as image; #[allow(unused_imports)] use super::pin_configuration::ExternalFlash; use #(#mcu_address)::* as McuAddress; use #(#external_address)::* as ExternalAddress; }; Ok(format!("{}", code)) }
function_block-full_function
[ { "content": "pub fn read_key(mut file: File) -> Result<SigningKey, Error> {\n\n let mut string = String::new();\n\n file.read_to_string(&mut string).map_err(|_| Error::KeyParseFailed)?;\n\n SigningKey::from_str(string.as_str()).map_err(|_| Error::KeyParseFailed)\n\n}\n\n\n", "file_path": "tools/si...
Rust
src/influxdb_ioxd/rpc/management.rs
mitch292/influxdb_iox
7e8f3aaafbd497c0c6e8f993b4ce0330e6d43ccb
use std::convert::TryInto; use std::fmt::Debug; use std::sync::Arc; use tonic::{Request, Response, Status}; use tracing::error; use data_types::database_rules::DatabaseRules; use data_types::DatabaseName; use generated_types::google::{ AlreadyExists, FieldViolation, FieldViolationExt, InternalError, NotFound, PreconditionViolation, }; use generated_types::influxdata::iox::management::v1::*; use query::DatabaseStore; use server::{ConnectionManager, Error, Server}; struct ManagementService<M: ConnectionManager> { server: Arc<Server<M>>, } fn default_error_handler(error: Error) -> tonic::Status { match error { Error::IdNotSet => PreconditionViolation { category: "Writer ID".to_string(), subject: "influxdata.com/iox".to_string(), description: "Writer ID must be set".to_string(), } .into(), error => { error!(?error, "Unexpected error"); InternalError {}.into() } } } #[tonic::async_trait] impl<M> management_service_server::ManagementService for ManagementService<M> where M: ConnectionManager + Send + Sync + Debug + 'static, { async fn get_writer_id( &self, _: Request<GetWriterIdRequest>, ) -> Result<Response<GetWriterIdResponse>, Status> { match self.server.require_id().ok() { Some(id) => Ok(Response::new(GetWriterIdResponse { id })), None => return Err(NotFound::default().into()), } } async fn update_writer_id( &self, request: Request<UpdateWriterIdRequest>, ) -> Result<Response<UpdateWriterIdResponse>, Status> { self.server.set_id(request.get_ref().id); Ok(Response::new(UpdateWriterIdResponse {})) } async fn list_databases( &self, _: Request<ListDatabasesRequest>, ) -> Result<Response<ListDatabasesResponse>, Status> { let names = self.server.db_names_sorted().await; Ok(Response::new(ListDatabasesResponse { names })) } async fn get_database( &self, request: Request<GetDatabaseRequest>, ) -> Result<Response<GetDatabaseResponse>, Status> { let name = DatabaseName::new(request.into_inner().name).field("name")?; match self.server.db_rules(&name).await { Some(rules) => Ok(Response::new(GetDatabaseResponse { rules: Some(rules.into()), })), None => { return Err(NotFound { resource_type: "database".to_string(), resource_name: name.to_string(), ..Default::default() } .into()) } } } async fn create_database( &self, request: Request<CreateDatabaseRequest>, ) -> Result<Response<CreateDatabaseResponse>, Status> { let rules: DatabaseRules = request .into_inner() .rules .ok_or_else(|| FieldViolation::required("")) .and_then(TryInto::try_into) .map_err(|e| e.scope("rules"))?; let name = DatabaseName::new(rules.name.clone()).expect("protobuf mapping didn't validate name"); match self.server.create_database(name, rules).await { Ok(_) => Ok(Response::new(CreateDatabaseResponse {})), Err(Error::DatabaseAlreadyExists { db_name }) => { return Err(AlreadyExists { resource_type: "database".to_string(), resource_name: db_name, ..Default::default() } .into()) } Err(e) => Err(default_error_handler(e)), } } } pub fn make_server<M>( server: Arc<Server<M>>, ) -> management_service_server::ManagementServiceServer< impl management_service_server::ManagementService, > where M: ConnectionManager + Send + Sync + Debug + 'static, { management_service_server::ManagementServiceServer::new(ManagementService { server }) }
use std::convert::TryInto; use std::fmt::Debug; use std::sync::Arc; use tonic::{Request, Response, Status}; use tracing::error; use data_types::database_rules::DatabaseRules; use data_types::DatabaseName; use generated_types::google::{ AlreadyExists, FieldViolation, FieldViolationExt, InternalError, NotFound, PreconditionViolation, }; use generated_types::influxdata::iox::management::v1::*; use query::DatabaseStore; use server::{ConnectionManager, Error, Server}; struct ManagementService<M: ConnectionManager> { server: Arc<Server<M>>, } fn default_error_handler(error: Error) -> tonic::Status { match error { Error::IdNotSet => PreconditionViolation { category: "Writer ID".to_string(), subject: "influxdata.com/iox".to_string(), description: "Writer ID must be set".to_string(), } .into(), error => { error!(?error, "Unexpected error"); InternalError {}.into() } } } #[tonic::async_trait] impl<M> management_service_server::ManagementService for ManagementService<M> where M: ConnectionManager + Send + Sync + Debug + 'static, { async fn get_writer_id( &self, _: Request<GetWriterIdRequest>, ) -> Result<Response<GetWriterIdResponse>, Status> { match self.server.require_id().ok() { Some(id) => Ok(Response::new(GetWriterIdResponse { id })), None => return Err(NotFound::default().into()), } } async fn update_writer_id( &self, request: Request<UpdateWriterIdRequest>, ) -> Result<Response<UpdateWriterIdResponse>, Status> { self.server.set_id(request.get_ref().id); Ok(Response::new(UpdateWriterIdResponse {})) } async fn list_databases( &sel
async fn get_database( &self, request: Request<GetDatabaseRequest>, ) -> Result<Response<GetDatabaseResponse>, Status> { let name = DatabaseName::new(request.into_inner().name).field("name")?; match self.server.db_rules(&name).await { Some(rules) => Ok(Response::new(GetDatabaseResponse { rules: Some(rules.into()), })), None => { return Err(NotFound { resource_type: "database".to_string(), resource_name: name.to_string(), ..Default::default() } .into()) } } } async fn create_database( &self, request: Request<CreateDatabaseRequest>, ) -> Result<Response<CreateDatabaseResponse>, Status> { let rules: DatabaseRules = request .into_inner() .rules .ok_or_else(|| FieldViolation::required("")) .and_then(TryInto::try_into) .map_err(|e| e.scope("rules"))?; let name = DatabaseName::new(rules.name.clone()).expect("protobuf mapping didn't validate name"); match self.server.create_database(name, rules).await { Ok(_) => Ok(Response::new(CreateDatabaseResponse {})), Err(Error::DatabaseAlreadyExists { db_name }) => { return Err(AlreadyExists { resource_type: "database".to_string(), resource_name: db_name, ..Default::default() } .into()) } Err(e) => Err(default_error_handler(e)), } } } pub fn make_server<M>( server: Arc<Server<M>>, ) -> management_service_server::ManagementServiceServer< impl management_service_server::ManagementService, > where M: ConnectionManager + Send + Sync + Debug + 'static, { management_service_server::ManagementServiceServer::new(ManagementService { server }) }
f, _: Request<ListDatabasesRequest>, ) -> Result<Response<ListDatabasesResponse>, Status> { let names = self.server.db_names_sorted().await; Ok(Response::new(ListDatabasesResponse { names })) }
function_block-function_prefixed
[ { "content": "type DatabaseError = Box<dyn std::error::Error + Send + Sync + 'static>;\n\n\n\n/// A server ID of 0 is reserved and indicates no ID has been configured.\n\nconst SERVER_ID_NOT_SET: u32 = 0;\n\n\n\n#[derive(Debug, Snafu)]\n\npub enum Error {\n\n #[snafu(display(\"Server error: {}\", source))]\n...
Rust
src/unpack.rs
clouds56/crnlib
13dbc1e0e4184ad686e3987708c4564901ed92ef
use std::io::prelude::*; use anyhow::*; use serde::{Serialize, Deserialize}; use crate::{Tables, Huffman, codec::Codec}; pub trait Block: Serialize { const BLOCK_SIZE: usize; fn write_to<W: Write>(&self, mut w: W) -> std::io::Result<()> { use bincode::Options; let bin = bincode::config::DefaultOptions::new() .with_fixint_encoding() .with_little_endian() .serialize(self) .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; w.write(&bin)?; Ok(()) } } pub trait Unpack { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error>; fn next_tile_idx(codec: &mut Codec, encoding: &Huffman, tile_bits: &mut u32) -> Result<(usize, [usize; 4]), Error> { if *tile_bits == 1 { *tile_bits = encoding.next(codec).context("read chunk encoding bits")? | 512; } let tile_index = *tile_bits as usize & 7; *tile_bits >>= 3; Ok((Self::COUNT_TILES[tile_index], Self::TILES[tile_index])) } const TRUNK_SIZE: usize = 2; const COUNT_TILES: [usize; 8] = [ 1, 2, 2, 3, 3, 3, 3, 4 ]; const TILES: [[usize; 4]; 8] = [ [ 0, 0, 0, 0 ], [ 0, 0, 1, 1 ], [ 0, 1, 0, 1 ], [ 0, 0, 1, 2 ], [ 1, 2, 0, 0 ], [ 0, 1, 0, 2 ], [ 1, 0, 2, 0 ], [ 0, 1, 2, 3 ] ]; } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt1 { pub color_endpoint: (u16, u16), pub color_selector: [u8; 4], } impl Block for Dxt1 { const BLOCK_SIZE: usize = 8; } impl Unpack for Dxt1 { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut color_endpoint_index = 0; let mut color_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut color_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { color_endpoints[i] = tables.color_endpoint()?.next(codec, &mut color_endpoint_index).context("read color_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let color_selector = tables.color_selector()?.next(codec, &mut color_selector_index).context("read color_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt1 { color_endpoint: color_endpoints[tile], color_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt5 { pub alpha_endpoint: (u8, u8), pub alpha_selector: [u8; 6], pub color_endpoint: (u16, u16), pub color_selector: [u8; 4], } impl Block for Dxt5 { const BLOCK_SIZE: usize = 16; } impl Unpack for Dxt5 { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut color_endpoint_index = 0; let mut color_selector_index = 0; let mut alpha_endpoint_index = 0; let mut alpha_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut color_endpoints = [(0, 0); 4]; let mut alpha_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha_endpoint_index).context("read alpha_endpoint_delta")?; } for i in 0..tiles_count { color_endpoints[i] = tables.color_endpoint()?.next(codec, &mut color_endpoint_index).context("read color_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha_selector = tables.alpha_selector()?.next(codec, &mut alpha_selector_index).context("read alpha_selector_delta")?; let color_selector = tables.color_selector()?.next(codec, &mut color_selector_index).context("read color_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt5 { alpha_endpoint: alpha_endpoints[tile], alpha_selector, color_endpoint: color_endpoints[tile], color_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt5A { pub alpha_endpoint: (u8, u8), pub alpha_selector: [u8; 6], } impl Block for Dxt5A { const BLOCK_SIZE: usize = 8; } impl Unpack for Dxt5A { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut alpha_endpoint_index = 0; let mut alpha_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut alpha_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha_endpoint_index).context("read alpha_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha_selector = tables.alpha_selector()?.next(codec, &mut alpha_selector_index).context("read alpha_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt5A { alpha_endpoint: alpha_endpoints[tile], alpha_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxn { pub alpha0_endpoint: (u8, u8), pub alpha0_selector: [u8; 6], pub alpha1_endpoint: (u8, u8), pub alpha1_selector: [u8; 6], } impl Block for Dxn { const BLOCK_SIZE: usize = 16; } impl Unpack for Dxn { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut alpha0_endpoint_index = 0; let mut alpha0_selector_index = 0; let mut alpha1_endpoint_index = 0; let mut alpha1_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut alpha0_endpoints = [(0, 0); 4]; let mut alpha1_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha0_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha0_endpoint_index).context("read alpha0_endpoint_delta")?; } for i in 0..tiles_count { alpha1_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha1_endpoint_index).context("read alpha1_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha0_selector = tables.alpha_selector()?.next(codec, &mut alpha0_selector_index).context("read alpha0_selector_delta")?; let alpha1_selector = tables.alpha_selector()?.next(codec, &mut alpha1_selector_index).context("read alpha1_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxn { alpha0_endpoint: alpha0_endpoints[tile], alpha0_selector, alpha1_endpoint: alpha1_endpoints[tile], alpha1_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[test] fn test_constant() { assert_eq!(Dxt5::TILES.len(), Dxt5::COUNT_TILES.len()); assert_eq!(Dxt5::TILES[0].len(), Dxt5::TRUNK_SIZE * Dxt5::TRUNK_SIZE); use bincode::Options; let option = || bincode::config::DefaultOptions::new() .with_fixint_encoding() .with_little_endian(); assert_eq!(option().serialized_size(&Dxt1::default()).unwrap(), Dxt1::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxt5::default()).unwrap(), Dxt5::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxt5A::default()).unwrap(), Dxt5A::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxn::default()).unwrap(), Dxn::BLOCK_SIZE as u64); assert_eq!(option().serialize(&Dxt5 { alpha_endpoint: (0x17, 0x18), alpha_selector: [0x20, 0x21, 0x22, 0x23, 0x24, 0x25], color_endpoint: (0x3234, 0x3537), color_selector: [0x49, 0x48, 0x47, 0x46], }).unwrap(), &[ 0x17, 0x18, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x34, 0x32, 0x37, 0x35, 0x49, 0x48, 0x47, 0x46]); }
use std::io::prelude::*; use anyhow::*; use serde::{Serialize, Deserialize}; use crate::{Tables, Huffman, codec::Codec}; pub trait Block: Serialize { const BLOCK_SIZE: usize; fn write_to<W: Write>(&self, mut w: W) -> std::io::Result<()> { use bincode::Options; let bin = bincode::config::DefaultOptions::new() .with_fixint_encoding() .with_little_endian() .serialize(self) .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?; w.write(&bin)?; Ok(()) } } pub trait Unpack { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error>; fn next_tile_idx(codec: &mut Codec, encoding: &Huffman, tile_bits: &mut u32) -> Result<(usize, [usize; 4]), Error> { if *tile_bits == 1 { *tile_bits = encoding.next(codec).context("read chunk encoding bits")? | 512; } let tile_index = *tile_bits as usize & 7; *tile_bits >>= 3; Ok((Self::COUNT_TILES[tile_index], Self::TILES[tile_index])) } const TRUNK_SIZE: usize = 2; const COUNT_TILES: [usize; 8] = [ 1, 2, 2, 3, 3, 3, 3, 4 ]; const TILES: [[usize; 4]; 8] = [ [ 0, 0, 0, 0 ], [ 0, 0, 1, 1 ], [ 0, 1, 0, 1 ], [ 0, 0, 1, 2 ], [ 1, 2, 0, 0 ], [ 0, 1, 0, 2 ], [ 1, 0, 2, 0 ], [ 0, 1, 2, 3 ] ]; } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt1 { pub color_endpoint: (u16, u16), pub color_selector: [u8; 4], } impl Block for Dxt1 { const BLOCK_SIZE: usize = 8; } impl Unpack for Dxt1 { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut color_endpoint_index = 0; let mut color_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut color_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { color_endpoints[i] = tables.color_endpoint()?.next(codec, &mut color_endpoint_index).context("read color_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let color_selector = tables.color_selector()?.next(codec, &mut color_selector_index).context("read color_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt1 { color_endpoint: color_endpoints[tile], color_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt5 { pub alpha_endpoint: (u8, u8), pub alpha_selector: [u8; 6], pub color_endpoint: (u16, u16), pub color_selector: [u8; 4], } impl Block for Dxt5 { const BLOCK_SIZE: usize = 16; } impl Unpack for Dxt5 { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut color_endpoint_index = 0; let mut color_selector_index = 0; let mut alpha_endpoint_index = 0; let mut alpha_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut color_endpoints = [(0, 0); 4]; let mut alpha_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha_endpoint_index).context("read alpha_endpoint_delta")?; } for i in 0..tiles_count { color_endpoints[i] = tables.color_endpoint()?.next(codec, &mut color_endpoint_index).context("read color_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha_selector = tables.alpha_selector()?.next(codec, &mut alpha_selector_index).context("read alpha_selector_delta")?; let color_selector = tables.color_selector()?.next(codec, &mut color_selector_index).context("read color_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt5 { alpha_endpoint: alpha_endpoints[tile], alpha_selector, color_endpoint: color_endpoints[tile], color_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxt5A { pub alpha_endpoint: (u8, u8), pub alpha_selector: [u8; 6], } impl Block for Dxt5A { const BLOCK_SIZE: usize = 8; } impl Unpack for Dxt5A {
} #[derive(Debug, Default, Serialize, Deserialize)] pub struct Dxn { pub alpha0_endpoint: (u8, u8), pub alpha0_selector: [u8; 6], pub alpha1_endpoint: (u8, u8), pub alpha1_selector: [u8; 6], } impl Block for Dxn { const BLOCK_SIZE: usize = 16; } impl Unpack for Dxn { fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut alpha0_endpoint_index = 0; let mut alpha0_selector_index = 0; let mut alpha1_endpoint_index = 0; let mut alpha1_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut alpha0_endpoints = [(0, 0); 4]; let mut alpha1_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha0_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha0_endpoint_index).context("read alpha0_endpoint_delta")?; } for i in 0..tiles_count { alpha1_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha1_endpoint_index).context("read alpha1_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha0_selector = tables.alpha_selector()?.next(codec, &mut alpha0_selector_index).context("read alpha0_selector_delta")?; let alpha1_selector = tables.alpha_selector()?.next(codec, &mut alpha1_selector_index).context("read alpha1_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxn { alpha0_endpoint: alpha0_endpoints[tile], alpha0_selector, alpha1_endpoint: alpha1_endpoints[tile], alpha1_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) } } #[test] fn test_constant() { assert_eq!(Dxt5::TILES.len(), Dxt5::COUNT_TILES.len()); assert_eq!(Dxt5::TILES[0].len(), Dxt5::TRUNK_SIZE * Dxt5::TRUNK_SIZE); use bincode::Options; let option = || bincode::config::DefaultOptions::new() .with_fixint_encoding() .with_little_endian(); assert_eq!(option().serialized_size(&Dxt1::default()).unwrap(), Dxt1::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxt5::default()).unwrap(), Dxt5::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxt5A::default()).unwrap(), Dxt5A::BLOCK_SIZE as u64); assert_eq!(option().serialized_size(&Dxn::default()).unwrap(), Dxn::BLOCK_SIZE as u64); assert_eq!(option().serialize(&Dxt5 { alpha_endpoint: (0x17, 0x18), alpha_selector: [0x20, 0x21, 0x22, 0x23, 0x24, 0x25], color_endpoint: (0x3234, 0x3537), color_selector: [0x49, 0x48, 0x47, 0x46], }).unwrap(), &[ 0x17, 0x18, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x34, 0x32, 0x37, 0x35, 0x49, 0x48, 0x47, 0x46]); }
fn unpack(tables: &Tables, codec: &mut Codec, width: u16, height: u16, face: u8) -> Result<Vec<u8>, Error> { let block_x = (width + 3) / 4; let block_y = (height + 3) / 4; let chunk_x = (block_x + 1) as usize / Self::TRUNK_SIZE; let chunk_y = (block_y + 1) as usize / Self::TRUNK_SIZE; let mut tile_bits = 1u32; let mut alpha_endpoint_index = 0; let mut alpha_selector_index = 0; let pitch = block_x as usize * Self::BLOCK_SIZE; let mut result = vec![0u8; block_y as usize * pitch]; let mut cursor = std::io::Cursor::new(&mut result[..]); for _f in 0..face { for y in 0..chunk_y { let skip_y = y == (chunk_y - 1) && block_y & 1 == 1; let xrange: Box<dyn Iterator<Item=_>> = if y & 1 == 1 { Box::new((0..chunk_x).rev()) } else { Box::new(0..chunk_x) }; for x in xrange { let skip_x = block_x & 1 == 1 && x == (chunk_x - 1); let mut alpha_endpoints = [(0, 0); 4]; let (tiles_count, tiles) = Self::next_tile_idx(codec, &tables.chunk_encoding, &mut tile_bits)?; for i in 0..tiles_count { alpha_endpoints[i] = tables.alpha_endpoint()?.next(codec, &mut alpha_endpoint_index).context("read alpha_endpoint_delta")?; } for (i, &tile) in tiles.iter().enumerate() { let alpha_selector = tables.alpha_selector()?.next(codec, &mut alpha_selector_index).context("read alpha_selector_delta")?; if !skip_x && !skip_y { if i % Self::TRUNK_SIZE == 0 { let pos = (y * Self::TRUNK_SIZE + i / Self::TRUNK_SIZE) * pitch + x * Self::BLOCK_SIZE * Self::TRUNK_SIZE; cursor.seek(std::io::SeekFrom::Start(pos as _)).expect("seek"); } Dxt5A { alpha_endpoint: alpha_endpoints[tile], alpha_selector, }.write_to(&mut cursor).context("write block")?; } } } } } if !codec.is_complete() { bail!("extra bytes in codec") } Ok(result) }
function_block-full_function
[ { "content": "#[test]\n\nfn test_huffman() {\n\n let input = [0b0100_0000u8];\n\n let mut codec = Codec::new(&input);\n\n let huffman = Huffman::new(BTreeMap::<bool,_>::new()).expect(\"zero huffman\");\n\n assert!(huffman.next(&mut codec).is_err());\n\n\n\n let mut codec = Codec::new(&input);\n\n let mut ...
Rust
src/rngs/entropy.rs
robsmith11/rand
1e9554d79b915860894bfba3aaff8c3f1c8b2159
use rand_core::{RngCore, CryptoRng, Error, ErrorKind, impls}; #[allow(unused)] use rngs; #[derive(Debug)] pub struct EntropyRng { source: Source, } #[derive(Debug)] enum Source { Os(Os), Custom(Custom), Jitter(Jitter), None, } impl EntropyRng { pub fn new() -> Self { EntropyRng { source: Source::None } } } impl Default for EntropyRng { fn default() -> Self { EntropyRng::new() } } impl RngCore for EntropyRng { fn next_u32(&mut self) -> u32 { impls::next_u32_via_fill(self) } fn next_u64(&mut self) -> u64 { impls::next_u64_via_fill(self) } fn fill_bytes(&mut self, dest: &mut [u8]) { self.try_fill_bytes(dest).unwrap_or_else(|err| panic!("all entropy sources failed; first error: {}", err)) } fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { let mut reported_error = None; if let Source::Os(ref mut os_rng) = self.source { match os_rng.fill(dest) { Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: OsRng failed \ [trying other entropy sources]: {}", err); reported_error = Some(err); }, } } else if Os::is_supported() { match Os::new_and_fill(dest) { Ok(os_rng) => { debug!("EntropyRng: using OsRng"); self.source = Source::Os(os_rng); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Source::Custom(ref mut rng) = self.source { match rng.fill(dest) { Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: custom entropy source failed \ [trying other entropy sources]: {}", err); reported_error = Some(err); }, } } else if Custom::is_supported() { match Custom::new_and_fill(dest) { Ok(custom) => { debug!("EntropyRng: using custom entropy source"); self.source = Source::Custom(custom); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Source::Jitter(ref mut jitter_rng) = self.source { match jitter_rng.fill(dest) { Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: JitterRng failed: {}", err); reported_error = Some(err); }, } } else if Jitter::is_supported() { match Jitter::new_and_fill(dest) { Ok(jitter_rng) => { debug!("EntropyRng: using JitterRng"); self.source = Source::Jitter(jitter_rng); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Some(err) = reported_error { Err(Error::with_cause(ErrorKind::Unavailable, "All entropy sources failed", err)) } else { Err(Error::new(ErrorKind::Unavailable, "No entropy sources available")) } } } impl CryptoRng for EntropyRng {} trait EntropySource { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> where Self: Sized; fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error>; fn is_supported() -> bool { true } } #[allow(unused)] #[derive(Clone, Debug)] struct NoSource; #[allow(unused)] impl EntropySource for NoSource { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { Err(Error::new(ErrorKind::Unavailable, "Source not supported")) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { unreachable!() } fn is_supported() -> bool { false } } #[cfg(feature="rand_os")] #[derive(Clone, Debug)] pub struct Os(rngs::OsRng); #[cfg(feature="rand_os")] impl EntropySource for Os { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { let mut rng = rngs::OsRng::new()?; rng.try_fill_bytes(dest)?; Ok(Os(rng)) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } #[cfg(not(feature="std"))] type Os = NoSource; type Custom = NoSource; #[cfg(not(target_arch = "wasm32"))] #[derive(Clone, Debug)] pub struct Jitter(rngs::JitterRng); #[cfg(not(target_arch = "wasm32"))] impl EntropySource for Jitter { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { let mut rng = rngs::JitterRng::new()?; rng.try_fill_bytes(dest)?; Ok(Jitter(rng)) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } #[cfg(target_arch = "wasm32")] type Jitter = NoSource; #[cfg(test)] mod test { use super::*; #[test] fn test_entropy() { let mut rng = EntropyRng::new(); let n = (rng.next_u32() ^ rng.next_u32()).count_ones(); assert!(n >= 2); } }
use rand_core::{RngCore, CryptoRng, Error, ErrorKind, impls}; #[allow(unused)] use rngs; #[derive(Debug)] pub struct EntropyRng { source: Source, } #[derive(Debug)] enum Source { Os(Os), Custom(Custom), Jitter(Jitter), None, } impl EntropyRng { pub fn new() -> Self { EntropyRng { source: Source::None } } } impl Default for EntropyRng { fn default() -> Self { EntropyRng::new() } } impl RngCore for EntropyRng { fn next_u32(&mut self) -> u32 { impls::next_u32_via_fill(self) } fn next_u64(&mut self) -> u64 { impls::next_u64_via_fill(self) } fn fill_bytes(&mut
Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: JitterRng failed: {}", err); reported_error = Some(err); }, } } else if Jitter::is_supported() { match Jitter::new_and_fill(dest) { Ok(jitter_rng) => { debug!("EntropyRng: using JitterRng"); self.source = Source::Jitter(jitter_rng); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Some(err) = reported_error { Err(Error::with_cause(ErrorKind::Unavailable, "All entropy sources failed", err)) } else { Err(Error::new(ErrorKind::Unavailable, "No entropy sources available")) } } } impl CryptoRng for EntropyRng {} trait EntropySource { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> where Self: Sized; fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error>; fn is_supported() -> bool { true } } #[allow(unused)] #[derive(Clone, Debug)] struct NoSource; #[allow(unused)] impl EntropySource for NoSource { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { Err(Error::new(ErrorKind::Unavailable, "Source not supported")) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { unreachable!() } fn is_supported() -> bool { false } } #[cfg(feature="rand_os")] #[derive(Clone, Debug)] pub struct Os(rngs::OsRng); #[cfg(feature="rand_os")] impl EntropySource for Os { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { let mut rng = rngs::OsRng::new()?; rng.try_fill_bytes(dest)?; Ok(Os(rng)) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } #[cfg(not(feature="std"))] type Os = NoSource; type Custom = NoSource; #[cfg(not(target_arch = "wasm32"))] #[derive(Clone, Debug)] pub struct Jitter(rngs::JitterRng); #[cfg(not(target_arch = "wasm32"))] impl EntropySource for Jitter { fn new_and_fill(dest: &mut [u8]) -> Result<Self, Error> { let mut rng = rngs::JitterRng::new()?; rng.try_fill_bytes(dest)?; Ok(Jitter(rng)) } fn fill(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } #[cfg(target_arch = "wasm32")] type Jitter = NoSource; #[cfg(test)] mod test { use super::*; #[test] fn test_entropy() { let mut rng = EntropyRng::new(); let n = (rng.next_u32() ^ rng.next_u32()).count_ones(); assert!(n >= 2); } }
self, dest: &mut [u8]) { self.try_fill_bytes(dest).unwrap_or_else(|err| panic!("all entropy sources failed; first error: {}", err)) } fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { let mut reported_error = None; if let Source::Os(ref mut os_rng) = self.source { match os_rng.fill(dest) { Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: OsRng failed \ [trying other entropy sources]: {}", err); reported_error = Some(err); }, } } else if Os::is_supported() { match Os::new_and_fill(dest) { Ok(os_rng) => { debug!("EntropyRng: using OsRng"); self.source = Source::Os(os_rng); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Source::Custom(ref mut rng) = self.source { match rng.fill(dest) { Ok(()) => return Ok(()), Err(err) => { warn!("EntropyRng: custom entropy source failed \ [trying other entropy sources]: {}", err); reported_error = Some(err); }, } } else if Custom::is_supported() { match Custom::new_and_fill(dest) { Ok(custom) => { debug!("EntropyRng: using custom entropy source"); self.source = Source::Custom(custom); return Ok(()); }, Err(err) => { reported_error = reported_error.or(Some(err)) }, } } if let Source::Jitter(ref mut jitter_rng) = self.source { match jitter_rng.fill(dest) {
random
[ { "content": "/// Implement `next_u64` via `next_u32`, little-endian order.\n\npub fn next_u64_via_u32<R: RngCore + ?Sized>(rng: &mut R) -> u64 {\n\n // Use LE; we explicitly generate one value before the next.\n\n let x = u64::from(rng.next_u32());\n\n let y = u64::from(rng.next_u32());\n\n (y << 3...
Rust
src/validator/lockbox.rs
Cognoscan/fog_pack
7b3af246faa851bfc2aa09cc186ff2332124e791
use super::*; use crate::element::*; use crate::error::{Error, Result}; use serde::{Deserialize, Serialize}; #[inline] fn is_false(v: &bool) -> bool { !v } #[inline] fn u32_is_zero(v: &u32) -> bool { *v == 0 } #[inline] fn u32_is_max(v: &u32) -> bool { *v == u32::MAX } macro_rules! lockbox_validator { ($t: ty, $e: ident, $v: ident, $link:expr, $name:expr) => { #[doc = "Validator for a [`"] #[doc = $name] #[doc = "`]["] #[doc = $link] #[doc = "].\n\n"] #[doc = "This validator will only pass a "] #[doc = $name] #[doc = " value. Validation passes if:\n\n"] #[doc = "- The number of bytes in the lockbox is less than or equal to `max_len`\n"] #[doc = "- The number of bytes in the lockbox is greater than or equal to `min_len`\n"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(deny_unknown_fields, default)] pub struct $v { #[serde(skip_serializing_if = "String::is_empty")] pub comment: String, #[serde(skip_serializing_if = "u32_is_max")] pub max_len: u32, #[serde(skip_serializing_if = "u32_is_zero")] pub min_len: u32, #[serde(skip_serializing_if = "is_false")] pub size: bool, } impl std::default::Default for $v { fn default() -> Self { Self { comment: String::new(), max_len: u32::MAX, min_len: u32::MIN, size: false, } } } impl $v { pub fn new() -> Self { Self::default() } pub fn comment(mut self, comment: impl Into<String>) -> Self { self.comment = comment.into(); self } pub fn max_len(mut self, max_len: u32) -> Self { self.max_len = max_len; self } pub fn min_len(mut self, min_len: u32) -> Self { self.min_len = min_len; self } pub fn size(mut self, size: bool) -> Self { self.size = size; self } pub fn build(self) -> Validator { Validator::$e(self) } pub(crate) fn validate(&self, parser: &mut Parser) -> Result<()> { let elem = parser .next() .ok_or_else(|| Error::FailValidate(concat!("Expected a ",$name).to_string()))??; let elem = if let Element::$e(v) = elem { v } else { return Err(Error::FailValidate(format!( concat!("Expected ", $name, ", got {}"), elem.name() ))); }; let len = elem.as_bytes().len() as u32; if len > self.max_len { return Err(Error::FailValidate( concat!($name, " is longer than max_len").to_string() )); } if len < self.min_len { return Err(Error::FailValidate( concat!($name, " is shorter than min_len").to_string() )); } Ok(()) } fn query_check_self(&self, other: &Self) -> bool { self.size || (u32_is_max(&other.max_len) && u32_is_zero(&other.min_len)) } pub(crate) fn query_check(&self, other: &Validator) -> bool { match other { Validator::$e(other) => self.query_check_self(other), Validator::Multi(list) => list.iter().all(|other| match other { Validator::$e(other) => self.query_check_self(other), _ => false, }), Validator::Any => true, _ => false, } } } }; ($t: ty, $e: ident, $v: ident) => { lockbox_validator!($t, $e, $v, concat!("fog_crypto::lockbox::", stringify!($t)), stringify!($t)); } } lockbox_validator!(DataLockbox, DataLockbox, DataLockboxValidator); lockbox_validator!(IdentityLockbox, IdentityLockbox, IdentityLockboxValidator); lockbox_validator!(StreamLockbox, StreamLockbox, StreamLockboxValidator); lockbox_validator!(LockLockbox, LockLockbox, LockLockboxValidator);
use super::*; use crate::element::*; use crate::error::{Error, Result}; use serde::{Deserialize, Serialize}; #[inline] fn is_false(v: &bool) -> bool { !v } #[inline] fn u32_is_zero(v: &u32) -> bool { *v == 0 } #[inline] fn u32_is_max(v: &u32) -> bool { *v == u32::MAX } macro_rules! lockbox_validator { ($t: ty, $e: ident, $v: ident, $link:expr, $name:expr) => { #[doc = "Validator for a [`"] #[doc = $name] #[doc = "`]["] #[doc = $link] #[doc = "].\n\n"] #[doc = "This validator will only pass a "] #[doc = $name] #[doc = " value. Validation passes if:\n\n"] #[doc = "- The number of bytes in the lockbox is less than or equal to `max_len`\n"] #[doc = "- The number of bytes in the lockbox is greater than or equal to `min_len`\n"] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(deny_unknown_fields, default)] pub struct $v { #[serde(skip_serializing_if = "String::is_empty")] pub comment: String, #[serde(skip_serializing_if = "u32_is_max")] pub max_len: u32, #[serde(skip_serializing_if = "u32_is_zero")] pub min_len: u32, #[serde(skip_serializing_if = "is_false")] pub size: bool, } impl std::default::Default for $v { fn default() -> Self { Self { comment: String::new(), max_len: u32::MAX, min_len: u32::MIN, size: false, } } } impl $v { pub fn new() -> Self { Self::default() } pub fn comment(mut self, comment: impl Into<String>) -> Self { self.comment = comment.into(); self } pub fn max_len(mut self, max_len: u32) -> Self { self.max_len = max_len; self } pub fn min_len(mut self, min_len: u32) -> Self { self.min_len = min_len; self } pub fn size(mut self, size: bool) -> Self { self.size = size; self } pub fn build(self) -> Validator { Validator::$e(self) } pub(crate) fn validate(&self, parser: &mut Parser) -> Result<()> { let elem = parser .next() .ok_or_else(|| Error::FailValidate(concat!("Expected a ",$name).to_string()))??; let elem = if let Element::$e(v) = elem { v } else { return Err(Error::FailValidate(format!( concat!("Expected ", $name, ", got {}"), elem.name() ))); }; let len = elem.as_bytes().len() as u32; if len > self.max_len { return Err(Error::FailValidate( concat!($name, " is longer than max_len").to_string() )); } if len < self.min_len { return Err(Error::FailValidate( concat!($name, " is shorter than min_len").to_string() )); } Ok(()) } fn query_check_self(&self, other: &Self) -> bool { self.size || (u32_is_max(&other.max_le
r().all(|other| match other { Validator::$e(other) => self.query_check_self(other), _ => false, }), Validator::Any => true, _ => false, } } } }; ($t: ty, $e: ident, $v: ident) => { lockbox_validator!($t, $e, $v, concat!("fog_crypto::lockbox::", stringify!($t)), stringify!($t)); } } lockbox_validator!(DataLockbox, DataLockbox, DataLockboxValidator); lockbox_validator!(IdentityLockbox, IdentityLockbox, IdentityLockboxValidator); lockbox_validator!(StreamLockbox, StreamLockbox, StreamLockboxValidator); lockbox_validator!(LockLockbox, LockLockbox, LockLockboxValidator);
n) && u32_is_zero(&other.min_len)) } pub(crate) fn query_check(&self, other: &Validator) -> bool { match other { Validator::$e(other) => self.query_check_self(other), Validator::Multi(list) => list.ite
random
[ { "content": "/// Read raw lockbox data out from a buffer\n\npub fn read_raw_lockbox(buf: &mut &[u8], len: usize) -> crate::Result<Lockbox> {\n\n Ok(Lockbox::decode(len, buf)?)\n\n}\n\n\n\n\n", "file_path": "old/decode.rs", "rank": 2, "score": 338446.3248757994 }, { "content": "/// Genera...
Rust
ring/src/aead/poly1305.rs
Soptq/phala-blockchain
f2fbd1e62b1b8c2567bfed993ae85a56f227a880
use super::{Tag, TAG_LEN}; use crate::{c, cpu}; pub(super) struct Key { key_and_nonce: [u8; KEY_LEN], cpu_features: cpu::Features, } pub(super) const BLOCK_LEN: usize = 16; pub(super) const KEY_LEN: usize = 2 * BLOCK_LEN; impl Key { #[inline] pub(super) fn new(key_and_nonce: [u8; KEY_LEN], cpu_features: cpu::Features) -> Self { Self { key_and_nonce, cpu_features, } } } pub struct Context { state: poly1305_state, #[allow(dead_code)] cpu_features: cpu::Features, } #[repr(C, align(64))] struct poly1305_state([u8; OPAQUE_LEN]); const OPAQUE_LEN: usize = 512; macro_rules! dispatch { ( $features:expr => ( $f:ident | $neon_f:ident ) ( $( $p:ident : $t:ty ),+ ) ( $( $a:expr ),+ ) ) => { match () { #[cfg(all(target_arch = "arm", not(target_vendor = "apple")))] () if cpu::arm::NEON.available($features) => { extern "C" { fn $neon_f( $( $p : $t ),+ ); } unsafe { $neon_f( $( $a ),+ ) } } () => { extern "C" { fn $f( $( $p : $t ),+ ); } unsafe { $f( $( $a ),+ ) } } } } } impl Context { #[inline] pub(super) fn from_key( Key { key_and_nonce, cpu_features, }: Key, ) -> Self { let mut ctx = Self { state: poly1305_state([0u8; OPAQUE_LEN]), cpu_features, }; dispatch!( cpu_features => (GFp_poly1305_init | GFp_poly1305_init_neon) (statep: &mut poly1305_state, key: &[u8; KEY_LEN]) (&mut ctx.state, &key_and_nonce)); ctx } #[inline(always)] pub fn update(&mut self, input: &[u8]) { dispatch!( self.cpu_features => (GFp_poly1305_update | GFp_poly1305_update_neon) (statep: &mut poly1305_state, input: *const u8, in_len: c::size_t) (&mut self.state, input.as_ptr(), input.len())); } pub(super) fn finish(mut self) -> Tag { let mut tag = Tag([0u8; TAG_LEN]); dispatch!( self.cpu_features => (GFp_poly1305_finish | GFp_poly1305_finish_neon) (statep: &mut poly1305_state, mac: &mut [u8; TAG_LEN]) (&mut self.state, &mut tag.0)); tag } } pub(super) fn sign(key: Key, input: &[u8]) -> Tag { let mut ctx = Context::from_key(key); ctx.update(input); ctx.finish() } #[cfg(test)] mod tests { use super::*; use crate::test; use core::convert::TryInto; #[test] pub fn test_poly1305() { let cpu_features = cpu::features(); test::run(test_file!("poly1305_test.txt"), |section, test_case| { assert_eq!(section, ""); let key = test_case.consume_bytes("Key"); let key: &[u8; KEY_LEN] = key.as_slice().try_into().unwrap(); let input = test_case.consume_bytes("Input"); let expected_mac = test_case.consume_bytes("MAC"); let key = Key::new(*key, cpu_features); let Tag(actual_mac) = sign(key, &input); assert_eq!(expected_mac, actual_mac.as_ref()); Ok(()) }) } }
use super::{Tag, TAG_LEN}; use crate::{c, cpu}; pub(super) struct Key { key_and_nonce: [u8; KEY_LEN], cpu_features: cpu::Features, } pub(super) const BLOCK_LEN: usize = 16; pub(super) const KEY_LEN: usize = 2 * BLOCK_LEN; impl Key { #[inline] pub(super) fn new(key_and_nonce: [u8; KEY_LEN], cpu_features: cpu::Features) -> Self { Self { key_and_nonce, cpu_features, } } } pub struct Context { state: poly1305_state, #[allow(dead_code)] cpu_features: cpu::Features, } #[repr(C, align(64))] struct poly1305_state([u8; OPAQUE_LEN]); const OPAQUE_LEN: usize = 512; macro_rules! dispatch { ( $features:expr => ( $f:ident | $neon_f:ident ) ( $( $p:ident : $t:ty ),+ ) ( $( $a:expr ),+ ) ) => { match () { #[cfg(all(target_arch = "arm", not(target_vendor = "apple")))] () if cpu::arm::NEON.available($features) => { extern "C" { fn $neon_f( $( $p : $t ),+ ); } unsafe { $neon_f( $( $a ),+ ) } } () => { extern "C" { fn $f( $( $p : $t ),+ ); } unsafe { $f( $( $a ),+ ) } } } } } impl Context { #[inline] pub(super) fn from_key( Key { key_and_nonce, cpu_features, }: Key, ) -> Self { let mut ctx = Self { state: poly1305_state([0u8; OPAQUE_LEN]), cpu_features, }; dispatch!( cpu_features => (GFp_poly1305_init | GFp_poly1305_init_neon) (statep: &mut poly1305_state, key: &[u8; KEY_LEN]) (&mut ctx.state, &key_and_nonce)); ctx } #[inline(always)]
pub(super) fn finish(mut self) -> Tag { let mut tag = Tag([0u8; TAG_LEN]); dispatch!( self.cpu_features => (GFp_poly1305_finish | GFp_poly1305_finish_neon) (statep: &mut poly1305_state, mac: &mut [u8; TAG_LEN]) (&mut self.state, &mut tag.0)); tag } } pub(super) fn sign(key: Key, input: &[u8]) -> Tag { let mut ctx = Context::from_key(key); ctx.update(input); ctx.finish() } #[cfg(test)] mod tests { use super::*; use crate::test; use core::convert::TryInto; #[test] pub fn test_poly1305() { let cpu_features = cpu::features(); test::run(test_file!("poly1305_test.txt"), |section, test_case| { assert_eq!(section, ""); let key = test_case.consume_bytes("Key"); let key: &[u8; KEY_LEN] = key.as_slice().try_into().unwrap(); let input = test_case.consume_bytes("Input"); let expected_mac = test_case.consume_bytes("MAC"); let key = Key::new(*key, cpu_features); let Tag(actual_mac) = sign(key, &input); assert_eq!(expected_mac, actual_mac.as_ref()); Ok(()) }) } }
pub fn update(&mut self, input: &[u8]) { dispatch!( self.cpu_features => (GFp_poly1305_update | GFp_poly1305_update_neon) (statep: &mut poly1305_state, input: *const u8, in_len: c::size_t) (&mut self.state, input.as_ptr(), input.len())); }
function_block-full_function
[ { "content": "pub fn shift_partial<F>((in_prefix_len, in_out): (usize, &mut [u8]), transform: F)\n\nwhere\n\n F: FnOnce(&[u8]) -> Block,\n\n{\n\n let (block, in_out_len) = {\n\n let input = &in_out[in_prefix_len..];\n\n let in_out_len = input.len();\n\n if in_out_len == 0 {\n\n ...
Rust
wire_protocol/src/connection.rs
vangork/pravega-client-rust
b57b2ea6eee3aa49d354e19b5fc457d4ccf72e9f
use crate::error::*; use async_trait::async_trait; use pravega_client_shared::PravegaNodeUri; use snafu::ResultExt; use std::fmt; use std::fmt::{Debug, Formatter}; use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::io::{ReadHalf, WriteHalf}; use tokio::net::TcpStream; use tokio_rustls::client::TlsStream; use uuid::Uuid; #[async_trait] pub trait Connection: Send + Sync + Debug { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError>; async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError>; fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>); fn get_endpoint(&self) -> PravegaNodeUri; fn get_uuid(&self) -> Uuid; fn is_valid(&self) -> bool; fn can_recycle(&mut self, recycle: bool); } pub struct TokioConnection { pub uuid: Uuid, pub endpoint: PravegaNodeUri, pub stream: Option<TcpStream>, pub can_recycle: bool, } #[async_trait] impl Connection for TokioConnection { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .write_all(payload) .await .context(SendData { endpoint })?; Ok(()) } async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .read_exact(buf) .await .context(ReadData { endpoint })?; Ok(()) } fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>) { assert!(self.stream.is_some()); let (read_half, write_half) = tokio::io::split(self.stream.take().expect("take connection")); let read = Box::new(ConnectionReadHalfTokio { uuid: self.uuid, endpoint: self.endpoint.clone(), read_half: Some(read_half), }) as Box<dyn ConnectionReadHalf>; let write = Box::new(ConnectionWriteHalfTokio { uuid: self.uuid, endpoint: self.endpoint.clone(), write_half: Some(write_half), }) as Box<dyn ConnectionWriteHalf>; (read, write) } fn get_endpoint(&self) -> PravegaNodeUri { self.endpoint.clone() } fn get_uuid(&self) -> Uuid { self.uuid } fn is_valid(&self) -> bool { self.can_recycle && self.stream.as_ref().is_some() && self.stream.as_ref().expect("get connection").is_valid() } fn can_recycle(&mut self, can_recycle: bool) { self.can_recycle = can_recycle } } impl Debug for TokioConnection { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("TlsConnection") .field("connection id", &self.uuid) .field("pravega endpoint", &self.endpoint) .finish() } } pub struct TlsConnection { pub uuid: Uuid, pub endpoint: PravegaNodeUri, pub stream: Option<TlsStream<TcpStream>>, pub can_recycle: bool, } #[async_trait] impl Connection for TlsConnection { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .write_all(payload) .await .context(SendData { endpoint: endpoint.clone(), })?; self.stream .as_mut() .expect("get connection") .flush() .await .context(SendData { endpoint })?; Ok(()) } async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .read_exact(buf) .await .context(ReadData { endpoint })?; Ok(()) } fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>) { assert!(self.stream.is_some()); let (read_half, write_half) = tokio::io::split(self.stream.take().expect("take connection")); let read = Box::new(ConnectionReadHalfTls { uuid: self.uuid, endpoint: self.endpoint.clone(), read_half: Some(read_half), }) as Box<dyn ConnectionReadHalf>; let write = Box::new(ConnectionWriteHalfTls { uuid: self.uuid, endpoint: self.endpoint.clone(), write_half: Some(write_half), }) as Box<dyn ConnectionWriteHalf>; (read, write) } fn get_endpoint(&self) -> PravegaNodeUri { self.endpoint.clone() } fn get_uuid(&self) -> Uuid { self.uuid } fn is_valid(&self) -> bool { self.can_recycle && self.stream.as_ref().is_some() && self.stream.as_ref().expect("get connection").is_valid() } fn can_recycle(&mut self, can_recycle: bool) { self.can_recycle = can_recycle; } } impl Debug for TlsConnection { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("TlsConnection") .field("connection id", &self.uuid) .field("pravega endpoint", &self.endpoint) .finish() } } #[async_trait] pub trait ConnectionReadHalf: Send + Sync { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError>; fn get_id(&self) -> Uuid; } pub struct ConnectionReadHalfTokio { uuid: Uuid, endpoint: PravegaNodeUri, read_half: Option<ReadHalf<TcpStream>>, } #[async_trait] impl ConnectionReadHalf for ConnectionReadHalfTokio { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut reader) = self.read_half { reader.read_exact(buf).await.context(ReadData { endpoint })?; } else { panic!("should not try to read when read half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } pub struct ConnectionReadHalfTls { uuid: Uuid, endpoint: PravegaNodeUri, read_half: Option<ReadHalf<TlsStream<TcpStream>>>, } #[async_trait] impl ConnectionReadHalf for ConnectionReadHalfTls { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut reader) = self.read_half { reader.read_exact(buf).await.context(ReadData { endpoint })?; } else { panic!("should not try to read when read half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } #[async_trait] pub trait ConnectionWriteHalf: Send + Sync + Debug { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError>; fn get_id(&self) -> Uuid; } #[derive(Debug)] pub struct ConnectionWriteHalfTokio { uuid: Uuid, endpoint: PravegaNodeUri, write_half: Option<WriteHalf<TcpStream>>, } #[async_trait] impl ConnectionWriteHalf for ConnectionWriteHalfTokio { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut writer) = self.write_half { writer.write_all(payload).await.context(SendData { endpoint })?; } else { panic!("should not try to write when write half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } #[derive(Debug)] pub struct ConnectionWriteHalfTls { uuid: Uuid, endpoint: PravegaNodeUri, write_half: Option<WriteHalf<TlsStream<TcpStream>>>, } #[async_trait] impl ConnectionWriteHalf for ConnectionWriteHalfTls { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut writer) = self.write_half { writer.write_all(payload).await.context(SendData { endpoint: endpoint.clone(), })?; writer.flush().await.context(SendData { endpoint })?; } else { panic!("should not try to write when write half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } pub trait Validate { fn is_valid(&self) -> bool; } impl Validate for TcpStream { fn is_valid(&self) -> bool { self.peer_addr().map_or_else(|_e| false, |_addr| true) } } impl Validate for TlsStream<TcpStream> { fn is_valid(&self) -> bool { let (io, _session) = self.get_ref(); io.peer_addr().map_or_else(|_e| false, |_addr| true) } }
use crate::error::*; use async_trait::async_trait; use pravega_client_shared::PravegaNodeUri; use snafu::ResultExt; use std::fmt; use std::fmt::{Debug, Formatter}; use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::io::{ReadHalf, WriteHalf}; use tokio::net::TcpStream; use tokio_rustls::client::TlsStream; use uuid::Uuid; #[async_trait] pub trait Connection: Send + Sync + Debug { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError>; async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError>; fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>); fn get_endpoint(&self) -> PravegaNodeUri; fn get_uuid(&self) -> Uuid; fn is_valid(&self) -> bool; fn can_recycle(&mut self, recycle: bool); } pub struct TokioConnection { pub uuid: Uuid, pub endpoint: PravegaNodeUri, pub stream: Option<TcpStream>, pub can_recycle: bool, } #[async_trait] impl Connection for TokioConnection { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .write_all(payload) .await .context(SendData { endpoint })?; Ok(()) } async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .read_exact(buf) .await .context(ReadData { endpoint })?; Ok(()) } fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>) { assert!(self.stream.is_some()); let (read_half, write_half) = tokio::io::split(self.stream.take().expect("take connection")); let read = Box::new(ConnectionReadHalfTokio { uuid: self.uuid, endpoint: self.endpoint.clone(), read_half: Some(read_half), }) as Box<dyn ConnectionReadHalf>; let write = Box::new(ConnectionWriteHalfTokio { uuid: self.uuid, endpoint: self.endpoint.clone(), write_half: Some(write_half), }) as Box<dyn ConnectionWriteHalf>; (read, write) } fn get_endpoint(&self) -> PravegaNodeUri { self.endpoint.clone() } fn get_uuid(&self) -> Uuid { self.uuid } fn is_valid(&self) -> bool { self.can_recycle && self.stream.as_ref().is_some() && self.stream.as_ref().expect("get connection").is_valid() } fn can_recycle(&mut self, can_recycle: bool) { self.can_recycle = can_recycle } } impl Debug for TokioConnection { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("TlsConnection") .field("connection id", &self.uuid) .field("pravega endpoint", &self.endpoint) .finish() } } pub struct TlsConnection { pub uuid: Uuid, pub endpoint: PravegaNodeUri, pub stream: Option<TlsStream<TcpStream>>, pub can_recycle: bool, } #[async_trait] impl Connection for TlsConnection { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .write_all(payload) .await .context(SendData { endpoint: endpoint.clone(), })?; self.stream .as_mut() .expect("get connection") .flush() .await .context(SendData { endpoint })?; Ok(()) } async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { assert!(self.stream.is_some()); let endpoint = self.endpoint.clone(); self.stream .as_mut() .expect("get connection") .read_exact(buf) .await .context(ReadData { endpoint })?; Ok(()) } fn split(&mut self) -> (Box<dyn ConnectionReadHalf>, Box<dyn ConnectionWriteHalf>) { assert!(self.stream.is_some()); let (read_half, write_half) = tokio::io::split(self.stream.take().expect("take connection")); let read = Box::new(ConnectionReadHalfTls { uuid: self.uuid, endpoint: self.endpoint.clone(), read_half: Some(read_half), }) as Box<dyn ConnectionReadHalf>;
(read, write) } fn get_endpoint(&self) -> PravegaNodeUri { self.endpoint.clone() } fn get_uuid(&self) -> Uuid { self.uuid } fn is_valid(&self) -> bool { self.can_recycle && self.stream.as_ref().is_some() && self.stream.as_ref().expect("get connection").is_valid() } fn can_recycle(&mut self, can_recycle: bool) { self.can_recycle = can_recycle; } } impl Debug for TlsConnection { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_struct("TlsConnection") .field("connection id", &self.uuid) .field("pravega endpoint", &self.endpoint) .finish() } } #[async_trait] pub trait ConnectionReadHalf: Send + Sync { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError>; fn get_id(&self) -> Uuid; } pub struct ConnectionReadHalfTokio { uuid: Uuid, endpoint: PravegaNodeUri, read_half: Option<ReadHalf<TcpStream>>, } #[async_trait] impl ConnectionReadHalf for ConnectionReadHalfTokio { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut reader) = self.read_half { reader.read_exact(buf).await.context(ReadData { endpoint })?; } else { panic!("should not try to read when read half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } pub struct ConnectionReadHalfTls { uuid: Uuid, endpoint: PravegaNodeUri, read_half: Option<ReadHalf<TlsStream<TcpStream>>>, } #[async_trait] impl ConnectionReadHalf for ConnectionReadHalfTls { async fn read_async(&mut self, buf: &mut [u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut reader) = self.read_half { reader.read_exact(buf).await.context(ReadData { endpoint })?; } else { panic!("should not try to read when read half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } #[async_trait] pub trait ConnectionWriteHalf: Send + Sync + Debug { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError>; fn get_id(&self) -> Uuid; } #[derive(Debug)] pub struct ConnectionWriteHalfTokio { uuid: Uuid, endpoint: PravegaNodeUri, write_half: Option<WriteHalf<TcpStream>>, } #[async_trait] impl ConnectionWriteHalf for ConnectionWriteHalfTokio { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut writer) = self.write_half { writer.write_all(payload).await.context(SendData { endpoint })?; } else { panic!("should not try to write when write half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } #[derive(Debug)] pub struct ConnectionWriteHalfTls { uuid: Uuid, endpoint: PravegaNodeUri, write_half: Option<WriteHalf<TlsStream<TcpStream>>>, } #[async_trait] impl ConnectionWriteHalf for ConnectionWriteHalfTls { async fn send_async(&mut self, payload: &[u8]) -> Result<(), ConnectionError> { let endpoint = self.endpoint.clone(); if let Some(ref mut writer) = self.write_half { writer.write_all(payload).await.context(SendData { endpoint: endpoint.clone(), })?; writer.flush().await.context(SendData { endpoint })?; } else { panic!("should not try to write when write half is gone"); } Ok(()) } fn get_id(&self) -> Uuid { self.uuid } } pub trait Validate { fn is_valid(&self) -> bool; } impl Validate for TcpStream { fn is_valid(&self) -> bool { self.peer_addr().map_or_else(|_e| false, |_addr| true) } } impl Validate for TlsStream<TcpStream> { fn is_valid(&self) -> bool { let (io, _session) = self.get_ref(); io.peer_addr().map_or_else(|_e| false, |_addr| true) } }
let write = Box::new(ConnectionWriteHalfTls { uuid: self.uuid, endpoint: self.endpoint.clone(), write_half: Some(write_half), }) as Box<dyn ConnectionWriteHalf>;
assignment_statement
[ { "content": "#[async_trait]\n\npub trait ClientConnection: Send + Sync {\n\n async fn read(&mut self) -> Result<Replies, ClientConnectionError>;\n\n async fn write(&mut self, request: &Requests) -> Result<(), ClientConnectionError>;\n\n fn split(&mut self) -> (ClientConnectionReadHalf, ClientConnectio...
Rust
nrf-softdevice/src/flash.rs
chris-ricketts/nrf-softdevice
6ee09a134d9366029462963650dcd7e3921d6c1d
use core::future::Future; use core::marker::PhantomData; use core::sync::atomic::{AtomicBool, Ordering}; use embedded_storage::nor_flash::{ErrorType, NorFlashError, NorFlashErrorKind}; use embedded_storage_async::nor_flash::{AsyncNorFlash, AsyncReadNorFlash}; use crate::raw; use crate::util::{DropBomb, Signal}; use crate::{RawError, Softdevice}; #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] #[non_exhaustive] pub enum FlashError { Failed, AddressMisaligned, BufferMisaligned, } impl NorFlashError for FlashError { fn kind(&self) -> NorFlashErrorKind { match self { Self::Failed => NorFlashErrorKind::Other, Self::AddressMisaligned => NorFlashErrorKind::NotAligned, Self::BufferMisaligned => NorFlashErrorKind::NotAligned, } } } pub struct Flash { _private: PhantomData<*mut ()>, } static FLASH_TAKEN: AtomicBool = AtomicBool::new(false); impl Flash { const PAGE_SIZE: usize = 4096; pub fn take(_sd: &Softdevice) -> Flash { if FLASH_TAKEN .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire) .is_err() { panic!("nrf_softdevice::Softdevice::take_flash() called multiple times.") } Flash { _private: PhantomData, } } } static SIGNAL: Signal<Result<(), FlashError>> = Signal::new(); pub(crate) fn on_flash_success() { SIGNAL.signal(Ok(())) } pub(crate) fn on_flash_error() { SIGNAL.signal(Err(FlashError::Failed)) } impl ErrorType for Flash { type Error = FlashError; } impl AsyncReadNorFlash for Flash { const READ_SIZE: usize = 1; type ReadFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn read<'a>(&'a mut self, address: u32, data: &'a mut [u8]) -> Self::ReadFuture<'a> { async move { data.copy_from_slice(unsafe { core::slice::from_raw_parts(address as *const u8, data.len()) }); Ok(()) } } fn capacity(&self) -> usize { 256 * 4096 } } impl AsyncNorFlash for Flash { const WRITE_SIZE: usize = 4; const ERASE_SIZE: usize = 4096; type WriteFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn write<'a>(&'a mut self, offset: u32, data: &'a [u8]) -> Self::WriteFuture<'a> { async move { let data_ptr = data.as_ptr(); let data_len = data.len() as u32; let address = offset as usize; if address % 4 != 0 { return Err(FlashError::AddressMisaligned); } if (data_ptr as u32) % 4 != 0 || data_len % 4 != 0 { return Err(FlashError::BufferMisaligned); } let words_ptr = data_ptr as *const u32; let words_len = data_len / 4; let bomb = DropBomb::new(); let ret = unsafe { raw::sd_flash_write(address as _, words_ptr, words_len) }; let ret = match RawError::convert(ret) { Ok(()) => SIGNAL.wait().await, Err(_e) => { warn!("sd_flash_write err {:?}", _e); Err(FlashError::Failed) } }; bomb.defuse(); ret } } type EraseFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn erase<'a>(&'a mut self, from: u32, to: u32) -> Self::EraseFuture<'a> { async move { if from as usize % Self::PAGE_SIZE != 0 { return Err(FlashError::AddressMisaligned); } if to as usize % Self::PAGE_SIZE != 0 { return Err(FlashError::AddressMisaligned); } let bomb = DropBomb::new(); for address in (from as usize..to as usize).step_by(Self::PAGE_SIZE) { let page_number = (address / Self::PAGE_SIZE) as u32; let ret = unsafe { raw::sd_flash_page_erase(page_number) }; match RawError::convert(ret) { Ok(()) => match SIGNAL.wait().await { Err(_e) => { warn!("sd_flash_page_erase err {:?}", _e); bomb.defuse(); return Err(_e); } _ => {} }, Err(_e) => { warn!("sd_flash_page_erase err {:?}", _e); bomb.defuse(); return Err(FlashError::Failed); } } } bomb.defuse(); Ok(()) } } }
use core::future::Future; use core::marker::PhantomData; use core::sync::atomic::{AtomicBool, Ordering}; use embedded_storage::nor_flash::{ErrorType, NorFlashError, NorFlashErrorKind}; use embedded_storage_async::nor_flash::{AsyncNorFlash, AsyncReadNorFlash}; use crate::raw; use crate::util::{DropBomb, Signal}; use crate::{RawError, Softdevice}; #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] #[non_exhaustive] pub enum FlashError { Failed, AddressMisaligned, BufferMisaligned, } impl NorFlashError for FlashError { fn kind(&self) -> NorFlashErrorKind { match self { Self::Failed => NorFlashErrorKind::Other, Self::AddressMisaligned => NorFlashErrorKind::NotAligned, Self::BufferMisaligned => NorFlashErrorKind::NotAligned, } } } pub struct Flash { _private: PhantomData<*mut ()>, } static FLASH_TAKEN: AtomicBool = AtomicBool::new(false); impl Flash { const PAGE_SIZE: usize = 4096; pub fn take(_sd: &Softdevice) -> Flash { if FLASH_TAKEN .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire) .is_err() { panic!("nrf_softdevice::Softdevice::take_flash() called multiple times.") } Flash { _private: PhantomData, } } } static SIGNAL: Signal<Result<(), FlashError>> = Signal::new(); pub(crate) fn on_flash_success() { SIGNAL.signal(Ok(())) } pub(crate) fn on_flash_error() { SIGNAL.signal(Err(FlashError::Failed)) } impl ErrorType for Flash { type Error = FlashError; } impl AsyncReadNorFlash for Flash { const READ_SIZE: usize = 1; type ReadFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn read<'a>(&'a mut self, address: u32, da
w_parts(address as *const u8, data.len()) }); Ok(()) } } fn capacity(&self) -> usize { 256 * 4096 } } impl AsyncNorFlash for Flash { const WRITE_SIZE: usize = 4; const ERASE_SIZE: usize = 4096; type WriteFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn write<'a>(&'a mut self, offset: u32, data: &'a [u8]) -> Self::WriteFuture<'a> { async move { let data_ptr = data.as_ptr(); let data_len = data.len() as u32; let address = offset as usize; if address % 4 != 0 { return Err(FlashError::AddressMisaligned); } if (data_ptr as u32) % 4 != 0 || data_len % 4 != 0 { return Err(FlashError::BufferMisaligned); } let words_ptr = data_ptr as *const u32; let words_len = data_len / 4; let bomb = DropBomb::new(); let ret = unsafe { raw::sd_flash_write(address as _, words_ptr, words_len) }; let ret = match RawError::convert(ret) { Ok(()) => SIGNAL.wait().await, Err(_e) => { warn!("sd_flash_write err {:?}", _e); Err(FlashError::Failed) } }; bomb.defuse(); ret } } type EraseFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn erase<'a>(&'a mut self, from: u32, to: u32) -> Self::EraseFuture<'a> { async move { if from as usize % Self::PAGE_SIZE != 0 { return Err(FlashError::AddressMisaligned); } if to as usize % Self::PAGE_SIZE != 0 { return Err(FlashError::AddressMisaligned); } let bomb = DropBomb::new(); for address in (from as usize..to as usize).step_by(Self::PAGE_SIZE) { let page_number = (address / Self::PAGE_SIZE) as u32; let ret = unsafe { raw::sd_flash_page_erase(page_number) }; match RawError::convert(ret) { Ok(()) => match SIGNAL.wait().await { Err(_e) => { warn!("sd_flash_page_erase err {:?}", _e); bomb.defuse(); return Err(_e); } _ => {} }, Err(_e) => { warn!("sd_flash_page_erase err {:?}", _e); bomb.defuse(); return Err(FlashError::Failed); } } } bomb.defuse(); Ok(()) } } }
ta: &'a mut [u8]) -> Self::ReadFuture<'a> { async move { data.copy_from_slice(unsafe { core::slice::from_ra
function_block-random_span
[ { "content": "pub fn get_value(_sd: &Softdevice, handle: u16, buf: &mut [u8]) -> Result<usize, GetValueError> {\n\n let mut value = raw::ble_gatts_value_t {\n\n p_value: buf.as_mut_ptr(),\n\n len: buf.len() as _,\n\n offset: 0,\n\n };\n\n let ret = unsafe {\n\n raw::sd_ble_g...
Rust
tests/elements/test_from_telemetry_stage.rs
dmrolfs/proctor
9b2fac5e80e4a8874906a85302af7b34b4433f46
use std::path::PathBuf; use anyhow::Result; use cast_trait_object::DynCastExt; use chrono::{DateTime, Utc}; use claim::*; use once_cell::sync::Lazy; use pretty_assertions::assert_eq; use proctor::elements; use proctor::graph::{stage, Connect, Graph, SinkShape}; use proctor::phases::collection::{make_telemetry_cvs_source, SourceSetting}; use serde::{Deserialize, Serialize}; use serde_test::{assert_tokens, Token}; use super::DEFAULT_LAST_DEPLOYMENT; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] struct Data { #[serde( default, rename = "task.last_failure", serialize_with = "proctor::serde::date::serialize_optional_datetime_format", deserialize_with = "proctor::serde::date::deserialize_optional_datetime" )] pub last_failure: Option<DateTime<Utc>>, #[serde(rename = "cluster.is_deploying")] pub is_deploying: bool, #[serde( rename = "cluster.last_deployment", serialize_with = "proctor::serde::date::serialize_format", deserialize_with = "proctor::serde::date::deserialize" )] pub last_deployment: DateTime<Utc>, } impl Default for Data { fn default() -> Self { Self { last_failure: None, is_deploying: true, last_deployment: *DEFAULT_LAST_DEPLOYMENT, } } } static NOW: Lazy<DateTime<Utc>> = Lazy::new(|| Utc::now()); static NOW_REP: Lazy<String> = Lazy::new(|| format!("{}", NOW.format("%+"))); #[test] fn test_data_serde() { let data = Data { last_failure: Some(NOW.clone()), is_deploying: true, last_deployment: NOW.clone(), }; assert_tokens( &data, &vec![ Token::Struct { name: "Data", len: 3 }, Token::Str("task.last_failure"), Token::Some, Token::Str(&NOW_REP), Token::Str("cluster.is_deploying"), Token::Bool(true), Token::Str("cluster.last_deployment"), Token::Str(&NOW_REP), Token::StructEnd, ], ); } #[tokio::test(flavor = "multi_thread", worker_threads = 4)] async fn test_make_from_telemetry_stage() -> Result<()> { once_cell::sync::Lazy::force(&proctor::tracing::TEST_TRACING); let main_span = tracing::info_span!("test_make_from_telemetry_stage"); let _main_span_guard = main_span.enter(); let base_path = assert_ok!(std::env::current_dir()); let path = base_path.join(PathBuf::from("./tests/data/eligibility.csv")); let setting = SourceSetting::Csv { path }; let mut source = assert_ok!(make_telemetry_cvs_source::<Data, _>("local", &setting)); let convert = elements::make_from_telemetry("convert", true).await; let mut sink = stage::Fold::<_, Data, Vec<Data>>::new("sink", Vec::default(), |mut acc, item| { acc.push(item); acc }); let rx_acc = assert_some!(sink.take_final_rx()); let source_stage = assert_some!(source.stage.take()); (source_stage.outlet(), convert.inlet()).connect().await; (convert.outlet(), sink.inlet()).connect().await; let mut g = Graph::default(); g.push_back(source_stage.dyn_upcast()).await; g.push_back(convert.dyn_upcast()).await; g.push_back(Box::new(sink)).await; assert_ok!(g.run().await); let actual = assert_ok!(rx_acc.await); let expected = vec![ Data { last_failure: None, is_deploying: true, last_deployment: DateTime::parse_from_str("2014-11-28T10:11:37.246310806Z", "%+")?.with_timezone(&Utc), }, Data { last_failure: Some(DateTime::parse_from_str("2014-11-28T12:45:59.324310806Z", "%+")?.with_timezone(&Utc)), is_deploying: false, last_deployment: DateTime::parse_from_str("2021-03-08T23:57:12.918473937Z", "%+")?.with_timezone(&Utc), }, ]; assert_eq!(actual, expected); Ok(()) }
use std::path::PathBuf; use anyhow::Result; use cast_trait_object::DynCastExt; use chrono::{DateTime, Utc}; use claim::*; use once_cell::sync::Lazy; use pretty_assertions::assert_eq; use proctor::elements; use proctor::graph::{stage, Connect, Graph, SinkShape}; use proctor::phases::collection::{make_telemetry_cvs_source, SourceSetting}; use serde::{Deserialize, Serialize}; use serde_test::{assert_tokens, Token}; use super::DEFAULT_LAST_DEPLOYMENT; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] struct Data { #[serde( default, rename = "task.last_failure", serialize_with = "proctor::serde::date::serialize_optional_datetime_format", deserialize_with = "proctor::serde::date::deserialize_optional_datetime" )] pub last_failure: Option<DateTime<Utc>>, #[serde(rename = "cluster.is_deploying")] pub is_deploying: bool, #[serde( rename = "cluster.last_deployment", serialize_with = "proctor::serde::date::serialize_format", deserialize_with = "proctor::serde::date::deserialize" )] pub last_deployment: DateTime<Utc>, } impl Default for Data { fn default() -> Self { Self { last_failure: None, is_deploying: true, last_deployment: *DEFAULT_LAST_DEPLOYMENT, } } } static NOW: Lazy<DateTime<Utc>> = Lazy::new(|| Utc::now()); static NOW_REP: Lazy<String> = Lazy::new(|| format!("{}", NOW.format("%+"))); #[test] fn test_data_serde() {
assert_tokens( &data, &vec![ Token::Struct { name: "Data", len: 3 }, Token::Str("task.last_failure"), Token::Some, Token::Str(&NOW_REP), Token::Str("cluster.is_deploying"), Token::Bool(true), Token::Str("cluster.last_deployment"), Token::Str(&NOW_REP), Token::StructEnd, ], ); } #[tokio::test(flavor = "multi_thread", worker_threads = 4)] async fn test_make_from_telemetry_stage() -> Result<()> { once_cell::sync::Lazy::force(&proctor::tracing::TEST_TRACING); let main_span = tracing::info_span!("test_make_from_telemetry_stage"); let _main_span_guard = main_span.enter(); let base_path = assert_ok!(std::env::current_dir()); let path = base_path.join(PathBuf::from("./tests/data/eligibility.csv")); let setting = SourceSetting::Csv { path }; let mut source = assert_ok!(make_telemetry_cvs_source::<Data, _>("local", &setting)); let convert = elements::make_from_telemetry("convert", true).await; let mut sink = stage::Fold::<_, Data, Vec<Data>>::new("sink", Vec::default(), |mut acc, item| { acc.push(item); acc }); let rx_acc = assert_some!(sink.take_final_rx()); let source_stage = assert_some!(source.stage.take()); (source_stage.outlet(), convert.inlet()).connect().await; (convert.outlet(), sink.inlet()).connect().await; let mut g = Graph::default(); g.push_back(source_stage.dyn_upcast()).await; g.push_back(convert.dyn_upcast()).await; g.push_back(Box::new(sink)).await; assert_ok!(g.run().await); let actual = assert_ok!(rx_acc.await); let expected = vec![ Data { last_failure: None, is_deploying: true, last_deployment: DateTime::parse_from_str("2014-11-28T10:11:37.246310806Z", "%+")?.with_timezone(&Utc), }, Data { last_failure: Some(DateTime::parse_from_str("2014-11-28T12:45:59.324310806Z", "%+")?.with_timezone(&Utc)), is_deploying: false, last_deployment: DateTime::parse_from_str("2021-03-08T23:57:12.918473937Z", "%+")?.with_timezone(&Utc), }, ]; assert_eq!(actual, expected); Ok(()) }
let data = Data { last_failure: Some(NOW.clone()), is_deploying: true, last_deployment: NOW.clone(), };
assignment_statement
[ { "content": "#[tracing::instrument(level = \"debug\", skip(serializer))]\n\npub fn serialize_format<S>(date: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let datetime_rep = format!(\"{}\", date.format(FORMAT));\n\n serializer.serialize_str(datetime_rep...
Rust
src/union_find/client.rs
ccozad/cozad-union-find
3ec2ac39cecf08e6d68977a67f2c7bb02409fc6f
#[cfg(test)] #[path = "client_tests.rs"] mod client_tests; use std::collections::HashMap; #[derive(Hash, Eq, PartialEq, Debug)] struct Node { pub uuid: String, pub parent_index: usize, pub index: usize, pub size: usize } #[derive(Hash, Eq, PartialEq, Debug)] pub struct BulkConnection { pub a: usize, pub b: usize } #[derive(Debug)] pub struct Client { nodes: Vec<Node>, node_map: HashMap<String, usize>, set_count: usize } impl BulkConnection { pub fn new(a: usize, b: usize) -> Self { BulkConnection { a, b } } } impl Client { pub fn new() -> Self { let node_map = HashMap::new(); let mut nodes = Vec::new(); let root_node = Node { uuid: String::from("root"), parent_index: 0, index: 0, size: 0 }; nodes.push(root_node); Client { nodes, node_map, set_count: 0 } } #[allow(dead_code)] pub fn add_node(&mut self, uuid: &str) { if !self.node_exists(uuid) { let node = Node { uuid: String::from(uuid), parent_index: self.nodes.len(), index: self.nodes.len(), size: 1 }; self.node_map.insert(String::from(uuid), node.index); self.nodes.push(node); self.set_count += 1; } } pub fn add_nodes_bulk(&mut self, uuid_list: Vec<String>) { for uuid in uuid_list.iter() { let node = Node { uuid: String::from(uuid), parent_index: self.nodes.len(), index: self.nodes.len(), size: 1 }; self.node_map.insert(String::from(uuid), node.index); self.nodes.push(node); self.set_count += 1; } } #[allow(dead_code)] pub fn connect_nodes(&mut self, uuid_a: &str, uuid_b: &str) { let uuid_a_root = self.find_root_index(uuid_a); let uuid_b_root = self.find_root_index(uuid_b); if uuid_a_root == uuid_b_root { return } else { let node_slice = &mut self.nodes[..]; if node_slice[uuid_a_root].size < node_slice[uuid_b_root].size { node_slice[uuid_a_root].parent_index = uuid_b_root; node_slice[uuid_b_root].size += node_slice[uuid_a_root].size; } else { node_slice[uuid_b_root].parent_index = uuid_a_root; node_slice[uuid_a_root].size += node_slice[uuid_b_root].size; } self.set_count -= 1; } } pub fn connect_nodes_bulk(&mut self, connections: Vec<BulkConnection>) { for connection in connections.iter() { let uuid_a_root = self.find_root_index_bulk(connection.a + 1); let uuid_b_root = self.find_root_index_bulk(connection.b + 1); if uuid_a_root == uuid_b_root { } else { let node_slice = &mut self.nodes[..]; if node_slice[uuid_a_root].size < node_slice[uuid_b_root].size { node_slice[uuid_a_root].parent_index = uuid_b_root; node_slice[uuid_b_root].size += node_slice[uuid_a_root].size; } else { node_slice[uuid_b_root].parent_index = uuid_a_root; node_slice[uuid_a_root].size += node_slice[uuid_b_root].size; } self.set_count -= 1; } } } pub fn disjoint_set_count(&self) -> usize { self.set_count } pub fn find_root_index(&self, uuid: &str) -> usize { let node_index = self.node_index(uuid); if node_index > 0 { let mut node = self.nodes.get(node_index).unwrap(); while node.parent_index != node.index { node = self.nodes.get(node.parent_index).unwrap(); } node.parent_index } else { 0 } } pub fn find_root_index_bulk(&self, node_index: usize) -> usize { let mut node = self.nodes.get(node_index).unwrap(); while node.parent_index != node.index { node = self.nodes.get(node.parent_index).unwrap(); } node.parent_index } #[allow(dead_code)] pub fn nodes_connected(&self, uuid_a: &str, uuid_b: &str) -> bool { let uuid_a_root = self.find_root_index(uuid_a); let uuid_b_root = self.find_root_index(uuid_b); uuid_a_root > 0 && uuid_a_root == uuid_b_root } #[allow(dead_code)] pub fn node_count(&self) -> usize { self.nodes.len() - 1 } pub fn node_exists(&self, uuid: &str) -> bool { let node_uuid = String::from(uuid); self.node_map.contains_key(&node_uuid) } pub fn node_index(&self, uuid: &str) -> usize { let node_uuid = String::from(uuid); if self.node_map.contains_key(&node_uuid) { *self.node_map.get(&node_uuid).unwrap() } else { 0 } } }
#[cfg(test)] #[path = "client_tests.rs"] mod client_tests; use std::collections::HashMap; #[derive(Hash, Eq, PartialEq, Debug)] struct Node { pub uuid: String, pub parent_index: usize, pub index: usize, pub size: usize } #[derive(Hash, Eq, PartialEq, Debug)] pub struct BulkConnection { pub a: usize, pub b: usize } #[derive(Debug)] pub struct Client { nodes: Vec<Node>, node_map: HashMap<String, usize>, set_count: usize } impl BulkConnection { pub fn new(a: usize, b: usize) -> Self { BulkConnection { a, b } } } impl Client { pub fn new() -> Self { let node_map = HashMap::new(); let mut nodes = Vec::new(); let root_node = Node { uuid: String::from("root"), parent_index: 0, index: 0, size: 0 }; nodes.push(root_node); Client { nodes, node_map, set_count: 0 } } #[allow(dead_code)] pub fn add_node(&mut self, uuid: &str) { if !self.node_exists(uuid) { let node = Node { uuid: String::from(uuid), parent_index: self.nodes.len(), index: self.nodes.len(), size: 1 }; self.node_map.insert(String::from(uuid), node.index); self.nodes.push(node); self.set_count += 1; } } pub fn add_nodes_bulk(&mut self, uuid_list: Vec<String>) { for uuid in uuid_list.iter() { let node = Node { uuid: String::from(uuid), parent_index: self.nodes.len(), index: self.nodes.len(), size: 1 }; self.node_map.insert(String::from(uuid), node.index); self.nodes.push(node); self.set_count += 1; } } #[allow(dead_code)] pub fn connect_nodes(&mut self, uuid_a: &str, uuid_b: &str) { let uuid_a_root = self.find_root_index(uuid_a); let uuid_b_root = self.find_root_index(uuid_b); if uuid_a_root == uuid_b_root { return } else { let node_slice = &mut self.nodes[..];
self.set_count -= 1; } } pub fn connect_nodes_bulk(&mut self, connections: Vec<BulkConnection>) { for connection in connections.iter() { let uuid_a_root = self.find_root_index_bulk(connection.a + 1); let uuid_b_root = self.find_root_index_bulk(connection.b + 1); if uuid_a_root == uuid_b_root { } else { let node_slice = &mut self.nodes[..]; if node_slice[uuid_a_root].size < node_slice[uuid_b_root].size { node_slice[uuid_a_root].parent_index = uuid_b_root; node_slice[uuid_b_root].size += node_slice[uuid_a_root].size; } else { node_slice[uuid_b_root].parent_index = uuid_a_root; node_slice[uuid_a_root].size += node_slice[uuid_b_root].size; } self.set_count -= 1; } } } pub fn disjoint_set_count(&self) -> usize { self.set_count } pub fn find_root_index(&self, uuid: &str) -> usize { let node_index = self.node_index(uuid); if node_index > 0 { let mut node = self.nodes.get(node_index).unwrap(); while node.parent_index != node.index { node = self.nodes.get(node.parent_index).unwrap(); } node.parent_index } else { 0 } } pub fn find_root_index_bulk(&self, node_index: usize) -> usize { let mut node = self.nodes.get(node_index).unwrap(); while node.parent_index != node.index { node = self.nodes.get(node.parent_index).unwrap(); } node.parent_index } #[allow(dead_code)] pub fn nodes_connected(&self, uuid_a: &str, uuid_b: &str) -> bool { let uuid_a_root = self.find_root_index(uuid_a); let uuid_b_root = self.find_root_index(uuid_b); uuid_a_root > 0 && uuid_a_root == uuid_b_root } #[allow(dead_code)] pub fn node_count(&self) -> usize { self.nodes.len() - 1 } pub fn node_exists(&self, uuid: &str) -> bool { let node_uuid = String::from(uuid); self.node_map.contains_key(&node_uuid) } pub fn node_index(&self, uuid: &str) -> usize { let node_uuid = String::from(uuid); if self.node_map.contains_key(&node_uuid) { *self.node_map.get(&node_uuid).unwrap() } else { 0 } } }
if node_slice[uuid_a_root].size < node_slice[uuid_b_root].size { node_slice[uuid_a_root].parent_index = uuid_b_root; node_slice[uuid_b_root].size += node_slice[uuid_a_root].size; } else { node_slice[uuid_b_root].parent_index = uuid_a_root; node_slice[uuid_a_root].size += node_slice[uuid_b_root].size; }
if_condition
[ { "content": "#[test]\n\nfn node_index_positive() {\n\n let mut client = ufclient::Client::new();\n\n client.add_node(\"A\");\n\n\n\n assert_eq!(1, client.node_index(\"A\"));\n\n}\n\n\n", "file_path": "src/union_find/client_tests.rs", "rank": 1, "score": 76115.25809501106 }, { "cont...
Rust
parser/src/typing/phase1.rs
thejohncrafter/projets-2020
2c4525f5d241a67663a1f74f2326abd2890c44fa
use std::collections::HashSet; use crate::ast::{Structure, Function, Exp, StaticType}; use super::data::*; use super::visit::IntoVisitor; use super::assign::collect_all_assign; use super::func_signatures::{build_signature, is_callable_with_exactly, format_signature}; fn is_reserved_name(n: &String) -> bool { match n.as_str() { "div" | "print" | "println" => true, _ => false } } impl<'a> IntoVisitor<'a, InternalTypingResult<'a>> for GlobalEnvironmentState<'a> { fn visit_structure(&mut self, s: Structure<'a>) -> InternalTypingResult<'a> { if self.structures.contains_key(&s.name.name) { return Err( (s.span, format!("The ident '{}' is already taken by another structure", s.name.name).to_string()).into()); } self.known_types.insert(StaticType::Struct(s.name.name.clone())); for field in &s.fields { let fname = &field.name.name; if self.all_structure_fields.contains_key(fname) { return Err( (field.span, format!("The field name '{}' is already taken by this structure or another one", fname).to_string()).into() ); } if !self.known_types.contains(&field.ty) { return Err( (field.span, format!("This type is malformed, either it is not a primitive, or it's not this structure itself or another structure declared before").to_string()).into() ); } self.all_structure_fields.insert( fname.to_string().clone(), field.ty.clone() ); self.structure_name_by_fields.insert( fname.to_string().clone(), s.name.name.clone() ); if s.mutable { self.all_mutable_fields.insert(fname.to_string().clone()); } } self.structures.insert(s.name.name.clone(), s); Ok(()) } fn visit_function(&mut self, f: Function<'a>) -> InternalTypingResult<'a> { if is_reserved_name(&f.name) { return Err( (f.span, format!("The ident '{}' is a reserved name, it cannot be used as a function name", f.name).to_string()).into() ); } if !self.known_types.contains(&f.ret_ty) { return Err((f.span, format!("The return type '{}' of '{}' is malformed, either it's not a primitive or a declared structure", f.ret_ty, f.name).to_string()).into()); } let mut names: HashSet<String> = HashSet::new(); for param in &f.params { if names.contains(&param.name.name) { return Err((param.span, format!("The ident '{}' is already taken by another argument", param.name.name).to_string()).into()); } names.insert(param.name.name.clone()); if !self.known_types.contains(&param.ty) { return Err( (param.span, format!("This type is malformed, either it is not a primitive or it's not a declared before structure").to_string()).into() ); } } for sig in self.function_sigs.entry(f.name.clone()).or_default() { if is_callable_with_exactly(f.params.iter().map(|arg| arg.ty.clone()).collect(), &sig) { return Err( (f.span, format!( "The function '{}' has already been defined with the exact same signature ({}), add type annotations to disambiguate or remove duplicates", f.name, format_signature(f.params.into_iter().map(|arg| arg.ty).collect()) ).to_string()).into() ); } } self.function_sigs.entry(f.name.clone()).or_default().push(build_signature(&f)); self.functions.entry(f.name.clone()).or_default().push(f); Ok(()) } fn visit_expression(&mut self, ge: Exp<'a>) -> InternalTypingResult<'a> { self.global_variables.extend(collect_all_assign(&ge).into_iter().map(|l_ident| l_ident.name)); self.global_expressions.push(ge); Ok(()) } }
use std::collections::HashSet; use crate::ast::{Structure, Function, Exp, StaticType}; use super::data::*; use super::visit::IntoVisitor; use super::assign::collect_all_assign; use super::func_signatures::{build_signature, is_callable_with_exactly, format_signature}; fn is_reserved_name(n: &String) -> bool { match n.as_str() { "div" | "print" | "println" => true, _ => false } } impl<'a> IntoVisitor<'a, InternalTypingResult<'a>> for GlobalEnvironmentState<'a> { fn visit_structure(&mut self, s: Structure<'a>) -> InternalTypingResult<'a> { if self.structures.contains_key(&s.name.name) { return Err( (s.span, format!("The ident '{}' is already taken by another structure", s.name.name).to_string()).into()); } self.known_types.insert(StaticType::Struct(s.name.name.clone())); for field in &s.fields { let fname = &field.name.name; if self.all_structure_fields.contains_key(fname) { return Err( (field.span, format!("The field name '{}' is already taken by this structure or another one", fname).to_string()).into() ); } if !self.known_types.contains(&field.ty) { return Err( (field.span, format!("This type is malformed, either it is not a primitive, or it's not this structure itself or another structure declared before").to_string()).into() ); } self.all_structure_fields.insert( fname.to_string().clone(), field.ty.clone() ); self.structure_name_by_fields.insert( fname.to_string().clone(), s.name.name.clone() ); if s.mutable { self.all_mutable_fields.insert(fname.to_string().clone()); } } self.structures.insert(s.name.name.clone(), s); Ok(()) } fn visit_function(&mut self, f: Function<'a>) -> InternalTypingResult<'a> { if is_reserved_name(&f.name) { return Err( (f.span, format!("The ident '{}' is a reserved name, it cannot be used as a function name", f.name).to_string()).into() ); } if !self.known_types.contains(&f.ret_ty) { return Err((f.span, format!("The return type '{}' of '{}' is malformed, either it's not a primitive or a declared structure", f.ret_ty, f.name).to_string()).into()); } let mut names: HashSet<String> = HashSet::new(); for param in &f.params { if names.contains(&param.name.name) { return Err((param.span, format!("The ident '{}' is already taken by another argument", param.name.name).to_string()).into()); } names.insert(param.name.name.clone()); if !self.known_types.contains(&param.ty) { return
; } } for sig in self.function_sigs.entry(f.name.clone()).or_default() { if is_callable_with_exactly(f.params.iter().map(|arg| arg.ty.clone()).collect(), &sig) { return Err( (f.span, format!( "The function '{}' has already been defined with the exact same signature ({}), add type annotations to disambiguate or remove duplicates", f.name, format_signature(f.params.into_iter().map(|arg| arg.ty).collect()) ).to_string()).into() ); } } self.function_sigs.entry(f.name.clone()).or_default().push(build_signature(&f)); self.functions.entry(f.name.clone()).or_default().push(f); Ok(()) } fn visit_expression(&mut self, ge: Exp<'a>) -> InternalTypingResult<'a> { self.global_variables.extend(collect_all_assign(&ge).into_iter().map(|l_ident| l_ident.name)); self.global_expressions.push(ge); Ok(()) } }
Err( (param.span, format!("This type is malformed, either it is not a primitive or it's not a declared before structure").to_string()).into() )
call_expression
[ { "content": "pub fn is_builtin_function(name: &String) -> bool {\n\n match name.as_str() {\n\n \"println\" | \"div\" | \"print\" => true,\n\n _ => false\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct TypedDecls<'a> {\n\n pub functions: HashMap<String, Vec<Function<'a>>>,\n\n pub str...
Rust
clashctl-tui/src/event.rs
EurusEurus/clashctl
6fabdeabc6dc4920cb9fb3da701242ac103fbf04
use std::fmt::Display; use crossterm::event::{KeyCode as KC, KeyEvent as KE, KeyModifiers as KM}; use log::Level; use tui::{ style::{Color, Style}, text::{Span, Spans}, }; use crate::{ clashctl::model::{ConnectionsWithSpeed, Log, Proxies, Rules, Traffic, Version}, components::MovableListItem, utils::AsColor, Error, Result, }; #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum Event { Quit, Input(InputEvent), Update(UpdateEvent), Diagnostic(DiagnosticEvent), } impl<'a> MovableListItem<'a> for Event { fn to_spans(&self) -> Spans<'a> { match self { Event::Quit => Spans(vec![]), Event::Update(event) => Spans(vec![ Span::styled("⇵ ", Style::default().fg(Color::Yellow)), Span::raw(event.to_string()), ]), Event::Input(event) => Spans(vec![ Span::styled("✜ ", Style::default().fg(Color::Green)), Span::raw(format!("{:?}", event)), ]), Event::Diagnostic(event) => match event { DiagnosticEvent::Log(level, payload) => Spans(vec![ Span::styled( format!("✇ {:<6}", level), Style::default().fg(level.as_color()), ), Span::raw(payload.to_owned()), ]), }, } } } impl Event { pub fn is_quit(&self) -> bool { matches!(self, Event::Quit) } pub fn is_interface(&self) -> bool { matches!(self, Event::Input(_)) } pub fn is_update(&self) -> bool { matches!(self, Event::Update(_)) } pub fn is_diagnostic(&self) -> bool { matches!(self, Event::Diagnostic(_)) } } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum InputEvent { Esc, TabGoto(u8), ToggleDebug, ToggleHold, List(ListEvent), TestLatency, NextSort, PrevSort, Other(KE), } #[derive(Debug, Clone, PartialEq, Eq)] pub struct ListEvent { pub fast: bool, pub code: KC, } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum UpdateEvent { Config(clashctl_interactive::clashctl::model::Config), Connection(ConnectionsWithSpeed), Version(Version), Traffic(Traffic), Proxies(Proxies), Rules(Rules), Log(Log), ProxyTestLatencyDone, } impl Display for UpdateEvent { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { UpdateEvent::Config(x) => write!(f, "{:?}", x), UpdateEvent::Connection(x) => write!(f, "{:?}", x), UpdateEvent::Version(x) => write!(f, "{:?}", x), UpdateEvent::Traffic(x) => write!(f, "{:?}", x), UpdateEvent::Proxies(x) => write!(f, "{:?}", x), UpdateEvent::Rules(x) => write!(f, "{:?}", x), UpdateEvent::Log(x) => write!(f, "{:?}", x), UpdateEvent::ProxyTestLatencyDone => write!(f, "Test latency done"), } } } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum DiagnosticEvent { Log(Level, String), } impl TryFrom<KC> for Event { type Error = Error; fn try_from(value: KC) -> Result<Self> { match value { KC::Char('q') | KC::Char('x') => Ok(Event::Quit), KC::Char('t') => Ok(Event::Input(InputEvent::TestLatency)), KC::Esc => Ok(Event::Input(InputEvent::Esc)), KC::Char(' ') => Ok(Event::Input(InputEvent::ToggleHold)), KC::Char(char) if char.is_ascii_digit() => Ok(Event::Input(InputEvent::TabGoto( char.to_digit(10) .expect("char.is_ascii_digit() should be able to parse into number") as u8, ))), _ => Err(Error::TuiInternalErr), } } } impl From<KE> for Event { fn from(value: KE) -> Self { match (value.modifiers, value.code) { (KM::CONTROL, KC::Char('c')) => Self::Quit, (KM::CONTROL, KC::Char('d')) => Self::Input(InputEvent::ToggleDebug), (modi, arrow @ (KC::Left | KC::Right | KC::Up | KC::Down | KC::Enter)) => { Event::Input(InputEvent::List(ListEvent { fast: matches!(modi, KM::CONTROL | KM::SHIFT), code: arrow, })) } (KM::ALT, KC::Char('s')) => Self::Input(InputEvent::PrevSort), (KM::NONE, KC::Char('s')) => Self::Input(InputEvent::NextSort), (KM::NONE, key_code) => key_code .try_into() .unwrap_or_else(|_| Self::Input(InputEvent::Other(value))), _ => Self::Input(InputEvent::Other(value)), } } }
use std::fmt::Display; use crossterm::event::{KeyCode as KC, KeyEvent as KE, KeyModifiers as KM}; use log::Level; use tui::{ style::{Color, Style}, text::{Span, Spans}, }; use crate::{ clashctl::model::{ConnectionsWithSpeed, Log, Proxies, Rules, Traffic, Version}, components::MovableListItem, utils::AsColor, Error, Result, }; #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum Event { Quit, Input(InputEvent), Update(UpdateEvent), Diagnostic(DiagnosticEvent), } impl<'a> MovableListItem<'a> for Event { fn to_spans(&self) -> Spans<'a> { match self { Event::Quit => Spans(vec![]), Event::Update(event) => Spans(vec![ Span::styled("⇵ ", Style::default().fg(Color::Yellow)), Span::raw(event.to_string()), ]), Event::Input(event) => Spans(vec![ Span::styled("✜ ", Style::default().fg(Color::Green)), Span::raw(format!("{:?}",
, ToggleDebug, ToggleHold, List(ListEvent), TestLatency, NextSort, PrevSort, Other(KE), } #[derive(Debug, Clone, PartialEq, Eq)] pub struct ListEvent { pub fast: bool, pub code: KC, } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum UpdateEvent { Config(clashctl_interactive::clashctl::model::Config), Connection(ConnectionsWithSpeed), Version(Version), Traffic(Traffic), Proxies(Proxies), Rules(Rules), Log(Log), ProxyTestLatencyDone, } impl Display for UpdateEvent { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { UpdateEvent::Config(x) => write!(f, "{:?}", x), UpdateEvent::Connection(x) => write!(f, "{:?}", x), UpdateEvent::Version(x) => write!(f, "{:?}", x), UpdateEvent::Traffic(x) => write!(f, "{:?}", x), UpdateEvent::Proxies(x) => write!(f, "{:?}", x), UpdateEvent::Rules(x) => write!(f, "{:?}", x), UpdateEvent::Log(x) => write!(f, "{:?}", x), UpdateEvent::ProxyTestLatencyDone => write!(f, "Test latency done"), } } } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum DiagnosticEvent { Log(Level, String), } impl TryFrom<KC> for Event { type Error = Error; fn try_from(value: KC) -> Result<Self> { match value { KC::Char('q') | KC::Char('x') => Ok(Event::Quit), KC::Char('t') => Ok(Event::Input(InputEvent::TestLatency)), KC::Esc => Ok(Event::Input(InputEvent::Esc)), KC::Char(' ') => Ok(Event::Input(InputEvent::ToggleHold)), KC::Char(char) if char.is_ascii_digit() => Ok(Event::Input(InputEvent::TabGoto( char.to_digit(10) .expect("char.is_ascii_digit() should be able to parse into number") as u8, ))), _ => Err(Error::TuiInternalErr), } } } impl From<KE> for Event { fn from(value: KE) -> Self { match (value.modifiers, value.code) { (KM::CONTROL, KC::Char('c')) => Self::Quit, (KM::CONTROL, KC::Char('d')) => Self::Input(InputEvent::ToggleDebug), (modi, arrow @ (KC::Left | KC::Right | KC::Up | KC::Down | KC::Enter)) => { Event::Input(InputEvent::List(ListEvent { fast: matches!(modi, KM::CONTROL | KM::SHIFT), code: arrow, })) } (KM::ALT, KC::Char('s')) => Self::Input(InputEvent::PrevSort), (KM::NONE, KC::Char('s')) => Self::Input(InputEvent::NextSort), (KM::NONE, key_code) => key_code .try_into() .unwrap_or_else(|_| Self::Input(InputEvent::Other(value))), _ => Self::Input(InputEvent::Other(value)), } } }
event)), ]), Event::Diagnostic(event) => match event { DiagnosticEvent::Log(level, payload) => Spans(vec![ Span::styled( format!("✇ {:<6}", level), Style::default().fg(level.as_color()), ), Span::raw(payload.to_owned()), ]), }, } } } impl Event { pub fn is_quit(&self) -> bool { matches!(self, Event::Quit) } pub fn is_interface(&self) -> bool { matches!(self, Event::Input(_)) } pub fn is_update(&self) -> bool { matches!(self, Event::Update(_)) } pub fn is_diagnostic(&self) -> bool { matches!(self, Event::Diagnostic(_)) } } #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum InputEvent { Esc, TabGoto(u8)
random
[ { "content": "pub fn help_footer(content: &str, normal: Style, highlight: Style) -> Spans {\n\n if content.is_empty() {\n\n Spans(vec![])\n\n } else if content.len() == 1 {\n\n Spans(vec![Span::raw(content)])\n\n } else {\n\n let (index, _) = content.char_indices().nth(1).unwrap();...
Rust
src/widget_themes/classic.rs
fltk-rs/fltk-theme
8e792be048cb6c868c66f526bffd7782f52f54ee
use super::*; use fltk::{app, enums::Color, misc::Tooltip}; fn classic_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("AAWWMMRR", x, y, w, h); } fn classic_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 2, y + 2, w - 4, h - 4); classic_button_up_frame(x, y, w, h, c); } fn classic_check_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("WWMMPPAA", x, y, w, h); } fn classic_check_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 2, y + 2, w - 4, h - 4); classic_check_down_frame(x, y, w, h, c); } fn classic_panel_thin_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("MMWW", x, y, w, h); } fn classic_panel_thin_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 1, y + 1, w - 2, h - 2); classic_panel_thin_up_frame(x, y, w, h, c); } fn classic_spacer_thin_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("WWMM", x, y, w, h); } fn classic_spacer_thin_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 1, y + 1, w - 2, h - 2); classic_spacer_thin_down_frame(x, y, w, h, c); } fn classic_default_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("AAAAGGWWMMRR", x, y, w, h); } fn classic_default_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 3, y + 3, w - 6, h - 6); classic_default_button_up_frame(x, y, w, h, c); } fn classic_radio_round_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(Color::gray_ramp('M' as i32 - 'A' as i32))); draw_arc(x, y, w, h, 45.0, 225.0); set_draw_color(activated_color(Color::gray_ramp('W' as i32 - 'A' as i32))); draw_arc(x, y, w, h, -135.0, 45.0); set_draw_color(activated_color(Color::gray_ramp(0))); draw_arc(x + 1, y + 1, w - 2, h - 2, 45.0, 225.0); set_draw_color(activated_color(Color::gray_ramp('T' as i32 - 'A' as i32))); draw_arc(x + 1, y + 1, w - 2, h - 2, -135.0, 45.0); } fn classic_radio_round_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(Color::gray_ramp('W' as i32 - 'A' as i32))); draw_pie(x + 2, y + 2, w - 4, h - 4, 0.0, 360.0); classic_radio_round_down_frame(x, y, w, h, c); } fn use_classic_scheme() { app::set_scheme(app::Scheme::Base); app::set_frame_type_cb(OS_BUTTON_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_CHECK_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_BUTTON_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb(OS_CHECK_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4); app::set_frame_type_cb(OS_PANEL_THIN_UP_BOX, classic_panel_thin_up_box, 1, 1, 2, 2); app::set_frame_type_cb( OS_SPACER_THIN_DOWN_BOX, classic_spacer_thin_down_box, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_PANEL_THIN_UP_FRAME, classic_panel_thin_up_frame, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_SPACER_THIN_DOWN_FRAME, classic_spacer_thin_down_frame, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_RADIO_ROUND_DOWN_BOX, classic_radio_round_down_box, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_HOVERED_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_HOVERED_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb( OS_DEPRESSED_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_INPUT_THIN_DOWN_BOX, classic_check_down_box, 2, 3, 4, 6); app::set_frame_type_cb( OS_INPUT_THIN_DOWN_FRAME, classic_check_down_frame, 2, 3, 4, 6, ); app::set_frame_type_cb( OS_DEFAULT_BUTTON_UP_BOX, classic_default_button_up_box, 3, 3, 6, 6, ); app::set_frame_type_cb( OS_DEFAULT_HOVERED_UP_BOX, classic_default_button_up_box, 3, 3, 6, 6, ); app::set_frame_type_cb( OS_DEFAULT_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4, ); app::set_frame_type2(OS_TOOLBAR_BUTTON_HOVER_BOX, FrameType::FlatBox); app::set_frame_type_cb(OS_TABS_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_SWATCH_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_MINI_BUTTON_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb( OS_MINI_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_MINI_BUTTON_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb( OS_MINI_DEPRESSED_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4, ); app::set_frame_type_cb(FrameType::UpBox, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(FrameType::DownBox, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb( FrameType::RoundDownBox, classic_radio_round_down_box, 2, 2, 4, 4, ); app::set_frame_type2(OS_BG_BOX, FrameType::FlatBox); } fn use_classic_colors() { app::background(0xD4, 0xD0, 0xC8); app::background2(0xFF, 0xFF, 0xFF); app::foreground(0x00, 0x00, 0x00); app::set_color(Color::Inactive, 0x5F, 0x5F, 0x5F); app::set_color(Color::Selection, 0x0A, 0x24, 0x6A); app::set_color(Color::Free, 0xD4, 0xD0, 0xC8); Tooltip::set_color(Color::from_rgb(0xFF, 0xFF, 0xE1)); Tooltip::set_text_color(Color::ForeGround); } pub(crate) fn use_classic_theme() { use_classic_scheme(); use_classic_colors(); use_native_settings(); }
use super::*; use fltk::{app, enums::Color, misc::Tooltip}; fn classic_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("AAWWMMRR", x, y, w, h); } fn classic_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 2, y + 2, w - 4, h - 4); classic_button_up_frame(x, y, w, h, c); } fn classic_check_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("WWMMPPAA", x, y, w, h); } fn classic_check_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 2, y + 2, w - 4, h - 4); classic_check_down_frame(x, y, w, h, c); } fn classic_panel_thin_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("MMWW", x, y, w, h); } fn classic_panel_thin_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 1, y + 1, w - 2, h - 2); classic_panel_thin_up_frame(x, y, w, h, c); } fn classic_spacer_thin_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("WWMM", x, y, w, h); } fn classic_spacer_thin_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 1, y + 1, w - 2, h - 2); classic_spacer_thin_down_frame(x, y, w, h, c); } fn classic_default_button_up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { draw_frame2("AAAAGGWWMMRR", x, y, w, h); } fn classic_default_button_up_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(c)); draw_rectf(x + 3, y + 3, w - 6, h - 6); classic_default_button_up_frame(x, y, w, h, c); } fn classic_radio_round_down_frame(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(Color::gray_ramp('M' as i32 - 'A' as i32))); draw_arc(x, y, w, h, 45.0, 225.0); set_draw_color(activated_color(Color::gray_ramp('W' as i32 - 'A' as i32))); draw_arc(x, y, w, h, -135.0, 45.0); set_draw_color(activated_color(Color::gray_ramp(0))); draw_arc(x + 1, y + 1, w - 2, h - 2, 45.0, 225.0); set_draw_color(activated_color(Color::gray_ramp('T' as i32 - 'A' as i32))); draw_arc(x + 1, y + 1, w - 2, h - 2, -135.0, 45.0); } fn classic_radio_round_down_box(x: i32, y: i32, w: i32, h: i32, c: Color) { set_draw_color(activated_color(Color::gray_ramp('W' as i32 - 'A' as i32))); draw_pie(x + 2, y + 2, w - 4, h - 4, 0.0, 360.0); classic_radio_round_down_frame(x, y, w, h, c); } fn use_classic_scheme() { app::set_scheme(app::Scheme::Base); app::set_frame_type_cb(OS_BUTTON_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_CHECK_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_BUTTON_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb(OS_CHECK_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4); app::set_frame_type_cb(OS_PANEL_THIN_UP_BOX, classic_panel_thin_up_box, 1, 1, 2, 2); app::set_frame_type_cb( OS_SPACER_THIN_DOWN_BOX, classic_spacer_thin_down_box, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_PANEL_THIN_UP_FRAME, classic_panel_thin_up_frame, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_SPACER_THIN_DOWN_FRAME, classic_spacer_thin_down_frame, 1, 1, 2, 2, ); app::set_frame_type_cb( OS_RADIO_ROUND_DOWN_BOX, classic_radio_round_down_box, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_HOVERED_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_HOVERED_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb( OS_DEPRESSED_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_INPUT_THIN_DOWN_BOX, classic_check_down_box, 2, 3, 4, 6); app::set_frame_type_cb( OS_INPUT_THIN_DOWN_FRAME, classic_check_down_frame, 2, 3, 4, 6, ); app::set_frame_type_cb( OS_DEFAULT_BUTTON_UP_BOX, classic_default_button_up_box, 3, 3, 6, 6, ); app::set_frame_type_cb( OS_DEFAULT_HOVERED_UP_BOX, classic_default_button_up_box, 3, 3, 6, 6, ); app::set_frame_type_cb( OS_DEFAULT_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4, ); app::set_frame_type2(OS_TOOLBAR_BUTTON_HOVER_BOX, FrameType::FlatBox); app::set_frame_type_cb(OS_TABS_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_SWATCH_BOX, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb(OS_MINI_BUTTON_UP_BOX, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb( OS_MINI_DEPRESSED_DOWN_BOX, classic_check_down_box, 2, 2, 4, 4, ); app::set_frame_type_cb(OS_MINI_BUTTON_UP_FRAME, classic_button_up_frame, 2, 2, 4, 4); app::set_frame_type_cb( OS_MINI_DEPRESSED_DOWN_FRAME, classic_check_down_frame, 2, 2, 4, 4, ); app::set_frame_type_cb(FrameType::UpBox, classic_button_up_box, 2, 2, 4, 4); app::set_frame_type_cb(FrameType::DownBox, classic_check_down_box, 2, 2, 4, 4); app::set_frame_type_cb( FrameType::RoundDownBox, classic_radio_round_down_box, 2, 2, 4, 4, ); app::set_frame_type2(OS_BG_BOX, FrameType::FlatBox); }
pub(crate) fn use_classic_theme() { use_classic_scheme(); use_classic_colors(); use_native_settings(); }
fn use_classic_colors() { app::background(0xD4, 0xD0, 0xC8); app::background2(0xFF, 0xFF, 0xFF); app::foreground(0x00, 0x00, 0x00); app::set_color(Color::Inactive, 0x5F, 0x5F, 0x5F); app::set_color(Color::Selection, 0x0A, 0x24, 0x6A); app::set_color(Color::Free, 0xD4, 0xD0, 0xC8); Tooltip::set_color(Color::from_rgb(0xFF, 0xFF, 0xE1)); Tooltip::set_text_color(Color::ForeGround); }
function_block-full_function
[ { "content": "fn up_frame(x: i32, y: i32, w: i32, h: i32, c: Color) {\n\n rect(x, y, w, h, Color::color_average(Color::White, c, 0.2));\n\n}\n\n\n", "file_path": "src/widget_schemes/clean.rs", "rank": 6, "score": 231206.04478592548 }, { "content": "fn up_box(x: i32, y: i32, w: i32, h: i32...
Rust
src/tools/builder.rs
feenkcom/gtoolkit-maestro-rs
1c1651b6ddaf8648bce6037fa24e34ca08be7a93
use crate::create::FileToCreate; use crate::download::{FileToDownload, FilesToDownload}; use crate::{ Application, Checker, Downloader, ExecutableSmalltalk, FileToMove, ImageSeed, InstallerError, Result, Smalltalk, SmalltalkCommand, SmalltalkExpressionBuilder, SmalltalkScriptToExecute, SmalltalkScriptsToExecute, BUILDING, CREATING, DOWNLOADING, EXTRACTING, MOVING, SPARKLE, }; use crate::{FileToUnzip, FilesToUnzip}; use clap::{AppSettings, ArgEnum, Clap}; use feenk_releaser::{Version, VersionBump}; use file_matcher::FileNamed; use indicatif::HumanDuration; use reqwest::StatusCode; use std::path::PathBuf; use std::str::FromStr; use std::time::Instant; use url::Url; pub const DEFAULT_PHARO_IMAGE: &str = "https://dl.feenk.com/pharo/Pharo9.0-SNAPSHOT.build.1564.sha.f5f541c.arch.64bit.zip"; #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct BuildOptions { #[clap(long)] pub overwrite: bool, #[clap(long, default_value = "cloner", possible_values = Loader::VARIANTS, case_insensitive = true)] pub loader: Loader, #[clap(long, parse(try_from_str = url_parse), conflicts_with_all(&["image_zip", "image_file"]))] pub image_url: Option<Url>, #[clap(long, parse(from_os_str), conflicts_with_all(&["image_url", "image_file"]))] pub image_zip: Option<PathBuf>, #[clap(long, parse(from_os_str), conflicts_with_all(&["image_url", "image_zip"]))] pub image_file: Option<PathBuf>, #[clap(long, parse(from_os_str))] pub public_key: Option<PathBuf>, #[clap(long, parse(from_os_str))] pub private_key: Option<PathBuf>, #[clap(long, parse(try_from_str = BuildVersion::from_str), default_value = BuildVersion::BleedingEdge.abstract_name())] pub version: BuildVersion, } impl BuildOptions { pub fn image_seed(&self) -> ImageSeed { if let Some(ref image_zip) = self.image_zip { return ImageSeed::Zip(image_zip.clone()); } if let Some(ref image_url) = self.image_url { return ImageSeed::Url(image_url.clone()); } if let Some(ref image_file) = self.image_file { return ImageSeed::Image(image_file.clone()); } return ImageSeed::Url( url_parse(DEFAULT_PHARO_IMAGE) .unwrap_or_else(|_| panic!("Failed to parse url: {}", DEFAULT_PHARO_IMAGE)), ); } } fn url_parse(val: &str) -> Result<Url> { Url::parse(val).map_err(|error| error.into()) } #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct ReleaseBuildOptions { #[clap(flatten)] pub build_options: BuildOptions, #[clap(long, default_value = VersionBump::Patch.to_str(), possible_values = VersionBump::variants(), case_insensitive = true)] pub bump: VersionBump, #[clap(long)] pub no_gt_world: bool, } #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct LocalBuildOptions { #[clap(flatten)] pub build_options: BuildOptions, #[clap(long)] pub no_gt_world: bool, } impl BuildOptions { fn ssh_keys(&self) -> Result<Option<(PathBuf, PathBuf)>> { let public_key = self.public_key()?; let private_key = self.private_key()?; match (&private_key, &public_key) { (Some(private), Some(public)) => Ok(Some((private.clone(), public.clone()))), (None, None) => Ok(None), _ => InstallerError::SshKeysConfigurationError(private_key, public_key).into(), } } fn public_key(&self) -> Result<Option<PathBuf>> { if let Some(ref key) = self.public_key { if key.exists() { Ok(Some(to_absolute::canonicalize(key).map_err(|error| { InstallerError::CanonicalizeError(key.clone(), error) })?)) } else { return InstallerError::PublicKeyDoesNotExist(key.clone()).into(); } } else { Ok(None) } } fn private_key(&self) -> Result<Option<PathBuf>> { if let Some(ref key) = self.private_key { if key.exists() { Ok(Some(to_absolute::canonicalize(key).map_err(|error| { InstallerError::CanonicalizeError(key.clone(), error) })?)) } else { return InstallerError::PrivateKeyDoesNotExist(key.clone()).into(); } } else { Ok(None) } } } impl BuildOptions { pub fn new() -> Self { Self { overwrite: false, loader: Loader::Cloner, image_url: None, image_zip: None, image_file: None, public_key: None, private_key: None, version: BuildVersion::BleedingEdge, } } pub fn should_overwrite(&self) -> bool { self.overwrite } pub fn overwrite(&mut self, overwrite: bool) { self.overwrite = overwrite; } pub fn loader(&mut self, loader: Loader) { self.loader = loader; } } #[derive(ArgEnum, Copy, Clone, Debug)] #[repr(u32)] pub enum Loader { #[clap(name = "cloner")] Cloner, #[clap(name = "metacello")] Metacello, } impl FromStr for Loader { type Err = String; fn from_str(s: &str) -> std::result::Result<Self, String> { <Loader as ArgEnum>::from_str(s, true) } } impl ToString for Loader { fn to_string(&self) -> String { (Loader::VARIANTS[*self as usize]).to_owned() } } #[derive(Debug, Clone)] pub enum BuildVersion { LatestRelease, BleedingEdge, Version(Version), } impl BuildVersion { pub fn abstract_name(&self) -> &str { match self { BuildVersion::LatestRelease => "latest-release", BuildVersion::BleedingEdge => "bleeding-edge", BuildVersion::Version(_) => "vX.Y.Z", } } } impl FromStr for BuildVersion { type Err = InstallerError; fn from_str(s: &str) -> Result<Self> { let version = s.to_string().to_lowercase(); let version_str = version.as_str(); match version_str { "latest-release" => Ok(BuildVersion::LatestRelease), "bleeding-edge" => Ok(BuildVersion::BleedingEdge), _ => Ok(BuildVersion::Version(Version::parse(version_str)?)), } } } impl ToString for BuildVersion { fn to_string(&self) -> String { match self { BuildVersion::Version(version) => version.to_string(), _ => self.abstract_name().to_string(), } } } pub struct Builder; #[derive(Serialize)] pub struct LoaderVersionInfo { gtoolkit_version: String, releaser_version: String, } impl Builder { pub fn new() -> Self { Self {} } pub async fn resolve_loader_version_info( &self, build_options: &BuildOptions, ) -> Result<LoaderVersionInfo> { let gtoolkit_version_string = match &build_options.version { BuildVersion::LatestRelease => { format!( "v{}", Application::latest_gtoolkit_image_version() .await? .to_string() ) } BuildVersion::BleedingEdge => "main".to_string(), BuildVersion::Version(version) => { format!("v{}", version.to_string()) } }; let releaser_version_string = match &build_options.version { BuildVersion::BleedingEdge => "main".to_string(), _ => { let releaser_version_file_url_string = format!( "https://raw.githubusercontent.com/feenkcom/gtoolkit/{}/gtoolkit-releaser.version", &gtoolkit_version_string ); let releaser_version_file_url = Url::parse(&releaser_version_file_url_string)?; let releaser_version_file_response = reqwest::get(releaser_version_file_url.clone()).await?; if releaser_version_file_response.status() != StatusCode::OK { return InstallerError::FailedToDownloadReleaserVersion( releaser_version_file_url.clone(), releaser_version_file_response.status(), ) .into(); } let releaser_version_file_content = releaser_version_file_response.text().await?; let releaser_version = Version::parse(releaser_version_file_content)?; format!("v{}", releaser_version.to_string()) } }; Ok(LoaderVersionInfo { gtoolkit_version: gtoolkit_version_string, releaser_version: releaser_version_string, }) } pub async fn build( &self, application: &mut Application, build_options: &BuildOptions, ) -> Result<()> { let started = Instant::now(); let image_seed = build_options.image_seed(); application.set_image_seed(image_seed.clone())?; Checker::new() .check(application, build_options.should_overwrite()) .await?; application.serialize_into_file()?; println!("{}Downloading files...", DOWNLOADING); let pharo_vm = FileToDownload::new( Url::parse(application.pharo_vm_url())?, application.workspace(), "pharo-vm.zip", ); let files_to_download = FilesToDownload::new() .extend(Downloader::files_to_download(application)) .add(pharo_vm.clone()) .maybe_add(image_seed.file_to_download(application)); files_to_download.download().await?; println!("{}Extracting files...", EXTRACTING); let files_to_unzip = FilesToUnzip::new() .extend(Downloader::files_to_unzip(application)) .add(FileToUnzip::new( pharo_vm.path(), application.workspace().join("pharo-vm"), )) .maybe_add(image_seed.file_to_unzip(application)); files_to_unzip.unzip().await?; if !image_seed.is_image_file() { println!("{}Moving files...", MOVING); let seed_image = FileNamed::wildmatch(format!("*.{}", application.image_extension())) .within(image_seed.seed_image_directory(application)) .find()?; let seed_smalltalk = Smalltalk::new(application.pharo_executable(), seed_image, application); let seed_evaluator = seed_smalltalk.evaluator(); SmalltalkCommand::new("save") .arg( application .workspace() .join(application.image_name()) .display() .to_string(), ) .execute(&seed_evaluator)?; FileToMove::new( FileNamed::wildmatch("*.sources") .within(image_seed.seed_image_directory(application)) .find()?, application.workspace(), ) .move_file() .await?; } let loader_template_string = match build_options.loader { Loader::Cloner => include_str!("../st/clone-gt.st"), Loader::Metacello => include_str!("../st/load-gt.st"), }; let loader_template = mustache::compile_str(loader_template_string)?; let loader_version_info = self.resolve_loader_version_info(build_options).await?; let loader_script = loader_template.render_to_string(&loader_version_info)?; let loader_script_file_name = format!("load-gt-{}.st", &loader_version_info.gtoolkit_version); println!("{}Creating build scripts...", CREATING); FileToCreate::new( application.workspace().join("load-patches.st"), include_str!("../st/load-patches.st"), ) .create() .await?; FileToCreate::new( application.workspace().join("load-taskit.st"), include_str!("../st/load-taskit.st"), ) .create() .await?; FileToCreate::new( application.workspace().join(&loader_script_file_name), loader_script, ) .create() .await?; let gtoolkit = application.gtoolkit(); let pharo = application.pharo(); println!("{}Preparing the image...", BUILDING); SmalltalkScriptsToExecute::new() .add(SmalltalkScriptToExecute::new("load-patches.st")) .add(SmalltalkScriptToExecute::new("load-taskit.st")) .execute(pharo.evaluator().save(true)) .await?; println!("{}Building Glamorous Toolkit...", BUILDING); let ssh_keys = build_options.ssh_keys()?; let mut scripts_to_execute = SmalltalkScriptsToExecute::new(); if let Some((private, public)) = ssh_keys { scripts_to_execute.add( SmalltalkExpressionBuilder::new() .add("IceCredentialsProvider useCustomSsh: true") .add(format!( "IceCredentialsProvider sshCredentials publicKey: '{}'; privateKey: '{}'", private.display(), public.display() )) .build(), ); } scripts_to_execute .add(SmalltalkScriptToExecute::new(&loader_script_file_name)) .execute(gtoolkit.evaluator().save(true)) .await?; println!("{} Done in {}", SPARKLE, HumanDuration(started.elapsed())); Ok(()) } }
use crate::create::FileToCreate; use crate::download::{FileToDownload, FilesToDownload}; use crate::{ Application, Checker, Downloader, ExecutableSmalltalk, FileToMove, ImageSeed, InstallerError, Result, Smalltalk, SmalltalkCommand, SmalltalkExpressionBuilder, SmalltalkScriptToExecute, SmalltalkScriptsToExecute, BUILDING, CREATING, DOWNLOADING, EXTRACTING, MOVING, SPARKLE, }; use crate::{FileToUnzip, FilesToUnzip}; use clap::{AppSettings, ArgEnum, Clap}; use feenk_releaser::{Version, VersionBump}; use file_matcher::FileNamed; use indicatif::HumanDuration; use reqwest::StatusCode; use std::path::PathBuf; use std::str::FromStr; use std::time::Instant; use url::Url; pub const DEFAULT_PHARO_IMAGE: &str = "https://dl.feenk.com/pharo/Pharo9.0-SNAPSHOT.build.1564.sha.f5f541c.arch.64bit.zip"; #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct BuildOptions { #[clap(long)] pub overwrite: bool, #[clap(long, default_value = "cloner", possible_values = Loader::VARIANTS, case_insensitive = true)] pub loader: Loader, #[clap(long, parse(try_from_str = url_parse), conflicts_with_all(&["image_zip", "image_file"]))] pub image_url: Option<Url>, #[clap(long, parse(from_os_str), conflicts_with_all(&["image_url", "image_file"]))] pub image_zip: Option<PathBuf>, #[clap(long, parse(from_os_str), conflicts_with_all(&["image_url", "image_zip"]))] pub image_file: Option<PathBuf>, #[clap(long, parse(from_os_str))] pub public_key: Option<PathBuf>, #[clap(long, parse(from_os_str))] pub private_key: Option<PathBuf>, #[clap(long, parse(try_from_str = BuildVersion::from_str), default_value = BuildVersion::BleedingEdge.abstract_name())] pub version: BuildVersion, } impl BuildOptions { pub fn image_seed(&self) -> ImageSeed { if let Some(ref image_zip) = self.image_zip { return ImageSeed::Zip(image_zip.clone()); } if let Some(ref image_url) = self.image_url { return ImageSeed::Url(image_url.clone()); } if let Some(ref image_file) = self.image_file { return ImageSeed::Image(image_file.clone()); } return ImageSeed::Url( url_parse(DEFAULT_PHARO_IMAGE) .unwrap_or_else(|_| panic!("Failed to parse url: {}", DEFAULT_PHARO_IMAGE)), ); } } fn url_parse(val: &str) -> Result<Url> { Url::parse(val).map_err(|error| error.into()) } #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct ReleaseBuildOptions { #[clap(flatten)] pub build_options: BuildOptions, #[clap(long, default_value = VersionBump::Patch.to_str(), possible_values = VersionBump::variants(), case_insensitive = true)] pub bump: VersionBump, #[clap(long)] pub no_gt_world: bool, } #[derive(Clap, Debug, Clone)] #[clap(setting = AppSettings::ColorAlways)] #[clap(setting = AppSettings::ColoredHelp)] pub struct LocalBuildOptions { #[clap(flatten)] pub build_options: BuildOptions, #[clap(long)] pub no_gt_world: bool, } impl BuildOptions { fn ssh_keys(&self) -> Result<Option<(PathBuf, PathBuf)>> { let public_key = self.public_key()?; let private_key = self.private_key()?; match (&private_key, &public_key) { (Some(private), Some(public)) => Ok(Some((private.clone(), public.clone()))), (None, None) => Ok(None), _ => InstallerError::SshKeysConfigurationError(private_key, public_key).into(), } } fn public_key(&self) -> Result<Option<PathBuf>> { if let Some(ref key) = self.public_key { if key.exists() { Ok(Some(to_absolute::canonicalize(key).map_err(|error| { InstallerError::CanonicalizeError(key.clone(), error) })?)) } else { return InstallerError::PublicKeyDoesNotExist(key.clone()).into(); } } else { Ok(None) } } fn private_key(&self) -> Result<Option<PathBuf>> { if let Some(ref key) = self.private_key { if key.exists() { Ok(Some(to_absolute::canonicalize(key).map_err(|error| { InstallerError::CanonicalizeError(key.clone(), error) })?)) } else { return InstallerError::PrivateKeyDoesNotExist(key.clone()).into(); } } else { Ok(None) } } } impl BuildOptions { pub fn new() -> Self { Self { overwrite: false, loader: Loader::Cloner, image_url: None, image_zip: None, image_file: None, public_key: None, private_key: None, version: BuildVersion::BleedingEdge, } } pub fn should_overwrite(&self) -> bool { self.overwrite } pub fn overwrite(&mut self, overwrite: bool) { self.overwrite = overwrite; } pub fn loader(&mut self, loader: Loader) { self.loader = loader; } } #[derive(ArgEnum, Copy, Clone, Debug)] #[repr(u32)] pub enum Loader { #[clap(name = "cloner")] Cloner, #[clap(name = "metacello")] Metacello, } impl FromStr for Loader { type Err = String; fn from_str(s: &str) -> std::result::Result<Self, String> { <Loader as ArgEnum>::from_str(s, true) } } impl ToString for Loader { fn to_string(&self) -> String { (Loader::VARIANTS[*self as usize]).to_owned() } } #[derive(Debug, Clone)] pub enum BuildVersion { LatestRelease, BleedingEdge, Version(Version), } impl BuildVersion { pub fn abstract_name(&self) -> &str { match self { BuildVersion::LatestRelease => "latest-release", BuildVersion::BleedingEdge => "bleeding-edge", BuildVersion::Version(_) => "vX.Y.Z", } } } impl FromStr for BuildVersion { type Err = InstallerError; fn from_str(s: &str) -> Result<Self> { let version = s.to_string().to_lowercase(); let version_str = version.as_str(); match version_str { "latest-release" => Ok(BuildVersion::LatestRelease), "bleeding-edge" => Ok(BuildVersion::BleedingEdge), _ => Ok(BuildVersion::Version(Version::parse(version_str)?)), } } } impl ToString for BuildVersion { fn to_string(&self) -> String { match self { BuildVersion::Version(version) => version.to_string(), _ => self.abstract_name().to_string(), } } } pub struct Builder; #[derive(Serialize)] pub struct LoaderVersionInfo { gtoolkit_version: String, releaser_version: String, } impl Builder { pub fn new() -> Self { Self {} } pub async fn resolve_loader_version_info( &self, build_options: &BuildOptions, ) -> Result<LoaderVersionInfo> { let gtoolkit_version_string = match &build_options.version { BuildVersion::LatestRelease => { format!( "v{}", Application::latest_gtoolkit_image_version() .await? .to_string() ) } BuildVersion::BleedingEdge => "main".to_string(), BuildVersion::Version(version) => { format!("v{}", version.to_string()) } }; let releaser_version_string = match &build_options.version { BuildVersion::BleedingEdge => "main".to_string(), _ => { let releaser_version_file_url_string = format!( "https://raw.githubusercontent.com/feenkcom/gtoolkit/{}/gtoolkit-releaser.version", &gtoolkit_version_string ); let releaser_version_file_url = Url::parse(&releaser_version_file_url_string)?; let releaser_version_file_response = reqwest::get(releaser_version_file_url.clone()).await?; if releaser_version_file_response.status() != StatusCode::OK { return InstallerError::FailedToDownloadReleaserVersion( releaser_version_file_url.clone(), releaser_version_file_response.status(), ) .into(); } let releaser_version_file_content = releaser_version_file_response.text().await?; let releaser_version = Version::parse(releaser_version_file_content)?; format!("v{}", releaser_version.to_string()) } }; Ok(LoaderVersionInfo { gtoolkit_version: gtoolkit_version_string, releaser_version: releaser_version_string, }) }
}
pub async fn build( &self, application: &mut Application, build_options: &BuildOptions, ) -> Result<()> { let started = Instant::now(); let image_seed = build_options.image_seed(); application.set_image_seed(image_seed.clone())?; Checker::new() .check(application, build_options.should_overwrite()) .await?; application.serialize_into_file()?; println!("{}Downloading files...", DOWNLOADING); let pharo_vm = FileToDownload::new( Url::parse(application.pharo_vm_url())?, application.workspace(), "pharo-vm.zip", ); let files_to_download = FilesToDownload::new() .extend(Downloader::files_to_download(application)) .add(pharo_vm.clone()) .maybe_add(image_seed.file_to_download(application)); files_to_download.download().await?; println!("{}Extracting files...", EXTRACTING); let files_to_unzip = FilesToUnzip::new() .extend(Downloader::files_to_unzip(application)) .add(FileToUnzip::new( pharo_vm.path(), application.workspace().join("pharo-vm"), )) .maybe_add(image_seed.file_to_unzip(application)); files_to_unzip.unzip().await?; if !image_seed.is_image_file() { println!("{}Moving files...", MOVING); let seed_image = FileNamed::wildmatch(format!("*.{}", application.image_extension())) .within(image_seed.seed_image_directory(application)) .find()?; let seed_smalltalk = Smalltalk::new(application.pharo_executable(), seed_image, application); let seed_evaluator = seed_smalltalk.evaluator(); SmalltalkCommand::new("save") .arg( application .workspace() .join(application.image_name()) .display() .to_string(), ) .execute(&seed_evaluator)?; FileToMove::new( FileNamed::wildmatch("*.sources") .within(image_seed.seed_image_directory(application)) .find()?, application.workspace(), ) .move_file() .await?; } let loader_template_string = match build_options.loader { Loader::Cloner => include_str!("../st/clone-gt.st"), Loader::Metacello => include_str!("../st/load-gt.st"), }; let loader_template = mustache::compile_str(loader_template_string)?; let loader_version_info = self.resolve_loader_version_info(build_options).await?; let loader_script = loader_template.render_to_string(&loader_version_info)?; let loader_script_file_name = format!("load-gt-{}.st", &loader_version_info.gtoolkit_version); println!("{}Creating build scripts...", CREATING); FileToCreate::new( application.workspace().join("load-patches.st"), include_str!("../st/load-patches.st"), ) .create() .await?; FileToCreate::new( application.workspace().join("load-taskit.st"), include_str!("../st/load-taskit.st"), ) .create() .await?; FileToCreate::new( application.workspace().join(&loader_script_file_name), loader_script, ) .create() .await?; let gtoolkit = application.gtoolkit(); let pharo = application.pharo(); println!("{}Preparing the image...", BUILDING); SmalltalkScriptsToExecute::new() .add(SmalltalkScriptToExecute::new("load-patches.st")) .add(SmalltalkScriptToExecute::new("load-taskit.st")) .execute(pharo.evaluator().save(true)) .await?; println!("{}Building Glamorous Toolkit...", BUILDING); let ssh_keys = build_options.ssh_keys()?; let mut scripts_to_execute = SmalltalkScriptsToExecute::new(); if let Some((private, public)) = ssh_keys { scripts_to_execute.add( SmalltalkExpressionBuilder::new() .add("IceCredentialsProvider useCustomSsh: true") .add(format!( "IceCredentialsProvider sshCredentials publicKey: '{}'; privateKey: '{}'", private.display(), public.display() )) .build(), ); } scripts_to_execute .add(SmalltalkScriptToExecute::new(&loader_script_file_name)) .execute(gtoolkit.evaluator().save(true)) .await?; println!("{} Done in {}", SPARKLE, HumanDuration(started.elapsed())); Ok(()) }
function_block-full_function
[ { "content": "pub trait ExecutableSmalltalk {\n\n fn create_command(&self, evaluator: &SmalltalkEvaluator) -> Result<Command>;\n\n fn execute(&self, evaluator: &SmalltalkEvaluator) -> Result<()> {\n\n let mut command = self.create_command(evaluator)?;\n\n if evaluator.is_verbose() {\n\n ...
Rust
src/lib.rs
gcarq/seek_bufread
1ff6dff6e449cff969e94bb77aacb89de20cc4ac
use std::fmt; use std::io::{self, BufRead, Read, Seek, SeekFrom}; const DEFAULT_BUF_SIZE: usize = 8 * 1024; pub struct BufReader<R> { inner: R, buf: Box<[u8]>, buf_pos: usize, cap: usize, absolute_pos: u64, } impl<R: Read + Seek> BufReader<R> { pub fn new(inner: R) -> BufReader<R> { BufReader::with_capacity(DEFAULT_BUF_SIZE, inner) } pub fn with_capacity(cap: usize, inner: R) -> BufReader<R> { BufReader { inner: inner, buf: vec![0; cap].into_boxed_slice(), buf_pos: 0, cap: 0, absolute_pos: 0, } } pub fn position(&self) -> u64 { self.absolute_pos } pub fn capacity(&self) -> usize { self.cap } pub fn available(&self) -> usize { self.cap.checked_sub(self.buf_pos).unwrap_or(0) } pub fn into_inner(mut self) -> io::Result<R> { try!(self.inner.seek(SeekFrom::Start(self.absolute_pos))); Ok(self.inner) } fn sync_and_flush(&mut self, pos: SeekFrom) -> io::Result<u64> { self.buf_pos = self.cap; self.absolute_pos = try!(self.inner.seek(pos)); Ok(self.absolute_pos) } fn seek_backward(&mut self, n: i64) -> io::Result<u64> { let n_abs = n.abs() as usize; if self.buf_pos.checked_sub(n_abs).is_some() { self.absolute_pos -= n_abs as u64; self.buf_pos -= n_abs; Ok(self.absolute_pos) } else { let new_pos = self.absolute_pos - n_abs as u64; self.sync_and_flush(SeekFrom::Start(new_pos)) } } fn seek_forward(&mut self, n: usize) -> io::Result<u64> { if self.available().checked_sub(n).is_some() { self.consume(n); Ok(self.absolute_pos) } else { let new_pos = self.absolute_pos + n as u64; self.sync_and_flush(SeekFrom::Start(new_pos)) } } } impl<R: Read> Read for BufReader<R> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let n_exp = buf.len(); let mut n_total = 0; while n_total < n_exp { let n_read = try!(try!(self.fill_buf()).read(&mut buf[n_total..])); if n_read == 0 { break; } self.consume(n_read); n_total += n_read; } Ok(n_total) } } impl<R: Read> BufRead for BufReader<R> { fn fill_buf(&mut self) -> io::Result<&[u8]> { if self.cap == self.buf_pos { self.cap = try!(self.inner.read(&mut self.buf)); self.buf_pos = 0; } Ok(&self.buf[self.buf_pos..self.cap]) } fn consume(&mut self, amt: usize) { self.buf_pos += amt; self.absolute_pos += amt as u64; } } impl<R: Read + Seek> Seek for BufReader<R> { fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { match pos { SeekFrom::Current(n) => { match n >= 0 { true => self.seek_forward(n as usize), false => self.seek_backward(n) } } SeekFrom::Start(n) => { match n.checked_sub(self.absolute_pos) { Some(n_bytes) => self.seek_forward(n_bytes as usize), None => self.sync_and_flush(pos) } } _ => self.sync_and_flush(pos) } } } impl<R> fmt::Debug for BufReader<R> where R: fmt::Debug + Read + Seek { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.debug_struct("BufReader") .field("reader", &self.inner) .field("available", &self.available()) .field("capacity", &self.cap) .field("position", &self.absolute_pos) .finish() } } #[cfg(test)] mod tests { use super::*; use std::io::{self, Cursor, Read, Seek, SeekFrom}; #[test] fn default_behaviour() { let mut reader = BufReader::new(Cursor::new([5, 6, 7, 0, 1, 2, 3, 4])); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 0, 1, 2, 3, 4]); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn default_behaviour_std() { let mut reader = io::BufReader::new(Cursor::new([5, 6, 7, 0, 1, 2, 3, 4])); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 0, 1, 2, 3, 4]); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn small_capacity() { let inner = Cursor::new([5, 6, 7, 0, 1, 2, 3, 4]); let mut reader = BufReader::with_capacity(2, inner); let mut buf = [0, 0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7]); let mut buf = [0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1]); let mut buf = [0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2]); } #[test] fn small_capacity_std() { let inner = Cursor::new([5, 6, 7, 0, 1, 2, 3, 4]); let mut reader = io::BufReader::with_capacity(2, inner); let mut buf = [0, 0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7]); let mut buf = [0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1]); let mut buf = [0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2]); } #[test] fn seek_start() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(10, inner); reader.seek(SeekFrom::Start(3)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 10]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); reader.seek(SeekFrom::Start(13)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); } #[test] fn seek_start_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(10, inner); reader.seek(SeekFrom::Start(3)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 10]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); reader.seek(SeekFrom::Start(13)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); } #[test] fn seek_current_positive() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(20, inner); reader.seek(SeekFrom::Current(2)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2, 3, 4, 5, 6, 7, 8, 9]); reader.seek(SeekFrom::Current(6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [16, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_current_positive_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(20, inner); reader.seek(SeekFrom::Current(2)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2, 3, 4, 5, 6, 7, 8, 9]); reader.seek(SeekFrom::Current(6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [16, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_current_negative() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(3, inner); reader.seek(SeekFrom::Current(4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [4, 5, 6, 7]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); reader.seek(SeekFrom::Current(-4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); } #[test] fn seek_current_negative_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(3, inner); reader.seek(SeekFrom::Current(4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [4, 5, 6, 7]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); reader.seek(SeekFrom::Current(-4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); } #[test] fn seek_end() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(2, inner); reader.seek(SeekFrom::End(-6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12, 13, 14, 15, 16, 0, 0]); reader.seek(SeekFrom::End(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_end_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(2, inner); reader.seek(SeekFrom::End(-6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12, 13, 14, 15, 16, 0, 0]); reader.seek(SeekFrom::End(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn into_inner() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(4, inner); reader.seek(SeekFrom::Current(5)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 8, 9, 10, 11, 12]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 2]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12]); let mut inner = reader.into_inner().unwrap(); let mut buf = [0; 8]; inner.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); } }
use std::fmt; use std::io::{self, BufRead, Read, Seek, SeekFrom}; const DEFAULT_BUF_SIZE: usize = 8 * 1024; pub struct BufReader<R> { inner: R, buf: Box<[u8]>, buf_pos: usize, cap: usize, absolute_pos: u64, } impl<R: Read + Seek> BufReader<R> { pub fn new(inner: R) -> BufReader<R> { BufReader::with_capacity(DEFAULT_BUF_SIZE, inner) } pub fn with_capacity(cap: usize, inner: R) -> BufReader<R> { BufReader { inner: inner, buf: vec![0; cap].into_boxed_slice(), buf_pos: 0, cap: 0, absolute_pos: 0, } } pub fn position(&self) -> u64 { self.absolute_pos } pub fn capacity(&self) -> usize { self.cap } pub fn available(&self) -> usize { self.cap.checked_sub(self.buf_pos).unwrap_or(0) } pub fn into_inner(mut self) -> io::Result<R> { try!(self.inner.seek(SeekFrom::Start(self.absolute_pos))); Ok(self.inner) } fn sync_and_flush(&mut self, pos: SeekFrom) -> io::Result<u64> { self.buf_pos = self.cap; self.absolute_pos = try!(self.inner.seek(pos)); Ok(self.absolute_pos) } fn seek_backward(&mut self, n: i64) -> io::Result<u64> { let n_abs = n.abs() as usize; if self.buf_pos.checked_sub(n_abs).is_some() { self.absolute_pos -= n_abs as u64; self.buf_pos -= n_abs; Ok(self.absolute_pos) } else { let new_pos = self.absolute_pos - n_abs as u64; self.sync_and_flush(SeekFrom::Start(new_pos)) } } fn seek_forward(&mut self, n: usize) -> io::Result<u64> { if self.available().checked_sub(n).is_some() { self.consume(n); Ok(self.absolute_pos) } else { let new_pos = self.absolute_pos + n as u64; self.sync_and_flush(SeekFrom::Start(new_pos)) } } } impl<R: Read> Read for BufReader<R> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let n_exp = buf.len(); let mut n_total = 0; while n_total < n_exp { let n_read = try!(try!(self.fill_buf()).read(&mut buf[n_total..])); if n_read == 0 { break; } self.consume(n_read); n_total += n_read; } Ok(n_total) } } impl<R: Read> BufRead for BufReader<R> { fn fill_buf(&mut self) -> io::Result<&[u8]> { if self.cap == self.buf_pos { self.cap = try!(self.inner.read(&mut self.buf)); self.buf_pos = 0; } Ok(&self.buf[self.buf_pos..self.cap]) } fn consume(&mut self, amt: usize) { self.buf_pos += amt; self.absolute_pos += amt as u64; } } impl<R: Read + Seek> Seek for BufReader<R> { fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { match pos { SeekFrom::Current(n) => { match n >= 0 { true => self.seek_forward(n as usize), false => self.seek_backward(n) } } SeekFrom::Start(n) => { match n.checked_sub(self.absolute_pos) { Some(n_bytes) => self.seek_forward(n_bytes as usize), None => self.sync_and_flush(pos) } } _ => self.sync_and_flush(pos) } } } impl<R> fmt::Debug for BufReader<R> where R: fmt::Debug + Read + Seek { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.debug_struct("BufReader") .field("reader", &self.inner) .field("available", &self.available()) .field("capacity", &self.cap) .field("position", &self.absolute_pos) .finish() } } #[cfg(test)] mod tests { use super::*; use std::io::{self, Cursor, Read, Seek, SeekFrom}; #[test] fn default_behaviour() { let mut reader = BufReader::new(Cursor::new([5, 6, 7, 0, 1, 2, 3, 4])); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 0, 1, 2, 3, 4]); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn default_behaviour_std() { let mut reader = io::BufReader::new(Cursor::new([5, 6, 7, 0, 1, 2, 3, 4])); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 0, 1, 2, 3, 4]); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn small_capacity() { let inner = Cursor::new([5, 6, 7, 0, 1, 2, 3, 4]); let mut reader = BufReader::with_capacity(2, inner); let mut buf = [0, 0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7]); let mut buf = [0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1]); let mut buf = [0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2]); } #[test] fn small_capacity_std() { let inner = Cursor::new([5, 6, 7, 0, 1, 2, 3, 4]); let mut reader = io::BufReader::with_capacity(2, inner); let mut buf = [0, 0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7]); let mut buf = [0, 0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1]); let mut buf = [0]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2]); } #[test] fn seek_start() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(10, inner); reader.seek(SeekFrom::Start(3)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 10]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); reader.seek(SeekFrom::Start(13)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); } #[test] fn seek_start_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(10, inner); reader.seek(SeekFrom::Start(3)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 10]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); reader.seek(SeekFrom::Start(13)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); reader.seek(SeekFrom::Start(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7]); } #[test] fn seek_current_positive() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(20, inner); reader.seek(SeekFrom::Current(2)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2, 3, 4, 5, 6, 7, 8, 9]); reader.seek(SeekFrom::Current(6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [16, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_current_positive_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(20, inner); reader.seek(SeekFrom::Current(2)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [2, 3, 4, 5, 6, 7, 8, 9]); reader.seek(SeekFrom::Current(6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [16, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_current_negative() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(3, inner); reader.seek(SeekFrom::Current(4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [4, 5, 6, 7]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); reader.seek(SeekFrom::Current(-4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); } #[test] fn seek_current_negative_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(3, inner); reader.seek(SeekFrom::Current(4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [4, 5, 6, 7]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); reader.seek(SeekFrom::Current(-4)).unwrap(); let mut buf = [0; 4]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [6, 7, 8, 9]); } #[test] fn seek_end() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(2, inner); reader.seek(Seek
8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn seek_end_std() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = io::BufReader::with_capacity(2, inner); reader.seek(SeekFrom::End(-6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12, 13, 14, 15, 16, 0, 0]); reader.seek(SeekFrom::End(0)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn into_inner() { let inner = Cursor::new([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); let mut reader = BufReader::with_capacity(4, inner); reader.seek(SeekFrom::Current(5)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [5, 6, 7, 8, 9, 10, 11, 12]); reader.seek(SeekFrom::Current(-2)).unwrap(); let mut buf = [0; 2]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12]); let mut inner = reader.into_inner().unwrap(); let mut buf = [0; 8]; inner.read(&mut buf).unwrap(); assert_eq!(buf, [13, 14, 15, 16, 0, 0, 0, 0]); } }
From::End(-6)).unwrap(); let mut buf = [0; 8]; reader.read(&mut buf).unwrap(); assert_eq!(buf, [11, 12, 13, 14, 15, 16, 0, 0]); reader.seek(SeekFrom::End(0)).unwrap(); let mut buf = [0;
function_block-random_span
[ { "content": "#[bench]\n\nfn read_10mb_default_from_cursor(b: &mut Bencher) {\n\n b.iter(|| {\n\n let inner = Cursor::new(vec![1; 10000000]);\n\n let mut reader = BufReader::new(inner);\n\n\n\n let mut buf: Vec<u8> = Vec::with_capacity(10000000);\n\n reader.read_to_end(&mut buf).u...
Rust
src/types/rowkey.rs
TimeExceed/alicloud-tablestore-rust-sdk
d8a7b24c10803e4641d6f0d30e0b9d4ddda100c4
use bytes::Bytes; use crate::{Error, ErrorCode}; use std::convert::TryFrom; use super::*; #[cfg(test)] use quickcheck::{Arbitrary, Gen, empty_shrinker}; #[derive(Debug, Clone, Eq, PartialEq)] pub struct RowKey(pub Vec<RowKeyColumn>); #[derive(Debug, Clone, Eq, PartialEq)] pub struct ExtendedRowKey(pub Vec<ExtendedRowKeyColumn>); #[derive(Debug, Clone, Eq, PartialEq)] pub struct RowKeyColumn { pub name: Name, pub value: RowKeyValue, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct ExtendedRowKeyColumn { pub name: Name, pub value: ExtendedRowKeyValue, } #[derive(Debug, Clone, Eq, PartialEq)] pub enum RowKeyValue { Int(i64), Str(String), Blob(Bytes), } #[derive(Debug, Clone, Eq, PartialEq)] pub enum ExtendedRowKeyValue { Int(i64), Str(String), Blob(Bytes), InfMin, InfMax, AutoIncr, } impl RowKey { pub fn new(keys: Vec<RowKeyColumn>) -> Self { Self(keys) } pub fn iter(&self) -> impl Iterator<Item=&RowKeyColumn> { self.0.iter() } pub fn into_iter(self) -> impl Iterator<Item=RowKeyColumn> { self.0.into_iter() } } impl ExtendedRowKey { pub fn new(keys: Vec<ExtendedRowKeyColumn>) -> Self { Self(keys) } pub fn iter(&self) -> impl Iterator<Item=&ExtendedRowKeyColumn> { self.0.iter() } pub fn into_iter(self) -> impl Iterator<Item=ExtendedRowKeyColumn> { self.0.into_iter() } } impl From<RowKey> for ExtendedRowKey { fn from(rk: RowKey) -> Self { let ext_rk = rk.into_iter() .map(|x| { ExtendedRowKeyColumn::from(x) }) .collect(); ExtendedRowKey::new(ext_rk) } } impl TryFrom<ExtendedRowKey> for RowKey { type Error = Error; fn try_from(ext_rk: ExtendedRowKey) -> Result<RowKey, Error> { let mut xs = vec![]; let r = ext_rk.into_iter() .try_for_each(|x| { match RowKeyColumn::try_from(x) { Ok(x) => { xs.push(x); Ok(()) } Err(e) => { Err(e) } } }); match r { Ok(_) => Ok(RowKey::new(xs)), Err(e) => Err(e), } } } impl From<RowKeyColumn> for ExtendedRowKeyColumn { fn from(x: RowKeyColumn) -> Self { ExtendedRowKeyColumn{ name: x.name, value: ExtendedRowKeyValue::from(x.value), } } } impl TryFrom<ExtendedRowKeyColumn> for RowKeyColumn { type Error = Error; fn try_from(x: ExtendedRowKeyColumn) -> Result<Self, Error> { Ok(RowKeyColumn{ name: x.name, value: RowKeyValue::try_from(x.value)?, }) } } impl From<RowKeyValue> for ExtendedRowKeyValue { fn from(x: RowKeyValue) -> Self { match x { RowKeyValue::Int(x) => ExtendedRowKeyValue::Int(x), RowKeyValue::Str(x) => ExtendedRowKeyValue::Str(x), RowKeyValue::Blob(x) => ExtendedRowKeyValue::Blob(x), } } } impl TryFrom<ExtendedRowKeyValue> for RowKeyValue { type Error = Error; fn try_from(value: ExtendedRowKeyValue) -> Result<Self, Self::Error> { let msg = "Cannot convert InfMin/InfMax/AutoIncr to PrimaryKeyValue"; match value { ExtendedRowKeyValue::Int(x) => Ok(RowKeyValue::Int(x)), ExtendedRowKeyValue::Str(x) => Ok(RowKeyValue::Str(x)), ExtendedRowKeyValue::Blob(x) => Ok(RowKeyValue::Blob(x)), _ => Err(Error{ code: ErrorCode::ClientUnknown, message: msg.to_string(), }) } } } #[cfg(test)] impl Arbitrary for RowKeyColumn { fn arbitrary<G: Gen>(g: &mut G) -> Self { RowKeyColumn{ name: Name::arbitrary(g), value: RowKeyValue::arbitrary(g), } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let name = self.name.clone(); let value = self.value.clone(); let name_shrinker = self.name .shrink() .map(move |x| { RowKeyColumn{ name: x, value: value.clone(), } }); let value_shrinker = self.value .shrink() .map(move |x| { RowKeyColumn{ name: name.clone(), value: x, } }); Box::new(name_shrinker.chain(value_shrinker)) } } #[cfg(test)] impl Arbitrary for ExtendedRowKeyColumn { fn arbitrary<G: Gen>(g: &mut G) -> Self { ExtendedRowKeyColumn{ name: Name::arbitrary(g), value: ExtendedRowKeyValue::arbitrary(g), } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let name = self.name.clone(); let value = self.value.clone(); let name_shrinker = self.name .shrink() .map(move |x| { ExtendedRowKeyColumn{ name: x, value: value.clone(), } }); let value_shrinker = self.value .shrink() .map(move |x| { ExtendedRowKeyColumn{ name: name.clone(), value: x, } }); Box::new(name_shrinker.chain(value_shrinker)) } } #[cfg(test)] impl Arbitrary for RowKeyValue { fn arbitrary<G: Gen>(g: &mut G) -> Self { loop { let res = ExtendedRowKeyValue::arbitrary(g); match RowKeyValue::try_from(res) { Ok(x) => { return x; } Err(_) => {} } } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let res = ExtendedRowKeyValue::from(self.clone()) .shrink() .map(|x| { RowKeyValue::try_from(x.clone()).unwrap() }); Box::new(res) } } #[cfg(test)] impl Arbitrary for ExtendedRowKeyValue { fn arbitrary<G: Gen>(g: &mut G) -> Self { match g.next_u32() % 6 { 0 => ExtendedRowKeyValue::Int(i64::arbitrary(g)), 1 => ExtendedRowKeyValue::Str(String::arbitrary(g)), 2 => ExtendedRowKeyValue::Blob(Bytes::from(Vec::<u8>::arbitrary(g))), 3 => ExtendedRowKeyValue::InfMin, 4 => ExtendedRowKeyValue::InfMax, 5 => ExtendedRowKeyValue::AutoIncr, _ => unimplemented!() } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { match self { ExtendedRowKeyValue::Int(x) => { let res = x.shrink() .map(|x| { ExtendedRowKeyValue::Int(x) }); Box::new(res) } ExtendedRowKeyValue::Str(x) => { let res = x.shrink() .map(|x| { ExtendedRowKeyValue::Str(x) }); Box::new(res) } ExtendedRowKeyValue::Blob(x) => { let res = x.to_vec().shrink() .map(|x| { ExtendedRowKeyValue::Blob(Bytes::from(x)) }); Box::new(res) } _ => empty_shrinker(), } } } #[cfg(test)] impl Arbitrary for RowKey { fn arbitrary<G: Gen>(g: &mut G) -> Self { loop { let ext = ExtendedRowKey::arbitrary(g); if let Ok(res) = RowKey::try_from(ext) { return res; } } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let ext = ExtendedRowKey::from(self.clone()); let res = ext.shrink() .map(|x| { RowKey::try_from(x.clone()).unwrap() }); Box::new(res) } } #[cfg(test)] impl Arbitrary for ExtendedRowKey { fn arbitrary<G: Gen>(g: &mut G) -> Self { let mut keys = vec![]; loop { let go_on = bool::arbitrary(g); if !go_on { break; } let col = ExtendedRowKeyColumn::arbitrary(g); keys.push(col); } ExtendedRowKey::new(keys) } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let higher = self.0.len(); let mut lower = higher / 2; let xs_fn = move || { if lower >= higher { return None; } let mid = (lower + higher) / 2; lower = mid + 1; return Some(mid); }; let for_cut_tail = self.clone(); let xs = std::iter::from_fn(xs_fn) .map(move |x| { let orig: &[ExtendedRowKeyColumn] = for_cut_tail.0.as_slice(); let mut res: Vec<ExtendedRowKeyColumn> = vec![]; res.extend_from_slice(&orig[0..x]); ExtendedRowKey::new(res) }); let mut res: Box<dyn Iterator<Item = Self>> = Box::new(xs); for i in 0..self.0.len() { let me = self.0.clone(); let ys = self.0[i].shrink() .map(move |x| { let mut me_too = me.clone(); me_too[i] = x.clone(); ExtendedRowKey::new(me_too) }); res = Box::new(res.chain(ys)) } res } }
use bytes::Bytes; use crate::{Error, ErrorCode}; use std::convert::TryFrom; use super::*; #[cfg(test)
y> for ExtendedRowKey { fn from(rk: RowKey) -> Self { let ext_rk = rk.into_iter() .map(|x| { ExtendedRowKeyColumn::from(x) }) .collect(); ExtendedRowKey::new(ext_rk) } } impl TryFrom<ExtendedRowKey> for RowKey { type Error = Error; fn try_from(ext_rk: ExtendedRowKey) -> Result<RowKey, Error> { let mut xs = vec![]; let r = ext_rk.into_iter() .try_for_each(|x| { match RowKeyColumn::try_from(x) { Ok(x) => { xs.push(x); Ok(()) } Err(e) => { Err(e) } } }); match r { Ok(_) => Ok(RowKey::new(xs)), Err(e) => Err(e), } } } impl From<RowKeyColumn> for ExtendedRowKeyColumn { fn from(x: RowKeyColumn) -> Self { ExtendedRowKeyColumn{ name: x.name, value: ExtendedRowKeyValue::from(x.value), } } } impl TryFrom<ExtendedRowKeyColumn> for RowKeyColumn { type Error = Error; fn try_from(x: ExtendedRowKeyColumn) -> Result<Self, Error> { Ok(RowKeyColumn{ name: x.name, value: RowKeyValue::try_from(x.value)?, }) } } impl From<RowKeyValue> for ExtendedRowKeyValue { fn from(x: RowKeyValue) -> Self { match x { RowKeyValue::Int(x) => ExtendedRowKeyValue::Int(x), RowKeyValue::Str(x) => ExtendedRowKeyValue::Str(x), RowKeyValue::Blob(x) => ExtendedRowKeyValue::Blob(x), } } } impl TryFrom<ExtendedRowKeyValue> for RowKeyValue { type Error = Error; fn try_from(value: ExtendedRowKeyValue) -> Result<Self, Self::Error> { let msg = "Cannot convert InfMin/InfMax/AutoIncr to PrimaryKeyValue"; match value { ExtendedRowKeyValue::Int(x) => Ok(RowKeyValue::Int(x)), ExtendedRowKeyValue::Str(x) => Ok(RowKeyValue::Str(x)), ExtendedRowKeyValue::Blob(x) => Ok(RowKeyValue::Blob(x)), _ => Err(Error{ code: ErrorCode::ClientUnknown, message: msg.to_string(), }) } } } #[cfg(test)] impl Arbitrary for RowKeyColumn { fn arbitrary<G: Gen>(g: &mut G) -> Self { RowKeyColumn{ name: Name::arbitrary(g), value: RowKeyValue::arbitrary(g), } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let name = self.name.clone(); let value = self.value.clone(); let name_shrinker = self.name .shrink() .map(move |x| { RowKeyColumn{ name: x, value: value.clone(), } }); let value_shrinker = self.value .shrink() .map(move |x| { RowKeyColumn{ name: name.clone(), value: x, } }); Box::new(name_shrinker.chain(value_shrinker)) } } #[cfg(test)] impl Arbitrary for ExtendedRowKeyColumn { fn arbitrary<G: Gen>(g: &mut G) -> Self { ExtendedRowKeyColumn{ name: Name::arbitrary(g), value: ExtendedRowKeyValue::arbitrary(g), } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let name = self.name.clone(); let value = self.value.clone(); let name_shrinker = self.name .shrink() .map(move |x| { ExtendedRowKeyColumn{ name: x, value: value.clone(), } }); let value_shrinker = self.value .shrink() .map(move |x| { ExtendedRowKeyColumn{ name: name.clone(), value: x, } }); Box::new(name_shrinker.chain(value_shrinker)) } } #[cfg(test)] impl Arbitrary for RowKeyValue { fn arbitrary<G: Gen>(g: &mut G) -> Self { loop { let res = ExtendedRowKeyValue::arbitrary(g); match RowKeyValue::try_from(res) { Ok(x) => { return x; } Err(_) => {} } } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let res = ExtendedRowKeyValue::from(self.clone()) .shrink() .map(|x| { RowKeyValue::try_from(x.clone()).unwrap() }); Box::new(res) } } #[cfg(test)] impl Arbitrary for ExtendedRowKeyValue { fn arbitrary<G: Gen>(g: &mut G) -> Self { match g.next_u32() % 6 { 0 => ExtendedRowKeyValue::Int(i64::arbitrary(g)), 1 => ExtendedRowKeyValue::Str(String::arbitrary(g)), 2 => ExtendedRowKeyValue::Blob(Bytes::from(Vec::<u8>::arbitrary(g))), 3 => ExtendedRowKeyValue::InfMin, 4 => ExtendedRowKeyValue::InfMax, 5 => ExtendedRowKeyValue::AutoIncr, _ => unimplemented!() } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { match self { ExtendedRowKeyValue::Int(x) => { let res = x.shrink() .map(|x| { ExtendedRowKeyValue::Int(x) }); Box::new(res) } ExtendedRowKeyValue::Str(x) => { let res = x.shrink() .map(|x| { ExtendedRowKeyValue::Str(x) }); Box::new(res) } ExtendedRowKeyValue::Blob(x) => { let res = x.to_vec().shrink() .map(|x| { ExtendedRowKeyValue::Blob(Bytes::from(x)) }); Box::new(res) } _ => empty_shrinker(), } } } #[cfg(test)] impl Arbitrary for RowKey { fn arbitrary<G: Gen>(g: &mut G) -> Self { loop { let ext = ExtendedRowKey::arbitrary(g); if let Ok(res) = RowKey::try_from(ext) { return res; } } } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let ext = ExtendedRowKey::from(self.clone()); let res = ext.shrink() .map(|x| { RowKey::try_from(x.clone()).unwrap() }); Box::new(res) } } #[cfg(test)] impl Arbitrary for ExtendedRowKey { fn arbitrary<G: Gen>(g: &mut G) -> Self { let mut keys = vec![]; loop { let go_on = bool::arbitrary(g); if !go_on { break; } let col = ExtendedRowKeyColumn::arbitrary(g); keys.push(col); } ExtendedRowKey::new(keys) } fn shrink(&self) -> Box<dyn Iterator<Item = Self>> { let higher = self.0.len(); let mut lower = higher / 2; let xs_fn = move || { if lower >= higher { return None; } let mid = (lower + higher) / 2; lower = mid + 1; return Some(mid); }; let for_cut_tail = self.clone(); let xs = std::iter::from_fn(xs_fn) .map(move |x| { let orig: &[ExtendedRowKeyColumn] = for_cut_tail.0.as_slice(); let mut res: Vec<ExtendedRowKeyColumn> = vec![]; res.extend_from_slice(&orig[0..x]); ExtendedRowKey::new(res) }); let mut res: Box<dyn Iterator<Item = Self>> = Box::new(xs); for i in 0..self.0.len() { let me = self.0.clone(); let ys = self.0[i].shrink() .map(move |x| { let mut me_too = me.clone(); me_too[i] = x.clone(); ExtendedRowKey::new(me_too) }); res = Box::new(res.chain(ys)) } res } }
] use quickcheck::{Arbitrary, Gen, empty_shrinker}; #[derive(Debug, Clone, Eq, PartialEq)] pub struct RowKey(pub Vec<RowKeyColumn>); #[derive(Debug, Clone, Eq, PartialEq)] pub struct ExtendedRowKey(pub Vec<ExtendedRowKeyColumn>); #[derive(Debug, Clone, Eq, PartialEq)] pub struct RowKeyColumn { pub name: Name, pub value: RowKeyValue, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct ExtendedRowKeyColumn { pub name: Name, pub value: ExtendedRowKeyValue, } #[derive(Debug, Clone, Eq, PartialEq)] pub enum RowKeyValue { Int(i64), Str(String), Blob(Bytes), } #[derive(Debug, Clone, Eq, PartialEq)] pub enum ExtendedRowKeyValue { Int(i64), Str(String), Blob(Bytes), InfMin, InfMax, AutoIncr, } impl RowKey { pub fn new(keys: Vec<RowKeyColumn>) -> Self { Self(keys) } pub fn iter(&self) -> impl Iterator<Item=&RowKeyColumn> { self.0.iter() } pub fn into_iter(self) -> impl Iterator<Item=RowKeyColumn> { self.0.into_iter() } } impl ExtendedRowKey { pub fn new(keys: Vec<ExtendedRowKeyColumn>) -> Self { Self(keys) } pub fn iter(&self) -> impl Iterator<Item=&ExtendedRowKeyColumn> { self.0.iter() } pub fn into_iter(self) -> impl Iterator<Item=ExtendedRowKeyColumn> { self.0.into_iter() } } impl From<RowKe
random
[ { "content": "fn peek_tag(inp: &mut dyn Buf) -> Result<super::Tag, Error> {\n\n if !inp.has_remaining() {\n\n return issue_error();\n\n }\n\n let xs = inp.bytes();\n\n assert!(!xs.is_empty());\n\n Ok(super::Tag::try_from(xs[0])?)\n\n}\n\n\n", "file_path": "src/plainbuffer/serde.rs", ...
Rust
src/timer.rs
guerinoni/yobemag
ace316106477092b354ccf77b278899685cc5ca1
use std::cell::RefCell; use std::rc::Rc; use crate::{ clock::Clock, interrupt::{InterruptFlag, InterruptKind}, memory_device::ReadWrite, }; pub struct Timer { divider: u8, tima: u8, tma: u8, tac: u8, clock1: Clock, clock2: Clock, interrupt_flag: Rc<RefCell<InterruptFlag>>, } impl Timer { pub fn new(interrupt_flag: Rc<RefCell<InterruptFlag>>) -> Self { Self { divider: 0xAC, tima: 0, tma: 0, tac: 0, clock1: Clock::new(256), clock2: Clock::new(1024), interrupt_flag, } } } impl Timer { pub fn step(&mut self, cycles: u32) { self.divider = self.divider.wrapping_add(self.clock1.step(cycles)); if (self.tac & 0x04) != 0x00 { let n = self.clock2.step(cycles); for _ in 0..n { self.tima = self.tima.wrapping_add(1); if self.tima == 0x00 { self.tima = self.tma; self.interrupt_flag .borrow_mut() .request(InterruptKind::Timer); } } } } } impl ReadWrite for Timer { fn contains(&self, address: usize) -> bool { 0xFF04 == address || 0xFF05 == address || 0xFF06 == address || 0xFF07 == address } fn read_byte(&self, address: usize) -> Result<u8, std::io::Error> { match address { 0xFF04 => Ok(self.divider), 0xFF05 => Ok(self.tima), 0xFF06 => Ok(self.tma), 0xFF07 => Ok(self.tac), _ => Err(std::io::Error::new( std::io::ErrorKind::InvalidData, "can't write byte here", )), } } fn read_word(&self, _address: usize) -> Result<u16, std::io::Error> { unimplemented!() } fn write_byte(&mut self, address: usize, value: u8) -> Result<(), std::io::Error> { match address { 0xFF04 => { self.divider = 0; self.clock1.reset_counter(); } 0xFF05 => self.tima = value, 0xFF06 => self.tma = value, 0xFF07 => { if (self.tac & 0x03) != (value & 0x03) { self.clock2.reset_counter(); let new_period = match value & 0x03 { 0x00 => 1024, 0x01 => 16, 0x02 => 64, 0x03 => 256, _ => { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, format!("period of clock can't be {}", value), )) } }; self.clock2.set_period(new_period); self.tima = self.tma; } self.tac = value; } _ => { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, "can't write byte here", )) } } Ok(()) } fn write_word(&mut self, _address: usize, _value: u16) -> Result<(), std::io::Error> { unimplemented!() } }
use std::cell::RefCell; use std::rc::Rc; use crate::{ clock::Clock, interrupt::{InterruptFlag, InterruptKind}, memory_device::ReadWrite, }; pub struct Timer { divider: u8, tima: u8, tma: u8, tac: u8, clock1: Clock, clock2: Clock, interrupt_flag: Rc<RefCell<InterruptFlag>>, } impl Timer { pub fn new(interrupt_flag: Rc<RefCell<InterruptFlag>>) -> Self { Self { divider: 0xAC, tima: 0, tma: 0, tac: 0, clock1: Clock::new(256), clock2: Clock::new(1024), interrupt_flag, } } } impl Timer { pub fn step(&mut self, cycles: u32) { self.divider = self.divider.wrapping_add(self.clock1.step(cycles)); if (self.tac & 0x04) != 0x00 { let n = self.clock2.step(cycles);
} impl ReadWrite for Timer { fn contains(&self, address: usize) -> bool { 0xFF04 == address || 0xFF05 == address || 0xFF06 == address || 0xFF07 == address } fn read_byte(&self, address: usize) -> Result<u8, std::io::Error> { match address { 0xFF04 => Ok(self.divider), 0xFF05 => Ok(self.tima), 0xFF06 => Ok(self.tma), 0xFF07 => Ok(self.tac), _ => Err(std::io::Error::new( std::io::ErrorKind::InvalidData, "can't write byte here", )), } } fn read_word(&self, _address: usize) -> Result<u16, std::io::Error> { unimplemented!() } fn write_byte(&mut self, address: usize, value: u8) -> Result<(), std::io::Error> { match address { 0xFF04 => { self.divider = 0; self.clock1.reset_counter(); } 0xFF05 => self.tima = value, 0xFF06 => self.tma = value, 0xFF07 => { if (self.tac & 0x03) != (value & 0x03) { self.clock2.reset_counter(); let new_period = match value & 0x03 { 0x00 => 1024, 0x01 => 16, 0x02 => 64, 0x03 => 256, _ => { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, format!("period of clock can't be {}", value), )) } }; self.clock2.set_period(new_period); self.tima = self.tma; } self.tac = value; } _ => { return Err(std::io::Error::new( std::io::ErrorKind::InvalidData, "can't write byte here", )) } } Ok(()) } fn write_word(&mut self, _address: usize, _value: u16) -> Result<(), std::io::Error> { unimplemented!() } }
for _ in 0..n { self.tima = self.tima.wrapping_add(1); if self.tima == 0x00 { self.tima = self.tma; self.interrupt_flag .borrow_mut() .request(InterruptKind::Timer); } } } }
function_block-function_prefix_line
[ { "content": "/// since gameboy check for non original games when loading cartridge.\n\npub fn valid_checksum(data: &[u8]) -> Result<(), std::io::Error> {\n\n let checksum: Wrapping<u8> = data[0x134..0x14D]\n\n .iter()\n\n .cloned()\n\n .fold(Wrapping(0), |acc, v| acc - Wrapping(v) - Wra...
Rust
src/worker.rs
dginev/rust-cortex-peripherals
c1eb9eb489e45ae80857cc3b00b20fe9bee6c258
use std::borrow::Cow; use std::error::Error; use std::fs::File; use std::io::{Read, Seek, SeekFrom, Write}; use std::ops::Deref; use std::path::Path; use std::thread; use std::time::Duration; use std::ffi::OsString; use tempdir::TempDir; use zmq::{Context, Message, Socket, SNDMORE}; pub trait Worker: Clone + Send { fn convert(&self, _: &Path) -> Result<File, Box<dyn Error>>; fn message_size(&self) -> usize; fn get_service(&self) -> &str; fn get_source_address(&self) -> Cow<str>; fn get_sink_address(&self) -> Cow<str>; fn pool_size(&self) -> usize { 1 } fn set_identity(&mut self, _identity: String) { unimplemented!() } fn get_identity(&self) -> &str { unimplemented!() } fn start(&mut self, limit: Option<usize>) -> Result<(), Box<dyn Error>> where Self: 'static + Sized, { let hostname = hostname::get().unwrap_or_else(|_| OsString::from("hostname")).into_string().unwrap(); match self.pool_size() { 1 => { self.set_identity(format!("{}:engrafo:1", hostname)); self.start_single(limit) } n => { let mut threads = Vec::new(); for thread in 1..=n { let thread_str = if thread < 10 { format!("0{}", thread) } else { thread.to_string() }; let identity_single = format!("{}:engrafo:{}", hostname, thread_str); let mut thread_self: Self = self.clone(); thread_self.set_identity(identity_single); threads.push(thread::spawn(move || { thread_self.start_single(limit).unwrap(); })); } for t in threads { t.join().unwrap(); } Ok(()) } } } fn start_single(&self, limit: Option<usize>) -> Result<(), Box<dyn Error>> { let mut work_counter = 0; let context_source = Context::new(); let source = context_source.socket(zmq::DEALER).unwrap(); source.set_identity(self.get_identity().as_bytes()).unwrap(); assert!(source.connect(&self.get_source_address()).is_ok()); let context_sink = Context::new(); let sink = context_sink.socket(zmq::PUSH).unwrap(); assert!(sink.connect(&self.get_sink_address()).is_ok()); loop { let input_tmpdir = TempDir::new("cortex_task").unwrap(); let (file_result, input_filepath, input_size, taskid) = self.receive_from_cortex(&input_tmpdir, &source); let converted_result = if file_result.is_ok() { self.convert(Path::new(&input_filepath)) } else { file_result }; self.respond_to_cortex(converted_result, input_size, &taskid, &sink); input_tmpdir.close().unwrap(); work_counter += 1; if let Some(upper_bound) = limit { if work_counter >= upper_bound { thread::sleep(Duration::new(1, 0)); break; } } } Ok(()) } fn receive_from_cortex( &self, input_tmpdir: &TempDir, source: &Socket, ) -> (Result<File, Box<dyn Error>>, String, usize, String) { let mut taskid_msg = Message::new(); let mut recv_msg = Message::new(); source.send(&self.get_service(), 0).unwrap(); source.recv(&mut taskid_msg, 0).unwrap(); let taskid = taskid_msg.as_str().unwrap(); let input_filepath = input_tmpdir.path().to_str().unwrap().to_string() + "/" + taskid + ".zip"; let mut file = File::create(input_filepath.clone()).unwrap(); let mut input_size = 0; loop { source.recv(&mut recv_msg, 0).unwrap(); if let Ok(written) = file.write(recv_msg.deref()) { input_size += written; } if !source.get_rcvmore().unwrap() { break; } } let file_result = if input_size > 0 { file.seek(SeekFrom::Start(0)).unwrap(); Ok(file) } else { Err(From::from("Input was empty.")) }; info!( target: &format!("{}:received", self.get_identity()), "task {}, read {} bytes from CorTeX.", taskid, input_size ); (file_result, input_filepath, input_size, taskid.to_string()) } fn respond_to_cortex( &self, file_result: Result<File, Box<dyn Error>>, input_size: usize, taskid: &str, sink: &Socket, ) { sink.send(self.get_identity(), SNDMORE).unwrap(); sink.send(self.get_service(), SNDMORE).unwrap(); sink.send(taskid, SNDMORE).unwrap(); match file_result { Ok(mut converted_file) => { let mut total_size = 0; loop { let message_size = self.message_size(); let mut data = vec![0; message_size]; let size = converted_file.read(&mut data).unwrap(); total_size += size; data.truncate(size); if size < message_size { sink.send(&data, 0).unwrap(); break; } else { sink.send(&data, SNDMORE).unwrap(); } } info!( target: &format!("{}:completed", self.get_identity()), " task {}, sent {} bytes back to CorTeX.", taskid, total_size ); } Err(e) => { sink.send(&Vec::new(), 0).unwrap(); if input_size == 0 { info!( target: &format!("{}:result", self.get_identity()), "Empty input. Throttling for a minute." ); } else { info!( target: &format!("{}:result", self.get_identity()), "Conversion came back empty: {:?}. Throttling for a minute.", e ); } thread::sleep(Duration::new(60, 0)); } } } } mod echo; pub use echo::EchoWorker; mod tex_to_html; pub use tex_to_html::TexToHtmlWorker; #[cfg(feature = "engrafo")] mod engrafo; #[cfg(feature = "engrafo")] pub use engrafo::EngrafoWorker;
use std::borrow::Cow; use std::error::Error; use std::fs::File; use std::io::{Read, Seek, SeekFrom, Write}; use std::ops::Deref; use std::path::Path; use std::thread; use std::time::Duration; use std::ffi::OsString; use tempdir::TempDir; use zmq::{Context, Message, Socket, SNDMORE}; pub trait Worker: Clone + Send { fn convert(&self, _: &Path) -> Result<File, Box<dyn Error>>; fn message_size(&self) -> usize; fn get_service(&self) -> &str; fn get_source_address(&self) -> Cow<str>; fn get_sink_address(&self) -> Cow<str>; fn pool_size(&self) -> usize { 1 } fn set_identity(&mut self, _identity: String) { unimplemented!() } fn get_identity(&self) -> &str { unimplemented!() } fn start(&mut self, limit: Option<usize>) -> Result<(), Box<dyn Error>> where Self: 'static + Sized, { let hostname = hostname::get().unwrap_or_else(|_| OsString::from("hostname")).into_string().unwrap(); match self.pool_size() { 1 => { self.set_identity(format!("{}:engrafo:1", hostname)); self.start_single(limit) } n => { let mut threads = Vec::new(); for thread in 1..=n { let thread_str = if thread < 10 { format!("0{}", thread) } else { thread.to_string() }; let identity_single = format!("{}:engrafo:{}", hostname, thread_str); let mut thread_self: Self = self.clone(); thread_self.set_identity(identity_single); threads.push(thread::spawn(move || { thread_self.start_single(limit).unwrap(); })); } for t in threads { t.join().unwrap(); } Ok(()) } } } fn start_single(&self, limit: Option<usize>) -> Result<(), Box<dyn Error>> { let mut work_counter = 0; let context_source = Context::new(); let source = context_source.socket(zmq::DEALER).unwrap(); source.set_identity(self.get_identity().as_bytes()).unwrap(); assert!(source.connect(&self.get_source_address()).is_ok()); let context_sink = Context::new(); let sink = context_sink.socket(zmq::PUSH).unwrap(); assert!(sink.connect(&self.get_sink_address()).is_ok());
fn receive_from_cortex( &self, input_tmpdir: &TempDir, source: &Socket, ) -> (Result<File, Box<dyn Error>>, String, usize, String) { let mut taskid_msg = Message::new(); let mut recv_msg = Message::new(); source.send(&self.get_service(), 0).unwrap(); source.recv(&mut taskid_msg, 0).unwrap(); let taskid = taskid_msg.as_str().unwrap(); let input_filepath = input_tmpdir.path().to_str().unwrap().to_string() + "/" + taskid + ".zip"; let mut file = File::create(input_filepath.clone()).unwrap(); let mut input_size = 0; loop { source.recv(&mut recv_msg, 0).unwrap(); if let Ok(written) = file.write(recv_msg.deref()) { input_size += written; } if !source.get_rcvmore().unwrap() { break; } } let file_result = if input_size > 0 { file.seek(SeekFrom::Start(0)).unwrap(); Ok(file) } else { Err(From::from("Input was empty.")) }; info!( target: &format!("{}:received", self.get_identity()), "task {}, read {} bytes from CorTeX.", taskid, input_size ); (file_result, input_filepath, input_size, taskid.to_string()) } fn respond_to_cortex( &self, file_result: Result<File, Box<dyn Error>>, input_size: usize, taskid: &str, sink: &Socket, ) { sink.send(self.get_identity(), SNDMORE).unwrap(); sink.send(self.get_service(), SNDMORE).unwrap(); sink.send(taskid, SNDMORE).unwrap(); match file_result { Ok(mut converted_file) => { let mut total_size = 0; loop { let message_size = self.message_size(); let mut data = vec![0; message_size]; let size = converted_file.read(&mut data).unwrap(); total_size += size; data.truncate(size); if size < message_size { sink.send(&data, 0).unwrap(); break; } else { sink.send(&data, SNDMORE).unwrap(); } } info!( target: &format!("{}:completed", self.get_identity()), " task {}, sent {} bytes back to CorTeX.", taskid, total_size ); } Err(e) => { sink.send(&Vec::new(), 0).unwrap(); if input_size == 0 { info!( target: &format!("{}:result", self.get_identity()), "Empty input. Throttling for a minute." ); } else { info!( target: &format!("{}:result", self.get_identity()), "Conversion came back empty: {:?}. Throttling for a minute.", e ); } thread::sleep(Duration::new(60, 0)); } } } } mod echo; pub use echo::EchoWorker; mod tex_to_html; pub use tex_to_html::TexToHtmlWorker; #[cfg(feature = "engrafo")] mod engrafo; #[cfg(feature = "engrafo")] pub use engrafo::EngrafoWorker;
loop { let input_tmpdir = TempDir::new("cortex_task").unwrap(); let (file_result, input_filepath, input_size, taskid) = self.receive_from_cortex(&input_tmpdir, &source); let converted_result = if file_result.is_ok() { self.convert(Path::new(&input_filepath)) } else { file_result }; self.respond_to_cortex(converted_result, input_size, &taskid, &sink); input_tmpdir.close().unwrap(); work_counter += 1; if let Some(upper_bound) = limit { if work_counter >= upper_bound { thread::sleep(Duration::new(1, 0)); break; } } } Ok(()) }
function_block-function_prefix_line
[ { "content": "/// Transform the ZIP provided by cortex into a TempDir,\n\n/// for e.g. tools such as Engrafo that aren't ZIP-capable\n\npub fn extract_zip_to_tmpdir(path: &Path, tmpdir_prefix: &str) -> Result<TempDir, Box<dyn Error>> {\n\n let input_tmpdir = TempDir::new(tmpdir_prefix)?;\n\n let unpacked_...
Rust
chain/src/action.rs
monacohq/rust-eos
a54e873baaf21db9268e9537856b04357583d8bf
use alloc::string::{String, ToString}; use alloc::{format, vec}; use alloc::vec::Vec; use core::str::FromStr; use codec::{Encode, Decode}; use crate::{ AccountName, ActionName, Asset, Digest, NumBytes, PermissionLevel, Read, SerializeData, Write }; #[cfg(feature = "std")] use serde::{ Serialize, Deserialize, de::Error as DeError, ser::{Error as SerError, Serializer, SerializeStruct} }; #[derive(Clone, Debug, Read, Write, NumBytes, PartialEq, Default, Encode, Decode, Digest, SerializeData)] #[eosio_core_root_path = "crate"] #[repr(C)] pub struct Action { pub account: AccountName, pub name: ActionName, pub authorization: Vec<PermissionLevel>, pub data: Vec<u8>, } #[cfg(feature = "std")] impl<'de> serde::Deserialize<'de> for Action { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::de::Deserializer<'de> { #[derive(Debug)] struct VisitorAction; impl<'de> serde::de::Visitor<'de> for VisitorAction { type Value = Action; fn expecting(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "string or a struct, but this is: {:?}", self) } fn visit_map<D>(self, mut map: D) -> Result<Self::Value, D::Error> where D: serde::de::MapAccess<'de>, { let mut account = AccountName::default(); let mut name = ActionName::default(); let mut authorization: Vec<PermissionLevel> = vec![]; let mut data: Vec<u8> = vec![]; while let Some(field) = map.next_key()? { match field { "account" => { account = map.next_value()?; } "name" => { name = map.next_value()?; } "authorization" => { authorization= map.next_value()?; } "hex_data" => { let val: String= map.next_value()?; data = hex::decode(val).map_err(D::Error::custom)?; } _ => { let _: serde_json::Value = map.next_value()?; continue; } } } let action = Action { account, name, authorization, data, }; Ok(action) } } deserializer.deserialize_any(VisitorAction) } } impl Action { pub fn new(account: AccountName, name: ActionName, authorization: Vec<PermissionLevel>, data: Vec<u8>) -> Self { Action { account, name, authorization, data } } pub fn from_str<T: AsRef<str>, S: SerializeData>( account: T, name: T, authorization: Vec<PermissionLevel>, action_data: S ) -> crate::Result<Self> { let account = FromStr::from_str(account.as_ref()).map_err(crate::Error::from)?; let name = FromStr::from_str(name.as_ref()).map_err(crate::Error::from)?; let data = action_data.to_serialize_data()?; Ok(Action { account, name, authorization, data }) } pub fn transfer<T: AsRef<str>>(from: T, to: T, quantity: T, memo: T) -> crate::Result<Action> { let permission_level = PermissionLevel::from_str(from.as_ref(), "active")?; let action_transfer = ActionTransfer::from_str(from, to, quantity, memo)?; Action::from_str( "eosio.token", "transfer", vec![permission_level], action_transfer ) } } impl core::fmt::Display for Action { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "account: {}\n\ name: {}\n\ authorization: {}\n\ hex_data: {}", self.account, self.name, self.authorization.iter().map(|item| format!("{}", item)).collect::<String>(), hex::encode(&self.data), ) } } #[cfg(feature = "std")] impl serde::ser::Serialize for Action { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer { let mut state = serializer.serialize_struct("Action", 5)?; state.serialize_field("account", &self.account)?; state.serialize_field("name", &self.name)?; state.serialize_field("authorization", &self.authorization)?; state.serialize_field("hex_data", &hex::encode(&self.data))?; match (self.account.to_string().as_str(), self.name.to_string().as_str()) { ("eosio.token", "transfer") => { let data = ActionTransfer::read(&self.data, &mut 0).map_err(|_| S::Error::custom("Action read from data failed."))?; state.serialize_field("data", &data)?; }, _ => {} } state.end() } } #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Debug, Read, Write, NumBytes, Default, SerializeData)] #[eosio_core_root_path = "crate"] pub struct ActionTransfer { pub from: AccountName, pub to: AccountName, pub quantity: Asset, pub memo: String, } impl ActionTransfer { pub fn new(from: AccountName, to: AccountName, quantity: Asset, memo: String) -> Self { ActionTransfer { from, to, quantity, memo } } pub fn from_str<T: AsRef<str>>(from: T, to: T, quantity: T, memo: T) -> crate::Result<Self> { let from = FromStr::from_str(from.as_ref()).map_err(crate::Error::from)?; let to = FromStr::from_str(to.as_ref()).map_err(crate::Error::from)?; let quantity = FromStr::from_str(quantity.as_ref()).map_err(crate::Error::from)?; let memo = memo.as_ref().to_string(); Ok(ActionTransfer { from, to, quantity, memo }) } } pub trait ToAction: Write + NumBytes { const NAME: u64; #[inline] fn to_action( &self, account: AccountName, authorization: Vec<PermissionLevel>, ) -> crate::Result<Action> { let mut data = vec![0_u8; self.num_bytes()]; self.write(&mut data, &mut 0).map_err(crate::Error::BytesWriteError)?; Ok(Action { account, name: Self::NAME.into(), authorization, data, }) } } #[cfg(test)] mod tests { use hex; use super::*; #[test] fn action_hash_should_work() { let action = Action { account: FromStr::from_str("eosio.token").unwrap(), name: FromStr::from_str("issue").unwrap(), authorization: vec![PermissionLevel { actor: FromStr::from_str("eosio").unwrap(), permission: FromStr::from_str("active").unwrap(), }], data: hex::decode("0000000000ea305500625e5a1809000004454f530000000004696e6974").unwrap(), }; let hash = action.digest().unwrap(); assert_eq!(hash, "0221f3da945a3de738cdb744f7963a6a3486097ab42436d1f4e13a1ade502bb9".into()); } #[test] fn action_transfer_serialize_should_work() { let action = Action::transfer("testa", "testb", "1.0000 EOS", "a memo").ok().unwrap(); let data = action.to_serialize_data(); assert!(data.is_ok()); let data = data.unwrap(); assert_eq!( hex::encode(data), "00a6823403ea3055000000572d3ccdcd01000000000093b1ca00000000a8ed323227000000000093b1ca000000008093b1ca102700000000000004454f53000000000661206d656d6f" ); } #[test] fn action_deserialize_should_be_ok() { let action_str = r#" { "account": "eosio.token", "name": "transfer", "authorization": [ { "actor": "junglefaucet", "permission": "active" } ], "data": { "from": "junglefaucet", "receiver": "megasuper333", "stake_net_quantity": "1.0000 EOS", "stake_cpu_quantity": "1.0000 EOS", "transfer": 1 }, "hex_data": "9015d266a9c8a67e30c6b8aa6a6c989240420f000000000004454f5300000000134e657720425020526567697374726174696f6e" }"#; let action: Result<Action, _> = serde_json::from_str(action_str); assert!(action.is_ok()); let hash = action.unwrap().digest().unwrap(); assert_eq!(hash, "eaa3b4bf845a1b41668ab7ca49fb5644fc91a6c0156dfd33911b4ec69d2e41d6".into()) } }
use alloc::string::{String, ToString}; use alloc::{format, vec}; use alloc::vec::Vec; use core::str::FromStr; use codec::{Encode, Decode}; use crate::{ AccountName, ActionName, Asset, Digest, NumBytes, PermissionLevel, Read, SerializeData, Write }; #[cfg(feature = "std")] use serde::{ Serialize, Deserialize, de::Error as DeError, ser::{Error as SerError, Serializer, SerializeStruct} }; #[derive(Clone, Debug, Read, Write, NumBytes, PartialEq, Default, Encode, Decode, Digest, SerializeData)] #[eosio_core_root_path = "crate"] #[repr(C)] pub struct Action { pub account: AccountName, pub name: ActionName, pub authorization: Vec<PermissionLevel>, pub data: Vec<u8>, } #[cfg(feature = "std")] impl<'de> serde::Deserialize<'de> for Action { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::de::Deserializer<'de> { #[derive(Debug)] struct VisitorAction; impl<'de> serde::de::Visitor<'de> for VisitorAction { type Value = Action; fn expecting(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "string or a struct, but this is: {:?}", self) } fn visit_map<D>(self, mut map: D) -> Result<Self::Value, D::Error> where D: serde::de::MapAccess<'de>, { let mut account = AccountName::default(); let mut name = ActionName::default(); let mut authorization: Vec<PermissionLevel> = vec![]; let mut data: Vec<u8> = vec![]; while let Some(field) = map.next_key()? { match field { "account" => { account = map.next_value()?; } "name" => { name = map.next_value()?; } "authorization" => { authorization= map.next_value()?; } "hex_data" => { let val: String= map.next_value()?; data = hex::decode(val).map_err(D::Error::custom)?; } _ => { let _: serde_json::Value = map.next_value()?; continue; } } } let action = Action { account, name, authorization, data, }; Ok(action) } } deserializer.deserialize_any(VisitorAction) } } impl Action { pub fn new(account: AccountName, name: ActionName, authorization: Vec<PermissionLevel>, data: Vec<u8>) -> Self { Action { account, name, authorization, data } } pub fn from_str<T: AsRef<str>, S: SerializeData>( account: T, name: T, authorization: Vec<PermissionLevel>, action_data: S ) -> crate::Result<Self> { let account = FromStr::from_str(account.as_ref()).map_err(crate::Error::from)?; let name = FromStr::from_str(name.as_ref()).map_err(crate::Error::from)?; let data = action_data.to_serialize_data()?; Ok(Action { account, name, authorization, data }) } pub fn transfer<T: AsRef<str>>(from:
o.token", "transfer", vec![permission_level], action_transfer ) } } impl core::fmt::Display for Action { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!(f, "account: {}\n\ name: {}\n\ authorization: {}\n\ hex_data: {}", self.account, self.name, self.authorization.iter().map(|item| format!("{}", item)).collect::<String>(), hex::encode(&self.data), ) } } #[cfg(feature = "std")] impl serde::ser::Serialize for Action { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer { let mut state = serializer.serialize_struct("Action", 5)?; state.serialize_field("account", &self.account)?; state.serialize_field("name", &self.name)?; state.serialize_field("authorization", &self.authorization)?; state.serialize_field("hex_data", &hex::encode(&self.data))?; match (self.account.to_string().as_str(), self.name.to_string().as_str()) { ("eosio.token", "transfer") => { let data = ActionTransfer::read(&self.data, &mut 0).map_err(|_| S::Error::custom("Action read from data failed."))?; state.serialize_field("data", &data)?; }, _ => {} } state.end() } } #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Debug, Read, Write, NumBytes, Default, SerializeData)] #[eosio_core_root_path = "crate"] pub struct ActionTransfer { pub from: AccountName, pub to: AccountName, pub quantity: Asset, pub memo: String, } impl ActionTransfer { pub fn new(from: AccountName, to: AccountName, quantity: Asset, memo: String) -> Self { ActionTransfer { from, to, quantity, memo } } pub fn from_str<T: AsRef<str>>(from: T, to: T, quantity: T, memo: T) -> crate::Result<Self> { let from = FromStr::from_str(from.as_ref()).map_err(crate::Error::from)?; let to = FromStr::from_str(to.as_ref()).map_err(crate::Error::from)?; let quantity = FromStr::from_str(quantity.as_ref()).map_err(crate::Error::from)?; let memo = memo.as_ref().to_string(); Ok(ActionTransfer { from, to, quantity, memo }) } } pub trait ToAction: Write + NumBytes { const NAME: u64; #[inline] fn to_action( &self, account: AccountName, authorization: Vec<PermissionLevel>, ) -> crate::Result<Action> { let mut data = vec![0_u8; self.num_bytes()]; self.write(&mut data, &mut 0).map_err(crate::Error::BytesWriteError)?; Ok(Action { account, name: Self::NAME.into(), authorization, data, }) } } #[cfg(test)] mod tests { use hex; use super::*; #[test] fn action_hash_should_work() { let action = Action { account: FromStr::from_str("eosio.token").unwrap(), name: FromStr::from_str("issue").unwrap(), authorization: vec![PermissionLevel { actor: FromStr::from_str("eosio").unwrap(), permission: FromStr::from_str("active").unwrap(), }], data: hex::decode("0000000000ea305500625e5a1809000004454f530000000004696e6974").unwrap(), }; let hash = action.digest().unwrap(); assert_eq!(hash, "0221f3da945a3de738cdb744f7963a6a3486097ab42436d1f4e13a1ade502bb9".into()); } #[test] fn action_transfer_serialize_should_work() { let action = Action::transfer("testa", "testb", "1.0000 EOS", "a memo").ok().unwrap(); let data = action.to_serialize_data(); assert!(data.is_ok()); let data = data.unwrap(); assert_eq!( hex::encode(data), "00a6823403ea3055000000572d3ccdcd01000000000093b1ca00000000a8ed323227000000000093b1ca000000008093b1ca102700000000000004454f53000000000661206d656d6f" ); } #[test] fn action_deserialize_should_be_ok() { let action_str = r#" { "account": "eosio.token", "name": "transfer", "authorization": [ { "actor": "junglefaucet", "permission": "active" } ], "data": { "from": "junglefaucet", "receiver": "megasuper333", "stake_net_quantity": "1.0000 EOS", "stake_cpu_quantity": "1.0000 EOS", "transfer": 1 }, "hex_data": "9015d266a9c8a67e30c6b8aa6a6c989240420f000000000004454f5300000000134e657720425020526567697374726174696f6e" }"#; let action: Result<Action, _> = serde_json::from_str(action_str); assert!(action.is_ok()); let hash = action.unwrap().digest().unwrap(); assert_eq!(hash, "eaa3b4bf845a1b41668ab7ca49fb5644fc91a6c0156dfd33911b4ec69d2e41d6".into()) } }
T, to: T, quantity: T, memo: T) -> crate::Result<Action> { let permission_level = PermissionLevel::from_str(from.as_ref(), "active")?; let action_transfer = ActionTransfer::from_str(from, to, quantity, memo)?; Action::from_str( "eosi
function_block-random_span
[ { "content": "/// Directly encode a slice as base58\n\npub fn encode_slice(data: &[u8]) -> String {\n\n encode_iter(data.iter().cloned())\n\n}\n\n\n", "file_path": "keys/src/base58.rs", "rank": 0, "score": 273043.4003143676 }, { "content": "/// Obtain a string with the base58check encodin...
Rust
components/epaxos/src/replication/hdlreply.rs
openacid/celeritasdb
061d71e5e6305b6e3782530433ad082680c9fc14
use crate::qpaxos::replicate_reply::Phase; use crate::qpaxos::Direction; use crate::qpaxos::ReplicateReply; use crate::qpaxos::*; use crate::replica::*; use crate::replication::RpcHandlerError; pub fn check_repl_common(inst: &Instance, reply: ReplicateReply) -> Result<Phase, RpcHandlerError> { let iid = reply .instance_id .ok_or(ProtocolError::LackOf("instance_id".into()))?; if iid != inst.instance_id.unwrap() { let err = ProtocolError::NotMatch( "instance_id".into(), format!("{}", inst.instance_id.unwrap()), format!("{}", iid), ); return Err(err.into()); } let phase = reply.phase.ok_or(ProtocolError::LackOf("phase".into()))?; let last_ballot = reply.last_ballot; if inst.ballot < last_ballot { let zero = Some(BallotNum::default()); let err = RpcHandlerError::StaleBallot( inst.ballot.or(zero).unwrap(), last_ballot.or(zero).unwrap(), ); return Err(err); } Ok(phase) } pub fn handle_prepare_reply( st: &mut ReplicationStatus, from_rid: ReplicaId, repl: ReplicateReply, ) -> Result<(), RpcHandlerError> { if let Some(ref e) = repl.err { return Err(RpcHandlerError::RemoteError(e.clone())); } let phase = check_repl_common(&st.instance, repl)?; let frepl: PrepareReply = phase .try_into() .or(Err(ProtocolError::LackOf("phase::Prepare".into())))?; let deps = frepl .deps .as_ref() .ok_or(ProtocolError::LackOf("phase::Prepare.deps".into()))?; if frepl.deps_committed.len() < deps.len() { return Err(ProtocolError::Incomplete( "phase::Prepare.deps_committed".into(), deps.len() as i32, frepl.deps_committed.len() as i32, ) .into()); } for (i, d) in deps.iter().enumerate() { let rid = d.replica_id; if !st.prepared.contains_key(&rid) { st.prepared.insert( rid, DepStatus { ..DepStatus::default() }, ); } let pre = st.prepared.get_mut(&rid).unwrap(); if pre.replied.insert(from_rid) { pre.rdeps.push(RepliedDep { idx: d.idx, seq: d.seq, committed: frepl.deps_committed[i], }); } else { return Err(RpcHandlerError::DupRpc( InstanceStatus::Prepared, Direction::Reply, from_rid, st.instance.instance_id.unwrap(), )); } } Ok(()) } pub fn handle_accept_reply( st: &mut ReplicationStatus, from_rid: ReplicaId, repl: ReplicateReply, ) -> Result<(), RpcHandlerError> { if let Some(ref e) = repl.err { return Err(RpcHandlerError::RemoteError(e.clone())); } check_repl_common(&st.instance, repl)?; let inst = &st.instance; let status = inst.get_status(); if status != InstanceStatus::Accepted { return Err(RpcHandlerError::DelayedReply( InstanceStatus::Accepted, status, )); } if st.accepted.insert(from_rid) { } else { return Err(RpcHandlerError::DupRpc( InstanceStatus::Accepted, Direction::Reply, from_rid, st.instance.instance_id.unwrap(), )); } Ok(()) }
use crate::qpaxos::replicate_reply::Phase; use crate::qpaxos::Direction; use crate::qpaxos::ReplicateReply; use crate::qpaxos::*; use crate::replica::*; use crate::replication::RpcHandlerError; pub fn check_repl_common(inst: &Instance, reply: ReplicateReply) -> Result<Phase, RpcHandlerError> { let iid = reply .instance_id .ok_or(ProtocolError::LackOf("instance_id".into()))?; if iid != inst.instance_id.unwrap() { let err = ProtocolError::NotMatch( "instance_id".into(),
pub fn handle_prepare_reply( st: &mut ReplicationStatus, from_rid: ReplicaId, repl: ReplicateReply, ) -> Result<(), RpcHandlerError> { if let Some(ref e) = repl.err { return Err(RpcHandlerError::RemoteError(e.clone())); } let phase = check_repl_common(&st.instance, repl)?; let frepl: PrepareReply = phase .try_into() .or(Err(ProtocolError::LackOf("phase::Prepare".into())))?; let deps = frepl .deps .as_ref() .ok_or(ProtocolError::LackOf("phase::Prepare.deps".into()))?; if frepl.deps_committed.len() < deps.len() { return Err(ProtocolError::Incomplete( "phase::Prepare.deps_committed".into(), deps.len() as i32, frepl.deps_committed.len() as i32, ) .into()); } for (i, d) in deps.iter().enumerate() { let rid = d.replica_id; if !st.prepared.contains_key(&rid) { st.prepared.insert( rid, DepStatus { ..DepStatus::default() }, ); } let pre = st.prepared.get_mut(&rid).unwrap(); if pre.replied.insert(from_rid) { pre.rdeps.push(RepliedDep { idx: d.idx, seq: d.seq, committed: frepl.deps_committed[i], }); } else { return Err(RpcHandlerError::DupRpc( InstanceStatus::Prepared, Direction::Reply, from_rid, st.instance.instance_id.unwrap(), )); } } Ok(()) } pub fn handle_accept_reply( st: &mut ReplicationStatus, from_rid: ReplicaId, repl: ReplicateReply, ) -> Result<(), RpcHandlerError> { if let Some(ref e) = repl.err { return Err(RpcHandlerError::RemoteError(e.clone())); } check_repl_common(&st.instance, repl)?; let inst = &st.instance; let status = inst.get_status(); if status != InstanceStatus::Accepted { return Err(RpcHandlerError::DelayedReply( InstanceStatus::Accepted, status, )); } if st.accepted.insert(from_rid) { } else { return Err(RpcHandlerError::DupRpc( InstanceStatus::Accepted, Direction::Reply, from_rid, st.instance.instance_id.unwrap(), )); } Ok(()) }
format!("{}", inst.instance_id.unwrap()), format!("{}", iid), ); return Err(err.into()); } let phase = reply.phase.ok_or(ProtocolError::LackOf("phase".into()))?; let last_ballot = reply.last_ballot; if inst.ballot < last_ballot { let zero = Some(BallotNum::default()); let err = RpcHandlerError::StaleBallot( inst.ballot.or(zero).unwrap(), last_ballot.or(zero).unwrap(), ); return Err(err); } Ok(phase) }
function_block-function_prefix_line
[ { "content": "#[test]\n\nfn test_display_replicate_reply_err() {\n\n let cmn = \"last:None, iid:None, phase\";\n\n\n\n {\n\n // storage error\n\n let r = ReplicateReply {\n\n err: Some(QError {\n\n sto: Some(StorageFailure::default()),\n\n req: None,\...
Rust
src/gui/raw_control.rs
rodrigocfd/winsafe
3e9e60cb12902f51e953bcedbecd911262ed795a
use std::ptr::NonNull; use std::sync::Arc; use crate::aliases::ErrResult; use crate::co; use crate::enums::IdMenu; use crate::gui::base::Base; use crate::gui::events::{EventsView, WindowEventsAll}; use crate::gui::privs::{multiply_dpi, paint_control_borders}; use crate::gui::raw_base::RawBase; use crate::gui::resizer::{Horz, Vert}; use crate::gui::traits::{AsWindow, Child, ParentEvents, UiThread, Window}; use crate::handles::{HBRUSH, HCURSOR, HICON}; use crate::handles::HWND; use crate::structs::{POINT, SIZE, WNDCLASSEX}; use crate::various::WString; struct Obj { base: RawBase, opts: WindowControlOpts, } impl Window for Obj { fn hwnd(&self) -> HWND { self.base.hwnd() } } #[derive(Clone)] pub(in crate::gui) struct RawControl(Arc<Obj>); impl Window for RawControl { fn hwnd(&self) -> HWND { self.0.base.hwnd() } } impl AsWindow for RawControl { fn as_window(&self) -> Arc<dyn Window> { self.0.clone() } } impl UiThread for RawControl { fn run_ui_thread<F>(&self, func: F) where F: FnOnce() -> ErrResult<()>, { self.0.base.run_ui_thread(func); } } impl ParentEvents for RawControl { fn on(&self) -> &WindowEventsAll { self.0.base.on() } } impl Child for RawControl { fn ctrl_id(&self) -> u16 { self.0.opts.ctrl_id } } impl RawControl { pub(in crate::gui) fn new( parent_base_ref: &Base, opts: WindowControlOpts) -> RawControl { let (horz, vert) = (opts.horz_resize, opts.vert_resize); let wnd = Self( Arc::new( Obj { base: RawBase::new(Some(parent_base_ref)), opts, }, ), ); wnd.default_message_handlers(parent_base_ref, horz, vert); wnd } pub(in crate::gui) fn base_ref(&self) -> &Base { self.0.base.base_ref() } fn default_message_handlers(&self, parent_base_ref: &Base, horz: Horz, vert: Vert) { self.base_ref().default_message_handlers(); parent_base_ref.privileged_events_ref().wm(parent_base_ref.create_or_initdlg(), { let self2 = self.clone(); let parent_base_ptr = NonNull::from(parent_base_ref); move |_| { let opts = &self2.0.opts; let mut wcx = WNDCLASSEX::default(); let mut class_name_buf = WString::default(); RawBase::fill_wndclassex(self2.base_ref().parent_hinstance()?, opts.class_style, opts.class_icon, opts.class_icon, opts.class_bg_brush, opts.class_cursor, &mut wcx, &mut class_name_buf)?; let atom = self2.0.base.register_class(&mut wcx)?; let mut wnd_pos = opts.position; let mut wnd_sz = opts.size; multiply_dpi(Some(&mut wnd_pos), Some(&mut wnd_sz))?; self2.0.base.create_window( atom, None, IdMenu::Id(opts.ctrl_id), wnd_pos, wnd_sz, opts.ex_style, opts.style, )?; unsafe { parent_base_ptr.as_ref().resizer_add( parent_base_ptr.as_ref(), self2.base_ref().hwnd_ref(), horz, vert)?; } Ok(0) } }); self.on().wm_nc_paint({ let self2 = self.clone(); move |p| { paint_control_borders(*self2.base_ref().hwnd_ref(), p)?; Ok(()) } }); } } pub struct WindowControlOpts { pub class_name: String, pub class_style: co::CS, pub class_icon: HICON, pub class_cursor: HCURSOR, pub class_bg_brush: HBRUSH, pub position: POINT, pub size: SIZE, pub style: co::WS, pub ex_style: co::WS_EX, pub ctrl_id: u16, pub horz_resize: Horz, pub vert_resize: Vert, } impl Default for WindowControlOpts { fn default() -> Self { Self { class_name: "".to_owned(), class_style: co::CS::DBLCLKS, class_icon: HICON::NULL, class_cursor: HCURSOR::NULL, class_bg_brush: HBRUSH::from_sys_color(co::COLOR::WINDOW), position: POINT { x: 0, y: 0 }, size: SIZE { cx: 0, cy: 0 }, style: co::WS::CHILD | co::WS::TABSTOP | co::WS::GROUP | co::WS::VISIBLE | co::WS::CLIPCHILDREN | co::WS::CLIPSIBLINGS, ex_style: co::WS_EX::LEFT, ctrl_id: 0, horz_resize: Horz::None, vert_resize: Vert::None, } } }
use std::ptr::NonNull; use std::sync::Arc; use crate::aliases::ErrResult; use crate::co; use crate::enums::IdMenu; use crate::gui::base::Base; use crate::gui::events::{EventsView, WindowEventsAll}; use crate::gui::privs::{multiply_dpi, paint_control_borders}; use crate::gui::raw_base::RawBase; use crate::gui::resizer::{Horz, Vert}; use crate::gui::traits::{AsWindow, Child, ParentEvents, UiThread, Window}; use crate::handles::{HBRUSH, HCURSOR, HICON}; use crate::handles::HWND; use crate::structs::{POINT, SIZE, WNDCLASSEX}; use crate::various::WString; struct Obj { base: RawBase, opts: WindowControlOpts, } impl Window for Obj { fn hwnd(&self) -> HWND { self.base.hwnd() } } #[derive(Clone)] pub(in crate::gui) struct RawControl(Arc<Obj>); impl Window for RawControl { fn hwnd(&self) -> HWND { self.0.base.hwnd() } } impl AsWindow for RawControl { fn as_window(&self) -> Arc<dyn Window> { self.0.clone() } } impl UiThread for RawControl { fn run_ui_thread<F>(&self, func: F) where F: FnOnce() -> ErrResult<()>, { self.0.base.run_ui_thread(func); } } impl ParentEvents for RawControl { fn on(&self) -> &WindowEventsAll { self.0.base.on() } } impl Child for RawControl { fn ctrl_id(&self) -> u16 { self.0.opts.ctrl_id } } impl RawControl { pub(in crate::gui) fn new( parent_base_ref: &Base, opts: WindowControlOpts) -> RawControl { let (horz, vert) = (opts.horz_resize, opts.vert_resize); let wnd = Self( Arc::new( Obj { base: RawBase::new(Some(parent_base_ref)), opts, }, ), ); wnd.default_message_handlers(parent_base_ref, horz, vert); wnd } pub(in crate::gui) fn base_ref(&self) -> &Base { self.0.base.base_ref() } fn default_message_handlers(&self, parent_base_ref: &Base, horz: Horz, vert: Vert) { self.base_ref().default_message_handlers(); parent_base_ref.privileged_events_ref().wm(parent_base_ref.create_or_initdlg(), { let self2 = self.clone(); let parent_base_ptr = NonNull::from(parent_base_ref); move |_| { let opts = &self2.0.opts; let mut wcx = WNDCLASSEX::default(); let mut class_name_buf = WString::default(); RawBase::fill_wndclassex(self2.base_ref().parent_hinstance()?, opts.class_style, opts.class_icon, opts.class_icon, opts.class_bg_brush, opts.class_cursor, &mut wcx, &mut class_name_buf)?; let atom = self2.0.base.register_class(&mut wcx)?; let mut wnd_pos = opts.position; let mut wnd_sz = opts.size; multiply_dpi(Some(&mut wnd_pos), Some(&mut wnd_sz))?; self2.0.base.create_window( atom, None, IdMenu::Id(opts.ctrl_id), wnd_pos, wnd_sz, opts.ex_style, opts.style, )?; unsafe { parent_base_ptr.as_ref().resizer_add( parent_base_ptr.as_ref(), self2.base_ref().hwnd_ref(), horz, vert)?; } Ok(0) } }); self.on().wm_nc_paint({ let self2 = self.clone(); move |p| { paint_control_borders(*self2.base_ref().hwnd_ref(), p)?; Ok(()) } }); } } pub struct WindowControlOpts { pub class_name: String, pub class_style: co::CS, pub class_icon: HICON, pub class_cursor: HCURSOR, pub class_bg_brush: HBRUSH, pub position: POINT, pub size: SIZE, pub style: co::WS, pub ex_style: co::WS_EX, pub ctrl_id: u16, pub horz_resize: Horz, pub vert_resize: Vert, } impl Default for WindowControlOpts { fn default() -> Self { Sel
}
f { class_name: "".to_owned(), class_style: co::CS::DBLCLKS, class_icon: HICON::NULL, class_cursor: HCURSOR::NULL, class_bg_brush: HBRUSH::from_sys_color(co::COLOR::WINDOW), position: POINT { x: 0, y: 0 }, size: SIZE { cx: 0, cy: 0 }, style: co::WS::CHILD | co::WS::TABSTOP | co::WS::GROUP | co::WS::VISIBLE | co::WS::CLIPCHILDREN | co::WS::CLIPSIBLINGS, ex_style: co::WS_EX::LEFT, ctrl_id: 0, horz_resize: Horz::None, vert_resize: Vert::None, } }
function_block-function_prefixed
[ { "content": "/// [`RegisterClassEx`](https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-registerclassexw)\n\n/// function.\n\npub fn RegisterClassEx(wcx: &WNDCLASSEX) -> WinResult<ATOM> {\n\n\tmatch unsafe { user32::RegisterClassExW(wcx as *const _ as _) } {\n\n\t\t0 => Err(GetLastError()),\...
Rust
src/main.rs
almusil/portable-ecg
c39cf6aa9cf1aaee1167d2bc05cd0c895f7522c6
#![no_main] #![no_std] use lib as _; use cortex_m::singleton; use heapless::consts::U64; use heapless::spsc::{Queue, SingleCore}; use lib::display::Display; use lib::hw::{ get_calibration, init_clock, init_lcd, Adc, AdcConfig, BeatCounter, BeatTimer, FrameTimer, HwLcd, IliError, LcdInterface, }; use lib::sampler::Sampler; use lib::{BOTTOM_SCROLL_OFFSET, TOP_SCROLL_OFFSET}; use rtic::app; use stm32g0xx_hal::delay::DelayExt; use stm32g0xx_hal::dma::DmaExt; use stm32g0xx_hal::dmamux::DmaMuxIndex; use stm32g0xx_hal::gpio::{GpioExt, Speed}; use stm32g0xx_hal::time::U32Ext; #[app(device = stm32g0xx_hal::stm32, peripherals = true)] const APP: () = { struct Resources { display: Display<'static, U64, HwLcd, IliError>, sampler: Sampler<'static, U64>, frame_timer: FrameTimer, adc: Adc, beat_timer: BeatTimer, beat_counter: BeatCounter, } #[init] fn init(cx: init::Context) -> init::LateResources { let core: rtic::export::Peripherals = cx.core; let device: stm32g0xx_hal::stm32::Peripherals = cx.device; let queue: &'static mut Queue<_, _, _, _> = singleton!(: Queue<u16, U64, u8, SingleCore> = unsafe {Queue::u8_sc()}).unwrap(); let dma_buffer: &'static mut [u16; 4] = singleton!(: [u16; 4] = [0; 4]).unwrap(); let (producer, consumer) = queue.split(); let mut rcc = init_clock(device.RCC); let mut delay = core.SYST.delay(&mut rcc); let gpioa = device.GPIOA.split(&mut rcc); let gpiob = device.GPIOB.split(&mut rcc); let interface = LcdInterface::new( gpiob.pb0.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb1.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb2.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb3.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb4.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb5.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb6.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb7.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb8.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb9.into_push_pull_output().set_speed(Speed::VeryHigh), ); let lcd = init_lcd( interface, gpioa.pa4.into_push_pull_output(), gpioa.pa5.into_push_pull_output(), (TOP_SCROLL_OFFSET, BOTTOM_SCROLL_OFFSET), &mut delay, ) .unwrap(); let display = Display::new(lcd, consumer).unwrap(); let frame_timer = FrameTimer::new(device.TIM6, 30.hz(), &mut rcc); let dma = device.DMA.split(&mut rcc, device.DMAMUX); let mut ch1 = dma.ch1; ch1.mux().select_peripheral(DmaMuxIndex::ADC); let adc = Adc::new( device.ADC, device.TIM1, dma_buffer, AdcConfig::new(gpioa.pa0, ch1, 500.hz()), &mut rcc, &mut delay, ); let sampler = Sampler::new(dma_buffer, producer, get_calibration(), 4095); let beat_timer = BeatTimer::new(device.TIM7, 10_000.ms(), &mut rcc); let beat_counter = BeatCounter::new(device.TIM3, gpioa.pa6, &mut rcc); init::LateResources { display, sampler, frame_timer, adc, beat_timer, beat_counter, } } #[idle(resources = [frame_timer, adc, beat_counter, beat_timer])] fn idle(mut cx: idle::Context) -> ! { cx.resources .beat_counter .lock(|counter: &mut BeatCounter| counter.start()); cx.resources.beat_timer.lock(|timer: &mut BeatTimer| { timer.start(); }); cx.resources.frame_timer.lock(|timer: &mut FrameTimer| { timer.start(); }); cx.resources.adc.lock(|adc: &mut Adc| { adc.start(); }); loop { cortex_m::asm::nop(); } } #[task(binds = DMA_CHANNEL1, priority = 2, resources = [adc, sampler])] fn dma(cx: dma::Context) { let adc: &mut Adc = cx.resources.adc; let sampler: &mut Sampler<'_, _> = cx.resources.sampler; adc.unpend(); sampler.sample::<IliError>().unwrap(); } #[task(binds = TIM6, priority = 1, resources = [display, frame_timer])] fn tim6(cx: tim6::Context) { let frame_timer: &mut FrameTimer = cx.resources.frame_timer; let display: &mut Display<'_, _, _, _> = cx.resources.display; frame_timer.unpend(); display.frame().unwrap(); } #[task(binds = TIM7, priority = 1, resources = [beat_counter, beat_timer, display])] fn tim7(cx: tim7::Context) { let counter: &mut BeatCounter = cx.resources.beat_counter; let timer: &mut BeatTimer = cx.resources.beat_timer; let display: &mut Display<'_, _, _, _> = cx.resources.display; timer.unpend(); display.update_bpm(counter.read() * 6).unwrap(); counter.reset(); } };
#![no_main] #![no_std] use lib as _; use cortex_m::singleton; use heapless::consts::U64; use heapless::spsc::{Queue, SingleCore}; use lib::display::Display; use lib::hw::{ get_calibration, init_clock, init_lcd, Adc, AdcConfig, BeatCounter, BeatTimer, FrameTimer, HwLcd, IliError, LcdInterface, }; use lib::sampler::Sampler; use lib::{BOTTOM_SCROLL_OFFSET, TOP_SCROLL_OFFSET}; use rtic::app; use stm32g0xx_hal::delay::DelayExt; use stm32g0xx_hal::dma::DmaExt; use stm32g0xx_hal::dmamux::DmaMuxIndex; use stm32g0xx_hal::gpio::{GpioExt, Speed}; use stm32g0xx_hal::time::U32Ext; #[app(device = stm32g0xx_hal::stm32, peripherals = true)] const APP: () = { struct Resources { display: Display<'static, U64, HwLcd, IliError>, sampler: Sampler<'static, U64>, frame_timer: FrameTimer, adc: Adc, beat_timer: BeatTimer, beat_counter: BeatCounter, } #[init] fn init(cx: init::Context) -> init::LateResources { let core: rtic::export::Peripherals = cx.core; let device: stm32g0xx_hal::stm32::Peripherals = cx.device; let queue: &'static mut Queue<_, _, _, _> = singleton!(: Queue<u16, U64, u8, SingleCore> = unsafe {Queue::u8_sc()}).unwrap(); let
_counter; let timer: &mut BeatTimer = cx.resources.beat_timer; let display: &mut Display<'_, _, _, _> = cx.resources.display; timer.unpend(); display.update_bpm(counter.read() * 6).unwrap(); counter.reset(); } };
dma_buffer: &'static mut [u16; 4] = singleton!(: [u16; 4] = [0; 4]).unwrap(); let (producer, consumer) = queue.split(); let mut rcc = init_clock(device.RCC); let mut delay = core.SYST.delay(&mut rcc); let gpioa = device.GPIOA.split(&mut rcc); let gpiob = device.GPIOB.split(&mut rcc); let interface = LcdInterface::new( gpiob.pb0.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb1.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb2.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb3.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb4.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb5.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb6.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb7.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb8.into_push_pull_output().set_speed(Speed::VeryHigh), gpiob.pb9.into_push_pull_output().set_speed(Speed::VeryHigh), ); let lcd = init_lcd( interface, gpioa.pa4.into_push_pull_output(), gpioa.pa5.into_push_pull_output(), (TOP_SCROLL_OFFSET, BOTTOM_SCROLL_OFFSET), &mut delay, ) .unwrap(); let display = Display::new(lcd, consumer).unwrap(); let frame_timer = FrameTimer::new(device.TIM6, 30.hz(), &mut rcc); let dma = device.DMA.split(&mut rcc, device.DMAMUX); let mut ch1 = dma.ch1; ch1.mux().select_peripheral(DmaMuxIndex::ADC); let adc = Adc::new( device.ADC, device.TIM1, dma_buffer, AdcConfig::new(gpioa.pa0, ch1, 500.hz()), &mut rcc, &mut delay, ); let sampler = Sampler::new(dma_buffer, producer, get_calibration(), 4095); let beat_timer = BeatTimer::new(device.TIM7, 10_000.ms(), &mut rcc); let beat_counter = BeatCounter::new(device.TIM3, gpioa.pa6, &mut rcc); init::LateResources { display, sampler, frame_timer, adc, beat_timer, beat_counter, } } #[idle(resources = [frame_timer, adc, beat_counter, beat_timer])] fn idle(mut cx: idle::Context) -> ! { cx.resources .beat_counter .lock(|counter: &mut BeatCounter| counter.start()); cx.resources.beat_timer.lock(|timer: &mut BeatTimer| { timer.start(); }); cx.resources.frame_timer.lock(|timer: &mut FrameTimer| { timer.start(); }); cx.resources.adc.lock(|adc: &mut Adc| { adc.start(); }); loop { cortex_m::asm::nop(); } } #[task(binds = DMA_CHANNEL1, priority = 2, resources = [adc, sampler])] fn dma(cx: dma::Context) { let adc: &mut Adc = cx.resources.adc; let sampler: &mut Sampler<'_, _> = cx.resources.sampler; adc.unpend(); sampler.sample::<IliError>().unwrap(); } #[task(binds = TIM6, priority = 1, resources = [display, frame_timer])] fn tim6(cx: tim6::Context) { let frame_timer: &mut FrameTimer = cx.resources.frame_timer; let display: &mut Display<'_, _, _, _> = cx.resources.display; frame_timer.unpend(); display.frame().unwrap(); } #[task(binds = TIM7, priority = 1, resources = [beat_counter, beat_timer, display])] fn tim7(cx: tim7::Context) { let counter: &mut BeatCounter = cx.resources.beat
random
[ { "content": "pub fn init_lcd(\n\n interface: LcdInterface,\n\n lcd_rst: LcdRst,\n\n lcd_rd: LcdRD,\n\n scroller_offset: (u16, u16),\n\n delay: &mut Delay<SYST>,\n\n) -> Result<HwLcd, IliError> {\n\n let mut lcd_rd = lcd_rd;\n\n lcd_rd.set_high().unwrap();\n\n IliLcd::new(interface, lcd_...
Rust
2021/src/day17.rs
shrugalic/advent_of_code
8d18a3dbdcf847a667ab553f5441676003b9362a
use std::cmp::max; use std::ops::{AddAssign, RangeInclusive}; const INPUT: &str = include_str!("../input/day17.txt"); pub(crate) fn day17_part1() -> isize { Probe::from(INPUT).highest_point() } pub(crate) fn day17_part2() -> usize { Probe::from(INPUT).trajectory_count() } #[derive(Debug, PartialEq)] struct Probe { target_area: RangeInclusive<Pair>, } impl Probe { fn highest_point(&self) -> isize { self.target_trajectory().0 } fn trajectory_count(&self) -> usize { self.target_trajectory().1 } fn target_trajectory(&self) -> (isize, usize) { let mut max_ys = vec![]; let mut target_velocity_count = 0; let y_velocity_range = self.y_velocity_range(); for x in self.x_velocity_range() { for y in y_velocity_range.clone() { if let Some(max_y) = self.simulate_trajectory(x, y) { max_ys.push(max_y); target_velocity_count += 1; } } } (*max_ys.iter().max().unwrap(), target_velocity_count) } fn simulate_trajectory(&self, x: isize, y: isize) -> Option<isize> { let mut position = Pair::new(0, 0); let mut velocity = Pair::new(x, y); let mut max_y = isize::MIN; while !(position.is_past(&self.target_area) || velocity.cannot_reach(&self.target_area, &position)) { max_y = max(max_y, position.y); position += velocity; velocity.x = max(velocity.x - 1, 0); velocity.y -= 1; if position.is_within(&self.target_area) { return Some(max_y); } } None } fn x_velocity_range(&self) -> RangeInclusive<isize> { let mut min_x = 1; while Probe::reachable(min_x) < self.target_area.start().x { min_x += 1; } let max_x = self.target_area.end().x; min_x..=max_x } fn reachable(x: isize) -> isize { x * (x + 1) / 2 } fn y_velocity_range(&self) -> RangeInclusive<isize> { let min_y = self.target_area.start().y; let max_y = self.target_area.start().y.abs(); min_y..=max_y } } impl From<&str> for Probe { fn from(input: &str) -> Self { let range_from = |range: &str| -> RangeInclusive<isize> { let (start, end) = range.split_once("..").unwrap(); start.parse().unwrap()..=end.parse().unwrap() }; let (x, y) = input .trim() .trim_start_matches("target area: x=") .split_once(", y=") .map(|(x, y)| (range_from(x), range_from(y))) .unwrap(); Probe { target_area: Pair::new(*x.start(), *y.start())..=Pair::new(*x.end(), *y.end()), } } } #[derive(Debug, PartialEq, Copy, Clone)] struct Pair { x: isize, y: isize, } impl Pair { fn new(x: isize, y: isize) -> Self { Self { x, y } } fn is_past(&self, target_area: &RangeInclusive<Pair>) -> bool { self.x > target_area.end().x || self.y < target_area.start().y } fn is_within(&self, target_area: &RangeInclusive<Pair>) -> bool { (target_area.start().x..=target_area.end().x).contains(&self.x) && (target_area.start().y..=target_area.end().y).contains(&self.y) } fn cannot_reach(&self, target_area: &RangeInclusive<Pair>, position: &Pair) -> bool { self.x == 0 && position.x < target_area.start().x } } impl AddAssign for Pair { fn add_assign(&mut self, rhs: Self) { self.x += rhs.x; self.y += rhs.y; } } #[cfg(test)] mod tests { use super::*; const EXAMPLE: &str = "target area: x=20..30, y=-10..-5"; #[test] fn part1_example() { assert_eq!(45, Probe::from(EXAMPLE).highest_point()); } #[test] fn part1() { assert_eq!(5565, day17_part1()); } #[test] fn part2_example() { assert_eq!(112, Probe::from(EXAMPLE).trajectory_count()); } #[test] fn part2() { assert_eq!(2118, day17_part2()); } }
use std::cmp::max; use std::ops::{AddAssign, RangeInclusive}; const INPUT: &str = include_str!("../input/day17.txt"); pub(crate) fn day17_part1() -> isize { Probe::from(INPUT).highest_point() } pub(crate) fn day17_part2() -> usize { Probe::from(INPUT).trajectory_count() } #[derive(Debug, PartialEq)] struct Probe { target_area: RangeInclusive<Pair>, } impl Probe { fn highest_point(&self) -> isize { self.target_trajectory().0 } fn trajectory_count(&self) -> usize { self.target_trajectory().1 } fn target_trajectory(&self) -> (isize, usize) { let mut max_ys = vec![]; let mut target_velocity_count = 0; let y_velocity_range = self.y_velocity_range(); for x in self.x_velocity_range() { for y in y_velocity_range.clone() {
} } (*max_ys.iter().max().unwrap(), target_velocity_count) } fn simulate_trajectory(&self, x: isize, y: isize) -> Option<isize> { let mut position = Pair::new(0, 0); let mut velocity = Pair::new(x, y); let mut max_y = isize::MIN; while !(position.is_past(&self.target_area) || velocity.cannot_reach(&self.target_area, &position)) { max_y = max(max_y, position.y); position += velocity; velocity.x = max(velocity.x - 1, 0); velocity.y -= 1; if position.is_within(&self.target_area) { return Some(max_y); } } None } fn x_velocity_range(&self) -> RangeInclusive<isize> { let mut min_x = 1; while Probe::reachable(min_x) < self.target_area.start().x { min_x += 1; } let max_x = self.target_area.end().x; min_x..=max_x } fn reachable(x: isize) -> isize { x * (x + 1) / 2 } fn y_velocity_range(&self) -> RangeInclusive<isize> { let min_y = self.target_area.start().y; let max_y = self.target_area.start().y.abs(); min_y..=max_y } } impl From<&str> for Probe { fn from(input: &str) -> Self { let range_from = |range: &str| -> RangeInclusive<isize> { let (start, end) = range.split_once("..").unwrap(); start.parse().unwrap()..=end.parse().unwrap() }; let (x, y) = input .trim() .trim_start_matches("target area: x=") .split_once(", y=") .map(|(x, y)| (range_from(x), range_from(y))) .unwrap(); Probe { target_area: Pair::new(*x.start(), *y.start())..=Pair::new(*x.end(), *y.end()), } } } #[derive(Debug, PartialEq, Copy, Clone)] struct Pair { x: isize, y: isize, } impl Pair { fn new(x: isize, y: isize) -> Self { Self { x, y } } fn is_past(&self, target_area: &RangeInclusive<Pair>) -> bool { self.x > target_area.end().x || self.y < target_area.start().y } fn is_within(&self, target_area: &RangeInclusive<Pair>) -> bool { (target_area.start().x..=target_area.end().x).contains(&self.x) && (target_area.start().y..=target_area.end().y).contains(&self.y) } fn cannot_reach(&self, target_area: &RangeInclusive<Pair>, position: &Pair) -> bool { self.x == 0 && position.x < target_area.start().x } } impl AddAssign for Pair { fn add_assign(&mut self, rhs: Self) { self.x += rhs.x; self.y += rhs.y; } } #[cfg(test)] mod tests { use super::*; const EXAMPLE: &str = "target area: x=20..30, y=-10..-5"; #[test] fn part1_example() { assert_eq!(45, Probe::from(EXAMPLE).highest_point()); } #[test] fn part1() { assert_eq!(5565, day17_part1()); } #[test] fn part2_example() { assert_eq!(112, Probe::from(EXAMPLE).trajectory_count()); } #[test] fn part2() { assert_eq!(2118, day17_part2()); } }
if let Some(max_y) = self.simulate_trajectory(x, y) { max_ys.push(max_y); target_velocity_count += 1; }
if_condition
[ { "content": "fn parse_family(input: Vec<&str>) -> Vec<Vec<isize>> {\n\n // Vector of family members. This is only needed to get a unique index for each location\n\n let mut family: Vec<_> = vec![];\n\n // Happiness from each family member to all other family members (by index)\n\n let mut happiness...
Rust
crates/content-tree/src/leaf.rs
josephg/diamond-types
5e32135c760310964172adce5c4668034fe8bf35
use std::mem::take; use std::ptr::NonNull; use rle::Searchable; use super::*; impl<E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> NodeLeaf<E, I, IE, LE> { pub(crate) unsafe fn new(next: Option<NonNull<Self>>) -> Self { Self::new_with_parent(ParentPtr::Root(NonNull::dangling()), next) } pub(crate) fn new_with_parent(parent: ParentPtr<E, I, IE, LE>, next: Option<NonNull<Self>>) -> Self { Self { parent, data: [E::default(); LE], num_entries: 0, _pin: PhantomPinned, next, } } pub fn find_offset<F>(&self, mut offset: usize, stick_end: bool, entry_to_num: F) -> Option<(usize, usize)> where F: Fn(E) -> usize { for i in 0..self.len_entries() { let entry: E = self.data[i]; let entry_len = entry_to_num(entry); if offset < entry_len || (stick_end && entry_len == offset) { return Some((i, offset)); } else { offset -= entry_len } } if offset == 0 { Some((self.len_entries(), 0)) } else { None } } pub fn next_leaf(&self) -> Option<NonNull<Self>> { self.next } pub fn prev_leaf(&self) -> Option<NonNull<Self>> { self.adjacent_leaf_by_traversal(false) } pub(crate) fn adjacent_leaf_by_traversal(&self, direction_forward: bool) -> Option<NonNull<Self>> { let mut parent = self.parent; let mut node_ptr = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }); loop { match parent { ParentPtr::Root(_) => { return None; }, ParentPtr::Internal(n) => { let node_ref = unsafe { n.as_ref() }; let idx = node_ref.find_child(node_ptr).unwrap(); let next_idx: Option<usize> = if direction_forward { let next_idx = idx + 1; if (next_idx < IE) && node_ref.children[next_idx].is_some() { Some(next_idx) } else { None } } else if idx > 0 { Some(idx - 1) } else { None }; if let Some(next_idx) = next_idx { node_ptr = unsafe { node_ref.children[next_idx].as_ref().unwrap().as_ptr() }; break; } else { node_ptr = NodePtr::Internal(unsafe { NonNull::new_unchecked(node_ref as *const _ as *mut _) }); parent = node_ref.parent; } } } } loop { match node_ptr { NodePtr::Internal(n) => { let node_ref = unsafe { n.as_ref() }; let next_idx = if direction_forward { 0 } else { let num_children = node_ref.count_children(); assert!(num_children > 0); num_children - 1 }; node_ptr = unsafe { node_ref.children[next_idx].as_ref().unwrap().as_ptr() }; }, NodePtr::Leaf(n) => { return Some(n); } } } } pub fn len_entries(&self) -> usize { self.num_entries as usize } pub fn as_slice(&self) -> &[E] { &self.data[0..self.num_entries as usize] } pub fn update_parent_count(&mut self, amt: I::Update) { if amt == I::Update::default() { return; } let mut child = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self) }); let mut parent = self.parent; loop { match parent { ParentPtr::Root(mut r) => { unsafe { I::update_offset_by_marker(&mut r.as_mut().count, &amt); } break; }, ParentPtr::Internal(mut n) => { let idx = unsafe { n.as_mut() }.find_child(child).unwrap(); let c = &mut unsafe { n.as_mut() }.metrics[idx]; I::update_offset_by_marker(c, &amt); child = NodePtr::Internal(n); parent = unsafe { n.as_mut() }.parent; }, }; } } pub fn flush_metric_update(&mut self, marker: &mut I::Update) { let amt = take(marker); self.update_parent_count(amt); } pub fn has_root_as_parent(&self) -> bool { self.parent.is_root() } pub fn count_items(&self) -> I::Value { if I::CAN_COUNT_ITEMS { match self.parent { ParentPtr::Root(root) => { unsafe { root.as_ref() }.count } ParentPtr::Internal(node) => { let child = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }); let idx = unsafe { node.as_ref() }.find_child(child).unwrap(); unsafe { node.as_ref() }.metrics[idx] } } } else { let mut val = I::Value::default(); for elem in self.data[..self.num_entries as usize].iter() { I::increment_offset(&mut val, elem); } val } } pub fn splice_out(&mut self, idx: usize) { debug_assert!(idx < self.num_entries as usize); self.data.copy_within(idx + 1..self.num_entries as usize, idx); self.num_entries -= 1; } pub fn clear_all(&mut self) { self.num_entries = 0; } pub fn unsafe_cursor_at_start(&self) -> UnsafeCursor<E, I, IE, LE> { UnsafeCursor::new( unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }, 0, 0 ) } } impl<E: ContentTraits + Searchable, I: TreeMetrics<E>, const IE: usize, const LE: usize> NodeLeaf<E, I, IE, LE> { pub fn find(&self, loc: E::Item) -> Option<UnsafeCursor<E, I, IE, LE>> { for i in 0..self.len_entries() { let entry: E = self.data[i]; if let Some(offset) = entry.get_offset(loc) { debug_assert!(offset < entry.len()); return Some(UnsafeCursor::new( unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }, i, offset )) } } None } }
use std::mem::take; use std::ptr::NonNull; use rle::Searchable; use super::*; impl<E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> NodeLeaf<E, I, IE, LE> { pub(crate) unsafe fn new(next: Option<NonNull<Self>>) -> Self { Self::new_with_parent(ParentPtr::Root(NonNull::dangling()), next) } pub(crate) fn new_with_parent(parent: ParentPtr<E, I, IE, LE>, next: Option<NonNull<Self>>) -> Self { Self { parent, data: [E::default(); LE], num_entries: 0, _pin: PhantomPinned, next, } } pub fn find_offset<F>(&self, mut offset: usize, stick_end: bool, entry_to_num: F) -> Option<(usize, usize)> where F: Fn(E) -> usize { for i in 0..self.len_entries() { let entry: E = self.data[i]; let entry_len = entry_to_num(entry); if offset < entry_len || (stick_end && entry_len == offset) { return Some((i, offset)); } else { offset -= entry_len } } if offset == 0 { Some((self.len_entries(), 0)) } else { None } } pub fn next_leaf(&self) -> Option<NonNull<Self>> { self.next } pub fn prev_leaf(&self) -> Option<NonNull<Self>> { self.adjacent_leaf_by_traversal(false) } pub(crate) fn adjacent_leaf_by_traversal(&self, direction_forward: bool) -> Option<NonNull<Self>> { let mut parent = self.parent; let mut node_ptr = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }); loop { match parent { ParentPtr::Root(_) => { return None; }, ParentPtr::Internal(n) => { let node_ref = unsafe { n.as_ref() }; let idx = node_ref.find_child(node_ptr).unwrap(); let next_idx: Option<usize> = if direction_forward { let next_idx = idx + 1; if (next_idx < IE) && node_ref.children[next_idx].is_some() { Some(next_idx) } else { None } } else if idx > 0 { Some(idx - 1) } else { None }; if let Some(next_idx) = next_idx { node_ptr = unsafe { node_ref.children[next_idx].as_ref().unwrap().as_ptr() }; break; } else { node_ptr = NodePtr::Internal(unsafe { NonNull::new_unchecked(node_ref as *const _ as *mut _) }); parent = node_ref.parent; } } } } loop { match node_ptr { NodePtr::Internal(n) => { let node_ref = unsafe { n.as_ref() }; let next_idx = if direction_forward { 0 } else { let num_children = node_ref.count_children(); assert!(num_children > 0); num_children - 1 }; node_ptr = unsafe { node_ref.children[next_idx].as_ref().unwrap().as_ptr() }; }, NodePtr::Leaf(n) => { return Some(n); } } } } pub fn len_entries(&self) -> usize { self.num_entries as usize } pub fn as_slice(&self) -> &[E] { &self.data[0..self.num_entries as usize] } pub fn update_parent_count(&mut self, amt: I::Update) { if amt == I::Update::default() { return; } let mut child = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self) }); let mut parent = self.parent; loop { match parent { ParentPtr::Root(mut r) => { unsafe { I::update_offset_by_marker(&mut r.as_mut().count, &amt); } break; }, ParentPtr::Internal(mut n) => { let idx = unsafe { n.as_mut() }.find_child(child).unwrap(); let c = &mut unsafe { n.as_mut() }.metrics[idx]; I::update_offset_by_marker(c, &amt); child = NodePtr::Internal(n); parent = unsafe { n.as_mut() }.parent; }, }; } } pub fn flush_metric_update(&mut self, marker: &mut I::Update) { let amt = take(marker); self.update_parent_count(amt); } pub fn has_root_as_parent(&self) -> bool { self.parent.is_root() } pub fn count_items(&self) -> I::Value { if I::CAN_COUNT_ITEMS { match self.parent { ParentPtr::Root(root) => { unsafe { root.as_ref() }.count } ParentPtr::Internal(node) => { let child = NodePtr::Leaf(unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }); let idx = unsafe { node.as_ref() }.find_child(child).unwrap(); unsafe { node.as_ref() }.metrics[idx] } } } else { let mut val = I::Value::default(); for elem in self.data[..self.num_entries as usize].iter() { I::increment_offset(&mut val, elem); } val } } pub fn splice_out(&mut self, idx: usize) { debug_assert!(idx < self.num_entries as usize); self.data.copy_within(idx + 1..self.num_entries as usize, idx); self.num_entries -= 1; } pub fn clear_all(&mut self) { self.num_entries = 0; } pub fn unsafe_cursor_at_start(&self) -> UnsafeCursor<E, I, IE, LE> { UnsafeCursor::new( unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }, 0, 0 ) } } impl<E: ContentTraits + Searchable, I: TreeMetrics<E>, const IE: usize, const LE: usize> NodeLeaf<E, I, IE, LE> { pub fn find(&self, loc: E::Item) -> Option<UnsafeCursor<E, I, IE, LE>> { for i in 0..self.len_entries() { let entry: E = self.data[i]; if let Some(offset) = entry.get_offset(loc) { debug_assert!(offset < entry.len()); return
} } None } }
Some(UnsafeCursor::new( unsafe { NonNull::new_unchecked(self as *const _ as *mut _) }, i, offset ))
call_expression
[ { "content": "struct DebugContent<'a, E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize>(&'a ContentTreeRaw<E, I, IE, LE>);\n\n\n\nimpl<'a, E: ContentTraits, I: TreeMetrics<E>, const IE: usize, const LE: usize> Debug for DebugContent<'a, E, I, IE, LE> {\n\n fn fmt(&self, f: &mut Formatter...
Rust
mythril/src/multiboot2.rs
barkera/mythril
b7b45fe08a05c5b66317bbf13fdd4fcf10c428d2
use crate::acpi; use crate::boot_info::{self, BootInfo}; use crate::global_alloc; use crate::memory::HostPhysAddr; use alloc::vec::Vec; extern "C" { pub static MULTIBOOT2_HEADER_START: u32; pub static MULTIBOOT2_HEADER_END: u32; } pub fn header_location() -> (u32, u32) { unsafe { (MULTIBOOT2_HEADER_START, MULTIBOOT2_HEADER_END) } } fn setup_global_alloc_region(info: &multiboot2::BootInformation) -> (u64, u64) { let mem_tag = info .memory_map_tag() .expect("Missing multiboot memory map tag"); let available = mem_tag .memory_areas() .map(|area| (area.start_address(), area.end_address())); debug!("Modules:"); let modules = info.module_tags().map(|module| { debug!( " 0x{:x}-0x{:x}", module.start_address(), module.end_address() ); (module.start_address() as u64, module.end_address() as u64) }); let sections_tag = info .elf_sections_tag() .expect("Missing multiboot elf sections tag"); debug!("Elf sections:"); let sections = sections_tag.sections().map(|section| { debug!( " 0x{:x}-0x{:x}", section.start_address(), section.end_address() ); (section.start_address(), section.end_address()) }); let multiboot_info = [(info.start_address() as u64, info.end_address() as u64)]; debug!( "Multiboot Info: 0x{:x}-0x{:x}", info.start_address(), info.end_address() ); let excluded = modules .chain(sections) .chain(multiboot_info.iter().copied()); let max_excluded = excluded .max_by(|left, right| left.1.cmp(&right.1)) .expect("No max excluded region"); let largest_region = available .max_by(|left, right| (left.1 - left.0).cmp(&(right.1 - right.0))) .expect("No largest region"); if largest_region.0 > max_excluded.1 { largest_region } else if max_excluded.1 > largest_region.0 && max_excluded.1 < largest_region.1 { (max_excluded.1, largest_region.1) } else { panic!("Unable to find suitable global alloc region") } } pub fn early_init_multiboot2(addr: HostPhysAddr) -> BootInfo { let multiboot_info = unsafe { multiboot2::load(addr.as_u64() as usize) }; let alloc_region = setup_global_alloc_region(&multiboot_info); info!( "Allocating from 0x{:x}-{:x}", alloc_region.0, alloc_region.1 ); unsafe { global_alloc::Allocator::allocate_from(alloc_region.0, alloc_region.1); } let modules = multiboot_info .module_tags() .map(|tag| boot_info::BootModule { address: HostPhysAddr::new(tag.start_address() as u64), size: (tag.end_address() - tag.start_address()) as usize, identifier: Some(tag.name().into()), }) .collect::<Vec<_>>(); let rsdp = multiboot_info .rsdp_v2_tag() .filter(|tag| tag.checksum_is_valid()) .map(|rsdp_v2| acpi::rsdp::RSDP::V2 { xsdt_addr: rsdp_v2.xsdt_address() as u64, oemid: { let mut oemid = [0u8; 6]; if let Some(id) = rsdp_v2.oem_id() { oemid.copy_from_slice(id.as_bytes()); } oemid }, }) .or_else(|| { multiboot_info .rsdp_v1_tag() .filter(|tag| tag.checksum_is_valid()) .map(|rsdp_v1| acpi::rsdp::RSDP::V1 { rsdt_addr: rsdp_v1.rsdt_address() as u32, oemid: { let mut oemid = [0u8; 6]; if let Some(id) = rsdp_v1.oem_id() { oemid.copy_from_slice(id.as_bytes()); } oemid }, }) }); BootInfo { modules: modules, rsdp: rsdp, } }
use crate::acpi; use crate::boot_info::{self, BootInfo}; use crate::global_alloc; use crate::memory::HostPhysAddr; use alloc::vec::Vec; extern "C" { pub static MULTIBOOT2_HEADER_START: u32; pub static MULTIBOOT2_HEADER_END: u32; } pub fn header_location() -> (u32, u32) { unsafe { (MULTIBOOT2_HEADER_START, MULTIBOOT2_HEADER_END) } } fn setup_global_alloc_region(info: &multiboot2::BootInformation) -> (u64, u64) { let mem_tag = info .memory_map_tag() .expect("Missing multiboot memory map tag"); let available = mem_tag .memory_areas() .map(|area| (area.start_address(), area.end_address())); debug!("Modules:"); let modules = info.module_tags().map(|module| { debug!( " 0x{:x}-0x{:x}", module.start_address(), module.end_address() ); (module.start_address() as u64, module.end_address() as u64) });
pub fn early_init_multiboot2(addr: HostPhysAddr) -> BootInfo { let multiboot_info = unsafe { multiboot2::load(addr.as_u64() as usize) }; let alloc_region = setup_global_alloc_region(&multiboot_info); info!( "Allocating from 0x{:x}-{:x}", alloc_region.0, alloc_region.1 ); unsafe { global_alloc::Allocator::allocate_from(alloc_region.0, alloc_region.1); } let modules = multiboot_info .module_tags() .map(|tag| boot_info::BootModule { address: HostPhysAddr::new(tag.start_address() as u64), size: (tag.end_address() - tag.start_address()) as usize, identifier: Some(tag.name().into()), }) .collect::<Vec<_>>(); let rsdp = multiboot_info .rsdp_v2_tag() .filter(|tag| tag.checksum_is_valid()) .map(|rsdp_v2| acpi::rsdp::RSDP::V2 { xsdt_addr: rsdp_v2.xsdt_address() as u64, oemid: { let mut oemid = [0u8; 6]; if let Some(id) = rsdp_v2.oem_id() { oemid.copy_from_slice(id.as_bytes()); } oemid }, }) .or_else(|| { multiboot_info .rsdp_v1_tag() .filter(|tag| tag.checksum_is_valid()) .map(|rsdp_v1| acpi::rsdp::RSDP::V1 { rsdt_addr: rsdp_v1.rsdt_address() as u32, oemid: { let mut oemid = [0u8; 6]; if let Some(id) = rsdp_v1.oem_id() { oemid.copy_from_slice(id.as_bytes()); } oemid }, }) }); BootInfo { modules: modules, rsdp: rsdp, } }
let sections_tag = info .elf_sections_tag() .expect("Missing multiboot elf sections tag"); debug!("Elf sections:"); let sections = sections_tag.sections().map(|section| { debug!( " 0x{:x}-0x{:x}", section.start_address(), section.end_address() ); (section.start_address(), section.end_address()) }); let multiboot_info = [(info.start_address() as u64, info.end_address() as u64)]; debug!( "Multiboot Info: 0x{:x}-0x{:x}", info.start_address(), info.end_address() ); let excluded = modules .chain(sections) .chain(multiboot_info.iter().copied()); let max_excluded = excluded .max_by(|left, right| left.1.cmp(&right.1)) .expect("No max excluded region"); let largest_region = available .max_by(|left, right| (left.1 - left.0).cmp(&(right.1 - right.0))) .expect("No largest region"); if largest_region.0 > max_excluded.1 { largest_region } else if max_excluded.1 > largest_region.0 && max_excluded.1 < largest_region.1 { (max_excluded.1, largest_region.1) } else { panic!("Unable to find suitable global alloc region") } }
function_block-function_prefix_line
[ { "content": "/// Get this current core's sequential index\n\npub fn read_core_idx() -> u64 {\n\n unsafe {\n\n let value: u64;\n\n llvm_asm!(\"mov [%fs], %rax\"\n\n : \"={rax}\"(value)\n\n ::: \"volatile\");\n\n value >> 3 // Shift away the RPL and TI bi...
Rust
bin/wayland/scanner/src/ast.rs
PowerOlive/garnet
16b5b38b765195699f41ccb6684cc58dd3512793
use crate::parser::{self, ArgKind}; #[derive(Debug)] pub struct Protocol { pub name: String, pub copyright: Option<String>, pub description: Option<Description>, pub interfaces: Vec<Interface>, } #[derive(Debug)] pub struct Description { pub summary: String, pub description: String, } #[derive(Debug)] pub struct Interface { pub name: String, pub version: u32, pub description: Option<Description>, pub requests: Vec<Message>, pub events: Vec<Message>, pub enums: Vec<Enum>, } #[derive(Debug)] pub struct Message { pub name: String, pub since: u32, pub request_type: Option<String>, pub description: Option<Description>, pub args: Vec<Arg>, } #[derive(Debug)] pub struct Arg { pub name: String, pub kind: ArgKind, pub summary: Option<String>, pub interface: Option<String>, pub nullable: bool, pub enum_type: Option<String>, pub description: Option<Description>, } #[derive(Debug)] pub struct Enum { pub name: String, pub since: u32, pub bitfield: bool, pub description: Option<Description>, pub entries: Vec<EnumEntry>, } #[derive(Debug)] pub struct EnumEntry { pub name: String, pub value: i64, pub summary: Option<String>, pub since: u32, pub description: Option<Description>, } pub type AstError = String; pub type AstResult<T> = Result<T, AstError>; fn build_protocol(node: parser::ParseNode) -> AstResult<Protocol> { if let parser::ParseElement::Protocol { name } = node.element { let mut copyright: Option<String> = None; let mut description: Option<Description> = None; let mut interfaces: Vec<Interface> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Copyright => copyright = Some(build_copyright(child)?), parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Interface { .. } => interfaces.push(build_interface(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Protocol { name: name, copyright, description, interfaces, }) } else { Err("Unexpected Element; expected Protocol".to_owned()) } } fn build_copyright(node: parser::ParseNode) -> AstResult<String> { if let Some(copyright) = node.body { Ok(copyright) } else { Err(format!("Unexpected node {:?}", node)) } } fn build_description(node: parser::ParseNode) -> AstResult<Description> { if let parser::ParseElement::Description { summary } = node.element { Ok(Description { summary, description: node.body.unwrap_or("".to_owned()), }) } else { Err("Invalid node".to_owned()) } } fn build_interface(node: parser::ParseNode) -> AstResult<Interface> { if let parser::ParseElement::Interface { name, version } = node.element { let mut description: Option<Description> = None; let mut requests: Vec<Message> = Vec::new(); let mut events: Vec<Message> = Vec::new(); let mut enums: Vec<Enum> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Request { .. } => requests.push(build_request(child)?), parser::ParseElement::Event { .. } => events.push(build_event(child)?), parser::ParseElement::Enum { .. } => enums.push(build_enum(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Interface { name, version, description, requests, events, enums, }) } else { Err("Invalid node".to_owned()) } } fn build_request(node: parser::ParseNode) -> AstResult<Message> { if let parser::ParseElement::Request { name, since, request_type, } = node.element { let mut description: Option<Description> = None; let mut args: Vec<Arg> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Arg { .. } => args.append(&mut build_arg(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Message { name, since, request_type, description, args, }) } else { Err("Invalid node".to_owned()) } } fn build_event(node: parser::ParseNode) -> AstResult<Message> { if let parser::ParseElement::Event { name, since } = node.element { let mut description: Option<Description> = None; let mut args: Vec<Arg> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Arg { .. } => args.append(&mut build_arg(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Message { name, since, description, args, request_type: None, }) } else { Err("Invalid node".to_owned()) } } fn build_arg(node: parser::ParseNode) -> AstResult<Vec<Arg>> { if let parser::ParseElement::Arg { name, kind, summary, interface, nullable, enum_type, } = node.element { let mut description: Option<Description> = None; for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } _ => return Err("Unsupported".to_owned()), } } let arg = Arg { name, kind, summary, interface, nullable, enum_type, description, }; if arg.kind == ArgKind::NewId && arg.interface.is_none() { Ok(vec![ Arg { name: format!("{}_interface_name", arg.name), kind: ArgKind::String, summary: None, interface: None, nullable: false, enum_type: None, description: None, }, Arg { name: format!("{}_interface_version", arg.name), kind: ArgKind::Uint, summary: None, interface: None, nullable: false, enum_type: None, description: None, }, arg, ]) } else { Ok(vec![arg]) } } else { Err("Invalid node".to_owned()) } } fn build_enum(node: parser::ParseNode) -> AstResult<Enum> { if let parser::ParseElement::Enum { name, since, bitfield, } = node.element { let mut description: Option<Description> = None; let mut entries: Vec<EnumEntry> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::EnumEntry { .. } => entries.push(build_enum_entry(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Enum { name, since, bitfield, description, entries, }) } else { Err("Invalid node".to_owned()) } } fn build_enum_entry(node: parser::ParseNode) -> AstResult<EnumEntry> { if let parser::ParseElement::EnumEntry { name, value, summary, since, } = node.element { let mut description: Option<Description> = None; for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } _ => return Err("Unsupported".to_owned()), } } Ok(EnumEntry { name, value, summary, since, description, }) } else { Err("Invalid node".to_owned()) } } impl Protocol { pub fn from_parse_tree(parse_tree: parser::ParseNode) -> AstResult<Protocol> { build_protocol(parse_tree) } }
use crate::parser::{self, ArgKind}; #[derive(Debug)] pub struct Protocol { pub name: String, pub copyright: Option<String>, pub description: Option<Description>, pub interfaces: Vec<Interface>, } #[derive(Debug)] pub struct Description { pub summary: String, pub description: String, } #[derive(Debug)] pub struct Interface { pub name: String, pub version: u32, pub description: Option<Description>, pub requests: Vec<Message>, pub events: Vec<Message>, pub enums: Vec<Enum>, } #[derive(Debug)] pub struct Message { pub name: String, pub since: u32, pub request_type: Option<String>, pub description: Option<Description>, pub args: Vec<Arg>, } #[derive(Debug)] pub struct Arg { pub name: String, pub kind: ArgKind, pub summary: Option<String>, pub interface: Option<String>, pub nullable: bool, pub enum_type: Option<String>, pub description: Option<Description>, } #[derive(Debug)] pub struct Enum { pub name: String, pub since: u32, pub bitfield: bool, pub description: Option<Description>, pub entries: Vec<EnumEntry>, } #[derive(Debug)] pub struct EnumEntry { pub name: String, pub value: i64, pub summary: Option<String>, pub since: u32, pub description: Option<Description>, } pub type AstError = String; pub type AstResult<T> = Result<T, AstError>; fn build_protocol(node: parser::ParseNode) -> AstResult<Protocol> { if let parser::ParseElement::Protocol { name } = node.element { let mut copyright: Option<String> = None; let mut description: Option<Description> = None; let mut interfaces: Vec<Interface> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Copyright => copyright = Some(build_copyright(child)?), parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Interface { .. } => interfaces.push(build_interface(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Protocol { name: name, copyright, description, interfaces, }) } else { Err("Unexpected Element; expected Protocol".to_owned()) } } fn build_copyright(node: parser::ParseNode) -> AstResult<String> { if let Some(copyright) = node.body { Ok(copyright) } else { Err(format!("Unexpected node {:?}", node)) } } fn build_description(node: parser::ParseNode) -> AstResult<Description> { if let parser::ParseElement::Description { summary } = node.element { Ok(Description { summary, description: node.body.unwrap_or("".to_owned()), }) } else { Err("Invalid node".to_owned()) } } fn build_interface(node: parser::ParseNode) -> AstResult<Interface> { if let parser::ParseElement::Interface { name, version } = node.element { let mut description: Option<Description> = None; let mut requests: Vec<Message> = Vec::new(); let mut events: Vec<Message> = Vec::new(); let mut enums: Vec<Enum> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Request { .. } => requests.push(build_request(child)?), parser::ParseElement::Event { .. } => events.push(build_event(child)?), parser::ParseElement::Enum { .. } => enums.push(build_enum(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Interface { name, version, description, requests, events, enums, }) } else { Err("Invalid node".to_owned()) } } fn build_request(node: parser::ParseNode) -> AstResult<Message> { if let parser::ParseElement::Request { name, since, request_type, } = node.element { let mut description: Option<Description> = None; let mut args: Vec<Arg> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Arg { .. } => args.append(&mut build_arg(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Message { name, since, request_type, description, args, }) } else { Err("Invalid node".to_owned()) } } fn build_event(node: parser::ParseNode) -> AstResult<Message> { if let parser::ParseElement::Event { name, since } = node.element { let mut description: Option<Description> = None; let mut args: Vec<Arg> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::Arg { .. } => args.append(&mut build_arg(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Message { name, since, description, args, request_type: None, }) } else { Err("Invalid node".to_owned()) } }
fn build_enum(node: parser::ParseNode) -> AstResult<Enum> { if let parser::ParseElement::Enum { name, since, bitfield, } = node.element { let mut description: Option<Description> = None; let mut entries: Vec<EnumEntry> = Vec::new(); for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } parser::ParseElement::EnumEntry { .. } => entries.push(build_enum_entry(child)?), _ => return Err("Unsupported".to_owned()), } } Ok(Enum { name, since, bitfield, description, entries, }) } else { Err("Invalid node".to_owned()) } } fn build_enum_entry(node: parser::ParseNode) -> AstResult<EnumEntry> { if let parser::ParseElement::EnumEntry { name, value, summary, since, } = node.element { let mut description: Option<Description> = None; for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } _ => return Err("Unsupported".to_owned()), } } Ok(EnumEntry { name, value, summary, since, description, }) } else { Err("Invalid node".to_owned()) } } impl Protocol { pub fn from_parse_tree(parse_tree: parser::ParseNode) -> AstResult<Protocol> { build_protocol(parse_tree) } }
fn build_arg(node: parser::ParseNode) -> AstResult<Vec<Arg>> { if let parser::ParseElement::Arg { name, kind, summary, interface, nullable, enum_type, } = node.element { let mut description: Option<Description> = None; for child in node.children { match &child.element { parser::ParseElement::Description { .. } => { description = Some(build_description(child)?) } _ => return Err("Unsupported".to_owned()), } } let arg = Arg { name, kind, summary, interface, nullable, enum_type, description, }; if arg.kind == ArgKind::NewId && arg.interface.is_none() { Ok(vec![ Arg { name: format!("{}_interface_name", arg.name), kind: ArgKind::String, summary: None, interface: None, nullable: false, enum_type: None, description: None, }, Arg { name: format!("{}_interface_version", arg.name), kind: ArgKind::Uint, summary: None, interface: None, nullable: false, enum_type: None, description: None, }, arg, ]) } else { Ok(vec![arg]) } } else { Err("Invalid node".to_owned()) } }
function_block-full_function
[]
Rust
server/src/sonos_endpoint.rs
papertigers/home-api
d9a430425ebfbc730b29683381f96e00317d2875
use crate::AppCtx; use dropshot::{endpoint, ApiDescription, HttpError, HttpResponseOk, RequestContext, TypedBody}; use futures::stream::StreamExt; use futures_util::stream::FuturesUnordered; use schemars::JsonSchema; use serde::Deserialize; use sonor::{rupnp::Device, Speaker}; use std::sync::Arc; use std::time::Duration; #[derive(Deserialize, JsonSchema)] struct SonosArgs { rooms: Vec<String>, volume: Option<u16>, sleep_timer: Option<u16>, } async fn goodnight(speaker: &sonor::Speaker, sleep_timer: Option<u16>) -> Result<(), sonor::Error> { speaker.stop().await?; let _ = speaker.clear_queue().await; speaker .queue_next("file:///jffs/settings/savedqueues.rsq#23", "") .await?; speaker.set_repeat_mode(sonor::RepeatMode::All).await?; speaker.set_shuffle(true).await?; if let Some(t) = sleep_timer.map(|v| v.clamp(0, 2 * 60 * 60)) { speaker.set_sleep_timer(t as u64).await?; } speaker.play().await } async fn group_rooms( rctx: Arc<RequestContext<AppCtx>>, rooms: &[String], volume: Option<u16>, ) -> Result<Option<Speaker>, sonor::Error> { let first = match rooms.first() { Some(c) => c, None => return Ok(None), }; if let Some(coordinator) = sonor::find(first, Duration::from_secs(3)).await? { let find = coordinator .zone_group_state() .await? .into_iter() .flat_map(|(_, v)| v) .filter(|i| rooms[1..].iter().any(|n| n.eq_ignore_ascii_case(i.name()))) .map(|info| { let url = info.location().parse(); async { let device = Device::from_url(url?).await?; Ok(Speaker::from_device(device)) } }) .collect::<FuturesUnordered<_>>() .collect::<Vec<Result<Option<Speaker>, sonor::Error>>>() .await; let speakers: Vec<Speaker> = find.into_iter().filter_map(Result::ok).flatten().collect(); let default_volume = coordinator.volume().await?; let volume = volume.unwrap_or(default_volume); coordinator.leave().await?; coordinator.set_volume(volume).await?; for speaker in speakers { speaker.leave().await?; speaker.set_volume(volume).await?; if let Err(e) = speaker.join(first).await { warn!( rctx.log, "failed to join {} to group: {}", speaker.name().await?, e ) } } info!(rctx.log, "joined rooms: {:?}", rooms); return Ok(Some(coordinator)); }; Ok(None) } #[endpoint { method = POST, path = "/sonos/sleep", }] async fn sleep( rctx: Arc<RequestContext<AppCtx>>, body_param: TypedBody<SonosArgs>, ) -> Result<HttpResponseOk<()>, HttpError> { let body = body_param.into_inner(); let context = Arc::clone(&rctx); if let Some(speaker) = group_rooms(context, &body.rooms, body.volume) .await .map_err(|e| HttpError::for_internal_error(format!("failed sonos request: {}", e)))? { goodnight(&speaker, body.sleep_timer) .await .map_err(|e| HttpError::for_unavail(None, format!("{}", e)))?; } else { return Err(HttpError::for_bad_request( None, format!("verify sonos speakers: [{:?}]", &body.rooms), )); } info!(rctx.log, "sleep mode initiated for: {:?}", &body.rooms); Ok(HttpResponseOk(())) } #[endpoint { method = POST, path = "/sonos/group", }] async fn group( rctx: Arc<RequestContext<AppCtx>>, body_param: TypedBody<SonosArgs>, ) -> Result<HttpResponseOk<()>, HttpError> { let body = body_param.into_inner(); let context = Arc::clone(&rctx); group_rooms(context, &body.rooms, body.volume) .await .map_err(|e| HttpError::for_internal_error(format!("failed sonos request: {}", e)))?; Ok(HttpResponseOk(())) } pub fn mount(api: &mut ApiDescription<AppCtx>) { api.register(sleep).expect("failed to mount sleep"); api.register(group).expect("failed to mount group"); }
use crate::AppCtx; use dropshot::{endpoint, ApiDescription, HttpError, HttpResponseOk, RequestContext, TypedBody}; use futures::stream::StreamExt; use futures_util::stream::FuturesUnordered; use schemars::JsonSchema; use serde::Deserialize; use sonor::{rupnp::Device, Speaker}; use std::sync::Arc; use std::time::Duration; #[derive(Deserialize, JsonSchema)] struct SonosArgs { rooms: Vec<String>, volume: Option<u16>, sleep_timer: Option<u16>, } async fn goodnight(speaker: &sonor::Speaker, sleep_timer: Option<u16>) -> Result<(), sonor::Error> { speaker.stop().await?; let _ = speaker.clear_queue().await; speaker .queue_next("file:///jffs/settings/savedqueues.rsq#23", "") .await?; speaker.set_repeat_mode(sonor::RepeatMode::All).await?; speaker.set_shuffle(true).await?; if let Some(t) = sleep_timer.map(|v| v.clamp(0, 2 * 60 * 60)) { speaker.set_sleep_timer(t as u64).await?; } speaker.play().await }
#[endpoint { method = POST, path = "/sonos/sleep", }] async fn sleep( rctx: Arc<RequestContext<AppCtx>>, body_param: TypedBody<SonosArgs>, ) -> Result<HttpResponseOk<()>, HttpError> { let body = body_param.into_inner(); let context = Arc::clone(&rctx); if let Some(speaker) = group_rooms(context, &body.rooms, body.volume) .await .map_err(|e| HttpError::for_internal_error(format!("failed sonos request: {}", e)))? { goodnight(&speaker, body.sleep_timer) .await .map_err(|e| HttpError::for_unavail(None, format!("{}", e)))?; } else { return Err(HttpError::for_bad_request( None, format!("verify sonos speakers: [{:?}]", &body.rooms), )); } info!(rctx.log, "sleep mode initiated for: {:?}", &body.rooms); Ok(HttpResponseOk(())) } #[endpoint { method = POST, path = "/sonos/group", }] async fn group( rctx: Arc<RequestContext<AppCtx>>, body_param: TypedBody<SonosArgs>, ) -> Result<HttpResponseOk<()>, HttpError> { let body = body_param.into_inner(); let context = Arc::clone(&rctx); group_rooms(context, &body.rooms, body.volume) .await .map_err(|e| HttpError::for_internal_error(format!("failed sonos request: {}", e)))?; Ok(HttpResponseOk(())) } pub fn mount(api: &mut ApiDescription<AppCtx>) { api.register(sleep).expect("failed to mount sleep"); api.register(group).expect("failed to mount group"); }
async fn group_rooms( rctx: Arc<RequestContext<AppCtx>>, rooms: &[String], volume: Option<u16>, ) -> Result<Option<Speaker>, sonor::Error> { let first = match rooms.first() { Some(c) => c, None => return Ok(None), }; if let Some(coordinator) = sonor::find(first, Duration::from_secs(3)).await? { let find = coordinator .zone_group_state() .await? .into_iter() .flat_map(|(_, v)| v) .filter(|i| rooms[1..].iter().any(|n| n.eq_ignore_ascii_case(i.name()))) .map(|info| { let url = info.location().parse(); async { let device = Device::from_url(url?).await?; Ok(Speaker::from_device(device)) } }) .collect::<FuturesUnordered<_>>() .collect::<Vec<Result<Option<Speaker>, sonor::Error>>>() .await; let speakers: Vec<Speaker> = find.into_iter().filter_map(Result::ok).flatten().collect(); let default_volume = coordinator.volume().await?; let volume = volume.unwrap_or(default_volume); coordinator.leave().await?; coordinator.set_volume(volume).await?; for speaker in speakers { speaker.leave().await?; speaker.set_volume(volume).await?; if let Err(e) = speaker.join(first).await { warn!( rctx.log, "failed to join {} to group: {}", speaker.name().await?, e ) } } info!(rctx.log, "joined rooms: {:?}", rooms); return Ok(Some(coordinator)); }; Ok(None) }
function_block-full_function
[ { "content": "type Result<T> = std::result::Result<T, error::AylaError>;\n\n\n\npub(crate) struct AylaClient {\n\n client: Client,\n\n region: Region,\n\n email: String,\n\n password: String,\n\n access_token: Option<String>,\n\n refresh_token: Option<String>,\n\n auth_expiration: Option<St...
Rust
crates/interledger-settlement/src/core/engines_api.rs
KevinWMatthews/interledger-rs
ec3c745e1af51b9aa83ac29b64111d2e9cbb3fbc
use super::{ get_hash_of, idempotency::{make_idempotent_call, IdempotentStore}, types::{Quantity, SettlementEngine}, }; use bytes::Bytes; use http::StatusCode; use hyper::Response; use interledger_errors::default_rejection_handler; use serde::{Deserialize, Serialize}; use warp::Filter; #[derive(Serialize, Deserialize, Debug, Clone, Hash)] pub struct CreateAccount { id: String, } async fn create_engine_account<E, S>( idempotency_key: Option<String>, account_id: CreateAccount, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input_hash = get_hash_of(account_id.id.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.create_account(account_id.id), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("CREATED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn delete_engine_account<E, S>( account_id: String, idempotency_key: Option<String>, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input_hash = get_hash_of(account_id.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.delete_account(account_id), input_hash, idempotency_key, StatusCode::NO_CONTENT, Bytes::from("DELETED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn engine_send_money<E, S>( id: String, idempotency_key: Option<String>, quantity: Quantity, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input = format!("{}{:?}", id, quantity); let input_hash = get_hash_of(input.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.send_money(id, quantity), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("EXECUTED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn engine_receive_message<E, S>( id: String, idempotency_key: Option<String>, message: Bytes, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input = format!("{}{:?}", id, message); let input_hash = get_hash_of(input.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.receive_message(id, message.to_vec()), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("RECEIVED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } pub fn create_settlement_engine_filter<E, S>( engine: E, store: S, ) -> warp::filters::BoxedFilter<(impl warp::Reply,)> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let with_store = warp::any().map(move || store.clone()); let with_engine = warp::any().map(move || engine.clone()); let idempotency = warp::header::optional::<String>("idempotency-key"); let account_id = warp::path("accounts").and(warp::path::param::<String>()); let accounts = warp::post() .and(warp::path("accounts")) .and(warp::path::end()) .and(idempotency) .and(warp::body::json()) .and(with_engine.clone()) .and(with_store.clone()) .and_then(create_engine_account); let del_account = warp::delete() .and(account_id) .and(warp::path::end()) .and(idempotency) .and(with_engine.clone()) .and(with_store.clone()) .and_then(delete_engine_account); let settlement_endpoint = account_id.and(warp::path("settlements")); let settlements = warp::post() .and(settlement_endpoint) .and(warp::path::end()) .and(idempotency) .and(warp::body::json()) .and(with_engine.clone()) .and(with_store.clone()) .and_then(engine_send_money); let messages_endpoint = account_id.and(warp::path("messages")); let messages = warp::post() .and(messages_endpoint) .and(warp::path::end()) .and(idempotency) .and(warp::body::bytes()) .and(with_engine) .and(with_store) .and_then(engine_receive_message); accounts .or(del_account) .or(settlements) .or(messages) .recover(default_rejection_handler) .boxed() } #[cfg(test)] mod tests { use super::*; use crate::core::idempotency::IdempotentData; use crate::core::types::{ApiResponse, ApiResult}; use async_trait::async_trait; use bytes::Bytes; use http::StatusCode; use interledger_errors::*; use parking_lot::RwLock; use serde_json::{json, Value}; use std::collections::HashMap; use std::sync::Arc; fn check_error_status_and_message(response: Response<Bytes>, status_code: u16, message: &str) { let err: Value = serde_json::from_slice(response.body()).unwrap(); assert_eq!(response.status().as_u16(), status_code); assert_eq!(err.get("status").unwrap(), status_code); assert_eq!(err.get("detail").unwrap(), message); } #[derive(Clone)] struct TestEngine; #[derive(Debug, Clone)] pub struct TestAccount; #[derive(Clone)] pub struct TestStore { #[allow(clippy::all)] pub cache: Arc<RwLock<HashMap<String, IdempotentData>>>, pub cache_hits: Arc<RwLock<u64>>, } fn test_store() -> TestStore { TestStore { cache: Arc::new(RwLock::new(HashMap::new())), cache_hits: Arc::new(RwLock::new(0)), } } #[async_trait] impl IdempotentStore for TestStore { async fn load_idempotent_data( &self, idempotency_key: String, ) -> Result<Option<IdempotentData>, IdempotentStoreError> { let cache = self.cache.read(); if let Some(data) = cache.get(&idempotency_key) { let mut guard = self.cache_hits.write(); *guard += 1; Ok(Some(data.clone())) } else { Ok(None) } } async fn save_idempotent_data( &self, idempotency_key: String, input_hash: [u8; 32], status_code: StatusCode, data: Bytes, ) -> Result<(), IdempotentStoreError> { let mut cache = self.cache.write(); cache.insert( idempotency_key, IdempotentData::new(status_code, data, input_hash), ); Ok(()) } } pub static IDEMPOTENCY: &str = "abcd01234"; #[async_trait] impl SettlementEngine for TestEngine { async fn send_money(&self, _account_id: String, _money: Quantity) -> ApiResult { Ok(ApiResponse::Default) } async fn receive_message(&self, _account_id: String, _message: Vec<u8>) -> ApiResult { Ok(ApiResponse::Default) } async fn create_account(&self, _account_id: String) -> ApiResult { Ok(ApiResponse::Default) } async fn delete_account(&self, _account_id: String) -> ApiResult { Ok(ApiResponse::Default) } } #[tokio::test] async fn idempotent_execute_settlement() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let settlement_call = |id, amount, scale| { warp::test::request() .method("POST") .path(&format!("/accounts/{}/settlements", id)) .body(json!(Quantity::new(amount, scale)).to_string()) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = settlement_call("1".to_owned(), 100, 6).await; assert_eq!(ret.status(), StatusCode::CREATED); assert_eq!(ret.body(), "EXECUTED"); let ret = settlement_call("1".to_owned(), 100, 6).await; assert_eq!(ret.status(), StatusCode::CREATED); assert_eq!(ret.body(), "EXECUTED"); let ret = settlement_call("42".to_owned(), 100, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = settlement_call("1".to_owned(), 42, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = settlement_call("42".to_owned(), 42, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 4); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "EXECUTED".to_string()); } #[tokio::test] async fn idempotent_receive_message() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let messages_call = |id, msg| { warp::test::request() .method("POST") .path(&format!("/accounts/{}/messages", id)) .body(msg) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = messages_call("1", vec![0]).await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "RECEIVED"); let ret = messages_call("1", vec![0]).await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "RECEIVED"); let ret = messages_call("42", vec![0]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = messages_call("1", vec![42]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = messages_call("42", vec![42]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 4); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "RECEIVED".to_string()); } #[tokio::test] async fn idempotent_create_account() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let create_account_call = |id: &str| { warp::test::request() .method("POST") .path("/accounts") .body(json!(CreateAccount { id: id.to_string() }).to_string()) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = create_account_call("1").await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "CREATED"); let ret = create_account_call("1").await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "CREATED"); let ret = create_account_call("42").await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 2); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "CREATED".to_string()); } #[tokio::test] async fn idempotent_delete_account() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let delete_account_call = |id: &str| { warp::test::request() .method("DELETE") .path(&format!("/accounts/{}", id)) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = delete_account_call("1").await; assert_eq!(ret.status(), StatusCode::NO_CONTENT); assert_eq!(ret.body(), "DELETED"); let ret = delete_account_call("1").await; assert_eq!(ret.status(), StatusCode::NO_CONTENT); assert_eq!(ret.body(), "DELETED"); let ret = delete_account_call("42").await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 2); assert_eq!(cached_data.status, 204); assert_eq!(cached_data.body, "DELETED".to_string()); } }
use super::{ get_hash_of, idempotency::{make_idempotent_call, IdempotentStore}, types::{Quantity, SettlementEngine}, }; use bytes::Bytes; use http::StatusCode; use hyper::Response; use interledger_errors::default_rejection_handler; use serde::{Deserialize, Serialize}; use warp::Filter; #[derive(Serialize, Deserialize, Debug, Clone, Hash)] pub struct CreateAccount { id: String, } async fn create_engine_account<E, S>( idempotency_key: Option<String>, account_id: CreateAccount, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input_hash = get_hash_of(account_id.id.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.create_account(account_id.id), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("CREATED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn delete_engine_account<E, S>( account_id: String, idempotency_key: Option<String>, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input_hash = get_hash_of(account_id.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.delete_account(account_id), input_hash, idempotency_key, StatusCode::NO_CONTENT, Bytes::from("DELETED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn engine_send_money<E, S>( id: String, idempotency_key: Option<String>, quantity: Quantity, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input = format!("{}{:?}", id, quantity); let input_hash = get_hash_of(input.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.send_money(id, quantity), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("EXECUTED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } async fn engine_receive_message<E, S>( id: String, idempotency_key: Option<String>, message: Bytes, engine: E, store: S, ) -> Result<impl warp::Reply, warp::Rejection> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let input = format!("{}{:?}", id, message); let input_hash = get_hash_of(input.as_ref()); let (status_code, message) = make_idempotent_call( store, engine.receive_message(id, message.to_vec()), input_hash, idempotency_key, StatusCode::CREATED, Bytes::from("RECEIVED"), ) .await?; Ok(Response::builder() .header("Content-Type", "application/json") .status(status_code) .body(message) .unwrap()) } pub fn create_settlement_engine_filter<E, S>( engine: E, store: S, ) -> warp::filters::BoxedFilter<(impl warp::Reply,)> where E: SettlementEngine + Clone + Send + Sync + 'static, S: IdempotentStore + Clone + Send + Sync + 'static, { let with_store = warp::any().map(move || store.clone()); let with_engine = warp::any().map(move || engine.clone()); let idempotency = warp::header::optional::<String>("idempotency-key"); let account_id = warp::path("accounts").and(warp::path::param::<String>()); let accounts = warp::post() .and(warp::path("accounts")) .and(warp::path::end()) .and(idempotency) .and(warp::body::json()) .and(with_engine.clone()) .and(with_store.clone()) .and_then(create_engine_account); let del_account = warp::delete() .and(account_id) .and(warp::path::end()) .and(idempotency) .and(with_engine.clone()) .and(with_store.clone()) .and_then(delete_engine_account); let settlement_endpoint = account_id.and(warp::path("settlements")); let settlements = warp::post() .and(settlement_endpoint) .and(warp::path::end()) .and(idempotency) .and(warp::body::json()) .and(with_engine.clone()) .and(with_store.clone()) .and_then(engine_send_money); let messages_endpoint = account_id.and(warp::path("messages")); let messages = warp::post() .and(messages_endpoint) .and(warp::path::end()) .and(idempotency) .and(warp::body::bytes()) .and(with_engine) .and(with_store) .and_then(engine_receive_message); accounts .or(del_account) .or(settlements) .or(messages) .recover(default_rejection_handler) .boxed() } #[cfg(test)] mod tests { use super::*; use crate::core::idempotency::IdempotentData; use crate::core::types::{ApiResponse, ApiResult}; use async_trait::async_trait; use bytes::Bytes; use http::StatusCode; use interledger_errors::*; use parking_lot::RwLock; use serde_json::{json, Value}; use std::collections::HashMap; use std::sync::Arc; fn check_error_status_and_message(response: Response<Bytes>, status_code: u16, message: &str) { let err: Value = serde_json::from_slice(response.body()).unwrap(); assert_eq!(response.status().as_u16(), status_code); assert_eq!(err.get("status").unwrap(), status_code); assert_eq!(err.get("detail").unwrap(), message); } #[derive(Clone)] struct TestEngine; #[derive(Debug, Clone)] pub struct TestAccount; #[derive(Clone)] pub struct TestStore { #[allow(clippy::all)] pub cache: Arc<RwLock<HashMap<String, IdempotentData>>>, pub cache_hits: Arc<RwLock<u64>>, } fn test_store() -> TestStore { TestStore { cache: Arc::new(RwLock::new(HashMap::new())), cache_hits: Arc::new(RwLock::new(0)), } } #[async_trait] impl IdempotentStore for TestStore { async fn load_idempotent_data( &self, idempotency_key: String, ) -> Result<Option<IdempotentData>, IdempotentStoreError> { let cache = self.cache.read(); if let Some(data) = cache.get(&idempotency_key) { let mut guard = self.cache_hits.write(); *guard += 1; Ok(Some(data.clone())) } else { Ok(None) } }
tEngine { async fn send_money(&self, _account_id: String, _money: Quantity) -> ApiResult { Ok(ApiResponse::Default) } async fn receive_message(&self, _account_id: String, _message: Vec<u8>) -> ApiResult { Ok(ApiResponse::Default) } async fn create_account(&self, _account_id: String) -> ApiResult { Ok(ApiResponse::Default) } async fn delete_account(&self, _account_id: String) -> ApiResult { Ok(ApiResponse::Default) } } #[tokio::test] async fn idempotent_execute_settlement() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let settlement_call = |id, amount, scale| { warp::test::request() .method("POST") .path(&format!("/accounts/{}/settlements", id)) .body(json!(Quantity::new(amount, scale)).to_string()) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = settlement_call("1".to_owned(), 100, 6).await; assert_eq!(ret.status(), StatusCode::CREATED); assert_eq!(ret.body(), "EXECUTED"); let ret = settlement_call("1".to_owned(), 100, 6).await; assert_eq!(ret.status(), StatusCode::CREATED); assert_eq!(ret.body(), "EXECUTED"); let ret = settlement_call("42".to_owned(), 100, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = settlement_call("1".to_owned(), 42, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = settlement_call("42".to_owned(), 42, 6).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 4); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "EXECUTED".to_string()); } #[tokio::test] async fn idempotent_receive_message() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let messages_call = |id, msg| { warp::test::request() .method("POST") .path(&format!("/accounts/{}/messages", id)) .body(msg) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = messages_call("1", vec![0]).await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "RECEIVED"); let ret = messages_call("1", vec![0]).await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "RECEIVED"); let ret = messages_call("42", vec![0]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = messages_call("1", vec![42]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let ret = messages_call("42", vec![42]).await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 4); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "RECEIVED".to_string()); } #[tokio::test] async fn idempotent_create_account() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let create_account_call = |id: &str| { warp::test::request() .method("POST") .path("/accounts") .body(json!(CreateAccount { id: id.to_string() }).to_string()) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = create_account_call("1").await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "CREATED"); let ret = create_account_call("1").await; assert_eq!(ret.status().as_u16(), StatusCode::CREATED); assert_eq!(ret.body(), "CREATED"); let ret = create_account_call("42").await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 2); assert_eq!(cached_data.status, 201); assert_eq!(cached_data.body, "CREATED".to_string()); } #[tokio::test] async fn idempotent_delete_account() { let store = test_store(); let engine = TestEngine; let api = create_settlement_engine_filter(engine, store.clone()); let delete_account_call = |id: &str| { warp::test::request() .method("DELETE") .path(&format!("/accounts/{}", id)) .header("Idempotency-Key", IDEMPOTENCY) .reply(&api) }; let ret = delete_account_call("1").await; assert_eq!(ret.status(), StatusCode::NO_CONTENT); assert_eq!(ret.body(), "DELETED"); let ret = delete_account_call("1").await; assert_eq!(ret.status(), StatusCode::NO_CONTENT); assert_eq!(ret.body(), "DELETED"); let ret = delete_account_call("42").await; check_error_status_and_message(ret, 409, "Provided idempotency key is tied to other input"); let cache = store.cache.read(); let cached_data = cache.get(&IDEMPOTENCY.to_string()).unwrap(); let cache_hits = store.cache_hits.read(); assert_eq!(*cache_hits, 2); assert_eq!(cached_data.status, 204); assert_eq!(cached_data.body, "DELETED".to_string()); } }
async fn save_idempotent_data( &self, idempotency_key: String, input_hash: [u8; 32], status_code: StatusCode, data: Bytes, ) -> Result<(), IdempotentStoreError> { let mut cache = self.cache.write(); cache.insert( idempotency_key, IdempotentData::new(status_code, data, input_hash), ); Ok(()) } } pub static IDEMPOTENCY: &str = "abcd01234"; #[async_trait] impl SettlementEngine for Tes
random
[ { "content": "/// A trait for Store implmentations that have ILP routing tables.\n\npub trait RouterStore: AccountStore + Clone + Send + Sync + 'static {\n\n /// **Synchronously** return the routing table.\n\n /// Note that this is synchronous because it assumes that Stores should\n\n /// keep the rout...
Rust
9_error_handling/result/src/main.rs
rhavill/rust-programming-language
6fec6d84aa6a887e8dd5a5117ae7548780c1ecac
use std::io; use std::io::ErrorKind; use std::io::Read; use std::fs; use std::fs::File; fn main() { loop { println!("Input a number and press <Enter>:"); println!("1 - to see a generic error crash."); println!("2 - to match error and prevent crash."); println!("3 - to catch the error with unwrap_or_else and prevent crash."); println!("4 - to unwrap error and crash."); println!("5 - to crash with expect."); println!("6 - to propogate errors."); println!("7 - to propogate errors with ? operator."); println!("8 - to propogate errors with chained ? operator."); println!("9 - to propogate errors with fs::read_to_string."); println!("10 - to exit."); let mut option = String::new(); io::stdin().read_line(&mut option) .expect("Failed to read line"); let option: u32 = match option.trim().parse() { Ok(num) => num, Err(_) => continue, }; match option { 1 => generic_error(), 2 => matching_error(), 3 => avoid_match_with_closure(), 4 => unwrap_crash(), 5 => expect_crash(), 6 => propogating_errors(), 7 => question_mark_propogation(), 8 => chained_question_mark_propogation(), 9 => fs_read_to_string_propogation(), _ => break, } } } #[allow(unused_variables)] fn generic_error() { /* If we give f a type annotation that we know is not the return type of the function and then try to compile the code, the compiler will tell us that the types don’t match. The error message will then tell us what the type of f is. */ let f = File::open("hello.txt"); let f = match f { Ok(file) => file, Err(error) => { panic!("Problem opening the file: {:?}", error) }, }; } #[allow(unused_variables)] fn matching_error() { let f = File::open("hello.txt"); let f = match f { Ok(file) => file, Err(error) => match error.kind() { ErrorKind::NotFound => match File::create("hello.txt") { Ok(fc) => fc, Err(e) => panic!("Problem creating the file: {:?}", e), }, other_error => panic!("Problem opening the file: {:?}", other_error), }, }; } #[allow(unused_variables)] fn avoid_match_with_closure() { /* In Chapter 13, you’ll learn about closures; the Result<T, E> type has many methods that accept a closure and are implemented using match expressions. Using those methods will make your code more concise. In Chapter 13, you’ll learn about closures; the Result<T, E> type has many methods that accept a closure and are implemented using match expressions. Using those methods will make your code more concise. */ let f = File::open("hello.txt").unwrap_or_else(|error| { if error.kind() == ErrorKind::NotFound { File::create("hello.txt").unwrap_or_else(|error| { panic!("Problem creating the file: {:?}", error); }) } else { panic!("Problem opening the file: {:?}", error); } }); } #[allow(unused_variables)] fn unwrap_crash() { /* unwrap is a shortcut method that is implemented just like the match expression we wrote in Listing 9-4. If the Result value is the Ok variant, unwrap will return the value inside the Ok. If the Result is the Err variant, unwrap will call the panic! macro for us. */ let f = File::open("hello.txt").unwrap(); } #[allow(unused_variables)] fn expect_crash() { /* Expect, which is similar to unwrap, lets us also choose the panic! error message. Using expect instead of unwrap and providing good error messages can convey your intent and make tracking down the source of a panic easier. */ let f = File::open("hello.txt").expect("Failed to open hello.txt"); } fn propogating_errors() { let username = read_username_from_file(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file() -> Result<String, io::Error> { let f = File::open("hello.txt"); let mut f = match f { Ok(file) => file, Err(e) => return Err(e), }; let mut s = String::new(); match f.read_to_string(&mut s) { Ok(_) => Ok(s), Err(e) => Err(e), } } fn question_mark_propogation() { let username = read_username_from_file_shorthand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shorthand() -> Result<String, io::Error> { /* If the value of the Result is an Ok, the value inside the Ok will get returned from this expression, and the program will continue. If the value is an Err, the Err will be returned from the whole function as if we had used the return keyword so the error value gets propagated to the calling code. This is different from using match, because error values that have the ? operator called on them go through the from function, defined in the From trait in the standard library, which is used to convert errors from one type into another. When the ? operator calls the from function, the error type received is converted into the error type defined in the return type of the current function. This is useful when a function returns one error type to represent all the ways a function might fail, even if parts might fail for many different reasons. As long as each error type implements the from function to define how to convert itself to the returned error type, the ? operator takes care of the conversion automatically. The ? at the end of the File::open call will return the value inside an Ok to the variable f. If an error occurs, the ? operator will return early out of the whole function and give any Err value to the calling code. The same thing applies to the ? at the end of the read_to_string call. */ let mut f = File::open("hello.txt")?; let mut s = String::new(); f.read_to_string(&mut s)?; Ok(s) } fn chained_question_mark_propogation() { let username = read_username_from_file_shorterhand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shorterhand() -> Result<String, io::Error> { let mut s = String::new(); File::open("hello.txt")?.read_to_string(&mut s)?; Ok(s) } fn fs_read_to_string_propogation() { let username = read_username_from_file_shortesthand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shortesthand() -> Result<String, io::Error> { fs::read_to_string("hello.txt") } /* The main function is special, and there are restrictions on what its return type must be. One valid return type for main is (), and conveniently, another valid return type is Result<T, E>, as shown here: */
use std::io; use std::io::ErrorKind; use std::io::Read; use std::fs; use std::fs::File; fn main() { loop { println!("Input a number and press <Enter>:"); println!("1 - to see a generic error crash."); println!("2 - to match error and prevent crash."); println!("3 - to catch the error with unwrap_or_else and prevent crash."); println!("4 - to unwrap error and crash."); println!("5 - to crash with expect."); println!("6 - to propogate errors.");
ssions. Using those methods will make your code more concise. In Chapter 13, you’ll learn about closures; the Result<T, E> type has many methods that accept a closure and are implemented using match expressions. Using those methods will make your code more concise. */ let f = File::open("hello.txt").unwrap_or_else(|error| { if error.kind() == ErrorKind::NotFound { File::create("hello.txt").unwrap_or_else(|error| { panic!("Problem creating the file: {:?}", error); }) } else { panic!("Problem opening the file: {:?}", error); } }); } #[allow(unused_variables)] fn unwrap_crash() { /* unwrap is a shortcut method that is implemented just like the match expression we wrote in Listing 9-4. If the Result value is the Ok variant, unwrap will return the value inside the Ok. If the Result is the Err variant, unwrap will call the panic! macro for us. */ let f = File::open("hello.txt").unwrap(); } #[allow(unused_variables)] fn expect_crash() { /* Expect, which is similar to unwrap, lets us also choose the panic! error message. Using expect instead of unwrap and providing good error messages can convey your intent and make tracking down the source of a panic easier. */ let f = File::open("hello.txt").expect("Failed to open hello.txt"); } fn propogating_errors() { let username = read_username_from_file(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file() -> Result<String, io::Error> { let f = File::open("hello.txt"); let mut f = match f { Ok(file) => file, Err(e) => return Err(e), }; let mut s = String::new(); match f.read_to_string(&mut s) { Ok(_) => Ok(s), Err(e) => Err(e), } } fn question_mark_propogation() { let username = read_username_from_file_shorthand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shorthand() -> Result<String, io::Error> { /* If the value of the Result is an Ok, the value inside the Ok will get returned from this expression, and the program will continue. If the value is an Err, the Err will be returned from the whole function as if we had used the return keyword so the error value gets propagated to the calling code. This is different from using match, because error values that have the ? operator called on them go through the from function, defined in the From trait in the standard library, which is used to convert errors from one type into another. When the ? operator calls the from function, the error type received is converted into the error type defined in the return type of the current function. This is useful when a function returns one error type to represent all the ways a function might fail, even if parts might fail for many different reasons. As long as each error type implements the from function to define how to convert itself to the returned error type, the ? operator takes care of the conversion automatically. The ? at the end of the File::open call will return the value inside an Ok to the variable f. If an error occurs, the ? operator will return early out of the whole function and give any Err value to the calling code. The same thing applies to the ? at the end of the read_to_string call. */ let mut f = File::open("hello.txt")?; let mut s = String::new(); f.read_to_string(&mut s)?; Ok(s) } fn chained_question_mark_propogation() { let username = read_username_from_file_shorterhand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shorterhand() -> Result<String, io::Error> { let mut s = String::new(); File::open("hello.txt")?.read_to_string(&mut s)?; Ok(s) } fn fs_read_to_string_propogation() { let username = read_username_from_file_shortesthand(); match username { Ok(username) => println!("Got username: {}", username), Err(_error) => println!("Error getting usename, using default."), }; } fn read_username_from_file_shortesthand() -> Result<String, io::Error> { fs::read_to_string("hello.txt") } /* The main function is special, and there are restrictions on what its return type must be. One valid return type for main is (), and conveniently, another valid return type is Result<T, E>, as shown here: */
println!("7 - to propogate errors with ? operator."); println!("8 - to propogate errors with chained ? operator."); println!("9 - to propogate errors with fs::read_to_string."); println!("10 - to exit."); let mut option = String::new(); io::stdin().read_line(&mut option) .expect("Failed to read line"); let option: u32 = match option.trim().parse() { Ok(num) => num, Err(_) => continue, }; match option { 1 => generic_error(), 2 => matching_error(), 3 => avoid_match_with_closure(), 4 => unwrap_crash(), 5 => expect_crash(), 6 => propogating_errors(), 7 => question_mark_propogation(), 8 => chained_question_mark_propogation(), 9 => fs_read_to_string_propogation(), _ => break, } } } #[allow(unused_variables)] fn generic_error() { /* If we give f a type annotation that we know is not the return type of the function and then try to compile the code, the compiler will tell us that the types don’t match. The error message will then tell us what the type of f is. */ let f = File::open("hello.txt"); let f = match f { Ok(file) => file, Err(error) => { panic!("Problem opening the file: {:?}", error) }, }; } #[allow(unused_variables)] fn matching_error() { let f = File::open("hello.txt"); let f = match f { Ok(file) => file, Err(error) => match error.kind() { ErrorKind::NotFound => match File::create("hello.txt") { Ok(fc) => fc, Err(e) => panic!("Problem creating the file: {:?}", e), }, other_error => panic!("Problem opening the file: {:?}", other_error), }, }; } #[allow(unused_variables)] fn avoid_match_with_closure() { /* In Chapter 13, you’ll learn about closures; the Result<T, E> type has many methods that accept a closure and are implemented using match expre
random
[ { "content": "fn main() {\n\n if_let_expressions();\n\n while_loop();\n\n for_loop();\n\n let_statement();\n\n function_parameters();\n\n}\n\n\n\n/*\n\nThe line if let Ok(age) = age introduces a new shadowed age variable that \n\ncontains the value inside the Ok variant. This means we need to pla...
Rust
src/number.rs
terrynsun/js.rs
702c162b7ad8e59c5e9b9ce0c0ec0ec43b532804
use std::cell::RefCell; use std::rc::Rc; use french_press::ScopeManager; use jsrs_common::backend::Backend; use jsrs_common::ast::*; use jsrs_common::ast::BinOp::*; use jsrs_common::types::coerce::{AsBool, AsNumber, AsString}; use jsrs_common::types::js_var::{JsVar, JsType}; use jsrs_common::types::js_var::JsPtrEnum::*; use jsrs_common::types::js_var::JsType::*; use jsrs_common::types::js_var::JsPtrTag; use jsrs_common::js_error::{self, JsError}; use eval::eval_exp; macro_rules! b { ($e: expr) => { $e.as_bool() } } macro_rules! n { ($e: expr) => { $e.as_number() } } macro_rules! ni64 { ($e: expr) => { $e.as_number() as i64 } } macro_rules! nu64 { ($e: expr) => { $e.as_number() as u64 } } macro_rules! ni32 { ($e: expr) => { { let n = $e.as_number(); if n.is_nan() { 0i32 } else { n as i32 } } } } macro_rules! nu32 { ($e: expr) => { $e.as_number() as u32 } } pub fn eval_binop(op: &BinOp, e1: &Exp, e2: &Exp, state: Rc<RefCell<ScopeManager>>) -> js_error::Result<JsType> { if let &And = op { let val1: JsVar = try!(eval_exp(e1, state.clone())).0; let b = if b!(val1) == false { JsBool(false) } else { let val2: JsVar = try!(eval_exp(e2, state.clone())).0; JsBool(b!(val2)) }; return Ok(b); } else if let &Or = op { let val1: JsVar = try!(eval_exp(e1, state.clone())).0; let b = if b!(val1) == true { JsBool(true) } else { let val2: JsVar = try!(eval_exp(e2, state.clone())).0; JsBool(b!(val2)) }; return Ok(b); } let val1_is_instance_var = match e1 { &Exp::InstanceVar(..) | &Exp::KeyAccessor(..) => true, _ => false }; let val2_is_instance_var = match e2 { &Exp::InstanceVar(..) | &Exp::KeyAccessor(..) => true, _ => false }; let (val1, ptr1) = try!(eval_exp(e1, state.clone())); let (val2, ptr2) = try!(eval_exp(e2, state.clone())); if let Err(e) = state.borrow_mut().alloc(val1.clone(), ptr1) { return Err(JsError::from(e)); } if let Err(e) = state.borrow_mut().alloc(val2.clone(), ptr2) { return Err(JsError::from(e)); } let v = match *op { And => { println!("{:?}", val1); if b!(val1) == false { JsBool(false) } else { JsBool(b!(val2)) } } Or => JsBool(b!(val1) || b!(val2)), Ge => JsBool(b!(val1) >= b!(val2)), Gt => JsBool(b!(val1) > b!(val2)), Le => JsBool(b!(val1) <= b!(val2)), Lt => JsBool(b!(val1) < b!(val2)), Neq => { if let Ok(JsBool(b)) = eval_binop(&Eql, e1, e2, state) { JsBool(!b) } else { JsBool(false) } } Eql => { let b = match (&val1.t, &val2.t) { (&JsNull, &JsNull) => false, (&JsUndef, &JsNull) => false, (&JsNull, &JsUndef) => false, (&JsUndef, &JsUndef) => false, (&JsNum(ref n1), &JsNum(ref n2)) => n1 == n2, (&JsBool(ref b1), &JsBool(ref b2)) => b1 == b2, (&JsPtr(_), &JsPtr(_)) => { let ptr1 = try_load!(state, &val1, val1_is_instance_var); let ptr2 = try_load!(state, &val2, val2_is_instance_var); match (&ptr1, &ptr2) { (&Some(JsSym(_)), &Some(JsSym(_))) => val1 == val2, (&Some(JsStr(ref s1)), &Some(JsStr(ref s2))) => s1 == s2, (&Some(JsObj(_)), &Some(JsObj(_))) => val1 == val2, (&Some(JsFn(_)), &Some(JsFn(_))) => val1 == val2, _ => false, } }, (&JsNum(ref n), &JsPtr(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| *n == n!(ptr)), (&JsPtr(_), &JsNum(ref n)) => try_load!(state, &val2,val2_is_instance_var).map_or(false, |ptr| *n == n!(ptr)), (&JsBool(_), &JsPtr(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| n!(val1) == n!(ptr)), (&JsPtr(_), &JsBool(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| n!(val2) == n!(ptr)), _ => false, }; JsBool(b) } EqlStrict => { let b = match (&val1.t, &val2.t) { (&JsNull, &JsNull) => true, (&JsUndef, &JsUndef) => true, (&JsNum(ref n1), &JsNum(ref n2)) => n1 == n2, (&JsBool(ref b1), &JsBool(ref b2)) => b1 == b2, (&JsPtr(_), &JsPtr(_)) => { let ptr1 = try_load!(state, &val1, val1_is_instance_var); let ptr2 = try_load!(state, &val2, val2_is_instance_var); match (&ptr1, &ptr2) { (&Some(JsSym(_)), &Some(JsSym(_))) => val1 == val2, (&Some(JsStr(ref s1)), &Some(JsStr(ref s2))) => s1 == s2, (&Some(JsObj(_)), &Some(JsObj(_))) => val1 == val2, (&Some(JsFn(_)), &Some(JsFn(_))) => val1 == val2, _ => false, } } _ => false, }; JsBool(b) } NeqStrict => { if let Ok(JsBool(b)) = eval_binop(&EqlStrict, e1, e2, state) { JsBool(!b) } else { JsBool(false) } } BitOr => JsNum((ni32!(val1) | ni32!(val2)) as f64), BitXor => JsNum((ni32!(val1) ^ ni32!(val2)) as f64), BitAnd => JsNum((ni32!(val1) & ni32!(val2)) as f64), ShiftLeft => JsNum(0.0), ShiftRight => JsNum(0.0), ShiftRightUnsigned => JsNum(0.0), Minus => JsNum(n!(val1) - n!(val2)), Plus => JsNum(n!(val1) + n!(val2)), Slash => JsNum(n!(val1) / n!(val2)), Star => JsNum(n!(val1) * n!(val2)), Mod => JsNum(n!(val1) % n!(val2)), Exponent => JsNum(n!(val1) % n!(val2)), InstanceOf => { let ptr = try_load!(state, &val1, val1_is_instance_var); let b = match (ptr, &val2.t) { (Some(JsObj(ref obj)), &JsPtr(JsPtrTag::NativeFn { ref name})) => &obj.name == name, (_, &JsPtr(JsPtrTag::NativeFn {..})) => false, (_, &JsPtr(JsPtrTag::JsFn{..})) => false, _ => { let ptr2 = try_load!(state, &val2, val2_is_instance_var); let err_str = ptr2.map(|p| p.as_string()).unwrap_or(val2.t.as_string()); return Err(JsError::TypeError(format!("Expecting a function in instanceof check, but got {}", err_str))); } }; JsBool(b) } }; Ok(v) }
use std::cell::RefCell; use std::rc::Rc; use french_press::ScopeManager; use jsrs_common::backend::Backend; use jsrs_common::ast::*; use jsrs_common::ast::BinOp::*; use jsrs_common::types::coerce::{AsBool, AsNumber, AsString}; use jsrs_common::types::js_var::{JsVar, JsType}; use jsrs_common::types::js_var::JsPtrEnum::*; use jsrs_common::types::js_var::JsType::*; use jsrs_common::types::js_var::JsPtrTag; use jsrs_common::js_error::{self, JsError}; use eval::eval_exp; macro_rules! b { ($e: expr) => { $e.as_bool() } } macro_rules! n { ($e: expr) => { $e.as_number() } } macro_rules! ni64 { ($e: expr) => { $e.as_number() as i64 } } macro_rules! nu64 { ($e: expr) => { $e.as_number() as u64 } } macro_rules! ni32 { ($e: expr) => { { let n = $e.as_number(); if n.is_nan() { 0i32 } else { n as i32 } } } } macro_rules! nu32 { ($e: expr) => { $e.as_number() as u32 } } pub fn eval_binop(op: &BinOp, e1: &Exp, e2: &Exp, state: Rc<RefCell<ScopeManager>>) -> js_error::Result<JsType> { if let &And = op { let val1: JsVar = try!(eval_exp(e1, state.clone())).0; let b = if b!(val1) == false { JsBool(false) } else { let val2: JsVar = try!(eval_exp(e2, state.clone())).0; JsBool(b!(val2)) }; return Ok(b); } else if let &Or = op { let val1: JsVar = try!(eval_exp(e1, state.clone())).0; let b = if b!(val1) == true { JsBool(true) } else { let val2: JsVar = try!(eval_exp(e2, state.clone())).0; JsBool(b!(val2)) }; return Ok(b); } let val1_is_instance_var = match e1 { &Exp::InstanceVar(..) | &Exp::KeyAccessor(..) => true, _ => false }; let val2_is_instance_var = match e2 { &Exp::InstanceVar(..) | &Exp::KeyAccessor(..) => true, _ => false }; let (val1, ptr1) = try!(eval_exp(e1, state.clone())); let (val2, ptr2) = try!(eval_exp(e2, state.clone())); if let Err(e) = state.borrow_mut().alloc(val1.clone(), ptr1) { return Err(JsError::from(e)); } if let Err(e) = state.borrow_mut().alloc(val2.clone(), ptr2) { return Err(JsError::from(e)); } let v = match *op { And => { println!("{:?}", val1); if b!(val1) == false { JsBool(false) } else { JsBool(b!(val2)) } } Or => JsBool(b!(val1) || b!(val2)), Ge => JsBool(b!(val1) >= b!(val2)), Gt => JsBool(b!(val1) > b!(val2)), Le => JsBool(b!(val1) <= b!(val2)), Lt => JsBool(b!(val1) < b!(val2)), Neq => { if let Ok(JsBool(b)) = eval_binop(&Eql, e1, e2, state) { JsBool(!b) } else { JsBool(false) } } Eql => { let b = match (&val1.t, &val2.t) { (&JsNull, &JsNull) => false, (&JsUndef, &JsNull) => false, (&JsNull, &JsUndef) => false, (&JsUndef, &JsUndef) => false, (&JsNum(ref n1), &JsNum(ref n2)) => n1 == n2, (&JsBool(ref b1), &JsBool(ref b2)) => b1 == b2, (&JsPtr(_), &JsPtr(_)) => { let ptr1 = try_load!(state, &val1, val1_is_instance_var); let ptr2 = try_load!(state, &val2, val2_is_instance_var); match (&ptr1, &ptr2) { (&Some(JsSym(_)), &Some(JsSym(_))) => val1 == val2, (&Some(JsStr(ref s1)), &Some(JsStr(ref s2))) => s1 == s2, (&Some(JsObj(_)), &Some(JsObj(_))) => val1 == val2, (&Some(JsFn(_)), &Some(JsFn(_))) => val1 == val2, _ => false, } }, (&JsNum(ref n), &JsPtr(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| *n == n!(ptr)), (&JsPtr(_), &JsNum(ref n)) => try_load!(state, &val2,val2_is_instance_var).map_or(false, |ptr| *n == n!(ptr)), (&JsBool(_), &JsPtr(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| n!(val1) == n!(ptr)), (&JsPtr(_), &JsBool(_)) => try_load!(state, &val2, val2_is_instance_var).map_or(false, |ptr| n!(val2) == n!(ptr)), _ => false, }; JsBool(b) } EqlStrict => { let b =
; JsBool(b) } NeqStrict => { if let Ok(JsBool(b)) = eval_binop(&EqlStrict, e1, e2, state) { JsBool(!b) } else { JsBool(false) } } BitOr => JsNum((ni32!(val1) | ni32!(val2)) as f64), BitXor => JsNum((ni32!(val1) ^ ni32!(val2)) as f64), BitAnd => JsNum((ni32!(val1) & ni32!(val2)) as f64), ShiftLeft => JsNum(0.0), ShiftRight => JsNum(0.0), ShiftRightUnsigned => JsNum(0.0), Minus => JsNum(n!(val1) - n!(val2)), Plus => JsNum(n!(val1) + n!(val2)), Slash => JsNum(n!(val1) / n!(val2)), Star => JsNum(n!(val1) * n!(val2)), Mod => JsNum(n!(val1) % n!(val2)), Exponent => JsNum(n!(val1) % n!(val2)), InstanceOf => { let ptr = try_load!(state, &val1, val1_is_instance_var); let b = match (ptr, &val2.t) { (Some(JsObj(ref obj)), &JsPtr(JsPtrTag::NativeFn { ref name})) => &obj.name == name, (_, &JsPtr(JsPtrTag::NativeFn {..})) => false, (_, &JsPtr(JsPtrTag::JsFn{..})) => false, _ => { let ptr2 = try_load!(state, &val2, val2_is_instance_var); let err_str = ptr2.map(|p| p.as_string()).unwrap_or(val2.t.as_string()); return Err(JsError::TypeError(format!("Expecting a function in instanceof check, but got {}", err_str))); } }; JsBool(b) } }; Ok(v) }
match (&val1.t, &val2.t) { (&JsNull, &JsNull) => true, (&JsUndef, &JsUndef) => true, (&JsNum(ref n1), &JsNum(ref n2)) => n1 == n2, (&JsBool(ref b1), &JsBool(ref b2)) => b1 == b2, (&JsPtr(_), &JsPtr(_)) => { let ptr1 = try_load!(state, &val1, val1_is_instance_var); let ptr2 = try_load!(state, &val2, val2_is_instance_var); match (&ptr1, &ptr2) { (&Some(JsSym(_)), &Some(JsSym(_))) => val1 == val2, (&Some(JsStr(ref s1)), &Some(JsStr(ref s2))) => s1 == s2, (&Some(JsObj(_)), &Some(JsObj(_))) => val1 == val2, (&Some(JsFn(_)), &Some(JsFn(_))) => val1 == val2, _ => false, } } _ => false, }
if_condition
[ { "content": "/// Evaluate an expression into a JsVar.\n\npub fn eval_exp(e: &Exp, state: Rc<RefCell<ScopeManager>>) -> js_error::Result<JsVarValue> {\n\n match e {\n\n // [ e1, e2, ... ]\n\n &Array(ref elems) => {\n\n let proto = Some(Box::new(get_array_proto(elems.len() as f64, sta...
Rust
crates/zoon/src/routing/router.rs
afidegnum/MoonZoon
e8be4f08eedb295b01aa55295fe1ea2c2e24384a
use crate::{routing::decode_uri_component, *}; use futures_signals::signal::{channel, Sender}; use std::marker::PhantomData; use web_sys::MouseEvent; type UrlChangeSender = Sender<Option<Vec<String>>>; pub struct Router<R> { popstate_listener: SendWrapper<Closure<dyn Fn()>>, link_interceptor: SendWrapper<Closure<dyn Fn(MouseEvent)>>, url_change_sender: UrlChangeSender, _url_change_handle: TaskHandle, _route_type: PhantomData<R>, } impl<R: FromRouteSegments> Router<R> { pub fn new(on_route_change: impl FnOnce(Option<R>) + Clone + 'static) -> Self { let (url_change_sender, _url_change_handle) = setup_url_change_handler(on_route_change); Router { popstate_listener: setup_popstate_listener(url_change_sender.clone()), link_interceptor: setup_link_interceptor(url_change_sender.clone()), url_change_sender, _url_change_handle, _route_type: PhantomData, } } pub fn go<'a>(&self, to: impl IntoCowStr<'a>) { go(&self.url_change_sender, to); } pub fn replace<'a>(&self, with: impl IntoCowStr<'a>) { replace(&self.url_change_sender, with); } } impl<R> Drop for Router<R> { fn drop(&mut self) { window() .remove_event_listener_with_callback( "popstate", self.popstate_listener.as_ref().unchecked_ref(), ) .unwrap_throw(); document() .remove_event_listener_with_callback( "click", self.link_interceptor.as_ref().unchecked_ref(), ) .unwrap_throw(); } } fn setup_url_change_handler<R: FromRouteSegments>( on_route_change: impl FnOnce(Option<R>) + Clone + 'static, ) -> (UrlChangeSender, TaskHandle) { let on_route_change = move |route: Option<R>| on_route_change.clone()(route); let (url_change_sender, url_change_receiver) = channel(current_url_segments()); let url_change_handler = url_change_receiver.for_each(move |segments| { let route = segments.and_then(R::from_route_segments); on_route_change(route); async {} }); let url_change_handle = Task::start_droppable(url_change_handler); (url_change_sender, url_change_handle) } fn go<'a>(url_change_sender: &UrlChangeSender, to: impl IntoCowStr<'a>) { let to = to.into_cow_str(); if !to.starts_with('/') { return window().location().assign(&to).unwrap_throw(); } history() .push_state_with_url(&JsValue::NULL, "", Some(&to)) .unwrap_throw(); url_change_sender .send(current_url_segments()) .unwrap_throw(); } fn replace<'a>(url_change_sender: &UrlChangeSender, with: impl IntoCowStr<'a>) { let with = with.into_cow_str(); if !with.starts_with('/') { return window().location().replace(&with).unwrap_throw(); } history() .replace_state_with_url(&JsValue::NULL, "", Some(&with)) .unwrap_throw(); url_change_sender .send(current_url_segments()) .unwrap_throw(); } fn current_url_segments() -> Option<Vec<String>> { let path = window().location().pathname().unwrap_throw(); let mut segments = Vec::new(); for segment in path.trim_start_matches('/').split_terminator('/') { match decode_uri_component(segment) { Ok(segment) => segments.push(segment), Err(error) => { crate::eprintln!( "Cannot decode the URL segment '{}'. Error: {:#?}", segment, error ); None? } } } Some(segments) } fn setup_popstate_listener(url_change_sender: UrlChangeSender) -> SendWrapper<Closure<dyn Fn()>> { let closure = Closure::wrap(Box::new(move || { url_change_sender .send(current_url_segments()) .unwrap_throw(); }) as Box<dyn Fn()>); window() .add_event_listener_with_callback("popstate", closure.as_ref().unchecked_ref()) .unwrap_throw(); SendWrapper::new(closure) } fn setup_link_interceptor( url_change_sender: UrlChangeSender, ) -> SendWrapper<Closure<dyn Fn(MouseEvent)>> { let closure = Closure::wrap(Box::new(move |event| { link_click_handler(event, &url_change_sender); }) as Box<dyn Fn(MouseEvent)>); document() .add_event_listener_with_callback("click", closure.as_ref().unchecked_ref()) .unwrap_throw(); SendWrapper::new(closure) } fn link_click_handler(event: MouseEvent, url_change_sender: &UrlChangeSender) -> Option<()> { if event.ctrl_key() || event.meta_key() || event.shift_key() || event.button() != 0 { None? } let ws_element: web_sys::Element = event.target()?.dyn_into().ok()?; let a: web_sys::Element = ws_element .closest(r#"a[href^="/"]:not([download], [target="_blank"])"#) .ok()??; let href = a.get_attribute("href")?; event.prevent_default(); go(url_change_sender, href); Some(()) }
use crate::{routing::decode_uri_component, *}; use futures_signals::signal::{channel, Sender}; use std::marker::PhantomData; use web_sys::MouseEvent; type UrlChangeSender = Sender<Option<Vec<String>>>; pub struct Router<R> { popstate_listener: SendWrapper<Closure<dyn Fn()>>, link_interceptor: SendWrapper<Closure<dyn Fn(MouseEvent)>>, url_change_sender: UrlChangeSender, _url_change_handle: TaskHandle, _route_type: PhantomData<R>, } impl<R: FromRouteSegments> Router<R> { pub fn new(on_route_change: impl FnOnce(Option<R>) + Clone + 'static) -> Self { let (url_change_sender, _url_change_handle) = setup_url_change_handler(on_route_change); Router { popstate_listener: setup_popstate_listener(url_change_sender.clone()), link_interceptor: setup_link_interceptor(url_change_sender.clone()), url_change_sender, _url_change_handle, _route_type: PhantomData, } } pub fn go<'a>(&self, to: impl IntoCowStr<'a>) { go(&self.url_change_sender, to); } pub fn replace<'a>(&self, with: impl IntoCowStr<'a>) { replace(&self.url_change_sender, with); } } impl<R> Drop for Router<R> { fn drop(&mut self) { window() .remove_event_listener_with_callback( "popstate", self.popstate_listener.as_ref().unchecked_ref(), ) .unwrap_throw(); document() .remove_event_listener_with_callback( "click", self.link_interceptor.as_ref().unchecked_ref(), ) .unwrap_throw(); } } fn setup_url_change_handler<R: FromRouteSegments>( on_route_change: impl FnOnce(Option<R>) + Clone + 'static, ) -> (UrlChangeSender, TaskHandle) { let on_route_change = move |route: Option<R>| on_route_change.clone()(route); let (url_change_sender, url_change_receiver) = channel(current_url_segments()); let url_change_handler = url_change_receiver.for_each(move |segments| { let route = segments.and_then(R::from_route_segments); on_route_change(route); async {} }); let url_change_handle = Task::start_droppable(url_change_handler); (url_change_sender, url_change_handle) } fn go<'a>(url_change_sender: &UrlChangeSender, to: impl IntoCowStr<'a>) { let to = to.into_cow_str(); if !to.starts_with('/') { return window().location().assign(&to).unwrap_throw(); } history() .push_state_with_url(&JsValue::NULL, "", Some(&to)) .unwrap_throw(); url_change_sender .send(current_url_segments()) .unwrap_throw(); } fn replace<'a>(url_change_sender: &UrlChangeSender, with: impl IntoCowStr<'a>) { let with = with.into_cow_str(); if !with.starts_with('/') { return window().location().replace(&with).unwrap_throw(); } history() .replace_state_with_url(&JsValue::NULL, "", Some(&with)) .unwrap_throw(); url_change_sender .send(current_url_segments()) .unwrap_throw(); }
fn setup_popstate_listener(url_change_sender: UrlChangeSender) -> SendWrapper<Closure<dyn Fn()>> { let closure = Closure::wrap(Box::new(move || { url_change_sender .send(current_url_segments()) .unwrap_throw(); }) as Box<dyn Fn()>); window() .add_event_listener_with_callback("popstate", closure.as_ref().unchecked_ref()) .unwrap_throw(); SendWrapper::new(closure) } fn setup_link_interceptor( url_change_sender: UrlChangeSender, ) -> SendWrapper<Closure<dyn Fn(MouseEvent)>> { let closure = Closure::wrap(Box::new(move |event| { link_click_handler(event, &url_change_sender); }) as Box<dyn Fn(MouseEvent)>); document() .add_event_listener_with_callback("click", closure.as_ref().unchecked_ref()) .unwrap_throw(); SendWrapper::new(closure) } fn link_click_handler(event: MouseEvent, url_change_sender: &UrlChangeSender) -> Option<()> { if event.ctrl_key() || event.meta_key() || event.shift_key() || event.button() != 0 { None? } let ws_element: web_sys::Element = event.target()?.dyn_into().ok()?; let a: web_sys::Element = ws_element .closest(r#"a[href^="/"]:not([download], [target="_blank"])"#) .ok()??; let href = a.get_attribute("href")?; event.prevent_default(); go(url_change_sender, href); Some(()) }
fn current_url_segments() -> Option<Vec<String>> { let path = window().location().pathname().unwrap_throw(); let mut segments = Vec::new(); for segment in path.trim_start_matches('/').split_terminator('/') { match decode_uri_component(segment) { Ok(segment) => segments.push(segment), Err(error) => { crate::eprintln!( "Cannot decode the URL segment '{}'. Error: {:#?}", segment, error ); None? } } } Some(segments) }
function_block-full_function
[ { "content": "#[static_ref]\n\npub fn router() -> &'static Router<Route> {\n\n Router::new(|route| match route {\n\n Some(Route::Active) => app::select_filter(app::Filter::Active),\n\n Some(Route::Completed) => app::select_filter(app::Filter::Completed),\n\n Some(Route::Root) | None => a...
Rust
roman-numeral/roman-numeral/src/lib.rs
Emilgardis/problems
9d1e8f917f78621b66816d624009bbd12861f075
#![cfg_attr(feature="clippy", feature(plugin))] #![cfg_attr(feature="clippy", plugin(clippy))] extern crate num; #[macro_use] extern crate num_derive; extern crate itertools; #[macro_use] extern crate error_chain; mod errors; use errors::*; use itertools::Itertools; use std::str::FromStr; use num::traits::{CheckedSub, CheckedMul, CheckedDiv}; use std::ops::{Add, Sub}; #[repr(u16)] #[derive(FromPrimitive, Ord, Eq, PartialOrd, PartialEq, Debug, Clone)] pub enum RomanNumeral { I = 1, V = 5, X = 10, L = 50, C = 100, D = 500, M = 1000, } impl RomanNumeral { fn biggest(u: &usize) -> Result<RomanNumeral> { use self::RomanNumeral::*; Ok(match () { _ if u >= &1000 => M, _ if u >= &500 => D, _ if u >= &100 => C, _ if u >= &50 => L, _ if u >= &10 => X, _ if u >= &5 => V, _ if u >= &1 => I, _ => return Err(ErrorKind::NoZeroNumeral.into()), }) } fn step_up(&self) -> Result<RomanNumeral> { use self::RomanNumeral::*; Ok(match self { &I | &X | &C | &M => return Err(format!("Cannot step_up from {:?}", self).into()), &V => X, &L => C, &D => M, }) } } impl FromStr for RomanNumeral { type Err = Error; fn from_str(s: &str) -> Result<Self> { use self::RomanNumeral::*; Ok(match s.to_uppercase().as_str() { "I" => I, "V" => V, "X" => X, "L" => L, "C" => C, "D" => D, "M" => M, _ => return Err(format!("No roman numeral corresponding to {}", s).into()), }) } } #[derive(Clone)] pub struct Roman(Vec<RomanNumeral>); impl FromStr for Roman { type Err = Error; fn from_str(s: &str) -> Result<Self> { let mut vec = vec![]; if s.len() == 0 { return Err(ErrorKind::NoZeroNumeral.into()); } for ch in s.chars() { vec.push(ch.to_string().parse().chain_err(|| "While parsing roman numeral")?) } Ok(Roman::new(vec).chain_err(|| "While making new roman numeral after parsing")?) } } impl Add for Roman { type Output = Roman; fn add(self, v: Roman) -> Roman { unimplemented!() } } impl Sub for Roman { type Output = Roman; fn sub(self, v: Roman) -> Roman { let k = self.as_usize().unwrap().sub(v.as_usize().unwrap()); Roman::from_usize(k).unwrap() } } impl CheckedSub for Roman { fn checked_sub(&self, v: &Roman) -> Option<Roman> { let k = self.as_usize().unwrap().checked_sub(if let Ok(v_) = v.as_usize() { v_ } else { return None; }); match k { Some(0) | None => None, Some(res) => Roman::from_usize(res).ok(), } } } impl Roman { pub fn new(content: Vec<RomanNumeral>) -> Result<Roman> { Roman::validate(content).chain_err(|| "While validating the sequence of roman numerals") } #[doc(hidden)] pub fn _inner(&self) -> &Vec<RomanNumeral> { &self.0 } pub fn as_usize(&self) -> Result<usize> { let mut sum = 0; let mut iter = self.0 .clone() .into_iter() .map(|e| (e, 1)) .coalesce(|x, y| if x.0 == y.0 { Ok((x.0, x.1 + y.1)) } else { Err((x, y)) }) .peekable(); while let Some((numeral, reps)) = iter.next() { if iter.peek().is_none() || iter.peek().unwrap().0 < numeral { sum += numeral as usize * reps; } else { let (n_next, n_reps) = iter.next().unwrap(); sum += (n_next as usize * n_reps).checked_sub((numeral as usize * reps)).ok_or::<Error>("Underflow error".into())?; } } Ok(sum) } fn validate(vec: Vec<RomanNumeral>) -> Result<Roman> { let ro = Roman(vec); ro.as_usize()?; let stred = ro.as_string()?; if stred.contains("VV") { Err(ErrorKind::InvalidSequence("VV").into()) } else if stred.contains("LL") { Err(ErrorKind::InvalidSequence("LL").into()) } else if stred.contains("DD") { Err(ErrorKind::InvalidSequence("DD").into()) } else { Ok(ro) } } pub fn as_string(&self) -> Result<String> { use std::fmt::Write; let mut buf = String::new(); for numeral in &self.0 { write!(buf, "{:?}", numeral)?; } Ok(buf) } pub fn condense(&mut self) -> Result<()> { let stred = self.as_string().unwrap(); let res = stred .replace("IIIIII", "VI") .replace("VIIII", "IX") .replace("DCCCC", "CM") .replace("LXXXX", "XC") .replace("CCCC", "CD") .replace("XXXX", "XL") .replace("IIII", "IV"); self.0 = res.parse::<Roman>().chain_err(|| "After condensing")?.0; Ok(()) } pub fn expand(&mut self) -> Result<()> { let stred = self.as_string()?; println!("{}", stred); let res = stred .replace("IV", "IIII") .replace("XL", "XXXX") .replace("CD", "CCCC") .replace("XC", "LXXXX") .replace("CM", "DCCCC") .replace("IX", "VIIII"); self.0 = res.parse::<Roman>().chain_err(|| "After expanding")?.0; Ok(()) } pub fn from_usize(u: usize) -> Result<Roman> { let mut vec = vec![]; let mut rest = u; while rest != 0 { let res = RomanNumeral::biggest(&rest)?; rest -= res.clone() as usize; vec.push(res); } Ok(Roman::new(vec)?) } } #[cfg(test)] mod tests { use super::*; use self::RomanNumeral::*; #[test] fn iiii() { let n = Roman::new(vec![I, I, I, I]).unwrap(); assert_eq!(4, n.as_usize().unwrap()); } #[test] fn iv() { let n = Roman::new(vec![I, V]).unwrap(); assert_eq!(4, n.as_usize().unwrap()); } #[test] fn ordering() { assert!(I < X); assert!(X <= L); assert!(X != C); assert!(C < D); } #[test] fn cdxcix() { let n = Roman::new(vec![C, D, X, C, I, X]).unwrap(); assert_eq!(499, n.as_usize().unwrap()); } #[test] fn cccclxxxxviiii() { let n = Roman::new(vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I]).unwrap(); assert_eq!(499, n.as_usize().unwrap()); } #[test] fn iix() { let n = Roman::new(vec![I, I, X]).unwrap(); assert_eq!(8, n.as_usize().unwrap()); } #[test] fn xiix() { let n = Roman::new(vec![X, I, I, X]).unwrap(); assert_eq!(18, n.as_usize().unwrap()); } #[test] fn from_str() { let n = Roman::from_str("MMXVII").unwrap(); assert_eq!(2017, n.as_usize().unwrap()); } #[test] fn from_usize() { let n = Roman::from_usize(2017).unwrap(); assert_eq!(2017, n.as_usize().unwrap()); } #[test] fn condense_499() { let mut n = Roman::new(vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(499, n.as_usize().unwrap()); assert_eq!(&vec![C, D, X, C, I, X], n._inner()); } #[test] fn condense_viiii() { let mut n = Roman::new(vec![V, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![I, X], n._inner()); } #[test] fn condense_cccc() { let mut n = Roman::new(vec![C, C, C, C]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![C, D], n._inner()); } #[test] fn expand() { let mut n = Roman::new(vec![C, D, X, C, I, X]).unwrap(); n.expand().unwrap(); assert_eq!(&vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I], n._inner()); } #[test] fn sixteen() { let mut n = Roman::new(vec![X, I, I, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![X, V, I], n._inner()); } }
#![cfg_attr(feature="clippy", feature(plugin))] #![cfg_attr(feature="clippy", plugin(clippy))] extern crate num; #[macro_use] extern crate num_derive; extern crate itertools; #[macro_use] extern crate error_chain; mod errors; use errors::*; use itertools::Itertools; use std::str::FromStr; use num::traits::{CheckedSub, CheckedMul, CheckedDiv}; use std::ops::{Add, Sub}; #[repr(u16)] #[derive(FromPrimitive, Ord, Eq, PartialOrd, PartialEq, Debug, Clone)] pub enum RomanNumeral { I = 1, V = 5, X = 10, L = 50, C = 100, D = 500, M = 1000, } impl RomanNumeral { fn biggest(u: &usize) -> Result<RomanNumeral> { use self::RomanNumeral::*; Ok(match () { _ if u >= &1000 => M, _ if u >= &500 => D, _ if u >= &100 => C, _ if u >= &50 => L, _ if u >= &10 => X, _ if u >= &5 => V, _ if u >= &1 => I, _ => return Err(ErrorKind::NoZeroNumeral.into()), }) } fn step_up(&self) -> Result<RomanNumeral> { use self::RomanNumeral::*; Ok(match self { &I | &X | &C | &M => return Err(format!("Cannot step_up from {:?}", self).into()), &V => X, &L => C, &D => M, }) } } impl FromStr for RomanNumeral { type Err = Error; fn from_str(s: &str) -> Result<Self> { use self::RomanNumeral::*; Ok(match s.to_uppercase().as_str() { "I" => I, "V" => V, "X" => X, "L" => L, "C" => C, "D" => D, "M" => M, _ => return Err(format!("No roman numeral corresponding to {}", s).into()), }) } } #[derive(Clone)] pub struct Roman(Vec<RomanNumeral>); impl FromStr for Roman { type Err = Error; fn from_str(s: &str) -> Result<Self> { let mut vec = vec![]; if s.len() == 0 { return Err(ErrorKind::NoZeroNumeral.into()); } for ch in s.chars() { vec.push(ch.to_string().parse().chain_err(|| "While parsing roman numeral")?) } Ok(Roman::new(vec).chain_err(|| "While making new roman numeral after parsing")?) } } impl Add for Roman { type Output = Roman; fn add(self, v: Roman) -> Roman { unimplemented!() } } impl Sub for Roman { type Output = Roman; fn sub(self, v: Roman) -> Roman { let k = self.as_usize().unwrap().sub(v.as_usize().unwrap()); Roman::from_usize(k).unwrap() } } impl CheckedSub for Roman { fn checked_sub(&self, v: &Roman) -> Option<Roman> { let k = self.as_usize().unwrap().checked_sub(if let Ok(v_) = v.as_usize() { v_ } else { return None; }); match k { Some(0) | None => None, Some(res) => Roman::from_usize(res).ok(), } } } impl Roman { pub fn new(content: Vec<RomanNumeral>) -> Result<Roman> { Roman::validate(content).chain_err(|| "While validating the sequence of roman numerals") } #[doc(hidden)] pub fn _inner(&self) -> &Vec<RomanNumeral> { &self.0 } pub fn as_usize(&self) -> Result<usize> { let mut sum = 0; let mut iter = self.0 .clone() .into_iter() .map(|e| (e, 1)) .coalesce(|x, y| if x.0 == y.0 { Ok((x.0, x.1 + y.1)) } else { Err((x, y)) }) .peekable(); while let Some((numeral, reps)) = iter.next() { if iter.peek().is_none() || iter.peek().unwrap().0 < numeral { sum += numeral as usize * reps; } else { let (n_next, n_reps) = iter.next().unwrap(); sum += (n_next as usize * n_reps).checked_sub((numeral as usize * reps)).ok_or::<Error>("Underflow error".into())?; } } Ok(sum) } fn validate(vec: Vec<RomanNumeral>) -> Result<Roman> { let ro = Roman(vec); ro.as_usize()?; let stred = ro.as_string()?; if stred.contains("VV") { Err(ErrorKind::InvalidSequence("VV").into()) } else if stred.contains("LL") { Err(ErrorKind::InvalidSequence("LL").into()) } else if stred.contains("DD") { Err(ErrorKind::InvalidSequence("DD").into()) } else { Ok(ro) } } pub fn as_string(&self) -> Result<String> { use std::fmt::Write; let mut buf = String::new(); for numeral in &self.0 { write!(buf, "{:?}", numeral)?; } Ok(buf) }
pub fn expand(&mut self) -> Result<()> { let stred = self.as_string()?; println!("{}", stred); let res = stred .replace("IV", "IIII") .replace("XL", "XXXX") .replace("CD", "CCCC") .replace("XC", "LXXXX") .replace("CM", "DCCCC") .replace("IX", "VIIII"); self.0 = res.parse::<Roman>().chain_err(|| "After expanding")?.0; Ok(()) } pub fn from_usize(u: usize) -> Result<Roman> { let mut vec = vec![]; let mut rest = u; while rest != 0 { let res = RomanNumeral::biggest(&rest)?; rest -= res.clone() as usize; vec.push(res); } Ok(Roman::new(vec)?) } } #[cfg(test)] mod tests { use super::*; use self::RomanNumeral::*; #[test] fn iiii() { let n = Roman::new(vec![I, I, I, I]).unwrap(); assert_eq!(4, n.as_usize().unwrap()); } #[test] fn iv() { let n = Roman::new(vec![I, V]).unwrap(); assert_eq!(4, n.as_usize().unwrap()); } #[test] fn ordering() { assert!(I < X); assert!(X <= L); assert!(X != C); assert!(C < D); } #[test] fn cdxcix() { let n = Roman::new(vec![C, D, X, C, I, X]).unwrap(); assert_eq!(499, n.as_usize().unwrap()); } #[test] fn cccclxxxxviiii() { let n = Roman::new(vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I]).unwrap(); assert_eq!(499, n.as_usize().unwrap()); } #[test] fn iix() { let n = Roman::new(vec![I, I, X]).unwrap(); assert_eq!(8, n.as_usize().unwrap()); } #[test] fn xiix() { let n = Roman::new(vec![X, I, I, X]).unwrap(); assert_eq!(18, n.as_usize().unwrap()); } #[test] fn from_str() { let n = Roman::from_str("MMXVII").unwrap(); assert_eq!(2017, n.as_usize().unwrap()); } #[test] fn from_usize() { let n = Roman::from_usize(2017).unwrap(); assert_eq!(2017, n.as_usize().unwrap()); } #[test] fn condense_499() { let mut n = Roman::new(vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(499, n.as_usize().unwrap()); assert_eq!(&vec![C, D, X, C, I, X], n._inner()); } #[test] fn condense_viiii() { let mut n = Roman::new(vec![V, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![I, X], n._inner()); } #[test] fn condense_cccc() { let mut n = Roman::new(vec![C, C, C, C]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![C, D], n._inner()); } #[test] fn expand() { let mut n = Roman::new(vec![C, D, X, C, I, X]).unwrap(); n.expand().unwrap(); assert_eq!(&vec![C, C, C, C, L, X, X, X, X, V, I, I, I, I], n._inner()); } #[test] fn sixteen() { let mut n = Roman::new(vec![X, I, I, I, I, I, I]).unwrap(); n.condense().unwrap(); assert_eq!(&vec![X, V, I], n._inner()); } }
pub fn condense(&mut self) -> Result<()> { let stred = self.as_string().unwrap(); let res = stred .replace("IIIIII", "VI") .replace("VIIII", "IX") .replace("DCCCC", "CM") .replace("LXXXX", "XC") .replace("CCCC", "CD") .replace("XXXX", "XL") .replace("IIII", "IV"); self.0 = res.parse::<Roman>().chain_err(|| "After condensing")?.0; Ok(()) }
function_block-full_function
[ { "content": "#[derive(Debug, PartialEq, PartialOrd, Eq, Ord)]\n\nstruct Name<'a>(pub &'a str);\n\n\n\nimpl<'a> Name<'a> {\n\n pub fn value(&self) -> usize {\n\n self.0.chars().fold(0, |v, ch| v + (ch as u8 - 64) as usize )\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests{\n\n\n\n use super::*;\n...
Rust
src/main.rs
lang-import/rs-translate
60912ec66a6518a0436112b6825383d7bb90eb14
extern crate redis; extern crate clap; extern crate rouille; use rouille::{Request, Response}; use std::{process, str}; use clap::{Arg, App}; fn list_engines(command: &str) -> Vec<String> { let output = process::Command::new(command) .arg("-S") .output().expect("list of supported engines").stdout; str::from_utf8(&output) .unwrap() .split_whitespace() .map(|x| x.trim()) .filter(|x| !x.is_empty() && !x.starts_with("*")) .map(|x| String::from(x)) .collect() } fn translate_engine(command: &str, engine: &str, lang: &str, word: &str) -> Option<String> { let lang_arg: String = ":".to_string() + lang; let output = match process::Command::new(command) .arg("-e").arg(engine).arg("-b").arg(lang_arg).arg(word) .output() { Ok(v) => v.stdout, Err(_) => return None }; if output.is_empty() { return None; } match String::from_utf8(output) { Ok(v) => Some(v), Err(_) => None } } fn translate(command: &str, engines: &[String], lang: &str, word: &str) -> Option<String> { for engine in engines { match translate_engine(&command, &engine.as_str(), &lang, &word) { Some(v) => return Some(v), None => {} }; } None } fn translate_cached(connection: &redis::Connection, command: &str, engines: &[String], lang: &str, word: &str) -> Option<String> { let value = match redis::cmd("HGET").arg(lang).arg(word).query(connection) { Err(e) => { println!("failed access the cache: {}", e); translate(&command, &engines, &lang, &word) } Ok(value) => { match value { Some(v) => return v, None => translate(&command, &engines, &lang, &word) } } }; match value { Some(v) => { match redis::cmd("HSET").arg(lang).arg(word).arg(v.as_str()).query(connection) { Err(e) => println!("failed save to cache word {} for lang {}: {}", word, lang, e), Ok(()) => {} }; Some(v) } None => None } } fn main() { let matches = App::new("Translate API") .version("1.0") .author("Alexander Baryshnikov <dev@baryshnikov.net>") .about("exposes trans-shell to Web") .arg(Arg::with_name("binary") .short("b") .long("bin") .help("path to binary for translate-shell") .default_value("/usr/bin/trans") .takes_value(true)) .arg(Arg::with_name("redis") .short("r") .long("redis") .help("redis URL") .default_value("redis://127.0.0.1/") .takes_value(true)) .arg(Arg::with_name("address") .short("a") .long("address") .help("binding address") .default_value("127.0.0.1:8000") .takes_value(true)) .get_matches(); let binding_addr = matches.value_of("address").unwrap(); let command: String = matches.value_of("binary").unwrap().to_string(); let redis_url = matches.value_of("redis").unwrap(); let client = redis::Client::open(redis_url).expect("connect to redis"); let engines = list_engines(command.as_str()); for engine in &engines { println!("found engine {}", engine) } println!("started server on {}", binding_addr); rouille::start_server(binding_addr, move |request: &Request| { let u = request.url(); let segments: Vec<&str> = u.as_str().split("/").collect(); if segments.len() != 5 || !(segments[1] == "translate" && segments[3] == "to") { return Response::text("bad request").with_status_code(422); } let word = segments[2]; let lang = segments[4]; let connection = client.get_connection().expect("open connection to redis"); let ans = translate_cached(&connection, command.as_str(), &engines, lang, word).unwrap(); Response::text(ans) }); }
extern crate redis; extern crate clap; extern crate rouille; use rouille::{Request, Response}; use std::{process, str}; use clap::{Arg, App};
fn translate_engine(command: &str, engine: &str, lang: &str, word: &str) -> Option<String> { let lang_arg: String = ":".to_string() + lang; let output = match process::Command::new(command) .arg("-e").arg(engine).arg("-b").arg(lang_arg).arg(word) .output() { Ok(v) => v.stdout, Err(_) => return None }; if output.is_empty() { return None; } match String::from_utf8(output) { Ok(v) => Some(v), Err(_) => None } } fn translate(command: &str, engines: &[String], lang: &str, word: &str) -> Option<String> { for engine in engines { match translate_engine(&command, &engine.as_str(), &lang, &word) { Some(v) => return Some(v), None => {} }; } None } fn translate_cached(connection: &redis::Connection, command: &str, engines: &[String], lang: &str, word: &str) -> Option<String> { let value = match redis::cmd("HGET").arg(lang).arg(word).query(connection) { Err(e) => { println!("failed access the cache: {}", e); translate(&command, &engines, &lang, &word) } Ok(value) => { match value { Some(v) => return v, None => translate(&command, &engines, &lang, &word) } } }; match value { Some(v) => { match redis::cmd("HSET").arg(lang).arg(word).arg(v.as_str()).query(connection) { Err(e) => println!("failed save to cache word {} for lang {}: {}", word, lang, e), Ok(()) => {} }; Some(v) } None => None } } fn main() { let matches = App::new("Translate API") .version("1.0") .author("Alexander Baryshnikov <dev@baryshnikov.net>") .about("exposes trans-shell to Web") .arg(Arg::with_name("binary") .short("b") .long("bin") .help("path to binary for translate-shell") .default_value("/usr/bin/trans") .takes_value(true)) .arg(Arg::with_name("redis") .short("r") .long("redis") .help("redis URL") .default_value("redis://127.0.0.1/") .takes_value(true)) .arg(Arg::with_name("address") .short("a") .long("address") .help("binding address") .default_value("127.0.0.1:8000") .takes_value(true)) .get_matches(); let binding_addr = matches.value_of("address").unwrap(); let command: String = matches.value_of("binary").unwrap().to_string(); let redis_url = matches.value_of("redis").unwrap(); let client = redis::Client::open(redis_url).expect("connect to redis"); let engines = list_engines(command.as_str()); for engine in &engines { println!("found engine {}", engine) } println!("started server on {}", binding_addr); rouille::start_server(binding_addr, move |request: &Request| { let u = request.url(); let segments: Vec<&str> = u.as_str().split("/").collect(); if segments.len() != 5 || !(segments[1] == "translate" && segments[3] == "to") { return Response::text("bad request").with_status_code(422); } let word = segments[2]; let lang = segments[4]; let connection = client.get_connection().expect("open connection to redis"); let ans = translate_cached(&connection, command.as_str(), &engines, lang, word).unwrap(); Response::text(ans) }); }
fn list_engines(command: &str) -> Vec<String> { let output = process::Command::new(command) .arg("-S") .output().expect("list of supported engines").stdout; str::from_utf8(&output) .unwrap() .split_whitespace() .map(|x| x.trim()) .filter(|x| !x.is_empty() && !x.starts_with("*")) .map(|x| String::from(x)) .collect() }
function_block-full_function
[]
Rust
src/usi/chiyuri.rs
muzudho/rust-kifuwarabe-wcsc30
e21fda4d648c8fa162ca3e59dd2d85dab6272fc5
use crate::entities::cosmic::playing::{Game, PosNums}; use crate::entities::cosmic::universe::Universe; use crate::entities::law::cryptographic::*; use crate::entities::law::usi::*; use crate::entities::spaceship::equipment::Beam; use crate::entities::spaceship::facility::{CommandRoom, GameRoom}; use crate::movegen::PseudoLegalMoves; use crate::position::Square; use crate::position::FILE_1; use crate::usi::Chiyuri; use crate::view::print_move_list; use rand::Rng; impl Chiyuri { pub fn do_(universe: &mut Universe, move_code: &str) { if read_move_code(&mut universe.game, move_code) { universe.game.history.decrease_moves_num(); let ply = universe.game.history.moves_num(); let move_ = universe.game.history.moves[ply as usize]; universe.game.do_move(move_); } } pub fn genmove(game: &Game) { let move_list = PseudoLegalMoves::generate(game.history.get_phase(), &game.position, true); print_move_list("genmove", &game.position, &move_list); } pub fn hash(universe: &Universe) { Beam::shoot("局面ハッシュ表示"); let s = universe.game.get_positions_hash_text(); Beam::shoot(&s); } pub fn how_much(tokens: &Vec<&str>) { let bestmove = tokens[1]; Beam::shoot(&format!("Debug | bestmove=|{}|", bestmove)); } pub fn record(universe: &Universe) { Beam::shoot("棋譜表示"); let s = universe.game.get_moves_history_debug_text(); Beam::shoot(&s); } /* TODO pub fn kiki(universe: &Universe) { // 利き数表示 let s = RestRoom::to_string(&universe.game, Phase::First); Beam::shoot(&s); let s = RestRoom::to_string(&universe.game, Phase::Second); Beam::shoot(&s); } */ pub fn list40(universe: &Universe) { Beam::shoot("----駒リスト40表示 ここから----"); universe .game .position .for_all_pieces_on_board(&mut |i, sq, pc_ex| { Beam::shoot(&format!( "[{}]{}{}", i, if let Some(sq) = sq { format!(" {:?}", sq) } else { " --".to_string() }, if let Some(piece_val) = pc_ex { format!(" {} {:?}", piece_val.piece, piece_val.num) } else { " --".to_string() } )); }); Beam::shoot("----駒リスト40表示 ここまで----"); } pub fn len0(universe: &mut Universe) { Beam::shoot("len==0"); if !&universe.dialogue_mode { universe.dialogue_mode = true; CommandRoom::print_title(); } else { let s = GameRoom::to_string(&universe.game, PosNums::Current); Beam::shoot(&s); } } pub fn pos(universe: &Universe) { let s = GameRoom::to_string(&universe.game, PosNums::Current); Beam::shoot(&s); } pub fn pos0(universe: &Universe) { let s = GameRoom::to_string(&universe.game, PosNums::Start); Beam::shoot(&s); } pub fn rand() { Beam::shoot("3<len rand"); let secret_number = rand::thread_rng().gen_range(1..101); Beam::shoot(&format!("乱数={}", secret_number)); } pub fn same(universe: &Universe) { let count = universe.game.count_same_position(); Beam::shoot(&format!("同一局面調べ count={}", count)); } pub fn startpos(universe: &mut Universe) { let tokens: Vec<&str> = POS_1.split(' ').collect(); set_position(&mut universe.game, &tokens); } pub fn teigi_conv() { Beam::shoot("teigi::convのテスト"); for ms in 1..9 { for hash in 0..10 { let sq = Square::from(FILE_1, ms); let next = push_sq_to_hash(hash, sq); let (hash_orig, sq_orig) = pop_sq_from_hash(next); Beam::shoot( &format!("push_ms_to_hash(0b{:4b},0b{:5b})=0b{:11b} pop_sq_from_hash(...)=(0b{:4b},0b{:5b})" ,hash ,ms ,next ,hash_orig ,sq_orig.number() )); } } } pub fn undo(universe: &mut Universe) { if !universe.game.undo_move() { Beam::shoot(&format!( "ply={} を、これより戻せません", universe.game.history.moves_num() )); } } }
use crate::entities::cosmic::playing::{Game, PosNums}; use crate::entities::cosmic::universe::Universe; use crate::entities::law::cryptographic::*; use crate::entities::law::usi::*; use crate::entities::spaceship::equipment::Beam; use crate::entities::spaceship::facility::{CommandRoom, GameRoom}; use crate::movegen::PseudoLegalMoves; use crate::position::Square; use crate::position::FILE_1; use crate::usi::Chiyuri; use crate::view::print_move_list; use rand::Rng; impl Chiyuri { pub fn do_(universe: &mut Universe, move_code: &str) { if read_move_code(&mut universe.game, move_code) { universe.game.history.decrease_moves_num(); let ply = universe.game.history.moves_num(); let move_ = universe.game.history.moves[ply as usize]; universe.game.do_move(move_); } } pub fn genmove(game: &Game) { let move_list = PseudoLegalMoves::generate(game.history.get_phase(), &game.position, true); print_move_list("genmove", &game.position, &move_list); } pub fn hash(universe: &Universe) { Beam::shoot("局面ハッシュ表示"); let s = universe.game.get_positions_hash_text(); Beam::shoot(&s); } pub fn how_much(tokens: &Vec<&str>) { let bestmove = tokens[1]; Beam::shoot(&format!("Debug | bestmove=|{}|", bestmove)); } pub fn record(universe: &Universe) { Beam::shoot("棋譜表示"); let s = universe.game.get_moves_history_debug_text(); Beam::shoot(&s); } /* TODO pub fn kiki(universe: &Universe) { // 利き数表示 let s = RestRoom::to_string(&universe.game, Phase::First); Beam::shoot(&s); let s = RestRoom::to_string(&universe.game, Phase::Second); Beam::shoot(&s); } */ pub fn list40(universe: &Universe) { Beam::shoot("----駒リスト40表示 ここから----"); universe .game .position .for_all_pieces_on_board(&mut |i, sq, pc_ex| { Beam::shoot(&format!( "[{}]{}{}", i, if let Some(sq) = sq { format!(" {:?}", sq) } else { " --".to_string() }, if let Some(piece_val) = pc_ex { format!(" {} {:?}", piece_val.piece, piece_val.num) } else { " --".to_string() } )); }); Beam::shoot("----駒リスト40表示 ここまで----"); } pub fn len0(universe: &mut Universe) { Beam::shoot("len==0"); if !&universe.dialogue_mode { universe.dialogue_mode = true; CommandRoom::print_title(); } else { let s = GameRoom::to_string(&universe.game, PosNums::Current); Beam::shoot(&s); } } pub fn pos(universe: &Universe) { let s = GameRoom::to_string(&universe.game, PosNums::Current); Beam::shoot(&s); } pub fn pos0(universe: &Universe) { let s = GameRoom::to_string(&universe.game, PosNums::Start); Beam::shoot(&s); } pub fn rand() { Beam::shoot("3<len rand"); let secret_number = rand::thread_rng().gen_range(1..101); Beam::shoot(&format!("乱数={}", secret_number)); } pub fn same(universe: &Universe) { let count = universe.game.count_same_position(); Beam::shoot(&format!("同一局面調べ count={}", count)); } pub fn startpos(universe: &mut Universe) { let tokens: Vec<&str> = POS_1.split(' ').collect(); set_position(&mut universe.game, &tokens); }
pub fn undo(universe: &mut Universe) { if !universe.game.undo_move() { Beam::shoot(&format!( "ply={} を、これより戻せません", universe.game.history.moves_num() )); } } }
pub fn teigi_conv() { Beam::shoot("teigi::convのテスト"); for ms in 1..9 { for hash in 0..10 { let sq = Square::from(FILE_1, ms); let next = push_sq_to_hash(hash, sq); let (hash_orig, sq_orig) = pop_sq_from_hash(next); Beam::shoot( &format!("push_ms_to_hash(0b{:4b},0b{:5b})=0b{:11b} pop_sq_from_hash(...)=(0b{:4b},0b{:5b})" ,hash ,ms ,next ,hash_orig ,sq_orig.number() )); } } }
function_block-full_function
[ { "content": "/// position コマンド読取\n\npub fn set_position(game: &mut Game, tokens: &Vec<&str>) {\n\n assert_eq!(tokens[0], \"position\");\n\n assert!(\n\n Regex::new(r\"[startpos|sfen]\").unwrap().is_match(tokens[1]),\n\n \"tokens1=[{}]\",\n\n tokens[1].to_string()\n\n );\n\n\n\n ...
Rust
rust/src/solutions/day14.rs
efrees/adventofcode2019
9267fdff07f0144d57b659744b83cc0c6b7ecb7b
use regex::Regex; use std::collections::HashMap; #[derive(Eq, PartialEq, Hash)] struct Chemical { count: u32, name: String, } pub fn solve() { println!("Day 14"); let raw_reactions = adventlib::read_input_lines("day14input.txt"); let reactions_list: Vec<_> = raw_reactions.iter().map(|m| parse_reaction(m)).collect(); let reactions: HashMap<_, _> = reactions_list.iter().map(|(g, v)| (&*g.name, v)).collect(); let mut reaction_output_count: HashMap<_, _> = reactions_list .iter() .map(|(g, _)| (&*g.name, g.count)) .collect(); let mut remnants_by_chemical: HashMap<_, u64> = reactions_list.iter().map(|(g, _)| (&*g.name, 0)).collect(); reaction_output_count.insert("FUEL", 1); let mut ore_required = compute_total_ore_requirements( "FUEL", 1, &reactions, &reaction_output_count, &mut remnants_by_chemical, ); println!("ORE required for one FUEL (part 1): {}", ore_required); let ore_supply = 1_000_000_000_000_u64; let mut potential_fuel_min = ore_supply / ore_required as u64; let mut potential_fuel_max = potential_fuel_min * 2; let mut search_interval = potential_fuel_min / 2; while search_interval > 0 { let next_to_check = potential_fuel_min + search_interval; ore_required = compute_total_ore_requirements( "FUEL", next_to_check, &reactions, &reaction_output_count, &mut remnants_by_chemical, ); if ore_required > ore_supply { potential_fuel_max = next_to_check - 1; } else { potential_fuel_min = next_to_check; } search_interval = (potential_fuel_max - potential_fuel_min) / 2; } println!("Total FUEL possible (part 2): {}", potential_fuel_min); } fn compute_total_ore_requirements( name: &str, amount: u64, reactions: &HashMap<&str, &Vec<Chemical>>, reaction_outputs: &HashMap<&str, u32>, remnant_totals: &mut HashMap<&str, u64>, ) -> u64 { if name == "ORE" { return amount; } let reaction = reactions .get(name) .expect(&format!("Missing reaction for {}", name)); let mut goal = amount; let mut remnant = remnant_totals.get(name).cloned().unwrap_or(0) as u64; goal -= std::cmp::min(remnant, amount); remnant -= std::cmp::min(remnant, amount); let mut requirement = 0; if goal > 0 { let recipe_output = reaction_outputs.get(&name).cloned().unwrap() as u64; let recipe_count = (goal + recipe_output - 1) / recipe_output; for input in reaction.iter() { requirement += compute_total_ore_requirements( &input.name, input.count as u64 * recipe_count, reactions, reaction_outputs, remnant_totals, ); remnant = recipe_output * recipe_count - goal; } } *remnant_totals.get_mut(name).unwrap() = remnant; return requirement; } fn parse_reaction(raw_reaction: &String) -> (Chemical, Vec<Chemical>) { let sides_of_equation: Vec<_> = raw_reaction.split(" => ").collect(); let result = parse_chemical(sides_of_equation[1]); let inputs: Vec<_> = sides_of_equation[0] .split(", ") .map(|raw| parse_chemical(raw)) .collect(); return (result, inputs); } fn parse_chemical(raw_chemical: &str) -> Chemical { lazy_static! { static ref PATTERN: Regex = Regex::new(r"\s*(\d+) (\w+)").expect("pattern for parsing"); } let captures = PATTERN .captures(raw_chemical) .expect("Line should match format"); return Chemical { count: captures[1] .parse() .expect("First part of chemical must be a number."), name: captures[2].to_string(), }; }
use regex::Regex; use std::collections::HashMap; #[derive(Eq, PartialEq, Hash)] struct Chemical { count: u32, name: String, } pub fn solve() { println!("Day 14"); let raw_reactions = adventlib::read_input_lines("day14input.txt"); let reactions_list: Vec<_> = raw_reactions.iter().map(|m| parse_reaction(m)).collect(); let reactions: HashMap<_, _> = reactions_list.iter().map(|(g, v)| (&*g.name, v)).collect(); let mut reaction_output_count: HashMap<_, _> = reactions_list .iter() .map(|(g, _)| (&*g.name, g.count)) .collect(); let mut remnants_by_chemical: HashMap<_, u64> = reactions_list.iter().map(|(g, _)| (&*g.name, 0)).collect(); reaction_output_count.insert("FUEL", 1); let mut ore_required = compute_total_ore_requirements( "FUEL", 1, &reactions, &reaction_output_count, &mut remnants_by_chemical, ); println!("ORE required for one FUEL (part 1): {}", ore_required); let ore_supply = 1_000_000_000_000_u64; let mut potential_fuel_min = ore_supply / ore_required as u64; let mut potential_fuel_max = potential_fuel_min * 2; let mut search_interval = potential_fuel_min / 2; while search_interval > 0 { let next_to_check = potential_fuel_min + search_interval; ore_required = compute_total_ore_requirements( "FUEL", next_to_check, &reactions, &reaction_output_count, &mut remnants_by_chemical, ); if ore_required > ore_supply { potential_fuel_max = next_to_check - 1; } else { potential_fuel_min = next_to_check; } search_interval = (potential_fuel_max - potential_fuel_min) / 2; } println!("Total FUEL possible (part 2): {}", potential_fuel_min); } fn compute_total_ore_requirements( name: &str, amount: u64, reactions: &HashMap<&str, &Vec<Chemical>>, reaction_outputs: &HashMap<&str, u32>, remnant_totals: &mut HashMap<&str, u64>, ) -> u64 { if name == "ORE" { return amount; } let reaction = reactions .get(name) .expect(&format!("Missing reaction for {}", name)); let mut goal = amount; let mut remnant = remnant_totals.get(name).cloned().unwrap_or(0) as u64; goal -= std::cmp::min(remnant, amount); remnant -= std::cmp::min(remnant, amount); let mut requirement = 0; if goal > 0 { let recipe_output = reaction_outputs.get(&name).cloned().unwrap() as u64; let recipe_count = (goal + recipe_output - 1) / recipe_output; for input in reaction.iter() { requirement +=
; remnant = recipe_output * recipe_count - goal; } } *remnant_totals.get_mut(name).unwrap() = remnant; return requirement; } fn parse_reaction(raw_reaction: &String) -> (Chemical, Vec<Chemical>) { let sides_of_equation: Vec<_> = raw_reaction.split(" => ").collect(); let result = parse_chemical(sides_of_equation[1]); let inputs: Vec<_> = sides_of_equation[0] .split(", ") .map(|raw| parse_chemical(raw)) .collect(); return (result, inputs); } fn parse_chemical(raw_chemical: &str) -> Chemical { lazy_static! { static ref PATTERN: Regex = Regex::new(r"\s*(\d+) (\w+)").expect("pattern for parsing"); } let captures = PATTERN .captures(raw_chemical) .expect("Line should match format"); return Chemical { count: captures[1] .parse() .expect("First part of chemical must be a number."), name: captures[2].to_string(), }; }
compute_total_ore_requirements( &input.name, input.count as u64 * recipe_count, reactions, reaction_outputs, remnant_totals, )
call_expression
[ { "content": "pub fn read_input_raw(filename: &str) -> String {\n\n let filename = \"inputs/\".to_owned() + filename;\n\n let mut file = File::open(filename).expect(\"Could not find input file\");\n\n let mut string = String::new();\n\n file.read_to_string(&mut string)\n\n .expect(\"Could not...
Rust
radogost/rlox/src/resolver.rs
stormasm/lox
ec1d179f6c478c40b7eb10304e40a19995ef09e6
use crate::error::{LoxError, Result}; use crate::statement::{Expr, ExprId, Stmt}; use std::collections::HashMap; #[derive(Copy, Clone, PartialEq, Eq)] enum FunctionType { None, Method, Function, Initializer, } #[derive(Copy, Clone, PartialEq, Eq)] enum ClassType { None, Class, SubClass, } pub type Depth = u64; struct Resolver<'a> { scopes: Vec<HashMap<&'a str, bool>>, expr_id_to_depth: HashMap<ExprId, Depth>, current_function: FunctionType, current_class: ClassType, } impl<'a> Resolver<'a> { fn new() -> Self { Self { scopes: Vec::new(), expr_id_to_depth: HashMap::new(), current_function: FunctionType::None, current_class: ClassType::None, } } fn resolve(&mut self, statements: &'a [Stmt]) -> Result<HashMap<ExprId, Depth>> { self.resolve_statements(statements)?; Ok(std::mem::take(&mut self.expr_id_to_depth)) } fn resolve_statements(&mut self, stmts: &'a [Stmt]) -> Result<()> { for stmt in stmts { self.resolve_statement(stmt)?; } Ok(()) } fn resolve_statement(&mut self, stmt: &'a Stmt) -> Result<()> { match stmt { Stmt::Block { statements } => { self.begin_scope(); self.resolve_statements(statements.as_ref())?; self.end_scope(); } Stmt::Var { name, initializer } => { self.declare(name); self.define(name); if let Some(initializer) = initializer { self.resolve_expression(initializer)?; } } Stmt::Function { name, parameters, body, } => { self.resolve_function(name, parameters, body, FunctionType::Function)?; } Stmt::Expression { expression } => { self.resolve_expression(expression)?; } Stmt::If { condition, then_branch, else_branch, } => { self.resolve_expression(condition)?; self.resolve_statement(then_branch.as_ref())?; if let Some(stmt) = else_branch { self.resolve_statement(stmt)?; } } Stmt::Print { expression } => self.resolve_expression(expression)?, Stmt::Return { value } => { if self.current_function == FunctionType::None { return Err(LoxError::ResolverError( "Cannot return from top-level code.", )); } if let Some(value) = value { if self.current_function == FunctionType::Initializer { return Err(LoxError::ResolverError( "Cannot return a value from an initializer.", )); } self.resolve_expression(value)?; } } Stmt::While { condition, body } => { self.resolve_expression(condition)?; self.resolve_statement(body)?; } Stmt::Class { name, superclass, methods, } => { let enclosing_class = self.current_class; self.current_class = ClassType::Class; self.declare(name); self.define(name); if let Some(superclass) = superclass { if let Expr::Variable { id: _, name: superclass_name, } = superclass.as_ref() { if name == superclass_name { return Err(LoxError::ResolverError( "A class cannot inherit from itself.", )); } } self.current_class = ClassType::SubClass; self.resolve_expression(superclass)?; self.begin_scope(); self.scopes .last_mut() .map(|scope| scope.insert("super", true)); } self.begin_scope(); self.scopes .last_mut() .map(|scope| scope.insert("this", true)); for method in methods.as_ref() { if let Stmt::Function { name, parameters, body, } = method { let function_type = if name == "init" { FunctionType::Initializer } else { FunctionType::Method }; self.resolve_function(name, parameters, body, function_type)?; } else { unreachable!() } } self.end_scope(); if superclass.is_some() { self.end_scope(); } self.current_class = enclosing_class; } }; Ok(()) } fn resolve_function( &mut self, name: &'a str, parameters: &'a Vec<String>, body: &'a [Stmt], function_type: FunctionType, ) -> Result<()> { self.declare(name); self.define(name); let enclosing_function = self.current_function; self.current_function = function_type; self.begin_scope(); for param in parameters { self.declare(&param); self.define(&param); } self.resolve_statements(body)?; self.end_scope(); self.current_function = enclosing_function; Ok(()) } fn resolve_expression(&mut self, expr: &'a Expr) -> Result<()> { match expr { Expr::Variable { id, name } => { if let Some(scope) = self.scopes.last() { if scope.get::<str>(name) == Some(&false) { return Err(LoxError::ResolverError( "Cannot read local variable in ints own initializer", )); } self.resolve_local(*id, name); } } Expr::This { id, keyword } => { if self.current_class == ClassType::None { return Err(LoxError::ResolverError( "Cannot use 'this' outside of a class.", )); } self.resolve_local(*id, keyword); } Expr::Super { id, keyword, method: _, } => { if self.current_class == ClassType::None { return Err(LoxError::ResolverError( "Cannot use 'super' outside of a class.", )); } if self.current_class != ClassType::SubClass { return Err(LoxError::ResolverError( "Cannot use 'super' in a class with no superclass.", )); } self.resolve_local(*id, keyword); } Expr::Assign { id, value, name } => { self.resolve_expression(value)?; self.resolve_local(*id, name); } Expr::Binary { left, token_type: _, right, } => { self.resolve_expression(left)?; self.resolve_expression(right)?; } Expr::Call { callee, arguments } => { self.resolve_expression(callee)?; for arg in arguments.as_ref() { self.resolve_expression(arg)?; } } Expr::Get { object, name: _ } => { self.resolve_expression(object)?; } Expr::Set { object, name: _, value, } => { self.resolve_expression(object)?; self.resolve_expression(value)?; } Expr::Grouping { expression } => { self.resolve_expression(expression)?; } Expr::Logical { left, operator: _, right, } => { self.resolve_expression(left)?; self.resolve_expression(right)?; } Expr::Unary { token_type: _, right, } => { self.resolve_expression(right)?; } Expr::Nil | Expr::Boolean(_) | Expr::Number(_) | Expr::String(_) => {} }; Ok(()) } fn begin_scope(&mut self) { self.scopes.push(HashMap::new()); } fn end_scope(&mut self) { self.scopes.pop(); } fn declare(&mut self, name: &'a str) { self.scopes .last_mut() .map(|scope| scope.insert(name, false)); } fn define(&mut self, name: &'a str) { self.scopes.last_mut().map(|scope| scope.insert(name, true)); } fn resolve_local(&mut self, expr_id: ExprId, name: &'a str) { self.scopes .iter_mut() .rev() .enumerate() .find(|(_, scope)| scope.contains_key(name)) .map(|(depth, _)| (expr_id, depth as u64)) .map(|(expr_id, depth)| self.expr_id_to_depth.insert(expr_id, depth)); } } pub fn resolve(statements: &[Stmt]) -> Result<HashMap<ExprId, Depth>> { let mut resolver = Resolver::new(); resolver.resolve(statements) } #[cfg(test)] mod tests { use super::{resolve, Depth}; use crate::error::{LoxError, Result}; use crate::lexer; use crate::parser; use crate::statement::ExprId; use std::collections::HashMap; fn scopes(source: &'static str) -> Result<HashMap<ExprId, Depth>> { let (tokens, lexer_errors) = lexer::lex(source); assert_eq!(lexer_errors.len(), 0); let (statements, parser_errors) = parser::parse(&tokens); assert_eq!(parser_errors.len(), 0); resolve(&statements) } #[test] fn invalid_return_statement() { let source = "return 42;"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot return from top-level code.") ); } #[test] fn valid_return_statement() { let source = r#" fun test() { return 42; } "#; let scopes = scopes(source); assert_eq!(scopes.is_ok(), true); } #[test] fn invalid_this() { let source = "var a = this;"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'this' outside of a class.") ); } #[test] fn cannot_return_from_initializer() { let source = r#" class Foo { init() { return "invalid"; } } "#; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot return a value from an initializer.") ); } #[test] fn cannot_use_super_outside_of_class() { let source = "super.foo();"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'super' outside of a class.") ); } #[test] fn cannot_use_super_in_non_subclass() { let source = r#" class Foo { foo() { super.foo(); } } "#; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'super' in a class with no superclass.") ); } }
use crate::error::{LoxError, Result}; use crate::statement::{Expr, ExprId, Stmt}; use std::collections::HashMap; #[derive(Copy, Clone, PartialEq, Eq)] enum FunctionType { None, Method, Function, Initializer, } #[derive(Copy, Clone, PartialEq, Eq)] enum ClassType { None, Class, SubClass, } pub type Depth = u64; struct Resolver<'a> { scopes: Vec<HashMap<&'a str, bool>>, expr_id_to_depth: HashMap<ExprId, Depth>, current_function: FunctionType, current_class: ClassType, } impl<'a> Resolver<'a> { fn new() -> Self { Self { scopes: Vec::new(), expr_id_to_depth: HashMap::new(), current_function: FunctionType::None, current_class: ClassType::None, } } fn resolve(&mut self, statements: &'a [Stmt]) -> Result<HashMap<ExprId, Depth>> { self.resolve_statements(statements)?; Ok(std::mem::take(&mut self.expr_id_to_depth)) } fn resolve_statements(&mut self, stmts: &'a [Stmt]) -> Result<()> { for stmt in stmts { self.resolve_statement(stmt)?; } Ok(()) } fn resolve_statement(&mut self, stmt: &'a Stmt) -> Result<()> { match stmt { Stmt::Block { statements } => { self.begin_scope(); self.resolve_statements(statements.as_ref())?; self.end_scope(); } Stmt::Var { name, initializer } => { self.declare(name); self.define(name); if let Some(initializer) = initializer { self.resolve_expression(initializer)?; } } Stmt::Function { name, parameters, body, } => { self.resolve_function(name, parameters, body, FunctionType::Function)?; } Stmt::Expression { expression } => { self.resolve_expression(expression)?; } Stmt::If { condition, then_branch, else_branch, } => { self.resolve_expression(condition)?; self.resolve_statement(then_branch.as_ref())?; if let Some(stmt) = else_branch { self.resolve_statement(stmt)?; } } Stmt::Print { expression } => self.resolve_expression(expression)?, Stmt::Return { value } => { if self.current_function == FunctionType::None { return Err(LoxError::ResolverError( "Cannot return from top-level code.", )); } if let Some(value) = value { if self.current_function == FunctionType::Initializer { return Err(LoxError::ResolverError( "Cannot return a value from an initializer.", )); } self.resolve_expression(value)?; } } Stmt::While { condition, body } => { self.resolve_expression(condition)?; self.resolve_statement(body)?; } Stmt::Class { name, superclass, methods, } => { let enclosing_class = self.current_class; self.current_class = ClassType::Class; self.declare(name); self.define(name); if let Some(superclass) = superclass { if let Expr::Variable { id: _, name: superclass_name, } = superclass.as_ref() { if name == superclass_name { return Err(LoxError::ResolverError( "A class cannot inherit from itself.", )); } } self.current_class = ClassType::SubClass; self.resolve_expression(superclass)?; self.begin_scope(); self.scopes .last_mut() .map(|scope| scope.insert("super", true)); } self.begin_scope(); self.scopes .last_mut() .map(|scope| scope.insert("this", true)); for method in methods.as_ref() { if let Stmt::Function { name, parameters, body, } = method { let function_type = if name == "init" { FunctionType::Initializer } else { FunctionType::Method }; self.resolve_function(name, parameters, body, function_type)?; } else { unreachable!() } } self.end_scope(); if superclass.is_some() { self.end_scope(); } self.current_class = enclosing_class; } }; Ok(()) } fn resolve_function( &mut self, name: &'a str, parameters: &'a Vec<String>, body: &'a [Stmt], function_type: FunctionType, ) -> Result<()> { self.declare(name); self.define(name); let enclosing_function = self.current_function; self.current_function = function_type; self.begin_scope(); for param in parameters { self.declare(&param); self.define(&param); } self.resolve_statements(body)?; self.end_scope(); self.current_function = enclosing_function; Ok(()) } fn resolve_expression(&mut self, expr: &'a Expr) -> Result<()> { match expr { Expr::Variable { id, name } => { if let Some(scope) = self.scopes.last() { if scope.get::<str>(name) == Some(&false) { return Err(LoxError::ResolverError( "Cannot read local variable in ints own initializer", )); } self.resolve_local(*id, name); } } Expr::This { id, keyword } => { if self.current_class == ClassType::None { return Err(LoxError::ResolverError( "Cannot use 'this' outside of a class.", )); } self.resolve_local(*id, keyword); } Expr::Super { id, keyword, method: _, } => { if self.current_class == ClassType::None { return Err(LoxError::ResolverError( "Cannot use 'super' outside of a class.", )); } if self.current_class != ClassType::SubClass { return Err(LoxError::ResolverError( "Cannot use 'super' in a class with no superclass.", )); } self.resolve_local(*id, keyword); } Expr::Assign { id, value, name } => { self.resolve_expression(value)?; self.resolve_local(*id, name); } Expr::Binary { left, token_type: _, right, } => { self.resolve_expression(left)?; self.resolve_expression(right)?; } Expr::Call { callee, arguments } => { self.resolve_expression(callee)?; for arg in arguments.as_ref() { self.resolve_expression(arg)?; } } Expr::Get { object, name: _ } => { self.resolve_expression(object)?; } Expr::Set { object, name: _, value, } => { self.resolve_expression(object)?; self.resolve_expression(value)?; } Expr::Grouping { expression } => { self.resolve_expression(expression)?; } Expr::Logical { left, operator: _, right, } => { self.resolve_expression(left)?; self.resolve_expression(right)?; } Expr::Unary { token_type: _, right, } => { self.resolve_expression(right)?; } Expr::Nil | Expr::Boolean(_) | Expr::Number(_) | Expr::String(_) => {} }; Ok(()) } fn begin_scope(&mut self) { self.scopes.push(HashMap::new()); } fn end_scope(&mut self) { self.scopes.pop(); } fn declare(&mut self, name: &'a str) { self.scopes .last_mut() .map(|scope| scope.insert(name, false)); } fn define(&mut self, name: &'a str) { self.scopes.last_mut().map(|scope| scope.insert(name, true)); } fn resolve_local(&mut self, expr_id: ExprId, name: &'a str) { self.scopes .iter_mut() .rev() .enumerate() .find(|(_, scope)| scope.contains_key(name)) .map(|(depth, _)| (expr_id, depth as u64)) .map(|(expr_id, depth)| self.expr_id_to_depth.insert(expr_id, depth)); } } pub fn resolve(statements: &[Stmt]) -> Result<HashMap<ExprId, Depth>> { let mut resolver = Resolver::new(); resolver.resolve(statements) } #[cfg(test)] mod tests { use super::{resolve, Depth}; use crate::error::{LoxError, Result}; use crate::lexer; use crate::parser; use crate::statement::ExprId; use std::collections::HashMap; fn scopes(source: &'static str) -> Result<HashMap<ExprId, Depth>> { let (tokens, lexer_errors) = lexer::lex(source); assert_eq!(lexer_errors.len(), 0); let (statements, parser_errors) = parser::parse(&tokens); assert_eq!(parser_errors.len(), 0); resolve(&statements) } #[test] fn invalid_return_statement() { let source = "return 42;"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot return from top-level code.") ); } #[test]
#[test] fn invalid_this() { let source = "var a = this;"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'this' outside of a class.") ); } #[test] fn cannot_return_from_initializer() { let source = r#" class Foo { init() { return "invalid"; } } "#; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot return a value from an initializer.") ); } #[test] fn cannot_use_super_outside_of_class() { let source = "super.foo();"; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'super' outside of a class.") ); } #[test] fn cannot_use_super_in_non_subclass() { let source = r#" class Foo { foo() { super.foo(); } } "#; let scopes = scopes(source); assert_eq!(scopes.is_err(), true); assert_eq!( scopes.unwrap_err(), LoxError::ResolverError("Cannot use 'super' in a class with no superclass.") ); } }
fn valid_return_statement() { let source = r#" fun test() { return 42; } "#; let scopes = scopes(source); assert_eq!(scopes.is_ok(), true); }
function_block-full_function
[ { "content": "#[derive(PartialEq, Clone, Copy)]\n\nenum ClassType {\n\n None,\n\n Class,\n\n Subclass,\n\n}\n\n\n\npub struct LexicalScopesResolver {\n\n // Note that this doesn't track globals at all\n\n scopes: Vec<FnvHashMap<Identifier, VariableDefinition>>,\n\n current_function: Option<Fun...
Rust
nakadion/src/handler/mod.rs
chridou/nakadion
86975ec81cd4b1d14a8ce26e8bc02f0772cb71c9
use std::fmt; use std::time::{Duration, Instant}; pub use bytes::Bytes; use futures::future::BoxFuture; pub type BatchHandlerFuture<'a> = BoxFuture<'a, BatchPostAction>; use crate::nakadi_types::{ event_type::EventTypeName, partition::PartitionId, subscription::{EventTypePartition, StreamId, SubscriptionCursor}, }; pub use crate::nakadi_types::Error; mod typed; pub use typed::*; #[derive(Debug)] #[non_exhaustive] pub struct BatchMeta<'a> { pub stream_id: StreamId, pub cursor: &'a SubscriptionCursor, pub frame_started_at: Instant, pub frame_completed_at: Instant, pub frame_id: usize, pub n_events: usize, } #[derive(Debug, Clone)] pub enum BatchPostAction { Commit(BatchStats), DoNotCommit(BatchStats), AbortStream(String), ShutDown(String), } impl BatchPostAction { pub fn commit_no_stats() -> Self { BatchPostAction::Commit(BatchStats::default()) } pub fn commit(t_deserialize: Duration) -> Self { BatchPostAction::Commit(BatchStats { t_deserialize: Some(t_deserialize), }) } pub fn do_not_commit_no_stats() -> Self { BatchPostAction::DoNotCommit(BatchStats::default()) } pub fn do_not_commit(t_deserialize: Duration) -> Self { BatchPostAction::DoNotCommit(BatchStats { t_deserialize: Some(t_deserialize), }) } } #[derive(Default, Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub struct BatchStats { pub t_deserialize: Option<Duration>, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum InactivityAnswer { KeepMeAlive, KillMe, } impl InactivityAnswer { pub fn should_kill(self) -> bool { self == InactivityAnswer::KillMe } pub fn should_stay_alive(self) -> bool { self == InactivityAnswer::KeepMeAlive } } pub trait BatchHandler: Send { fn handle<'a>(&'a mut self, events: Bytes, meta: BatchMeta<'a>) -> BatchHandlerFuture<'a>; fn on_inactive( &mut self, _inactive_for: Duration, _last_activity: Instant, ) -> InactivityAnswer { InactivityAnswer::KeepMeAlive } } pub struct HandlerFn<F>(pub F); impl<F> BatchHandler for HandlerFn<F> where F: for<'a> FnMut(Bytes, BatchMeta<'a>) -> BatchHandlerFuture<'a> + Send, { fn handle<'a>(&'a mut self, events: Bytes, meta: BatchMeta<'a>) -> BatchHandlerFuture<'a> { (self.0)(events, meta) } } #[derive(Debug, Clone, Eq, PartialEq)] pub enum HandlerAssignment { Unspecified, EventType(EventTypeName), EventTypePartition(EventTypePartition), } impl HandlerAssignment { pub fn event_type(&self) -> Option<&EventTypeName> { self.event_type_and_partition().0 } pub fn partition(&self) -> Option<&PartitionId> { self.event_type_and_partition().1 } pub fn event_type_and_partition(&self) -> (Option<&EventTypeName>, Option<&PartitionId>) { match self { HandlerAssignment::Unspecified => (None, None), HandlerAssignment::EventType(event_type) => (Some(&event_type), None), HandlerAssignment::EventTypePartition(ref etp) => { (Some(etp.event_type()), Some(etp.partition())) } } } pub fn into_event_type_and_partition(self) -> (Option<EventTypeName>, Option<PartitionId>) { match self { HandlerAssignment::Unspecified => (None, None), HandlerAssignment::EventType(event_type) => (Some(event_type), None), HandlerAssignment::EventTypePartition(etp) => { let (a, b) = etp.split(); (Some(a), Some(b)) } } } } impl fmt::Display for HandlerAssignment { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { HandlerAssignment::Unspecified => write!(f, "[unspecified]")?, HandlerAssignment::EventType(ref event_type) => { write!(f, "[event_type={}]", event_type)? } HandlerAssignment::EventTypePartition(ref event_type_partition) => write!( f, "[event_type={}, partition={}]", event_type_partition.event_type(), event_type_partition.partition() )?, } Ok(()) } } pub trait BatchHandlerFactory: Send + Sync + 'static { fn handler<'a>( &'a self, assignment: &'a HandlerAssignment, ) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>>; } impl<T> BatchHandlerFactory for T where T: for<'a> Fn(&'a HandlerAssignment) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>> + Send + Sync + 'static, { fn handler<'a>( &'a self, assignment: &'a HandlerAssignment, ) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>> { self(assignment) } }
use std::fmt; use std::time::{Duration, Instant}; pub use bytes::Bytes; use futures::future::BoxFuture; pub type BatchHandlerFuture<'a> = BoxFuture<'a, BatchPostAction>; use crate::nakadi_types::{ event_type::EventTypeName, partition::PartitionId, subscription::{EventTypePartition, StreamId, SubscriptionCursor}, }; pub use crate::nakadi_types::Error; mod typed; pub use typed::*; #[derive(Debug)] #[non_exhaustive] pub struct BatchMeta<'a> { pub stream_id: StreamId, pub cursor: &'a SubscriptionCursor, pub frame_started_at: Instant, pub frame_completed_at: Instant, pub frame_id: usize, pub n_events: usize, } #[derive(Debug, Clone)] pub enum BatchPostAction { Commit(BatchStats), DoNotCommit(BatchStats), AbortStream(String), ShutDown(String), } impl BatchPostAction { pub fn commit_no_stats() -> Self { BatchPostAction::Commit(BatchStats::default()) } pub fn commit(t_deserialize: Duration) -> Self { BatchPostAction::Commit(BatchStats { t_deserialize: Some(t_deserialize), }) } pub fn do_not_commit_no_stats() -> Self { BatchPostAction::DoNotCommit(BatchStats::default()) } pub fn do_not_commit(t_deserialize: Duration) -> Self { BatchPostAction::DoNotCommit(BatchStats { t_deserialize: Some(t_deserialize), }) } } #[derive(Default, Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub struct BatchStats { pub t_deserialize: Option<Duration>, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum InactivityAnswer { KeepMeAlive, KillMe, } impl InactivityAnswer { pub fn should_kill(self) -> bool { self == InactivityAnswer::KillMe } pub fn should_stay_alive(self) -> bool { self == InactivityAnswer::KeepMeAlive } } pub trait BatchHandler: Send { fn handle<'a>(&'a mut self, events: Bytes, meta: BatchMeta<'a>) -> BatchHandlerFuture<'a>; fn on_inactive( &mut self, _inactive_for: Duration, _last_activity: Instant, ) -> InactivityAnswer { InactivityAnswer::KeepMeAlive } } pub struct HandlerFn<F>(pub F); impl<F> BatchHandler for HandlerFn<F> where F: for<'a> FnMut(Bytes, BatchMeta<'a>) -> BatchHandlerFuture<'a> + Send, { fn handle<'a>(&'a mut self, events: Bytes, meta: BatchMeta<'a>) -> BatchHandlerFuture<'a> { (self.0)(events, meta) } } #[derive(Debug, Clone, Eq, PartialEq)] pub enum HandlerAssignment { Unspecified, EventType(EventTypeName), EventTypePartition(EventTypePartition), } impl HandlerAssignment { pub fn event_type(&self) -> Option<&EventTypeName> { self.event_type_and_partition().0 } pub fn partition(&self) -> Option<&PartitionId> { self.event_type_and_partition().1 } pub fn event_type_and_partition(&self) -> (Option<&EventTypeName>, Option<&PartitionId>) { match self { HandlerAssignment::Unspecified => (None, None), HandlerAssignment::EventType(event_type) => (Some(&event_type), None), HandlerAssignment::EventTypePartition(ref etp) => { (Some(etp.event_type()), Some(etp.partition())) } } } pub fn into_event_type_and_partition(self) -> (Option<EventTypeName>, Option<PartitionId>) { match self { HandlerAssignment::Unspecified => (None, None), HandlerAssignment::EventType(event_type) => (Some(event_type), None), HandlerAssignment::EventTypePartition(etp) => { let (a, b) = etp.split(); (Some(a), Some(b))
:Unspecified => write!(f, "[unspecified]")?, HandlerAssignment::EventType(ref event_type) => { write!(f, "[event_type={}]", event_type)? } HandlerAssignment::EventTypePartition(ref event_type_partition) => write!( f, "[event_type={}, partition={}]", event_type_partition.event_type(), event_type_partition.partition() )?, } Ok(()) } } pub trait BatchHandlerFactory: Send + Sync + 'static { fn handler<'a>( &'a self, assignment: &'a HandlerAssignment, ) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>>; } impl<T> BatchHandlerFactory for T where T: for<'a> Fn(&'a HandlerAssignment) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>> + Send + Sync + 'static, { fn handler<'a>( &'a self, assignment: &'a HandlerAssignment, ) -> BoxFuture<'a, Result<Box<dyn BatchHandler>, Error>> { self(assignment) } }
} } } } impl fmt::Display for HandlerAssignment { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { HandlerAssignment:
random
[ { "content": "/// Something that has an event type and a partition\n\n///\n\n/// Must only return event types and partitions that belong together.\n\npub trait EventTypePartitionLike {\n\n fn event_type(&self) -> &EventTypeName;\n\n fn partition(&self) -> &PartitionId;\n\n}\n\n\n\n/// Represents event-typ...
Rust
src/clone_wait_signal_pass_wasm.rs
AliceOh/RUST_clone_cgroup_project
64d381f849c5581dd2133417d0b2c2459c5e7e27
use std::process::Command; use std::thread::sleep; use std::time::Duration; use libc::{_exit, kill, pause, prctl, sigemptyset, sigset_t, sigwait, write, STDOUT_FILENO}; use nix::sys::signal::{ sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal, SIGCHLD, SIGCONT, SIGSTOP, SIGUSR1, }; use std::ffi::c_void; use nix::sched::{self, CloneFlags}; use nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus}; use nix::unistd::{getpid, getppid, Pid}; use interprocess::local_socket::{LocalSocketListener, LocalSocketStream}; use std::fs; use std::{ error::Error, io::{self, prelude::*, BufReader}, }; extern "C" fn handle_sigusr1(_: libc::c_int) { print_signal_safe("[clone child] Received Parent signal!\n"); } extern "C" fn handle_sigchld(_: libc::c_int) { print_signal_safe("[main] What a surprise! Got SIGCHLD!\n"); match waitpid(Pid::from_raw(-1), None) { Ok(_) => { print_signal_safe("[main] Child exited.\n"); print_signal_safe("[main] Bye Bye!\n"); exit_signal_safe(0); } Err(_) => { print_signal_safe("[main] waitpid() failed.\n"); exit_signal_safe(1); } } } fn child() -> isize { println!( "[clone child] Hello from child process with pid: {} and parent pid:{}", getpid(), getppid() ); let sig_action = SigAction::new( SigHandler::Handler(handle_sigusr1), SaFlags::empty(), SigSet::empty(), ); if let Err(err) = unsafe { sigaction(SIGUSR1, &sig_action) } { panic!("[clone child] sigaction() failed: {}", err); }; println!("[clone child] Wait for signal from parent"); unsafe { pause(); } println!("[clone child] Signal was delivered - pause is over"); let conn = LocalSocketStream::connect("/tmp/example.sock"); let mut conn = match conn { Ok(f) => f, Err(_e) => return 1, }; conn.write_all("Hello from client!\n".as_bytes()) .expect("client write to socket failed"); let mut buffer: Vec<u8> = Vec::new(); conn.read(&mut buffer).expect("read socket failed"); if let Ok(s) = String::from_utf8(buffer) { println!("[child]: received from socket: {}, length is {} bytes", s, s.len()); } println!("[clone child] Try to allocate big array"); let _v = Box::new([0i32; 600]); println!("[clone child] Yeah, get my array memory successfully!"); Command::new("ip") .arg("link") .spawn() .expect("ip command failed to start"); 0 } fn main() -> Result<(), Box<dyn Error>> { Command::new("mkdir") .arg("-p") .arg("/sys/fs/cgroup/foo") .output() .expect("failed to execute process"); println!("[main] after mkdir"); const STACK_SIZE: usize = 1024 * 1024; let ref mut stack = [0; STACK_SIZE]; let flags = CloneFlags::CLONE_NEWUSER | CloneFlags::CLONE_NEWPID | CloneFlags::CLONE_NEWNET | CloneFlags::CLONE_NEWNS | CloneFlags::CLONE_NEWCGROUP; let child_pid = sched::clone(Box::new(child), stack, flags, Some(Signal::SIGCHLD as i32)) .expect("Failed to spawn the child"); println!( "[main] I am the parent process with pid: {} and I cloned a child with PID {}.", getpid(), child_pid ); let sig_action = SigAction::new( SigHandler::Handler(handle_sigchld), SaFlags::empty(), SigSet::empty(), ); if let Err(err) = unsafe { sigaction(SIGCHLD, &sig_action) } { panic!("[main] sigaction() failed: {}", err); }; let pid_string = (i32::from(child_pid)).to_string(); fs::write("/sys/fs/cgroup/foo/cgroup.procs", pid_string).expect("Unable to write file"); let data = fs::read_to_string("/sys/fs/cgroup/foo/cgroup.procs").expect("Unable to read file"); println!("[main] read cgroup.procs get {}", data); let wasm_bytes = std::fs::read("add.wasm")?; fn handle_error(connection: io::Result<LocalSocketStream>) -> LocalSocketStream { match connection { Ok(val) => val, Err(error) => { eprintln!("\n"); panic!("Incoming connection failed: {}", error); } } } let listener = LocalSocketListener::bind("/tmp/example.sock") .expect("failed to set up LocalSocketListener"); println!("[main] bind /tmp/example.sock, socket server listening for connections."); println!("SIGUSR1 child_pid.as_raw() = {}", child_pid.as_raw()); unsafe { kill(child_pid.as_raw(), SIGUSR1 as i32); } let mut conn = listener.incoming().next().map(handle_error).unwrap(); let mut buffer: Vec<u8> = Vec::new(); conn.read(&mut buffer).expect("read socket failed"); if let Ok(s) = String::from_utf8(buffer) { println!("[main]: received from socket: {}, length is {} bytes", s, s.len()); } println!("[main]: before writing to socket"); conn.write_all(&wasm_bytes).expect("failed in write to socket"); println!("[main] I'll be doing my own stuff..."); loop { println!("[main] Do my own stuff."); sleep(Duration::from_millis(1000)); } } fn print_signal_safe(s: &str) { unsafe { write(STDOUT_FILENO, s.as_ptr() as (*const c_void), s.len()); } } fn exit_signal_safe(status: i32) { unsafe { _exit(status); } }
use std::process::Command; use std::thread::sleep; use std::time::Duration; use libc::{_exit, kill, pause, prctl, sigemptyset, sigset_t, sigwait, write, STDOUT_FILENO}; use nix::sys::signal::{ sigaction, SaFlags, SigAction, SigHandler, SigSet, Signal, SIGCHLD, SIGCONT, SIGSTOP, SIGUSR1, }; use std::ffi::c_void; use nix::sched::{self, CloneFlags}; use nix::sys::wait::{waitpid, WaitPidFlag, WaitStatus}; use nix::unistd::{getpid, getppid, Pid}; use interprocess::local_socket::{LocalSocketListener, LocalSocketStream}; use std::fs; use std::{ error::Error, io::{self, prelude::*, BufReader}, }; extern "C" fn handle_sigusr1(_: libc::c_int) { print_signal_safe("[clone child] Received Parent signal!\n"); } extern "C" fn handle_sigchld(_: libc::c_int) { print_signal_safe("[main] What a surprise! Got SIGCHLD!\n"); match waitpid(Pid::from_raw(-1), None) { Ok(_) => { print_signal_safe("[main] Child exited.\n"); print_signal_safe("[main] Bye Bye!\n"); exit_signal_safe(0); } Err(_) => { print_signal_safe("[main] waitpid() failed.\n"); exit_signal_safe(1); } } } fn child() -> isize { println!( "[clone child] Hello from child process with pid: {} and parent pid:{}", getpid(), getppid() ); let sig_action = SigAction::new( SigHandler::Handler(handle_sigusr1), SaFlags::empty(), SigSet::empty(), ); if let Err(err) = unsafe { sigaction(SIGUSR1, &sig_action) } { panic!("[clone child] sigaction() failed: {}", err); }; println!("[clone child] Wait for signal from parent"); unsafe { pause(); } println!("[clone child] Signal was delivered - pause is over"); let conn = LocalSocketStream::connect("/tmp/example.sock"); let mut conn = match conn { Ok(f) => f, Err(_e) => return 1, }; conn.write_all("Hello from client!\n".as_bytes()) .expect("client write to socket failed"); let mut buffer: Vec<u8> = Vec::new(); conn.read(&mut buffer).expect("read socket failed"); if let Ok(s) = String::from_utf8(buffer) { p
fn main() -> Result<(), Box<dyn Error>> { Command::new("mkdir") .arg("-p") .arg("/sys/fs/cgroup/foo") .output() .expect("failed to execute process"); println!("[main] after mkdir"); const STACK_SIZE: usize = 1024 * 1024; let ref mut stack = [0; STACK_SIZE]; let flags = CloneFlags::CLONE_NEWUSER | CloneFlags::CLONE_NEWPID | CloneFlags::CLONE_NEWNET | CloneFlags::CLONE_NEWNS | CloneFlags::CLONE_NEWCGROUP; let child_pid = sched::clone(Box::new(child), stack, flags, Some(Signal::SIGCHLD as i32)) .expect("Failed to spawn the child"); println!( "[main] I am the parent process with pid: {} and I cloned a child with PID {}.", getpid(), child_pid ); let sig_action = SigAction::new( SigHandler::Handler(handle_sigchld), SaFlags::empty(), SigSet::empty(), ); if let Err(err) = unsafe { sigaction(SIGCHLD, &sig_action) } { panic!("[main] sigaction() failed: {}", err); }; let pid_string = (i32::from(child_pid)).to_string(); fs::write("/sys/fs/cgroup/foo/cgroup.procs", pid_string).expect("Unable to write file"); let data = fs::read_to_string("/sys/fs/cgroup/foo/cgroup.procs").expect("Unable to read file"); println!("[main] read cgroup.procs get {}", data); let wasm_bytes = std::fs::read("add.wasm")?; fn handle_error(connection: io::Result<LocalSocketStream>) -> LocalSocketStream { match connection { Ok(val) => val, Err(error) => { eprintln!("\n"); panic!("Incoming connection failed: {}", error); } } } let listener = LocalSocketListener::bind("/tmp/example.sock") .expect("failed to set up LocalSocketListener"); println!("[main] bind /tmp/example.sock, socket server listening for connections."); println!("SIGUSR1 child_pid.as_raw() = {}", child_pid.as_raw()); unsafe { kill(child_pid.as_raw(), SIGUSR1 as i32); } let mut conn = listener.incoming().next().map(handle_error).unwrap(); let mut buffer: Vec<u8> = Vec::new(); conn.read(&mut buffer).expect("read socket failed"); if let Ok(s) = String::from_utf8(buffer) { println!("[main]: received from socket: {}, length is {} bytes", s, s.len()); } println!("[main]: before writing to socket"); conn.write_all(&wasm_bytes).expect("failed in write to socket"); println!("[main] I'll be doing my own stuff..."); loop { println!("[main] Do my own stuff."); sleep(Duration::from_millis(1000)); } } fn print_signal_safe(s: &str) { unsafe { write(STDOUT_FILENO, s.as_ptr() as (*const c_void), s.len()); } } fn exit_signal_safe(status: i32) { unsafe { _exit(status); } }
rintln!("[child]: received from socket: {}, length is {} bytes", s, s.len()); } println!("[clone child] Try to allocate big array"); let _v = Box::new([0i32; 600]); println!("[clone child] Yeah, get my array memory successfully!"); Command::new("ip") .arg("link") .spawn() .expect("ip command failed to start"); 0 }
function_block-function_prefixed
[ { "content": "/// allocate an array and new a \"ip\" process\n\nfn child() -> isize {\n\n println!(\n\n \"[clone child] Hello from child process with pid: {} and parent pid:{}\",\n\n getpid(),\n\n getppid()\n\n );\n\n\n\n // set signal handler for pause\n\n let sig_action = SigA...
Rust
gaoya/src/minhash/string_index.rs
serega/gaoya
b0fd049bcb5aa4c8f865462c209b48cf4d78349b
use crate::minhash::min_hasher64::MinHasher64V1; use crate::minhash::{MinHasher, MinHashIndex}; use crate::text::whitespace_split; use rayon::prelude::*; use std::collections::HashMap; use std::fmt; use std::fmt::{Display, Formatter, write}; use std::fs::File; use std::io::{BufRead, BufReader, Error}; use std::io::{Read, Write}; use fnv::FnvBuildHasher; pub struct MinHashStringIndex { lsh_index: MinHashIndex<u64, u64>, min_hash: MinHasher64V1<FnvBuildHasher>, doc_map: HashMap<u64, String>, doc_id: u64, } impl Display for MinHashStringIndex { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "MinHashStringIndex {{ "); self.lsh_index.fmt(f); write!(f, " }} ") } } impl MinHashStringIndex { pub fn new(num_bands: usize, band_width: usize, jaccard_threshold: f64) -> Self { MinHashStringIndex { lsh_index: MinHashIndex::new(num_bands, band_width, jaccard_threshold), min_hash: MinHasher64V1::new(num_bands * band_width), doc_map: HashMap::new(), doc_id: 0, } } pub fn insert(&mut self, text: String) { let min_hashes = self.min_hash.create_signature(whitespace_split(text.as_str())); self.doc_id += 1; self.doc_map.insert(self.doc_id, text); self.lsh_index.insert(self.doc_id, min_hashes); } pub fn query(&self, text: &str) -> Vec<&String> { let min_hashes = self.min_hash.create_signature(whitespace_split(text)); let ids = self.lsh_index.query(&min_hashes); ids.iter().map(|id| self.doc_map.get(id).unwrap()).collect() } pub fn load_from_lines<R: Read>(&mut self, reader: &mut BufReader<R>) { for line_result in reader.lines() { match line_result { Ok(line) => self.insert(line), Err(e) => (), } } } pub fn load_from_file(&mut self, file_name: &str) -> Result<usize, Error> { match File::open(file_name) { Ok(file) => { let current_size = self.size(); let mut reader: BufReader<File> = BufReader::new(file); self.load_from_lines(&mut reader); let new_count = self.size() - current_size; Ok(new_count) } Err(e) => Err(e), } } pub fn load_from_file_parallel(&mut self, file_name: &str) -> Result<usize, Error> { match File::open(file_name) { Ok(file) => { let current_size = self.size(); let mut reader: BufReader<File> = BufReader::new(file); let lines: Vec<(u64, String)> = reader .lines() .enumerate() .map(|v| (v.0 as u64 + self.doc_id, v.1.unwrap())) .collect(); let minhashes = lines .par_iter() .map(|line| { ( line.0, self.min_hash.create_signature(whitespace_split(&line.1)), ) }) .collect(); self.lsh_index.par_bulk_insert_pairs(minhashes); self.doc_id += lines.len() as u64; for line in lines { self.doc_map.insert(line.0, line.1); } let new_count = self.size() - current_size; Ok(new_count) } Err(e) => Err(e), } } pub fn size(&self) -> usize { return self.doc_id as usize; } } #[cfg(test)] mod tests { use super::MinHashStringIndex; use std::io::{BufReader, Read, Write}; #[test] fn test_load_from_file() { let strings: Vec<String> = [ "locality sensitive hashing is a cool algorithm", "locality sensitive hashing is a great algorithm", "locality sensitive hashing is a awesome algorithm", "we all scream for ice cream", "we all scream for ice cream", "we all scream for ice cream sandwich", ] .iter() .map(|s| s.to_string()) .collect(); let mut file = Vec::new(); for bytes in strings.iter().map(|s| s.as_bytes()) { file.write_all(&bytes).unwrap(); file.write_all("\n".as_bytes()).unwrap(); } let mut lsh_index = MinHashStringIndex::new(42, 4, 0.5); lsh_index.load_from_lines(&mut BufReader::new(file.as_slice())); assert_eq!(6, lsh_index.size()); println!("{}", lsh_index); let result = lsh_index.query(&strings[0]); assert_eq!(result.len(), 3); assert!(result.contains(&(&strings[0]))); assert!(result.contains(&(&strings[1]))); assert!(result.contains(&(&strings[2]))); } }
use crate::minhash::min_hasher64::MinHasher64V1; use crate::minhash::{MinHasher, MinHashIndex}; use crate::text::whitespace_split; use rayon::prelude::*; use std::collections::HashMap; use std::fmt; use std::fmt::{Display, Formatter, write}; use std::fs::File; use std::io::{BufRead, BufReader, Error}; use std::io::{Read, Write}; use fnv::FnvBuildHasher; pub struct MinHashStringIndex { lsh_index: MinHashIndex<u64, u64>, min_hash: MinHasher64V1<FnvBuildHasher>, doc_map: HashMap<u64, String>, doc_id: u64, } impl Display for MinHashStringIndex { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "MinHashStringIndex {{ "); self.lsh_index.fmt(f); write!(f, " }} ") } } impl MinHashStringIndex { pub fn new(num_bands: usize, band_width: usize, jaccard_threshold: f64) -> Self { MinHashStringIndex { lsh_index: MinHashIndex::new(num_bands, band_width, jaccard_threshold), min_hash: MinHasher64V1::new(num_bands * band_width), doc_map: HashMap::new(), doc_id: 0, } } pub fn insert(&mut self, text: String) { let min_hashes = self.min_hash.create_signature(whitespace_split(text.as_str())); self.doc_id += 1; self.doc_map.insert(self.doc_id, text); self.lsh_index.insert(self.doc_id, min_hashes); } pub fn query(&self, text: &str) -> Vec<&String> { let min_hashes = self.min_hash.create_signature(whitespace_split(text)); let ids = self.lsh_index.query(&min_hashes); ids.iter().map(|id| self.doc_map.get(id).unwrap()).collect() } pub fn load_from_lines<R: Read>(&mut self, reader: &mut BufReader<R>) { for line_result in reader.lines() { match line_result { Ok(line) => self.insert(line), Err(e) => (), } } } pub fn load_from_file(&mut self, file_name: &str) -> Result<usize, Error> { match File::open(file_name) { Ok(file) => { let current_size = self.size(); let mut reader: BufReader<File> = BufReader::new(file); self.load_from_lines(&mut reader); let new_count = self.size() - current_size; Ok(new_count) } Err(e) => Err(e), } } pub fn load_from_file_parallel(&mut self, file_name: &str) -> Result<usize, Error> { match File::open(file_name) { Ok(file) => { let current_size = self.size(); let mut reader: BufReader<File> = BufReader::new(file); let lines: Vec<(u64, String)> = reader .lines() .enumerate() .map(|v| (v.0 as u64 + self.doc_id, v.1.unwrap())) .collect(); let minhashes = lines .par_iter() .map(|line| { ( line.0, self.min_hash.create_signature(whitespace_split(&line.1)), ) }) .collect(); self.lsh_index.par_bulk_insert_pairs(minhashes); self.doc_id += lines.len() as u64; for line in lines { self.doc_map.insert(line.0, line.1); } let new_count = self.size() - current_size; Ok(new_count) } Err(e) => Err(e), } } pub fn size(&self) -> usize { return self.doc_id as usize; } } #[cfg(test)] mod tests { use super::MinHashStringIndex; use std::io::{BufReader, Read, Write}; #[test] fn test_load_from_file() {
let mut file = Vec::new(); for bytes in strings.iter().map(|s| s.as_bytes()) { file.write_all(&bytes).unwrap(); file.write_all("\n".as_bytes()).unwrap(); } let mut lsh_index = MinHashStringIndex::new(42, 4, 0.5); lsh_index.load_from_lines(&mut BufReader::new(file.as_slice())); assert_eq!(6, lsh_index.size()); println!("{}", lsh_index); let result = lsh_index.query(&strings[0]); assert_eq!(result.len(), 3); assert!(result.contains(&(&strings[0]))); assert!(result.contains(&(&strings[1]))); assert!(result.contains(&(&strings[2]))); } }
let strings: Vec<String> = [ "locality sensitive hashing is a cool algorithm", "locality sensitive hashing is a great algorithm", "locality sensitive hashing is a awesome algorithm", "we all scream for ice cream", "we all scream for ice cream", "we all scream for ice cream sandwich", ] .iter() .map(|s| s.to_string()) .collect();
assignment_statement
[ { "content": "pub fn shingle_text_range<'a>(text: &'a str, from: usize, to: usize) -> impl Iterator<Item = &'a str> {\n\n MultiShingles::new(text, from, to)\n\n}\n\n\n", "file_path": "gaoya/src/text/tokenizers.rs", "rank": 0, "score": 228558.83057087305 }, { "content": "pub fn shingle_tex...
Rust
fsb5/src/lib.rs
raftario/assetbundle
58ce22c0e0774c8c88471f3cdcad65569097a4b7
use byteorder::{LittleEndian, ReadBytesExt}; use std::{ collections::HashMap, convert::{TryFrom, TryInto}, io::{BufRead, Read, Seek, SeekFrom}, }; mod error; pub use error::Error; #[cfg(feature = "pcm")] mod pcm; #[derive(Debug, Copy, Clone)] pub enum SoundFormat { None, PCM8, PCM16, PCM24, PCM32, PCMFloat, GCADPCM, IMAADPCM, VAG, HEVAG, XMA, MPEG, CELT, AT9, XWMA, Vorbis, } impl SoundFormat { pub fn file_extension(self) -> &'static str { match self { SoundFormat::MPEG => "mp3", SoundFormat::Vorbis => "ogg", SoundFormat::PCM8 | SoundFormat::PCM16 | SoundFormat::PCM32 => "wav", _ => "bin", } } } impl TryFrom<u32> for SoundFormat { type Error = Error; fn try_from(value: u32) -> Result<Self, Self::Error> { match value { 0 => Ok(SoundFormat::None), 1 => Ok(SoundFormat::PCM8), 2 => Ok(SoundFormat::PCM16), 3 => Ok(SoundFormat::PCM24), 4 => Ok(SoundFormat::PCM32), 5 => Ok(SoundFormat::PCMFloat), 6 => Ok(SoundFormat::GCADPCM), 7 => Ok(SoundFormat::IMAADPCM), 8 => Ok(SoundFormat::VAG), 9 => Ok(SoundFormat::HEVAG), 10 => Ok(SoundFormat::XMA), 11 => Ok(SoundFormat::MPEG), 12 => Ok(SoundFormat::CELT), 13 => Ok(SoundFormat::AT9), 14 => Ok(SoundFormat::XWMA), 15 => Ok(SoundFormat::Vorbis), _ => Err(Error::SoundFormat(value)), } } } #[derive(Debug, Copy, Clone)] pub struct FSB5Header { pub id: [u8; 4], pub version: u32, pub num_samples: usize, pub sample_headers_size: usize, pub name_table_size: usize, pub data_size: usize, pub mode: SoundFormat, pub zero: [u8; 8], pub hash: [u8; 16], pub dummy: [u8; 8], pub unknown: u32, pub size: usize, } impl FSB5Header { fn read<R: Read + Seek>(reader: &mut R) -> Result<Self, Error> { let mut id: [u8; 4] = [0; 4]; reader.read_exact(&mut id)?; let version = reader.read_u32::<LittleEndian>()?; let num_samples = reader.read_u32::<LittleEndian>()? as usize; let sample_headers_size = reader.read_u32::<LittleEndian>()? as usize; let name_table_size = reader.read_u32::<LittleEndian>()? as usize; let data_size = reader.read_u32::<LittleEndian>()? as usize; let mode = reader.read_u32::<LittleEndian>()?; let mut zero = [0; 8]; reader.read_exact(&mut zero)?; let mut hash = [0; 16]; reader.read_exact(&mut hash)?; let mut dummy = [0; 8]; reader.read_exact(&mut dummy)?; let unknown = match version { 0 => reader.read_u32::<LittleEndian>()?, _ => 0, }; let mode = mode.try_into()?; let size = reader.seek(SeekFrom::Current(0))? as usize; Ok(Self { id, version, num_samples, sample_headers_size, name_table_size, data_size, mode, zero, hash, dummy, unknown, size, }) } } #[derive(Debug, Clone)] pub struct Sample { pub name: String, pub frequency: u32, pub channels: u64, pub data_offset: usize, pub samples: usize, pub metadata: HashMap<u64, MetadataChunk>, pub data: Vec<u8>, } #[derive(Debug, Clone)] pub enum MetadataChunk { Channels(u8), Frequency(u32), Loop(u32, u32), XMASeek(Vec<u8>), DSPCOEFF(Vec<u8>), XWMAData(Vec<u8>), VorbisData { crc32: u32, unknown: Vec<u8> }, } impl MetadataChunk { fn read<R: Read>(reader: &mut R, chunk_size: usize, chunk_type: u64) -> Result<Self, Error> { match chunk_type { 1 => { let channels = reader.read_u8()?; Ok(MetadataChunk::Channels(channels)) } 2 => { let frequency = reader.read_u32::<LittleEndian>()?; Ok(MetadataChunk::Frequency(frequency)) } 3 => { let loop_tuple = ( reader.read_u32::<LittleEndian>()?, reader.read_u32::<LittleEndian>()?, ); Ok(MetadataChunk::Loop(loop_tuple.0, loop_tuple.1)) } 6 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::XMASeek(data.to_vec())) } 7 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::DSPCOEFF(data.to_vec())) } 10 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::XWMAData(data.to_vec())) } 11 => { let crc32 = reader.read_u32::<LittleEndian>()?; let mut unknown = vec![0; chunk_size]; reader.read_exact(&mut unknown)?; Ok(MetadataChunk::VorbisData { crc32, unknown: unknown.to_vec(), }) } _ => Err(Error::MetadataChunkType(chunk_type)), } } } fn bits(val: u64, start: u64, len: u64) -> u64 { let stop = start + len; let r = val & ((1 << stop) - 1); r >> start } #[derive(Debug, Clone)] pub struct FSB5 { pub header: FSB5Header, pub raw_size: usize, pub samples: Vec<Sample>, } impl FSB5 { pub fn read<R: BufRead + Seek>(mut reader: R) -> Result<Self, Error> { let mut magic = [0; 4]; reader.read_exact(&mut magic)?; if magic != *b"FSB5" { return Err(Error::MagicHeader(magic)); } reader.seek(SeekFrom::Start(0))?; let header = FSB5Header::read(&mut reader)?; let raw_size = header.size + header.sample_headers_size + header.name_table_size + header.data_size; let mut samples = Vec::with_capacity(header.num_samples); for i in 0..header.num_samples { let mut raw = reader.read_u64::<LittleEndian>()?; let mut next_chunk = bits(raw, 0, 1); let mut frequency = bits(raw, 1, 4) as u32; let channels = bits(raw, 1 + 4, 1) + 1; let data_offset = (bits(raw, 1 + 4 + 1, 28) * 16) as usize; let self_samples = bits(raw, 1 + 4 + 1 + 28, 30) as usize; let mut chunks = HashMap::new(); while next_chunk != 0 { raw = reader.read_u32::<LittleEndian>()? as u64; next_chunk = bits(raw, 0, 1); let chunk_size = bits(raw, 1, 24) as usize; let chunk_type = bits(raw, 1 + 24, 7); let chunk_data = match MetadataChunk::read(&mut reader, chunk_size, chunk_type) { Ok(cd) => cd, Err(e) => match e { Error::MetadataChunkType(_) => { eprintln!("{}", e); continue; } _ => return Err(e), }, }; chunks.insert(chunk_type, chunk_data); } if let Some(MetadataChunk::Frequency(f)) = chunks.get(&2) { frequency = *f; } else { frequency = match frequency { 1 => 8000, 2 => 11000, 3 => 11025, 4 => 16000, 5 => 22050, 6 => 24000, 7 => 32000, 8 => 44100, 9 => 48000, _ => { return Err(Error::Frequency(frequency)); } } } samples.push(Sample { name: format!("{}", i), frequency, channels, data_offset, samples: self_samples, metadata: chunks, data: Vec::new(), }); } if header.name_table_size > 0 { let nametable_start = reader.seek(SeekFrom::Current(0))? as usize; let mut samplename_offsets = vec![0; header.num_samples]; for i in samplename_offsets.iter_mut() { *i = reader.read_u32::<LittleEndian>()? as usize; } for (i, sample) in samples.iter_mut().enumerate() { reader.seek(SeekFrom::Start( (nametable_start + samplename_offsets[i]) as u64, ))?; let mut name = Vec::new(); reader.read_until(0, &mut name)?; sample.name = String::from_utf8(name).map_err(|_| Error::NameTable(i))?; } } reader.seek(SeekFrom::Start( (header.size + header.sample_headers_size + header.name_table_size) as u64, ))?; for i in 0..header.num_samples { let data_start = samples.get(i).unwrap().data_offset; let data_end = if i < header.num_samples - 1 { samples.get(i + 1).unwrap().data_offset } else { data_start + header.data_size }; let mut data = Vec::with_capacity(data_end - data_start); reader.read_exact(&mut data)?; samples.get_mut(i).unwrap().data = data; } Ok(Self { header, raw_size, samples, }) } pub fn rebuild(&self, sample: Sample) -> Result<Vec<u8>, Error> { match self.header.mode { SoundFormat::MPEG => Ok(sample.data.unwrap()), #[cfg(feature = "pcm")] SoundFormat::PCM8 => pcm::rebuild(sample, 1), #[cfg(feature = "pcm")] SoundFormat::PCM16 => pcm::rebuild(sample, 2), #[cfg(feature = "pcm")] SoundFormat::PCM32 => pcm::rebuild(sample, 4), _ => Err(Error::RebuildFormat(self.header.mode)), } } }
use byteorder::{LittleEndian, ReadBytesExt}; use std::{ collections::HashMap, convert::{TryFrom, TryInto}, io::{BufRead, Read, Seek, SeekFrom}, }; mod error; pub use error::Error; #[cfg(feature = "pcm")] mod pcm; #[derive(Debug, Copy, Clone)] pub enum SoundFormat { None, PCM8, PCM16, PCM24, PCM32, PCMFloat, GCADPCM, IMAADPCM, VAG, HEVAG, XMA, MPEG, CELT, AT9, XWMA, Vorbis, } impl SoundFormat {
} impl TryFrom<u32> for SoundFormat { type Error = Error; fn try_from(value: u32) -> Result<Self, Self::Error> { match value { 0 => Ok(SoundFormat::None), 1 => Ok(SoundFormat::PCM8), 2 => Ok(SoundFormat::PCM16), 3 => Ok(SoundFormat::PCM24), 4 => Ok(SoundFormat::PCM32), 5 => Ok(SoundFormat::PCMFloat), 6 => Ok(SoundFormat::GCADPCM), 7 => Ok(SoundFormat::IMAADPCM), 8 => Ok(SoundFormat::VAG), 9 => Ok(SoundFormat::HEVAG), 10 => Ok(SoundFormat::XMA), 11 => Ok(SoundFormat::MPEG), 12 => Ok(SoundFormat::CELT), 13 => Ok(SoundFormat::AT9), 14 => Ok(SoundFormat::XWMA), 15 => Ok(SoundFormat::Vorbis), _ => Err(Error::SoundFormat(value)), } } } #[derive(Debug, Copy, Clone)] pub struct FSB5Header { pub id: [u8; 4], pub version: u32, pub num_samples: usize, pub sample_headers_size: usize, pub name_table_size: usize, pub data_size: usize, pub mode: SoundFormat, pub zero: [u8; 8], pub hash: [u8; 16], pub dummy: [u8; 8], pub unknown: u32, pub size: usize, } impl FSB5Header { fn read<R: Read + Seek>(reader: &mut R) -> Result<Self, Error> { let mut id: [u8; 4] = [0; 4]; reader.read_exact(&mut id)?; let version = reader.read_u32::<LittleEndian>()?; let num_samples = reader.read_u32::<LittleEndian>()? as usize; let sample_headers_size = reader.read_u32::<LittleEndian>()? as usize; let name_table_size = reader.read_u32::<LittleEndian>()? as usize; let data_size = reader.read_u32::<LittleEndian>()? as usize; let mode = reader.read_u32::<LittleEndian>()?; let mut zero = [0; 8]; reader.read_exact(&mut zero)?; let mut hash = [0; 16]; reader.read_exact(&mut hash)?; let mut dummy = [0; 8]; reader.read_exact(&mut dummy)?; let unknown = match version { 0 => reader.read_u32::<LittleEndian>()?, _ => 0, }; let mode = mode.try_into()?; let size = reader.seek(SeekFrom::Current(0))? as usize; Ok(Self { id, version, num_samples, sample_headers_size, name_table_size, data_size, mode, zero, hash, dummy, unknown, size, }) } } #[derive(Debug, Clone)] pub struct Sample { pub name: String, pub frequency: u32, pub channels: u64, pub data_offset: usize, pub samples: usize, pub metadata: HashMap<u64, MetadataChunk>, pub data: Vec<u8>, } #[derive(Debug, Clone)] pub enum MetadataChunk { Channels(u8), Frequency(u32), Loop(u32, u32), XMASeek(Vec<u8>), DSPCOEFF(Vec<u8>), XWMAData(Vec<u8>), VorbisData { crc32: u32, unknown: Vec<u8> }, } impl MetadataChunk { fn read<R: Read>(reader: &mut R, chunk_size: usize, chunk_type: u64) -> Result<Self, Error> { match chunk_type { 1 => { let channels = reader.read_u8()?; Ok(MetadataChunk::Channels(channels)) } 2 => { let frequency = reader.read_u32::<LittleEndian>()?; Ok(MetadataChunk::Frequency(frequency)) } 3 => { let loop_tuple = ( reader.read_u32::<LittleEndian>()?, reader.read_u32::<LittleEndian>()?, ); Ok(MetadataChunk::Loop(loop_tuple.0, loop_tuple.1)) } 6 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::XMASeek(data.to_vec())) } 7 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::DSPCOEFF(data.to_vec())) } 10 => { let mut data = vec![0; chunk_size]; reader.read_exact(&mut data)?; Ok(MetadataChunk::XWMAData(data.to_vec())) } 11 => { let crc32 = reader.read_u32::<LittleEndian>()?; let mut unknown = vec![0; chunk_size]; reader.read_exact(&mut unknown)?; Ok(MetadataChunk::VorbisData { crc32, unknown: unknown.to_vec(), }) } _ => Err(Error::MetadataChunkType(chunk_type)), } } } fn bits(val: u64, start: u64, len: u64) -> u64 { let stop = start + len; let r = val & ((1 << stop) - 1); r >> start } #[derive(Debug, Clone)] pub struct FSB5 { pub header: FSB5Header, pub raw_size: usize, pub samples: Vec<Sample>, } impl FSB5 { pub fn read<R: BufRead + Seek>(mut reader: R) -> Result<Self, Error> { let mut magic = [0; 4]; reader.read_exact(&mut magic)?; if magic != *b"FSB5" { return Err(Error::MagicHeader(magic)); } reader.seek(SeekFrom::Start(0))?; let header = FSB5Header::read(&mut reader)?; let raw_size = header.size + header.sample_headers_size + header.name_table_size + header.data_size; let mut samples = Vec::with_capacity(header.num_samples); for i in 0..header.num_samples { let mut raw = reader.read_u64::<LittleEndian>()?; let mut next_chunk = bits(raw, 0, 1); let mut frequency = bits(raw, 1, 4) as u32; let channels = bits(raw, 1 + 4, 1) + 1; let data_offset = (bits(raw, 1 + 4 + 1, 28) * 16) as usize; let self_samples = bits(raw, 1 + 4 + 1 + 28, 30) as usize; let mut chunks = HashMap::new(); while next_chunk != 0 { raw = reader.read_u32::<LittleEndian>()? as u64; next_chunk = bits(raw, 0, 1); let chunk_size = bits(raw, 1, 24) as usize; let chunk_type = bits(raw, 1 + 24, 7); let chunk_data = match MetadataChunk::read(&mut reader, chunk_size, chunk_type) { Ok(cd) => cd, Err(e) => match e { Error::MetadataChunkType(_) => { eprintln!("{}", e); continue; } _ => return Err(e), }, }; chunks.insert(chunk_type, chunk_data); } if let Some(MetadataChunk::Frequency(f)) = chunks.get(&2) { frequency = *f; } else { frequency = match frequency { 1 => 8000, 2 => 11000, 3 => 11025, 4 => 16000, 5 => 22050, 6 => 24000, 7 => 32000, 8 => 44100, 9 => 48000, _ => { return Err(Error::Frequency(frequency)); } } } samples.push(Sample { name: format!("{}", i), frequency, channels, data_offset, samples: self_samples, metadata: chunks, data: Vec::new(), }); } if header.name_table_size > 0 { let nametable_start = reader.seek(SeekFrom::Current(0))? as usize; let mut samplename_offsets = vec![0; header.num_samples]; for i in samplename_offsets.iter_mut() { *i = reader.read_u32::<LittleEndian>()? as usize; } for (i, sample) in samples.iter_mut().enumerate() { reader.seek(SeekFrom::Start( (nametable_start + samplename_offsets[i]) as u64, ))?; let mut name = Vec::new(); reader.read_until(0, &mut name)?; sample.name = String::from_utf8(name).map_err(|_| Error::NameTable(i))?; } } reader.seek(SeekFrom::Start( (header.size + header.sample_headers_size + header.name_table_size) as u64, ))?; for i in 0..header.num_samples { let data_start = samples.get(i).unwrap().data_offset; let data_end = if i < header.num_samples - 1 { samples.get(i + 1).unwrap().data_offset } else { data_start + header.data_size }; let mut data = Vec::with_capacity(data_end - data_start); reader.read_exact(&mut data)?; samples.get_mut(i).unwrap().data = data; } Ok(Self { header, raw_size, samples, }) } pub fn rebuild(&self, sample: Sample) -> Result<Vec<u8>, Error> { match self.header.mode { SoundFormat::MPEG => Ok(sample.data.unwrap()), #[cfg(feature = "pcm")] SoundFormat::PCM8 => pcm::rebuild(sample, 1), #[cfg(feature = "pcm")] SoundFormat::PCM16 => pcm::rebuild(sample, 2), #[cfg(feature = "pcm")] SoundFormat::PCM32 => pcm::rebuild(sample, 4), _ => Err(Error::RebuildFormat(self.header.mode)), } } }
pub fn file_extension(self) -> &'static str { match self { SoundFormat::MPEG => "mp3", SoundFormat::Vorbis => "ogg", SoundFormat::PCM8 | SoundFormat::PCM16 | SoundFormat::PCM32 => "wav", _ => "bin", } }
function_block-full_function
[ { "content": "pub fn rebuild(sample: Sample, width: u16) -> Result<Vec<u8>, Error> {\n\n let data = &sample.data.unwrap()[..(sample.samples * width as usize)];\n\n let mut writer = BufWriter::new(Cursor::new(Vec::new()));\n\n\n\n let spec = WavSpec {\n\n channels: sample.channels as u16,\n\n ...
Rust
day18/src/main.rs
theonejb/advent-of-code-20
d0cbefd5ef88cae566df1260750c1c7d7a98a9ec
use std::path::Path; use std::fs::File; use std::io::{BufReader, BufRead}; use std::cmp::Ordering; mod tests; #[derive(Debug, PartialEq, Eq)] enum Operator { Add, Sub, Mul, Div, } impl Ord for Operator { fn cmp(&self, other: &Self) -> Ordering { match self { Operator::Add => match other { Operator::Mul => Ordering::Greater, Operator::Div => Ordering::Greater, Operator::Add => Ordering::Equal, Operator::Sub => Ordering::Equal }, Operator::Sub => match other { Operator::Mul => Ordering::Greater, Operator::Div => Ordering::Greater, Operator::Add => Ordering::Equal, Operator::Sub => Ordering::Equal }, Operator::Mul => match other { Operator::Mul => Ordering::Equal, Operator::Div => Ordering::Equal, Operator::Add => Ordering::Less, Operator::Sub => Ordering::Less }, Operator::Div => match other { Operator::Mul => Ordering::Equal, Operator::Div => Ordering::Equal, Operator::Add => Ordering::Less, Operator::Sub => Ordering::Less }, } } } impl PartialOrd for Operator { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Operator { pub fn apply(&self, op1: i64, op2: i64) -> i64 { match self { Operator::Add => op1 + op2, Operator::Sub => op1 - op2, Operator::Mul => op1 * op2, Operator::Div => op1 / op2 } } } #[derive(Debug, PartialEq)] enum Token { Operator(Operator), Operand(i64), ParenthesisGroup(Vec<Token>), ParenthesisedToken(String), } /* Returns the next token and the left over string */ fn next_token(input: &str) -> (Token, &str) { let input = input.trim(); if input.starts_with("(") { let input = &input[1..]; let mut parenthesised_group = String::new(); let mut num_parens = 1; for c in input.chars() { match c { ')' => { num_parens -= 1; if num_parens == 0 { break; } } '(' => { num_parens += 1; } _ => {} } parenthesised_group.push(c); } let left_over = input.strip_prefix(&parenthesised_group).unwrap(); let left_over = &left_over[1..]; let left_over = left_over.trim(); return (Token::ParenthesisedToken(parenthesised_group), left_over); } let split: Vec<&str> = input.splitn(2, " ").collect(); let token_str = split[0]; let left_over = if split.len() == 2 { split[1] } else { "" }; let token = match token_str { "+" => Token::Operator(Operator::Add), "-" => Token::Operator(Operator::Sub), "*" => Token::Operator(Operator::Mul), "/" => Token::Operator(Operator::Div), other => Token::Operand( other.parse::<i64>().unwrap() ) }; (token, left_over) } fn parse(input: &str) -> Vec<Token> { let mut input = input; let mut output = vec![]; loop { let (token, left_over) = next_token(input); match token { Token::ParenthesisedToken(new_input) => { output.push( Token::ParenthesisGroup(parse(&new_input)) ); } t => { output.push(t); } } if left_over.is_empty() { break; } input = left_over; } output } fn value_of(token: &Token, calculate: fn(&Vec<Token>) -> i64) -> i64 { match token { Token::Operand(v) => *v, Token::ParenthesisGroup(v) => calculate(v), _ => { panic!("This shouldn't happen."); } } } fn calculate(input: &Vec<Token>) -> i64 { let mut input_iter = input.iter(); let mut first_operand = value_of(input_iter.next().unwrap(), calculate); let mut operator = input_iter.next().unwrap(); let mut second_operand = value_of(input_iter.next().unwrap(), calculate); if let Token::Operator(op) = operator { first_operand = op.apply(first_operand, second_operand); } for token in input_iter { match token { Token::Operator(_) => { operator = token; } _ => { if let Token::Operator(op) = operator { first_operand = op.apply(first_operand, value_of(token, calculate)); } } } } first_operand } fn calculate2(input: &Vec<Token>) -> i64 { let mut output_stack: Vec<&Token> = vec![]; let mut operator_stack: Vec<&Token> = vec![]; for token in input.iter() { match token { Token::Operator(op) => { while !operator_stack.is_empty() { let other_op_token = *operator_stack.last().unwrap(); if let Token::Operator(other_op) = other_op_token { if other_op > op { output_stack.push( operator_stack.pop().unwrap() ); } else { break; } } } operator_stack.push(token); } _ => { output_stack.push(token); } } } while !operator_stack.is_empty() { output_stack.push( operator_stack.pop().unwrap() ); } let mut operand_stack: Vec<i64> = vec![]; for token in output_stack { match token { Token::Operator(op) => { let op1 = operand_stack.pop().unwrap(); let op2 = operand_stack.pop().unwrap(); operand_stack.push( op.apply(op1, op2) ); }, _ => { operand_stack.push( value_of(token, calculate2) ); } } } operand_stack.pop().unwrap() } fn get_input(filename: &str) -> Vec<String> { let p = Path::new(filename); let f = File::open(p).unwrap(); let lines = BufReader::new(f).lines(); let mut input = vec![]; for l in lines { input.push(l.unwrap()); } input } fn main() { let input = get_input("input.txt"); let mut sum = 0; for expression in input.iter() { let parsed_expression = parse(expression.as_str()); sum += calculate(&parsed_expression); } println!("Part 1: {}", sum); let mut sum = 0; for expression in input.iter() { let parsed_expression = parse(expression.as_str()); sum += calculate2(&parsed_expression); } println!("Part 2: {}", sum); }
use std::path::Path; use std::fs::File; use std::io::{BufReader, BufRead}; use std::cmp::Ordering; mod tests; #[derive(Debug, PartialEq, Eq)] enum Operator { Add, Sub, Mul, Div, } impl Ord for Operator { fn cmp(&self, other: &Self) -> Ordering { match self { Operator::Add => match other { Operator::Mul => Ordering::Greater, Operator::Div => Ordering::Greater, Operator::Add => Ordering::Equal, Operator::Sub => Ordering::Equal }, Operator::Sub => match other { Operator::Mul => Ordering::Greater, Operator::Div => Ordering::Greater, Operator::Add => Ordering::Equal, Operator::Sub => Ordering::Equal }, Operator::Mul => match other { Operator::Mul => Ordering::Equal, Operator::Div => Ordering::Equal, Operator::Add => Ordering::Less, Operator::Sub => Ordering::Less }, Operator::Div => match other { Operator::Mul => Ordering::Equal, Operator::Div => Ordering::Equal, Operator::Add => Ordering::Less, Operator::Sub => Ordering::Less }, } } } impl PartialOrd for Operator { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Operator { pub fn apply(&self, op1: i64, op2: i64) -> i64 { match self { Operator::Add => op1 + op2, Operator::Sub => op1 - op2, Operator::Mul => op1 * op2, Operator::Div => op1 / op2 } } } #[derive(Debug, PartialEq)] enum Token { Operator(Operator), Operand(i64), ParenthesisGroup(Vec<Token>), ParenthesisedToken(String), } /* Returns the next token and the left over string */ fn next_token(input: &str) -> (Token, &str) { let input = input.trim(); if input.starts_with("(") { let input = &input[1..]; let mut parenthesised_group = String::new(); let mut num_parens = 1; for c in input.chars() { match c { ')' => { num_parens -= 1; if num_parens == 0 { break; }
r() { let parsed_expression = parse(expression.as_str()); sum += calculate2(&parsed_expression); } println!("Part 2: {}", sum); }
} '(' => { num_parens += 1; } _ => {} } parenthesised_group.push(c); } let left_over = input.strip_prefix(&parenthesised_group).unwrap(); let left_over = &left_over[1..]; let left_over = left_over.trim(); return (Token::ParenthesisedToken(parenthesised_group), left_over); } let split: Vec<&str> = input.splitn(2, " ").collect(); let token_str = split[0]; let left_over = if split.len() == 2 { split[1] } else { "" }; let token = match token_str { "+" => Token::Operator(Operator::Add), "-" => Token::Operator(Operator::Sub), "*" => Token::Operator(Operator::Mul), "/" => Token::Operator(Operator::Div), other => Token::Operand( other.parse::<i64>().unwrap() ) }; (token, left_over) } fn parse(input: &str) -> Vec<Token> { let mut input = input; let mut output = vec![]; loop { let (token, left_over) = next_token(input); match token { Token::ParenthesisedToken(new_input) => { output.push( Token::ParenthesisGroup(parse(&new_input)) ); } t => { output.push(t); } } if left_over.is_empty() { break; } input = left_over; } output } fn value_of(token: &Token, calculate: fn(&Vec<Token>) -> i64) -> i64 { match token { Token::Operand(v) => *v, Token::ParenthesisGroup(v) => calculate(v), _ => { panic!("This shouldn't happen."); } } } fn calculate(input: &Vec<Token>) -> i64 { let mut input_iter = input.iter(); let mut first_operand = value_of(input_iter.next().unwrap(), calculate); let mut operator = input_iter.next().unwrap(); let mut second_operand = value_of(input_iter.next().unwrap(), calculate); if let Token::Operator(op) = operator { first_operand = op.apply(first_operand, second_operand); } for token in input_iter { match token { Token::Operator(_) => { operator = token; } _ => { if let Token::Operator(op) = operator { first_operand = op.apply(first_operand, value_of(token, calculate)); } } } } first_operand } fn calculate2(input: &Vec<Token>) -> i64 { let mut output_stack: Vec<&Token> = vec![]; let mut operator_stack: Vec<&Token> = vec![]; for token in input.iter() { match token { Token::Operator(op) => { while !operator_stack.is_empty() { let other_op_token = *operator_stack.last().unwrap(); if let Token::Operator(other_op) = other_op_token { if other_op > op { output_stack.push( operator_stack.pop().unwrap() ); } else { break; } } } operator_stack.push(token); } _ => { output_stack.push(token); } } } while !operator_stack.is_empty() { output_stack.push( operator_stack.pop().unwrap() ); } let mut operand_stack: Vec<i64> = vec![]; for token in output_stack { match token { Token::Operator(op) => { let op1 = operand_stack.pop().unwrap(); let op2 = operand_stack.pop().unwrap(); operand_stack.push( op.apply(op1, op2) ); }, _ => { operand_stack.push( value_of(token, calculate2) ); } } } operand_stack.pop().unwrap() } fn get_input(filename: &str) -> Vec<String> { let p = Path::new(filename); let f = File::open(p).unwrap(); let lines = BufReader::new(f).lines(); let mut input = vec![]; for l in lines { input.push(l.unwrap()); } input } fn main() { let input = get_input("input.txt"); let mut sum = 0; for expression in input.iter() { let parsed_expression = parse(expression.as_str()); sum += calculate(&parsed_expression); } println!("Part 1: {}", sum); let mut sum = 0; for expression in input.ite
random
[ { "content": "fn get_input(rules_filename: &str, data_filename: &str) -> (Vec<String>, Vec<String>) {\n\n let f = File::open(Path::new(rules_filename)).unwrap();\n\n let mut rules = vec![];\n\n for line in BufReader::new(f).lines() {\n\n rules.push(line.unwrap());\n\n }\n\n\n\n let f = Fil...
Rust
src/link/classify.rs
CollinValley/hivemind
89c6e4214fb88d27404f7a48ac5453be807cb21e
use crate::link::utils::task_park::*; use crate::Classifier; use crate::{link::QueueStream, HStream, Link}; use crossbeam::atomic::AtomicCell; use crossbeam::crossbeam_channel; use crossbeam::crossbeam_channel::{Receiver, Sender}; use futures::prelude::*; use futures::ready; use futures::task::{Context, Poll}; use std::marker::PhantomData; use std::pin::Pin; use std::sync::Arc; use tokio::stream::Stream; pub(crate) struct DoClassify<C: Classifier + Send + 'static> { phantom: PhantomData<C>, } impl<C: Classifier + Send + 'static> DoClassify<C> { pub(crate) fn do_classify( input: HStream<C::Packet>, mut classifier: C, cap: Option<usize>, ) -> Link<C::Packet> { let mut senders: Vec<Sender<Option<C::Packet>>> = Vec::new(); let mut receivers: Vec<Receiver<Option<C::Packet>>> = Vec::new(); let mut streams: Vec<HStream<C::Packet>> = Vec::new(); let mut task_parks: Vec<Arc<AtomicCell<TaskParkState>>> = Vec::new(); for _ in 0..classifier.num_ports() { let (sender, receiver) = match cap { None => crossbeam_channel::unbounded::<Option<C::Packet>>(), Some(capacity) => crossbeam_channel::bounded::<Option<C::Packet>>(capacity), }; let task_park = Arc::new(AtomicCell::new(TaskParkState::Empty)); let stream = QueueStream::new(receiver.clone(), Arc::clone(&task_park)); senders.push(sender); streams.push(Box::new(stream)); receivers.push(receiver); task_parks.push(task_park); } let runnable = ClassifyRunnable::new(input, senders, classifier, task_parks); Link::new(vec![Box::new(runnable)], streams) } } pub(crate) struct ClassifyRunnable<C: Classifier> { input_stream: HStream<C::Packet>, to_egressors: Vec<Sender<Option<C::Packet>>>, classifier: C, task_parks: Vec<Arc<AtomicCell<TaskParkState>>>, } impl<C: Classifier> Unpin for ClassifyRunnable<C> {} impl<C: Classifier> ClassifyRunnable<C> { fn new( input_stream: HStream<C::Packet>, to_egressors: Vec<Sender<Option<C::Packet>>>, classifier: C, task_parks: Vec<Arc<AtomicCell<TaskParkState>>>, ) -> Self { ClassifyRunnable { input_stream, to_egressors, classifier, task_parks, } } } impl<C: Classifier> Future for ClassifyRunnable<C> { type Output = (); fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { let ingressor = Pin::into_inner(self); loop { for (port, to_egressor) in ingressor.to_egressors.iter().enumerate() { if to_egressor.is_full() { park_and_wake(&ingressor.task_parks[port], cx.waker().clone()); return Poll::Pending; } } let packet_option: Option<C::Packet> = ready!(Pin::new(&mut ingressor.input_stream).poll_next(cx)); match packet_option { None => { for to_egressor in ingressor.to_egressors.iter() { to_egressor .try_send(None) .expect("ClassifyIngressor::Drop: try_send to_egressor shouldn't fail"); } for task_park in ingressor.task_parks.iter() { die_and_wake(&task_park); } return Poll::Ready(()); } Some(packet) => { if let Some(port) = ingressor.classifier.classify(&packet) { if port >= ingressor.to_egressors.len() { panic!("Classifier used port outside of its listed range: Port {}, NumOutputs{}", port, ingressor.classifier.num_ports(), ); } if let Err(err) = ingressor.to_egressors[port].try_send(Some(packet)) { panic!( "Error in to_egressors[{}] sender, have nowhere to put packet: {:?}", port, err ); } unpark_and_wake(&ingressor.task_parks[port]); } } } } } } #[cfg(test)] mod tests { use crate::utils::test::classifier::{even_link, fizz_buzz_link}; use crate::utils::test::harness::{initialize_runtime, test_link}; use crate::utils::test::packet_generators::{immediate_stream, PacketIntervalGenerator}; use crate::Link; use core::time; #[test] fn even_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9]); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0], vec![0, 2, 420, 4, 6, 8]); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn even_odd_wait_between_packets() { let packets = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9]; let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = PacketIntervalGenerator::new(time::Duration::from_millis(10), packets.into_iter()); test_link(even_link(Box::new(packet_generator)), None).await }); assert_eq!(results[0], vec![0, 2, 420, 4, 6, 8]); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn only_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(vec![1, 1337, 3, 5, 7, 9]); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0], []); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn even_odd_long_stream() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..2000); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0].len(), 1000); assert_eq!(results[1].len(), 1000); } #[test] fn fizz_buzz() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..=30); test_link(fizz_buzz_link(packet_generator), None).await }); let expected_fizz_buzz = vec![0, 15, 30]; assert_eq!(results[0], expected_fizz_buzz); let expected_fizz = vec![3, 6, 9, 12, 18, 21, 24, 27]; assert_eq!(results[1], expected_fizz); let expected_buzz = vec![5, 10, 20, 25]; assert_eq!(results[2], expected_buzz); let expected_other = vec![1, 2, 4, 7, 8, 11, 13, 14, 16, 17, 19, 22, 23, 26, 28, 29]; assert_eq!(results[3], expected_other); } #[test] fn fizz_buzz_to_even_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..=30); let (mut fb_runnables, mut fb_egressors) = fizz_buzz_link(packet_generator).take(); let (mut eo_runnables, eo_egressors) = even_link(fb_egressors.pop().unwrap()).take(); fb_runnables.append(&mut eo_runnables); test_link(Link::new(fb_runnables, eo_egressors), None).await }); assert_eq!(results[0], vec![2, 4, 8, 14, 16, 22, 26, 28]); assert_eq!(results[1], vec![1, 7, 11, 13, 17, 19, 23, 29]); } }
use crate::link::utils::task_park::*; use crate::Classifier; use crate::{link::QueueStream, HStream, Link}; use crossbeam::atomic::AtomicCell; use crossbeam::crossbeam_channel; use crossbeam::crossbeam_channel::{Receiver, Sender}; use futures::prelude::*; use futures::ready; use futures::task::{Context, Poll}; use std::marker::PhantomData; use std::pin::Pin; use std::sync::Arc; use tokio::stream::Stream; pub(crate) struct DoClassify<C: Classifier + Send + 'static> { phantom: PhantomData<C>, } impl<C: Classifier + Send + 'static> DoClassify<C> { pub(crate) fn do_classify( input: HStream<C::Packet>, mut classifier: C, cap: Option<usize>, ) -> Link<C::Packet> { let mut senders: Vec<Sender<Option<C::Packet>>> = Vec::new(); let mut receivers: Vec<Receiver<Option<C::Packet>>> = Vec::new(); let mut streams: Vec<HStream<C::Packet>> = Vec::new(); let mut task_parks: Vec<Arc<AtomicCell<TaskParkState>>> = Vec::new(); for _ in 0..classifier.num_ports() { let (sender, receiver) = match cap { None => crossbeam_channel::unbounded::<Option<C::Packet>>(), Some(capacity) => crossbeam_channel::bounded::<Option<C::Packet>>(capacity), }; let task_park = Arc::new(AtomicCell::new(TaskParkState::Empty)); let stream = QueueStream::new(receiver.clone(), Arc::clone(&task_park)); senders.push(sender); streams.push(Box::new(stream)); receivers.push(receiver); task_parks.push(task_park); } let runnable = ClassifyRunnable::new(input, senders, classifier, task_parks); Link::new(vec![Box::new(runnable)], streams) } } pub(crate) struct ClassifyRunnable<C: Classifier> { input_stream: HStream<C::Packet>, to_egressors: Vec<Sender<Option<C::Packet>>>, classifier: C, task_parks: Vec<Arc<AtomicCell<TaskParkState>>>, } impl<C: Classifier> Unpin for ClassifyRunnable<C> {} impl<C: Classifier> ClassifyRunnable<C> { fn new( input_stream: HStream<C::Packet>, to_egressors: Vec<Sender<Option<C::Packet>>>, classifier: C, task_parks: Vec<Arc<AtomicCell<TaskParkState>>>, ) -> Self { ClassifyRunnable { input_stream, to_egressors, classifier, task_parks, } } } impl<C: Classifier> Future for ClassifyRunnable<C> { type Output = (); fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { let ingressor = Pin::into_inner(self); loop { for (port, to_egressor) in ingressor.to_egressors.iter().enumerate() { if to_egressor.is_full() { park_and_wake(&ingressor.task_parks[port], cx.waker().clone()); return Poll::Pending; } } let packet_option: Option<C::Packet> = ready!(Pin::new(&mut ingressor.input_stream).poll_next(cx)); match packet_option { None => { for to_egressor in ingressor.to_egressors.iter() { to_egressor .try_send(None) .expect("ClassifyIngressor::Drop: try_send to_egressor shouldn't fail"); } for task_park in ingressor.task_parks.iter() { die_and_wake(&task_park); } return Poll::Ready(()); } Some(packet) => { if let Some(port) = ingressor.classifier.classify(&packet) { if port >= ingressor.to_egressors.len() { panic!("Classifier used port outside of its listed range: Port {}, NumOutputs{}", port, ingressor.classifier.num_ports(), ); } if let Err(err) = ingressor.to_egressors[port].try_send(Some(packet)) { panic!( "Error in to_egressors[{}] sender, have nowhere to put packet: {:?}", port, err ); } unpark_and_wake(&ingressor.task_parks[port]); } } } } } } #[cfg(test)] mod tests { use crate::utils::test::classifier::{even_link, fizz_buzz_link}; use crate::utils::test::harness::{initialize_runtime, test_link}; use crate::utils::test::packet_generators::{immediate_stream, PacketIntervalGenerator}; use crate::Link; use core::time; #[test] fn even_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9]); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0], vec![0, 2, 420, 4, 6, 8]); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn even_odd_wait_between_packets() { let packets = vec![0, 1, 2, 420, 1337, 3, 4, 5, 6, 7, 8, 9]; let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = PacketIntervalGenerator::new(time::Duration::from_millis(10), packets.into_iter());
} #[test] fn fizz_buzz_to_even_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..=30); let (mut fb_runnables, mut fb_egressors) = fizz_buzz_link(packet_generator).take(); let (mut eo_runnables, eo_egressors) = even_link(fb_egressors.pop().unwrap()).take(); fb_runnables.append(&mut eo_runnables); test_link(Link::new(fb_runnables, eo_egressors), None).await }); assert_eq!(results[0], vec![2, 4, 8, 14, 16, 22, 26, 28]); assert_eq!(results[1], vec![1, 7, 11, 13, 17, 19, 23, 29]); } }
test_link(even_link(Box::new(packet_generator)), None).await }); assert_eq!(results[0], vec![0, 2, 420, 4, 6, 8]); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn only_odd() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(vec![1, 1337, 3, 5, 7, 9]); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0], []); assert_eq!(results[1], vec![1, 1337, 3, 5, 7, 9]); } #[test] fn even_odd_long_stream() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..2000); test_link(even_link(packet_generator), None).await }); assert_eq!(results[0].len(), 1000); assert_eq!(results[1].len(), 1000); } #[test] fn fizz_buzz() { let mut runtime = initialize_runtime(); let results = runtime.block_on(async { let packet_generator = immediate_stream(0..=30); test_link(fizz_buzz_link(packet_generator), None).await }); let expected_fizz_buzz = vec![0, 15, 30]; assert_eq!(results[0], expected_fizz_buzz); let expected_fizz = vec![3, 6, 9, 12, 18, 21, 24, 27]; assert_eq!(results[1], expected_fizz); let expected_buzz = vec![5, 10, 20, 25]; assert_eq!(results[2], expected_buzz); let expected_other = vec![1, 2, 4, 7, 8, 11, 13, 14, 16, 17, 19, 22, 23, 26, 28, 29]; assert_eq!(results[3], expected_other);
random
[ { "content": "pub fn fizz_buzz_link(stream: HStream<i32>) -> Link<i32> {\n\n DoClassify::do_classify(stream, FizzBuzz::new(), None)\n\n}\n", "file_path": "src/utils/test/classifier/fizz_buzz.rs", "rank": 0, "score": 165602.4877498499 }, { "content": "pub trait ClassifyFn<C: Classifier + C...
Rust
tests/end_to_end_cases/read_cli.rs
decadevvv/influxdb_iox
da440d8d277147e92c92fbdbe735cfcfb0c88912
use assert_cmd::Command; use predicates::prelude::*; use test_helpers::make_temp_file; use crate::common::server_fixture::ServerFixture; use super::scenario::rand_name; #[tokio::test] pub async fn test() { let server_fixture = ServerFixture::create_single_use().await; let db_name = rand_name(); let addr = server_fixture.grpc_base(); set_server_id(addr).await; create_database(&db_name, addr).await; test_read_default(&db_name, addr).await; test_read_format_pretty(&db_name, addr).await; test_read_format_csv(&db_name, addr).await; test_read_format_json(&db_name, addr).await; test_read_error(&db_name, addr).await; } async fn set_server_id(addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("server") .arg("set") .arg("23") .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains("Ok")); } async fn create_database(db_name: &str, addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("create") .arg(db_name) .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains("Ok")); let lp_data = vec![ "cpu,region=west user=23.2 100", "cpu,region=west user=21.0 150", ]; let lp_data_file = make_temp_file(lp_data.join("\n")); Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("write") .arg(db_name) .arg(lp_data_file.as_ref()) .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains("2 Lines OK")); } async fn test_read_default(db_name: &str, addr: &str) { let expected = "+--------+-------------------------------+------+\n\ | region | time | user |\n\ +--------+-------------------------------+------+\n\ | west | 1970-01-01 00:00:00.000000100 | 23.2 |\n\ | west | 1970-01-01 00:00:00.000000150 | 21 |\n\ +--------+-------------------------------+------+"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_pretty(db_name: &str, addr: &str) { let expected = "+--------+-------------------------------+------+\n\ | region | time | user |\n\ +--------+-------------------------------+------+\n\ | west | 1970-01-01 00:00:00.000000100 | 23.2 |\n\ | west | 1970-01-01 00:00:00.000000150 | 21 |\n\ +--------+-------------------------------+------+"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("pretty") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_csv(db_name: &str, addr: &str) { let expected = "west,1970-01-01T00:00:00.000000100,23.2\nwest,1970-01-01T00:00:00.000000150,21.0"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("csv") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_json(db_name: &str, addr: &str) { let expected = r#"[{"region":"west","time":"1970-01-01 00:00:00.000000100","user":23.2},{"region":"west","time":"1970-01-01 00:00:00.000000150","user":21.0}]"#; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("json") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_error(db_name: &str, addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from unknown_table") .arg("--host") .arg(addr) .assert() .failure() .stderr(predicate::str::contains( "Table or CTE with name \\'unknown_table\\' not found", )); Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("not_a_valid_format") .assert() .failure() .stderr(predicate::str::contains( "Unknown format type: not_a_valid_format. Expected one of 'pretty', 'csv' or 'json'", )); }
use assert_cmd::Command; use predicates::prelude::*; use test_helpers::make_temp_file; use crate::common::server_fixture::ServerFixture; use super::scenario::rand_name; #[tokio::test] pub async fn test() { let server_fixture = ServerFixture::create_single_use().await; let db_name = rand_name(); let addr = server_fixture.grpc_base(); set_server_id(addr).await; create_database(&db_name, addr).await; test_read_default(&db_name, addr).await; test_read_format_pretty(&db_name, addr).await; test_read_format_csv(&db_name, addr).await; test_read_format_json(&db_name, addr).await; test_read_error(&db_name, addr).await; } async fn set_server_id(addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("server") .arg("set") .arg("23") .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains("Ok")); } async fn create_database(db_name: &str, addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("create") .arg(db_name) .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains("Ok")); let lp_data = vec![ "cpu,region=west user=23.2 100", "cpu,region=west user=21.0 150", ]; let lp_data_file = make_temp_file(lp_data.join("\n")); Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("write") .arg(db_name) .arg(lp_data_file.as_ref()) .arg("--host")
.arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("json") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_error(db_name: &str, addr: &str) { Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from unknown_table") .arg("--host") .arg(addr) .assert() .failure() .stderr(predicate::str::contains( "Table or CTE with name \\'unknown_table\\' not found", )); Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("not_a_valid_format") .assert() .failure() .stderr(predicate::str::contains( "Unknown format type: not_a_valid_format. Expected one of 'pretty', 'csv' or 'json'", )); }
.arg(addr) .assert() .success() .stdout(predicate::str::contains("2 Lines OK")); } async fn test_read_default(db_name: &str, addr: &str) { let expected = "+--------+-------------------------------+------+\n\ | region | time | user |\n\ +--------+-------------------------------+------+\n\ | west | 1970-01-01 00:00:00.000000100 | 23.2 |\n\ | west | 1970-01-01 00:00:00.000000150 | 21 |\n\ +--------+-------------------------------+------+"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_pretty(db_name: &str, addr: &str) { let expected = "+--------+-------------------------------+------+\n\ | region | time | user |\n\ +--------+-------------------------------+------+\n\ | west | 1970-01-01 00:00:00.000000100 | 23.2 |\n\ | west | 1970-01-01 00:00:00.000000150 | 21 |\n\ +--------+-------------------------------+------+"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("pretty") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_csv(db_name: &str, addr: &str) { let expected = "west,1970-01-01T00:00:00.000000100,23.2\nwest,1970-01-01T00:00:00.000000150,21.0"; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query") .arg(db_name) .arg("select * from cpu") .arg("--host") .arg(addr) .arg("--format") .arg("csv") .assert() .success() .stdout(predicate::str::contains(expected)); } async fn test_read_format_json(db_name: &str, addr: &str) { let expected = r#"[{"region":"west","time":"1970-01-01 00:00:00.000000100","user":23.2},{"region":"west","time":"1970-01-01 00:00:00.000000150","user":21.0}]"#; Command::cargo_bin("influxdb_iox") .unwrap() .arg("database") .arg("query")
random
[ { "content": "/// Loads the specified lines into the named database\n\nfn load_lp(addr: &str, db_name: &str, lp_data: Vec<&str>) {\n\n let lp_data_file = make_temp_file(lp_data.join(\"\\n\"));\n\n\n\n Command::cargo_bin(\"influxdb_iox\")\n\n .unwrap()\n\n .arg(\"database\")\n\n .arg(\...
Rust
http/src/util/buf_list.rs
aliemjay/xitca-web
b12188930663ab5d27aa0a25cbf29fc4ecc6a0f8
use std::{collections::VecDeque, io::IoSlice}; use crate::bytes::{Buf, BufMut, Bytes, BytesMut}; pub struct BufList<B> { bufs: VecDeque<B>, remaining: usize, } impl<B: Buf> Default for BufList<B> { fn default() -> Self { Self::new() } } impl<B: Buf> BufList<B> { #[inline] pub fn with_capacity(cap: usize) -> Self { Self { bufs: VecDeque::with_capacity(cap), remaining: 0, } } #[inline] pub fn new() -> Self { Self::with_capacity(0) } #[inline] pub fn push(&mut self, buf: B) { debug_assert!(buf.has_remaining()); self.remaining += buf.remaining(); self.bufs.push_back(buf); } #[inline] pub fn cnt(&self) -> usize { self.bufs.len() } } impl<B: Buf> Buf for BufList<B> { #[inline] fn remaining(&self) -> usize { self.remaining } #[inline] fn chunk(&self) -> &[u8] { self.bufs.front().map(Buf::chunk).unwrap_or_default() } #[inline] fn chunks_vectored<'a>(&'a self, dst: &mut [IoSlice<'a>]) -> usize { assert!(!dst.is_empty()); let mut vecs = 0; for buf in &self.bufs { vecs += buf.chunks_vectored(&mut dst[vecs..]); if vecs == dst.len() { break; } } vecs } #[inline] fn advance(&mut self, mut cnt: usize) { debug_assert!(self.remaining >= cnt); self.remaining -= cnt; while cnt > 0 { { let front = &mut self.bufs[0]; let rem = front.remaining(); if rem > cnt { front.advance(cnt); return; } else { front.advance(rem); cnt -= rem; } } self.bufs.pop_front(); } } #[inline] fn copy_to_bytes(&mut self, len: usize) -> Bytes { match self.bufs.front_mut() { Some(front) if front.remaining() == len => { let b = front.copy_to_bytes(len); self.remaining -= len; self.bufs.pop_front(); b } Some(front) if front.remaining() > len => { self.remaining -= len; front.copy_to_bytes(len) } _ => { assert!(len <= self.remaining(), "`len` greater than remaining"); let mut bm = BytesMut::with_capacity(len); bm.put(self.take(len)); bm.freeze() } } } } #[cfg(test)] mod tests { use std::ptr; use super::*; fn hello_world_buf() -> BufList<Bytes> { let bufs = vec![Bytes::from("Hello"), Bytes::from(" "), Bytes::from("World")]; let remaining = bufs.iter().map(Buf::remaining).sum(); BufList { bufs: bufs.into(), remaining, } } #[test] fn to_bytes_shorter() { let mut bufs = hello_world_buf(); let old_ptr = bufs.chunk().as_ptr(); let start = bufs.copy_to_bytes(4); assert_eq!(start, "Hell"); assert!(ptr::eq(old_ptr, start.as_ptr())); assert_eq!(bufs.chunk(), b"o"); assert!(ptr::eq(old_ptr.wrapping_add(4), bufs.chunk().as_ptr())); assert_eq!(bufs.remaining(), 7); } #[test] fn to_bytes_eq() { let mut bufs = hello_world_buf(); let old_ptr = bufs.chunk().as_ptr(); let start = bufs.copy_to_bytes(5); assert_eq!(start, "Hello"); assert!(ptr::eq(old_ptr, start.as_ptr())); assert_eq!(bufs.chunk(), b" "); assert_eq!(bufs.remaining(), 6); } #[test] fn to_bytes_longer() { let mut bufs = hello_world_buf(); let start = bufs.copy_to_bytes(7); assert_eq!(start, "Hello W"); assert_eq!(bufs.remaining(), 4); } #[test] fn one_long_buf_to_bytes() { let mut buf = BufList::new(); buf.push(b"Hello World" as &[_]); assert_eq!(buf.copy_to_bytes(5), "Hello"); assert_eq!(buf.chunk(), b" World"); } #[test] #[should_panic(expected = "`len` greater than remaining")] fn buf_to_bytes_too_many() { hello_world_buf().copy_to_bytes(42); } }
use std::{collections::VecDeque, io::IoSlice}; use crate::bytes::{Buf, BufMut, Bytes, BytesMut}; pub struct BufList<B> { bufs: VecDeque<B>, remaining: usize, } impl<B: Buf> Default for BufList<B> { fn default() -> Self { Self::new() } } impl<B: Buf> BufList<B> { #[inline] pub fn with_capacity(cap: usize) -> Self { Self { bufs: VecDeque::with_capacity(cap), remaining: 0, } } #[inline] pub fn new() -> Self { Self::with_capacity(0) } #[inline] pub fn push(&mut self, buf: B) { debug_assert!(buf.has_remaining()); self.remaining += buf.remaining(); self.bufs.push_back(buf); } #[inline] pub fn cnt(&self) -> usize { self.bufs.len() } } impl<B: Buf> Buf for BufList<B> { #[inline] fn remaining(&self) -> usize { self.remaining } #[inline] fn chunk(&self) -> &[u8] { self.bufs.front().map(Buf::chunk).unwrap_or_default() } #[inline] fn chunks_vectored<'a>(&'a self, dst: &mut [IoSlice<'a>]) -> usize { assert!(!dst.is_empty()); let mut vecs = 0; for buf in &self.bufs { vecs += buf.chunks_vectored(&mut dst[vecs..]); if vecs == dst.len() { break; } } vecs } #[inline] fn advance(&mut self, mut cnt: usize) { debug_assert!(self.remaining >= cnt); self.remaining -= cnt; while cnt > 0 { { let front = &mut self.bufs[0]; let rem = front.remaining(); if rem > cnt { front.advance(cnt); return; } els
#[inline] fn copy_to_bytes(&mut self, len: usize) -> Bytes { match self.bufs.front_mut() { Some(front) if front.remaining() == len => { let b = front.copy_to_bytes(len); self.remaining -= len; self.bufs.pop_front(); b } Some(front) if front.remaining() > len => { self.remaining -= len; front.copy_to_bytes(len) } _ => { assert!(len <= self.remaining(), "`len` greater than remaining"); let mut bm = BytesMut::with_capacity(len); bm.put(self.take(len)); bm.freeze() } } } } #[cfg(test)] mod tests { use std::ptr; use super::*; fn hello_world_buf() -> BufList<Bytes> { let bufs = vec![Bytes::from("Hello"), Bytes::from(" "), Bytes::from("World")]; let remaining = bufs.iter().map(Buf::remaining).sum(); BufList { bufs: bufs.into(), remaining, } } #[test] fn to_bytes_shorter() { let mut bufs = hello_world_buf(); let old_ptr = bufs.chunk().as_ptr(); let start = bufs.copy_to_bytes(4); assert_eq!(start, "Hell"); assert!(ptr::eq(old_ptr, start.as_ptr())); assert_eq!(bufs.chunk(), b"o"); assert!(ptr::eq(old_ptr.wrapping_add(4), bufs.chunk().as_ptr())); assert_eq!(bufs.remaining(), 7); } #[test] fn to_bytes_eq() { let mut bufs = hello_world_buf(); let old_ptr = bufs.chunk().as_ptr(); let start = bufs.copy_to_bytes(5); assert_eq!(start, "Hello"); assert!(ptr::eq(old_ptr, start.as_ptr())); assert_eq!(bufs.chunk(), b" "); assert_eq!(bufs.remaining(), 6); } #[test] fn to_bytes_longer() { let mut bufs = hello_world_buf(); let start = bufs.copy_to_bytes(7); assert_eq!(start, "Hello W"); assert_eq!(bufs.remaining(), 4); } #[test] fn one_long_buf_to_bytes() { let mut buf = BufList::new(); buf.push(b"Hello World" as &[_]); assert_eq!(buf.copy_to_bytes(5), "Hello"); assert_eq!(buf.chunk(), b" World"); } #[test] #[should_panic(expected = "`len` greater than remaining")] fn buf_to_bytes_too_many() { hello_world_buf().copy_to_bytes(42); } }
e { front.advance(rem); cnt -= rem; } } self.bufs.pop_front(); } }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn apply_mask(buf: &mut [u8], mask: [u8; 4]) {\n\n apply_mask_fast32(buf, mask)\n\n}\n\n\n\n/// A safe unoptimized mask application.\n", "file_path": "http-ws/src/mask.rs", "rank": 0, "score": 278887.4791281594 }, { "content": "#[inline]\n\npub fn apply_mask...
Rust
tests/test_lookup_ipv6.rs
tomhrr/owhois
94791fca1e01c7bfc5fe7f001d65fe3755213ab3
extern crate owhois; extern crate ipnet; #[cfg(test)] mod test_lookup { use ipnet::Ipv6Net; use owhois::lookup::ResourceLookup; use owhois::lookup::Ipv6ResourceLookup; use std::str::FromStr; #[test] fn ipv6_lookup_empty() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, None); } #[test] fn ipv6_lookup_single() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1) ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); } #[test] fn ipv6_lookup_multiple() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1), (Ipv6Net::from_str("::1/128").unwrap(), 2) ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); } #[test] fn ipv6_lookup_parents() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/16").unwrap(), 1), (Ipv6Net::from_str("::/32").unwrap(), 2), (Ipv6Net::from_str("::/48").unwrap(), 3), ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/48").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/32").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/16").unwrap() ); assert_eq!(value, Some(1)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/15").unwrap() ); assert_eq!(value, None); } #[test] fn ipv6_lookup_bounds() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1), (Ipv6Net::from_str("::/8").unwrap(), 2), (Ipv6Net::from_str("ff00::/8").unwrap(), 3), (Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128").unwrap(), 4), ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::1/128").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/127").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/8").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/7").unwrap() ); assert_eq!(value, None); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128").unwrap() ); assert_eq!(value, Some(4)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/128").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/127").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ff00::/8").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("fe00::/7").unwrap() ); assert_eq!(value, None); } }
extern crate owhois; extern crate ipnet; #[cfg(test)] mod test_lookup { use ipnet::Ipv6Net; use owhois::lookup::ResourceLookup; use owhois::lookup::Ipv6ResourceLookup; use std::str::FromStr; #[test] fn ipv6_lookup_empty() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, None); } #[test] fn ipv6_lookup_single() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1) ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); } #[test] fn ipv6_lookup_multiple() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1), (Ipv6Net::from_str("::1/128").unwrap(), 2) ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); } #[test] fn ipv6_lookup_parents() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/16").unwrap(), 1), (Ipv6Net::from_str("::/32").unwrap(), 2), (Ipv6Net::from_str("::/48").unwrap(), 3), ]); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/48").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/32").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/16").unwrap() ); assert_eq!(value, Some(1)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/15").unwrap() ); assert_eq!(value, None); } #[test] fn ipv6_lookup_bounds() { let ipv6_lookup: Ipv6ResourceLookup = ResourceLookup::from_iter(vec![ (Ipv6Net::from_str("::/128").unwrap(), 1), (Ipv6Net::from_str("::/8").unwrap(), 2), (Ipv6Net::from_str("ff00::/8").unwrap(), 3), (Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128").unwrap(), 4), ]); let value = ipv6_lookup.get_longest_
m_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128").unwrap() ); assert_eq!(value, Some(4)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/128").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/127").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("ff00::/8").unwrap() ); assert_eq!(value, Some(3)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("fe00::/7").unwrap() ); assert_eq!(value, None); } }
match_value( Ipv6Net::from_str("::/128").unwrap() ); assert_eq!(value, Some(1)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::1/128").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/127").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/8").unwrap() ); assert_eq!(value, Some(2)); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::from_str("::/7").unwrap() ); assert_eq!(value, None); let value = ipv6_lookup.get_longest_match_value( Ipv6Net::fro
random
[ { "content": "fn run_processors(directory: &str,\n\n servers: &HashMap<String, u32>,\n\n processors: Vec<Box<dyn Processor>>,\n\n ipv4_path: &str,\n\n ipv6_path: &str,\n\n asn_path: &str) {\n\n let mut ipv4_entries: ...
Rust
src/adjacency/graph/compressed.rs
colin-daniels/agnr-ml
fc936cb8b6a68c37dfaf64c74796e0cf795c1bb8
use itertools::Itertools; use rand::prelude::*; use std::iter::FromIterator; use std::ops::Range; use super::Edge; #[derive(Debug, Clone, Eq, PartialEq)] pub struct CompressedGraph<M = ()> { edges: Vec<Edge<M>>, edge_ranges: Vec<Range<usize>>, } impl<M> Default for CompressedGraph<M> { fn default() -> Self { Self { edges: Default::default(), edge_ranges: Default::default(), } } } impl<T, M> Extend<T> for CompressedGraph<M> where T: Into<Edge<M>>, { fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) { let mut new_edges: Vec<Edge<M>> = iter.into_iter().map(|e| e.into()).collect(); if new_edges.is_empty() { return; } new_edges.sort_unstable_by_key(|e| (e.from, e.to)); let old_edges = self.edges.drain(..); self.edges = Itertools::merge_by(old_edges, new_edges.into_iter(), |a, b| { (a.from, a.to).lt(&(b.from, b.to)) }) .collect(); self.update_edge_ranges(); } } impl<T, M> FromIterator<T> for CompressedGraph<M> where T: Into<Edge<M>>, { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self { Self::from_edges(iter) } } impl<M> CompressedGraph<M> { pub fn new<I, T>(n_vertices: usize, edges: I) -> Self where I: IntoIterator<Item = T>, Self: Extend<T>, { let mut ret = Self::from_edges(edges); ret.resize(n_vertices); ret } pub fn from_edges<I, T>(iter: I) -> Self where I: IntoIterator<Item = T>, Self: Extend<T>, { let mut ret = Self::default(); ret.extend(iter); ret } #[inline(always)] pub fn n_vertices(&self) -> usize { self.edge_ranges.len() } #[inline(always)] pub fn n_edges(&self) -> usize { self.edges.len() } pub fn clear(&mut self) { self.resize(0); } pub fn resize(&mut self, new_n_vertices: usize) { let n_vertices = self.n_vertices(); if n_vertices == 0 { self.edges.clear(); self.edge_ranges.clear(); } else if new_n_vertices < n_vertices { self.edge_ranges .resize_with(new_n_vertices, || unreachable!("should be shrinking")); if let Some(last) = self.edge_ranges.last() { let new_n_edges = last.end; self.edges .resize_with(new_n_edges, || unreachable!("should be shrinking edges")); } else { unreachable!("handled zero case earlier, should be unreachable") } } else { let n_edges = self.n_edges(); self.edge_ranges .resize_with(new_n_vertices, || n_edges..n_edges); } } #[inline(always)] pub fn vertices(&self) -> impl Iterator<Item = usize> { 0..self.n_vertices() } #[inline] pub fn edges(&self) -> &[Edge<M>] { &self.edges } #[inline(always)] pub fn edges_at(&self, vertex: usize) -> &[Edge<M>] { &self.edges[self.edge_ranges[vertex].clone()] } #[inline(always)] pub fn neighbors<'a>(&'a self, vertex: usize) -> impl Iterator<Item = usize> + 'a { self.neighbors_meta(vertex).map(|(to, _)| to) } #[inline(always)] pub fn neighbors_meta<'a>(&'a self, vertex: usize) -> impl Iterator<Item = (usize, &M)> + 'a { self.edges_at(vertex).iter().map(|e| (e.to, &e.meta)) } #[inline] pub fn random_walk<'a, R: Rng + 'a>( &'a self, start: usize, mut rng: R, ) -> impl Iterator<Item = usize> + 'a { std::iter::successors(Some(start), move |&last| { let range = self.edge_ranges[last].clone(); self.edges[range].choose(&mut rng).map(|e| e.to) }) } pub fn maximum_matching(&self) -> Option<()> { let (_left, _right) = self.bipartite_coloring()?; todo!() } pub fn bipartite_coloring(&self) -> Option<(Vec<usize>, Vec<usize>)> { let (start, _) = self .edge_ranges .iter() .find_position(|&r| r.end > r.start)?; #[derive(Copy, Clone, Eq, PartialEq)] enum Color { Red, Blue, None, } let mut colors = vec![Color::None; self.n_vertices()]; let mut to_visit = vec![self.edge_ranges[start].clone()]; colors[start] = Color::Red; while let Some(edge_ids) = to_visit.last_mut() { match edge_ids.next().map(|id| &self.edges[id]) { Some(edge) => { let next_color = match colors[edge.from] { Color::Red => Color::Blue, Color::Blue => Color::Red, Color::None => unreachable!("error"), }; if colors[edge.to] == Color::None { colors[edge.to] = next_color; to_visit.push(self.edge_ranges[edge.to].clone()); } else if colors[edge.to] != next_color { dbg!("wrong color, not bipartite"); return None; } } None => { to_visit.pop(); } } } let mut red = Vec::with_capacity(colors.len()); let mut blue = Vec::with_capacity(colors.len()); for (i, color) in colors.into_iter().enumerate() { match color { Color::Red => red.push(i), Color::Blue => blue.push(i), Color::None => { if !self.edges_at(i).is_empty() { dbg!("couldn't reach all vertices (multiple connected components)"); return None; } } } } Some((red, blue)) } #[doc(hidden)] fn update_edge_ranges(&mut self) { let old_num_vertices = self.n_vertices(); let num_vertices_from_edges = self.edges.last().map(|e| e.from + 1).unwrap_or(0); let mut n_vertices = usize::max(old_num_vertices, num_vertices_from_edges); let mut offsets = Vec::with_capacity(n_vertices + 1); for (i, &Edge { from, to, .. }) in self.edges.iter().enumerate() { n_vertices = usize::max(n_vertices, to + 1); if offsets.len() < from + 1 { offsets.resize(from + 1, i); } } offsets.resize(n_vertices + 1, self.edges.len()); self.edge_ranges = offsets .into_iter() .tuple_windows() .map(|(a, b)| a..b) .collect(); } }
use itertools::Itertools; use rand::prelude::*; use std::iter::FromIterator; use std::ops::Range; use super::Edge; #[derive(Debug, Clone, Eq, PartialEq)] pub struct CompressedGraph<M = ()> { edges: Vec<Edge<M>>, edge_ranges: Vec<Range<usize>>, } impl<M> Default for CompressedGraph<M> { fn default() -> Self { Self { edges: Default::default(), edge_ranges: Default::default(), } } } impl<T, M> Extend<T> for CompressedGraph<M> where T: Into<Edge<M>>, { fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) { let mut new_edges: Vec<Edge<M>> = iter.into_iter().map(|e| e.into()).collect(); if new_edges.is_empty() { return; } new_edges.sort_unstable_by_key(|e| (e.from, e.to)); let old_edges = self.edges.drain(..); self.edges = Itertools::merge_by(old_edges, new_edges.into_iter(), |a, b| { (a.from, a.to).lt(&(b.from, b.to)) }) .collect(); self.update_edge_ranges(); } } impl<T, M> FromIterator<T> for CompressedGraph<M> where T: Into<Edge<M>>, { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self { Self::from_edges(iter) } } impl<M> CompressedGraph<M> { pub fn new<I, T>(n_vertices: usize, edges: I) -> Self where I: IntoIterator<Item = T>, Self: Extend<T>, { let mut ret = Self::from_edges(edges); ret.resize(n_vertices); ret } pub fn from_edges<I, T>(iter: I) -> Self where I: IntoIterator<Item = T>, Self: Extend<T>, { let mut ret = Self::default(); ret.extend(iter); ret } #[inline(always)] pub fn n_vertices(&self) -> usize { self.edge_ranges.len() } #[inline(always)] pub fn n_edges(&self) -> usize { self.edges.len() } pub fn clear(&mut self) { self.resize(0); } pub fn resize(&mut self, new_n_vertices: usize) { let n_vertices = self.n_vertices(); if n_vertices == 0 { self.edges.clear(); self.edge_ranges.clear(); } else if new_n_vertices < n_vertices { self.edge_ranges .resize_with(new_n_vertices, || unreachable!("should be shrinking")); if let Some(last) = self.edge_ranges.last() { let new_n_edges = last.end; self.edges .resize_with(new_n_edges, || unreachable!("should be shrinking edges")); } else { unreachable!("handled zero case earlier, should be unreachable") } } else { let n_edges = self.n_edges(); self.edge_ranges .resize_with(new_n_vertices, || n_edges..n_edges); } } #[inline(always)] pub fn vertices(&self) -> impl Iterator<Item = usize> { 0..self.n_vertices() } #[inline] pub fn edges(&self) -> &[Edge<M>] { &self.edges } #[inline(always)] pub fn edges_at(&self, vertex: usize) -> &[Edge<M>] { &self.edges[self.edge_ranges[vertex].clone()] } #[inline(always)] pub fn neighbors<'a>(&'a self, vertex: usize) -> impl Iterator<Item = usize> + 'a { self.neighbors_meta(vertex).map(|(to, _)| to) } #[inline(always)] pub fn neighbors_meta<'a>(&'a self, vertex: usize) -> impl Iterator<Item = (usize, &M)> + 'a { self.edges_at(vertex).iter().map(|e| (e.to, &e.meta)) } #[inline] pub fn random_walk<'a, R: Rng + 'a>( &'a self, start: usize, mut rng: R, ) -> impl Iterator<Item = usize> + 'a { std::iter::successors(Some(start), move |&last| { let range = self.edge_ranges[last].clone(); self.edges[range].choose(&mut rng).map(|e| e.to) }) } pub fn maximum_matching(&self) -> Option<()> { let (_left, _right) = self.bipartite_coloring()?; todo!() } pub fn bipartite_coloring(&self) -> Option<(Vec<usize>, Vec<usize>)> { let (start, _) = self .edge_ranges .iter() .find_position(|&r| r.end > r.start)?; #[derive(Copy, Clone, Eq, PartialEq)] enum Color { Red, Blue, None, } let mut colors = vec![Color::None; self.n_vertices()]; let mut to_visit = vec![self.edge_ranges[start].clone()]; colors[start] = Color::Red; while let Some(edge_ids) = to_visit.last_mut() { match edge_ids.next().map(|id| &self.edges[id]) { Some(edge) => { let next_color = match colors[edge.from] { Color::Red => Color::Blue, Color::Blue => Color::Red, Color::None => unreachable!("error"), }; if colors[edge.to] == Color::None { colors[edge.to] = next_color; to_visit.push(self.edge_ranges[edge.to].clone()); } else if colors[edge.to] != next_color { dbg!("wrong color, not bipartite"); return None; } } None => { to_visit.pop(); } } } let mut red = Vec::with_capacity(colors.len()); let mut blue = Vec::with_capacity(colors.len()); for (i, color) in colors.into_iter().enumerate() { match color { Color::Red => red.push(i), Color::Blue => blue.push(i), Color::None => { if !self.edges_at(i).is_empty() { dbg!("couldn't reach all vertices (multiple connected components)"); return None; } } } } Some((red, blue)) } #[doc(hidden)] fn update_edge_ranges(&mut self) { let old_num_vertices = self.n_vertices(); let num_vertices_from_edges = self.edges.last().map(|e| e.from + 1).unwrap_or(0); let mut n_vertices = usize::max(old_num_vertices, num_vertices_from_edges); let mut offsets = Vec::with_capacity(n_vertices + 1); for (i, &Edge { from, to, .. }) in self.edges.iter().enumerate() { n_vertices = usize::max(n_vertices, to + 1);
} offsets.resize(n_vertices + 1, self.edges.len()); self.edge_ranges = offsets .into_iter() .tuple_windows() .map(|(a, b)| a..b) .collect(); } }
if offsets.len() < from + 1 { offsets.resize(from + 1, i); }
if_condition
[ { "content": "#[inline]\n\npub fn iota<I: Idx>(start: I) -> std::iter::Map<RangeFrom<usize>, impl Fn(usize) -> I> {\n\n (start.index()..).map(I::new)\n\n}\n\n\n\n// NOTE: I don't want this to take arbitrary RangeBounds because it would either have\n\n// to use dynamic polymorphism, or panic on RangeFro...
Rust
src/packets.rs
gkbrk/RustOre-Classic
0ceb927ad06c5a6905811ba56137abff6b5ceb66
use std::io::MemReader; use std::io::net::tcp::TcpStream; use config::Configuration; use mc_string::MCString; pub struct Packet{ pub packet_id: u8, pub packet_len: uint, pub data: Vec<u8> } impl Packet{ pub fn receive(mut conn: TcpStream) -> Packet{ let packet_id = conn.read_byte().unwrap(); let packet_len = match packet_id{ 0x00 => 130, 0x05 => 8, 0x08 => 9, 0x0d => 65, _ => 0 }; let data = conn.read_exact(packet_len).unwrap(); return Packet{ packet_id: packet_id, packet_len: packet_len, data: data }; } pub fn parse_player_ident(&self) -> PlayerIdent{ let mut reader = MemReader::new(self.data.clone()); return PlayerIdent{ version: reader.read_u8().unwrap(), username: reader.read_mc_string(), verification_key: reader.read_mc_string(), unused: reader.read_u8().unwrap() }; } pub fn parse_set_block(&self) -> SetBlock{ let mut reader = MemReader::new(self.data.clone()); return SetBlock{ x: reader.read_be_i16().unwrap(), y: reader.read_be_i16().unwrap(), z: reader.read_be_i16().unwrap(), destroyed: match reader.read_u8().unwrap(){ 0x00 => true, 0x01 => false, _ => false }, block_id: reader.read_u8().unwrap() }; } pub fn parse_position_and_orientation(&self) -> PositionAndOrientation{ let mut reader = MemReader::new(self.data.clone()); return PositionAndOrientation{ player_id: reader.read_u8().unwrap(), x: reader.read_be_i16().unwrap(), y: reader.read_be_i16().unwrap(), z: reader.read_be_i16().unwrap(), yaw: reader.read_u8().unwrap(), pitch: reader.read_u8().unwrap() }; } pub fn parse_message(&self) -> Message{ let mut reader = MemReader::new(self.data.clone()); return Message{ unused: reader.read_u8().unwrap(), message: reader.read_mc_string() }; } } #[deriving(Clone)] struct PlayerIdent{ pub version: u8, pub username: String, pub verification_key: String, unused: u8 } #[deriving(Clone)] struct SetBlock{ pub x: i16, pub y: i16, pub z: i16, pub destroyed: bool, pub block_id: u8 } #[deriving(Clone)] struct PositionAndOrientation{ pub player_id: u8, pub x: i16, pub y: i16, pub z: i16, pub yaw: u8, pub pitch: u8 } #[deriving(Clone)] struct Message{ unused: u8, pub message: String } pub trait MCPackets{ fn send_server_ident(&mut self, config: Configuration); fn send_ping(&mut self); fn send_level_init(&mut self); fn send_chunk_data(&mut self, length: i16, data: &[u8], percentage: u8); fn send_level_finalize(&mut self, x_size: i16, y_size: i16, z_size: i16); fn send_spawn_player(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8); fn send_pos(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8); fn send_chat_message(&mut self, player_id: i8, message: String); } impl MCPackets for TcpStream{ fn send_server_ident(&mut self, config: Configuration){ self.write_u8(0x00); self.write_u8(0x07); self.write_mc_string(config.server_name); self.write_mc_string(config.server_motd); self.write_u8(0x00); } fn send_ping(&mut self){ self.write_u8(0x01); } fn send_level_init(&mut self){ self.write_u8(0x02); } fn send_chunk_data(&mut self, length: i16, data: &[u8], percentage: u8){ self.write_u8(0x03); self.write_be_i16(length); self.write(data); for i in range(0, 1024 - length){ self.write_u8(0x00); } self.write_u8(percentage); } fn send_level_finalize(&mut self, x_size: i16, y_size: i16, z_size: i16){ self.write_u8(0x04); self.write_be_i16(x_size); self.write_be_i16(y_size); self.write_be_i16(z_size); } fn send_spawn_player(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8){ self.write_u8(0x07); self.write_i8(-1); self.write_mc_string("gokberkdoga".to_string()); self.write_be_i16(x); self.write_be_i16(y); self.write_be_i16(z); self.write_u8(yaw); self.write_u8(pitch); } fn send_pos(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8){ self.write_u8(0x08); self.write_i8(-1); self.write_be_i16(x); self.write_be_i16(y); self.write_be_i16(z); self.write_u8(yaw); self.write_u8(pitch); } fn send_chat_message(&mut self, player_id: i8, message: String){ self.write_u8(0x0d); self.write_i8(player_id); self.write_mc_string(message); } }
use std::io::MemReader; use std::io::net::tcp::TcpStream; use config::Configuration; use mc_string::MCString; pub struct Packet{ pub packet_id: u8, pub packet_len: uint, pub data: Vec<u8> } impl Packet{ pub fn receive(mut conn: TcpStream) -> Packet{ let packet_id = conn.read_byte().unwrap(); let packet_len = match packet_id{ 0x00 => 130, 0x05 => 8, 0x08 => 9, 0x0d => 65, _ => 0 }; let data = conn.read_exact(packet_len).unwrap(); return Packet{ packet_id: packet_id, packet_len: packet_len, data: data }; } pub fn parse_player_ident(&self) -> PlayerIdent{ let mut reader = MemReader::new(self.data.clone()); return PlayerIdent{ version: reader.read_u8().unwrap(), username: reader.read_mc_string(), verification_key: reader.read_mc_string(), unused: reader.read_u8().unwrap() }; } pub fn parse_set_block(&self)
} fn send_ping(&mut self){ self.write_u8(0x01); } fn send_level_init(&mut self){ self.write_u8(0x02); } fn send_chunk_data(&mut self, length: i16, data: &[u8], percentage: u8){ self.write_u8(0x03); self.write_be_i16(length); self.write(data); for i in range(0, 1024 - length){ self.write_u8(0x00); } self.write_u8(percentage); } fn send_level_finalize(&mut self, x_size: i16, y_size: i16, z_size: i16){ self.write_u8(0x04); self.write_be_i16(x_size); self.write_be_i16(y_size); self.write_be_i16(z_size); } fn send_spawn_player(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8){ self.write_u8(0x07); self.write_i8(-1); self.write_mc_string("gokberkdoga".to_string()); self.write_be_i16(x); self.write_be_i16(y); self.write_be_i16(z); self.write_u8(yaw); self.write_u8(pitch); } fn send_pos(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8){ self.write_u8(0x08); self.write_i8(-1); self.write_be_i16(x); self.write_be_i16(y); self.write_be_i16(z); self.write_u8(yaw); self.write_u8(pitch); } fn send_chat_message(&mut self, player_id: i8, message: String){ self.write_u8(0x0d); self.write_i8(player_id); self.write_mc_string(message); } }
-> SetBlock{ let mut reader = MemReader::new(self.data.clone()); return SetBlock{ x: reader.read_be_i16().unwrap(), y: reader.read_be_i16().unwrap(), z: reader.read_be_i16().unwrap(), destroyed: match reader.read_u8().unwrap(){ 0x00 => true, 0x01 => false, _ => false }, block_id: reader.read_u8().unwrap() }; } pub fn parse_position_and_orientation(&self) -> PositionAndOrientation{ let mut reader = MemReader::new(self.data.clone()); return PositionAndOrientation{ player_id: reader.read_u8().unwrap(), x: reader.read_be_i16().unwrap(), y: reader.read_be_i16().unwrap(), z: reader.read_be_i16().unwrap(), yaw: reader.read_u8().unwrap(), pitch: reader.read_u8().unwrap() }; } pub fn parse_message(&self) -> Message{ let mut reader = MemReader::new(self.data.clone()); return Message{ unused: reader.read_u8().unwrap(), message: reader.read_mc_string() }; } } #[deriving(Clone)] struct PlayerIdent{ pub version: u8, pub username: String, pub verification_key: String, unused: u8 } #[deriving(Clone)] struct SetBlock{ pub x: i16, pub y: i16, pub z: i16, pub destroyed: bool, pub block_id: u8 } #[deriving(Clone)] struct PositionAndOrientation{ pub player_id: u8, pub x: i16, pub y: i16, pub z: i16, pub yaw: u8, pub pitch: u8 } #[deriving(Clone)] struct Message{ unused: u8, pub message: String } pub trait MCPackets{ fn send_server_ident(&mut self, config: Configuration); fn send_ping(&mut self); fn send_level_init(&mut self); fn send_chunk_data(&mut self, length: i16, data: &[u8], percentage: u8); fn send_level_finalize(&mut self, x_size: i16, y_size: i16, z_size: i16); fn send_spawn_player(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8); fn send_pos(&mut self, x: i16, y: i16, z: i16, yaw: u8, pitch: u8); fn send_chat_message(&mut self, player_id: i8, message: String); } impl MCPackets for TcpStream{ fn send_server_ident(&mut self, config: Configuration){ self.write_u8(0x00); self.write_u8(0x07); self.write_mc_string(config.server_name); self.write_mc_string(config.server_motd); self.write_u8(0x00);
random
[ { "content": "fn handle_connection(config: Configuration, mut conn: TcpStream, mutex_world: Arc<Mutex<World>>){\n\n let ip = match conn.peer_name(){\n\n Ok(x) => x.ip,\n\n Err(x) => {return;}\n\n };\n\n println!(\"{} is connecting to us...\", ip);\n\n loop{\n\n let packet = Pack...
Rust
src/db/users.rs
Follpvosten/swiki
7a5d216cd9776fa2958531bc8d357f0e695d3635
use std::{convert::TryFrom, result::Result as StdResult}; use rocket::{ outcome::try_outcome, request::{FromRequest, Outcome}, tokio::task::spawn_blocking, Request, }; use sqlx::PgPool; use uuid::Uuid; use zeroize::Zeroize; use crate::{Db, Error, Result}; #[derive(Debug, Clone, Copy)] pub struct UserSession { pub session_id: Uuid, pub user_id: Uuid, } #[rocket::async_trait] impl<'r> FromRequest<'r> for &'r UserSession { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { use rocket::outcome::IntoOutcome; let result = request .local_cache_async(async { let session_id = request .cookies() .get("session_id") .and_then(|cookie| base64::decode(cookie.value()).ok()) .and_then(|vec| uuid::Bytes::try_from(vec.as_slice()).ok()) .map(Uuid::from_bytes)?; let db: &Db = request.rocket().state()?; let user_id = match db.get_session_user(session_id).await { Err(e) => { log::error!("Error getting session user: {}", e); None } Ok(user_id) => Some(user_id), }?; user_id.map(|user_id| UserSession { session_id, user_id, }) }) .await; result.as_ref().or_forward(()) } } #[derive(Debug, Clone, serde::Serialize)] pub struct LoggedUser { id: Uuid, name: String, is_admin: bool, } impl LoggedUser { pub fn is_admin(&self) -> bool { self.is_admin } } #[rocket::async_trait] impl<'r> FromRequest<'r> for LoggedUser { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { use crate::error::IntoOutcomeHack; use rocket::outcome::IntoOutcome; let session: &UserSession = try_outcome!(request.guard().await); let db: &Db = try_outcome!(request.rocket().state().or_forward(())); async fn get_user_info(pool: &PgPool, id: Uuid) -> Result<(bool, String)> { Ok( sqlx::query!(r#"SELECT name, is_admin FROM "user" WHERE id = $1"#, id) .fetch_one(pool) .await .map(|r| (r.is_admin, r.name))?, ) } let (is_admin, name) = try_outcome!(get_user_info(db, session.user_id).await.into_outcome_hack()); Outcome::Success(LoggedUser { id: session.user_id, name, is_admin, }) } } #[derive(Debug, Clone, serde::Serialize)] pub struct LoggedAdmin(LoggedUser); #[rocket::async_trait] impl<'r> FromRequest<'r> for LoggedAdmin { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { let logged_user: LoggedUser = try_outcome!(request.guard().await); if logged_user.is_admin { Outcome::Success(LoggedAdmin(logged_user)) } else { Outcome::Forward(()) } } } fn hash_password(password: &str) -> StdResult<String, argon2::Error> { fn gen_salt() -> Vec<u8> { use rand::Rng; rand::thread_rng() .sample_iter(&rand::distributions::Alphanumeric) .take(32) .collect() } let config = argon2::Config { variant: argon2::Variant::Argon2i, ..Default::default() }; let salt = gen_salt(); argon2::hash_encoded(password.as_bytes(), &salt, &config) } fn verify_password(hash: &str, password: &str) -> StdResult<bool, argon2::Error> { argon2::verify_encoded(hash, password.as_bytes()) } pub async fn name_exists(pool: &PgPool, username: &str) -> Result<bool> { Ok(sqlx::query_scalar!( r#"SELECT EXISTS(SELECT 1 FROM "user" WHERE name = $1) AS "a!""#, username ) .fetch_one(pool) .await?) } pub async fn register(pool: &PgPool, username: &str, mut password: String) -> Result<Uuid> { if name_exists(pool, username).await? { return Err(Error::UserAlreadyExists(username.to_string())); } let id = Uuid::new_v4(); let pw_hash = spawn_blocking(move || { let res = hash_password(&password); password.zeroize(); res }) .await??; sqlx::query!( r#"INSERT INTO "user"(id, name, pw_hash, is_admin) VALUES($1, $2, $3, (SELECT COUNT(*) FROM "user") = 0)"#, id, username, pw_hash ) .execute(pool) .await?; Ok(id) } pub async fn try_login(pool: &PgPool, username: &str, mut password: String) -> Result<UserSession> { let (user_id, hash) = sqlx::query!( r#"SELECT id, pw_hash FROM "user" WHERE name = $1"#, username ) .fetch_optional(pool) .await? .map(|r| (r.id, r.pw_hash)) .ok_or_else(|| Error::UserNotFound(username.to_string()))?; let pw_valid = spawn_blocking(move || { let res = verify_password(&hash, &password); password.zeroize(); res }) .await??; if pw_valid { let session_id = create_session(pool, user_id).await?; Ok(UserSession { session_id, user_id, }) } else { Err(Error::WrongPassword) } } async fn create_session(pool: &PgPool, user_id: Uuid) -> Result<Uuid> { let session_id = Uuid::new_v4(); sqlx::query!( "INSERT INTO session(session_id, user_id) VALUES($1, $2)", session_id, user_id ) .execute(pool) .await?; Ok(session_id) } pub async fn destroy_session(pool: &PgPool, session_id: Uuid) -> Result<()> { sqlx::query!("DELETE FROM session WHERE session_id = $1", session_id) .execute(pool) .await?; Ok(()) } pub async fn get_session_user(pool: &PgPool, session_id: Uuid) -> Result<Option<Uuid>> { Ok(sqlx::query_scalar!( "SELECT user_id FROM session WHERE session_id = $1", session_id ) .fetch_optional(pool) .await?) } pub async fn is_admin(pool: &PgPool, user_id: Uuid) -> Result<bool> { Ok( sqlx::query_scalar!(r#"SELECT is_admin FROM "user" WHERE id = $1"#, user_id) .fetch_optional(pool) .await? .unwrap_or(false), ) }
use std::{convert::TryFrom, result::Result as StdResult}; use rocket::{ outcome::try_outcome, request::{FromRequest, Outcome}, tokio::task::spawn_blocking, Request, }; use sqlx::PgPool; use uuid::Uuid; use zeroize::Zeroize; use crate::{Db, Error, Result}; #[derive(Debug, Clone, Copy)] pub struct UserSession { pub session_id: Uuid, pub user_id: Uuid, } #[rocket::async_trait] impl<'r> FromRequest<'r> for &'r UserSession { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { use rocket::outcome::IntoOutcome; let result = request .local_cache_async(async { let session_id = request .cookies() .get("session_id") .and_then(|cookie| base64::decode(cookie.value()).ok()) .and_then(|vec| uuid::Bytes::try_from(vec.as_slice()).ok()) .map(Uuid::from_bytes)?; let db: &Db = request.rocket().state()?;
user_id.map(|user_id| UserSession { session_id, user_id, }) }) .await; result.as_ref().or_forward(()) } } #[derive(Debug, Clone, serde::Serialize)] pub struct LoggedUser { id: Uuid, name: String, is_admin: bool, } impl LoggedUser { pub fn is_admin(&self) -> bool { self.is_admin } } #[rocket::async_trait] impl<'r> FromRequest<'r> for LoggedUser { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { use crate::error::IntoOutcomeHack; use rocket::outcome::IntoOutcome; let session: &UserSession = try_outcome!(request.guard().await); let db: &Db = try_outcome!(request.rocket().state().or_forward(())); async fn get_user_info(pool: &PgPool, id: Uuid) -> Result<(bool, String)> { Ok( sqlx::query!(r#"SELECT name, is_admin FROM "user" WHERE id = $1"#, id) .fetch_one(pool) .await .map(|r| (r.is_admin, r.name))?, ) } let (is_admin, name) = try_outcome!(get_user_info(db, session.user_id).await.into_outcome_hack()); Outcome::Success(LoggedUser { id: session.user_id, name, is_admin, }) } } #[derive(Debug, Clone, serde::Serialize)] pub struct LoggedAdmin(LoggedUser); #[rocket::async_trait] impl<'r> FromRequest<'r> for LoggedAdmin { type Error = Error; async fn from_request(request: &'r Request<'_>) -> Outcome<Self, Self::Error> { let logged_user: LoggedUser = try_outcome!(request.guard().await); if logged_user.is_admin { Outcome::Success(LoggedAdmin(logged_user)) } else { Outcome::Forward(()) } } } fn hash_password(password: &str) -> StdResult<String, argon2::Error> { fn gen_salt() -> Vec<u8> { use rand::Rng; rand::thread_rng() .sample_iter(&rand::distributions::Alphanumeric) .take(32) .collect() } let config = argon2::Config { variant: argon2::Variant::Argon2i, ..Default::default() }; let salt = gen_salt(); argon2::hash_encoded(password.as_bytes(), &salt, &config) } fn verify_password(hash: &str, password: &str) -> StdResult<bool, argon2::Error> { argon2::verify_encoded(hash, password.as_bytes()) } pub async fn name_exists(pool: &PgPool, username: &str) -> Result<bool> { Ok(sqlx::query_scalar!( r#"SELECT EXISTS(SELECT 1 FROM "user" WHERE name = $1) AS "a!""#, username ) .fetch_one(pool) .await?) } pub async fn register(pool: &PgPool, username: &str, mut password: String) -> Result<Uuid> { if name_exists(pool, username).await? { return Err(Error::UserAlreadyExists(username.to_string())); } let id = Uuid::new_v4(); let pw_hash = spawn_blocking(move || { let res = hash_password(&password); password.zeroize(); res }) .await??; sqlx::query!( r#"INSERT INTO "user"(id, name, pw_hash, is_admin) VALUES($1, $2, $3, (SELECT COUNT(*) FROM "user") = 0)"#, id, username, pw_hash ) .execute(pool) .await?; Ok(id) } pub async fn try_login(pool: &PgPool, username: &str, mut password: String) -> Result<UserSession> { let (user_id, hash) = sqlx::query!( r#"SELECT id, pw_hash FROM "user" WHERE name = $1"#, username ) .fetch_optional(pool) .await? .map(|r| (r.id, r.pw_hash)) .ok_or_else(|| Error::UserNotFound(username.to_string()))?; let pw_valid = spawn_blocking(move || { let res = verify_password(&hash, &password); password.zeroize(); res }) .await??; if pw_valid { let session_id = create_session(pool, user_id).await?; Ok(UserSession { session_id, user_id, }) } else { Err(Error::WrongPassword) } } async fn create_session(pool: &PgPool, user_id: Uuid) -> Result<Uuid> { let session_id = Uuid::new_v4(); sqlx::query!( "INSERT INTO session(session_id, user_id) VALUES($1, $2)", session_id, user_id ) .execute(pool) .await?; Ok(session_id) } pub async fn destroy_session(pool: &PgPool, session_id: Uuid) -> Result<()> { sqlx::query!("DELETE FROM session WHERE session_id = $1", session_id) .execute(pool) .await?; Ok(()) } pub async fn get_session_user(pool: &PgPool, session_id: Uuid) -> Result<Option<Uuid>> { Ok(sqlx::query_scalar!( "SELECT user_id FROM session WHERE session_id = $1", session_id ) .fetch_optional(pool) .await?) } pub async fn is_admin(pool: &PgPool, user_id: Uuid) -> Result<bool> { Ok( sqlx::query_scalar!(r#"SELECT is_admin FROM "user" WHERE id = $1"#, user_id) .fetch_optional(pool) .await? .unwrap_or(false), ) }
let user_id = match db.get_session_user(session_id).await { Err(e) => { log::error!("Error getting session user: {}", e); None } Ok(user_id) => Some(user_id), }?;
assignment_statement
[ { "content": "type Result<T> = std::result::Result<T, Error>;\n\n\n\n// Route modules\n\nmod articles;\n\nmod settings;\n\nmod users;\n\n\n", "file_path": "src/main.rs", "rank": 0, "score": 122101.93948178331 }, { "content": "pub fn routes() -> Vec<rocket::Route> {\n\n rocket::routes![\n\...
Rust
src/kernel/memory.rs
IamTheCarl/kernel_emulator
790be670460830f0f62e02b3b214d2c566264473
use crate::kernel::{bytes::Bytes, Pointer, Value, ValueSize}; use segmap::SegmentMap; use std::{ cell::{Ref, RefCell, RefMut}, ops::{Deref, DerefMut, Range}, }; use thiserror::Error; #[derive(Error, Debug)] pub enum Error { #[error("Loaded overlapping memory.")] MemoryOverlapps { sections: Vec<(String, Range<Pointer>)>, }, #[error("Wrong memory type: {address:08x} Wanted read:{read_wanted} Wanted write:{write_wanted} Wanted execute:{execute_wanted}")] WrongMemoryType { address: Pointer, read_wanted: bool, write_wanted: bool, execute_wanted: bool, }, #[error("Attempt to access unmapped memory: 0x{0:016x}.")] UnmappedAddress(Pointer), #[error("Memory access operation is split between blocks.")] SectionAliacing, } pub type Result<T> = std::result::Result<T, Error>; pub struct ProcessMemory { segments: SegmentMap<Pointer, RefCell<MemoryBlock>>, } impl ProcessMemory { pub fn new() -> Self { Self { segments: SegmentMap::new(), } } pub fn segments(&self) -> impl Iterator<Item = (Range<Pointer>, bool, bool, bool)> + '_ { self.segments.iter().map(|(s, r)| { let block = r.borrow(); let is_read = block.is_read(); let is_write = block.is_write(); let is_executable = block.is_executable(); ( Range { start: *s.start_value().expect("Infinite memory region."), end: *s.end_value().expect("Infinite memory region."), }, is_read, is_write, is_executable, ) }) } pub fn get_memory_block(&self, address: &Pointer) -> Result<Ref<MemoryBlock>> { self.segments .get(address) .map_or(Err(Error::UnmappedAddress(*address)), |cell| { Ok(cell.borrow()) }) } pub fn get_memory_block_mut(&self, address: &Pointer) -> Result<RefMut<MemoryBlock>> { self.segments .get(address) .map_or(Err(Error::UnmappedAddress(*address)), |cell| { Ok(cell.borrow_mut()) }) } pub fn read_random_bytes(&self, address: Pointer, target: &mut [u8]) -> Result<()> { let length = target.len() as Pointer; let range = address..address + length; let block = self.get_memory_block(&range.start)?; if block.is_read() { let data = block.get_range(range)?; target.copy_from_slice(data); Ok(()) } else { Err(Error::WrongMemoryType { address: range.start, read_wanted: true, write_wanted: false, execute_wanted: false, }) } } pub fn read_random(&self, address: Pointer, size: ValueSize) -> Result<Value> { let mut bytes = [0u8; 8]; self.read_random_bytes(address, &mut bytes[..size.len()])?; let value = Value::from_bytes(&bytes[..size.len()]); Ok(value) } pub fn write_random_bytes(&self, address: Pointer, data: &[u8]) -> Result<()> { let mut block = self.get_memory_block_mut(&address)?; let length = data.len(); if block.is_write() { let range = address..address + length as Pointer; let block_data = block.get_range_mut(range)?; block_data.copy_from_slice(data); Ok(()) } else { Err(Error::WrongMemoryType { address, read_wanted: false, write_wanted: true, execute_wanted: false, }) } } pub fn write_random(&self, address: Pointer, value: Value) -> Result<()> { self.write_random_bytes(address, &value.to_bytes()) } pub fn new_block(&mut self, memory_block: MemoryBlock) -> Result<()> { match self .segments .insert_if_empty(memory_block.range(), RefCell::new(memory_block)) { None => Ok(()), Some(memory_block) => { let sections: Vec<(String, Range<Pointer>)> = self .segments .iter_in(memory_block.borrow().range()) .map(|(_segment, overlapping)| { let block = overlapping.borrow(); (block.name.clone(), block.range()) }) .collect(); let range = memory_block.borrow().range(); println!( "OVERLAPPING RANGE {} {:08x}-{:08x}:", memory_block.borrow().name, range.start, range.end ); for (name, range) in sections.iter() { println!("\t{}: {:08x}-{:08x}", name, range.start, range.end); } Err(Error::MemoryOverlapps { sections }) } } } pub fn new_blank_block(&mut self, block: BlankMemoryBlock) -> Result<()> { let range = block.range; let gaps: Vec<Range<Pointer>> = self .segments .iter_gaps() .map(|gap| Range { start: **gap.start_value().expect("Infinite memory secton."), end: **gap.end_value().expect("Infinite memory secton."), }) .filter(|gap| gap.end >= range.start && gap.start <= range.end) .map(|gap| Range { start: std::cmp::max(gap.start, range.start), end: std::cmp::min(gap.end, range.end), }) .collect(); println!("ADD BLANK SECTION {}: {:016x?}", block.name, gaps); let mut max = range.start; let mut index = 0; for gap in gaps { max = std::cmp::max(max, gap.end); let length = gap.end - gap.start; if length > 0 { let data = Bytes::Original(vec![0u8; length as usize]); let block = MemoryBlock::new( format!("{}-{}", block.name, index), gap.start, data, block.read, block.write, block.execute, ); index += 1; println!( "BLANK BLOCK {}: {:08x}-{:08x}", block.name, gap.start, gap.end ); self.new_block(block)?; } } let end_block_length = range.end + 1 - max; if end_block_length > 0 { let data = Bytes::Original(vec![0u8; end_block_length as usize]); let block = MemoryBlock::new( format!("{}-{}", block.name, index), max, data, block.read, block.write, block.execute, ); println!("BLANK BLOCK {}: {:08x}-{:08x}", block.name, max, range.end); self.new_block(block).ok(); } for i in range { debug_assert!( self.segments.get(&i).is_some(), "Missing address: {:016x}", i ); } Ok(()) } pub fn replace(&mut self, source: ProcessMemory) { self.segments = source.segments; } } #[derive(Clone, Debug)] pub struct MemoryBlock { name: String, read: bool, write: bool, execute: bool, base_address: Pointer, data: Bytes, } impl MemoryBlock { pub fn new( name: impl Into<String>, base_address: Pointer, data: Bytes, read: bool, write: bool, execute: bool, ) -> Self { let name = name.into(); Self { name, read, write, execute, base_address, data, } } } impl MemoryBlock { pub fn is_executable(&self) -> bool { self.execute } pub fn is_read(&self) -> bool { self.read } pub fn is_write(&self) -> bool { self.write } pub fn range(&self) -> std::ops::Range<Pointer> { self.base_address..(self.base_address + self.data.len() as Pointer - 1) } pub fn get_range(&self, range: Range<Pointer>) -> Result<&[u8]> { let start = range.start - self.base_address; let end = range.end - self.base_address; self.data .get(start as usize..end as usize) .map_or(Err(Error::SectionAliacing), Ok) } pub fn get_range_mut(&mut self, range: Range<Pointer>) -> Result<&mut [u8]> { let start = range.start - self.base_address; let end = range.end - self.base_address; self.data .get_mut(start as usize..end as usize) .map_or(Err(Error::SectionAliacing), Ok) } } impl Deref for MemoryBlock { type Target = [u8]; fn deref(&self) -> &Self::Target { self.data.deref() } } impl DerefMut for MemoryBlock { fn deref_mut(&mut self) -> &mut Self::Target { self.data.deref_mut() } } impl std::cmp::PartialEq for MemoryBlock { fn eq(&self, _other: &Self) -> bool { false } } impl std::cmp::Eq for MemoryBlock {} #[derive(Clone)] pub struct BlankMemoryBlock { name: String, range: Range<Pointer>, read: bool, write: bool, execute: bool, } impl BlankMemoryBlock { pub fn new( name: impl Into<String>, base_address: Pointer, length: Pointer, read: bool, write: bool, execute: bool, ) -> Self { let name = name.into(); Self { name, range: Range { start: base_address, end: base_address + length, }, read, write, execute, } } } #[test] fn overlapping_memory() { #[cfg(not(tarpaulin_include))] fn assert_overlap_failed(result: Result<()>) -> std::result::Result<(), &'static str> { match result { Err(error) => match error { Error::MemoryOverlapps { .. } => { Ok(()) } _ => Err("Overlapping produced wrong error type."), }, Ok(_) => Err("Overlapping did not fail."), } } let mut kernel = ProcessMemory::new(); kernel .new_block(MemoryBlock::new( "", 0, Bytes::from_static(&[0u8; 512]), false, false, false, )) .unwrap(); kernel .new_block(MemoryBlock::new( "", 512, Bytes::from_static(&[0u8; 512]), false, false, false, )) .unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 512, Bytes::from_static(&[0u8; 512]), false, false, false, )); assert_overlap_failed(result).unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 256, Bytes::from_static(&[0u8; 512]), false, false, false, )); assert_overlap_failed(result).unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 1, Bytes::from_static(&[0u8; 1]), false, false, false, )); assert_overlap_failed(result).unwrap(); }
use crate::kernel::{bytes::Bytes, Pointer, Value, ValueSize}; use segmap::SegmentMap; use std::{ cell::{Ref, RefCell, RefMut}, ops::{Deref, DerefMut, Range}, }; use thiserror::Error; #[derive(Error, Debug)] pub enum Error { #[error("Loaded overlapping memory.")] MemoryOverlapps { sections: Vec<(String, Range<Pointer>)>, }, #[error("Wrong memory type: {address:08x} Wanted read:{read_wanted} Wanted write:{write_wanted} Wanted execute:{execute_wanted}")] WrongMemoryType { address: Pointer, read_wanted: bool, write_wanted: bool, execute_wanted: bool, }, #[error("Attempt to access unmapped memory: 0x{0:016x}.")] UnmappedAddress(Pointer), #[error("Memory access operation is split between blocks.")] SectionAliacing, } pub type Result<T> = std::result::Result<T, Error>; pub struct ProcessMemory { segments: SegmentMap<Pointer, RefCell<MemoryBlock>>, } impl ProcessMemory { pub fn new() -> Self { Self { segments: SegmentMap::new(), } } pub fn segments(&self) -> impl Iterator<Item = (Range<Pointer>, bool, bool, bool)> + '_ { self.segments.iter().map(|(s, r)| { let block = r.borrow(); let is_read = block.is_read(); let is_write = block.is_write(); let is_executable = block.is_executable(); ( Range { start: *s.start_value().expect("Infinite memory region."), end: *s.end_value().expect("Infinite memory region."), }, is_read, is_write, is_executable, ) }) } pub fn get_memory_block(&self, address: &Pointer) -> Result<Ref<MemoryBlock>> { self.segments .get(address) .map_or(Err(Error::UnmappedAddress(*address)), |cell| { Ok(cell.borrow()) }) } pub fn get_memory_block_mut(&self, address: &Pointer) -> Result<RefMut<MemoryBlock>> { self.segments .get(address) .map_or(Err(Error::UnmappedAddress(*address)), |cell| { Ok(cell.borrow_mut()) }) } pub fn read_random_bytes(&self, address: Pointer, target: &mut [u8]) -> Result<()> { let length = target.len() as Pointer; let range = address..address + length; let block = self.get_memory_block(&range.start)?; if block.is_read() { let data = block.get_range(range)?; target.copy_from_slice(data); Ok(()) } else { Err(Error::WrongMemoryType { address: range.start, read_wanted: true, write_wanted: false, execute_wanted: false, }) } } pub fn read_random(&self, address: Pointer, size: ValueSize) -> Result<Value> { let mut bytes = [0u8; 8]; self.read_random_bytes(address, &mut bytes[..size.len()])?; let
base_address, end: base_address + length, }, read, write, execute, } } } #[test] fn overlapping_memory() { #[cfg(not(tarpaulin_include))] fn assert_overlap_failed(result: Result<()>) -> std::result::Result<(), &'static str> { match result { Err(error) => match error { Error::MemoryOverlapps { .. } => { Ok(()) } _ => Err("Overlapping produced wrong error type."), }, Ok(_) => Err("Overlapping did not fail."), } } let mut kernel = ProcessMemory::new(); kernel .new_block(MemoryBlock::new( "", 0, Bytes::from_static(&[0u8; 512]), false, false, false, )) .unwrap(); kernel .new_block(MemoryBlock::new( "", 512, Bytes::from_static(&[0u8; 512]), false, false, false, )) .unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 512, Bytes::from_static(&[0u8; 512]), false, false, false, )); assert_overlap_failed(result).unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 256, Bytes::from_static(&[0u8; 512]), false, false, false, )); assert_overlap_failed(result).unwrap(); let result = kernel.new_block(MemoryBlock::new( "", 1, Bytes::from_static(&[0u8; 1]), false, false, false, )); assert_overlap_failed(result).unwrap(); }
value = Value::from_bytes(&bytes[..size.len()]); Ok(value) } pub fn write_random_bytes(&self, address: Pointer, data: &[u8]) -> Result<()> { let mut block = self.get_memory_block_mut(&address)?; let length = data.len(); if block.is_write() { let range = address..address + length as Pointer; let block_data = block.get_range_mut(range)?; block_data.copy_from_slice(data); Ok(()) } else { Err(Error::WrongMemoryType { address, read_wanted: false, write_wanted: true, execute_wanted: false, }) } } pub fn write_random(&self, address: Pointer, value: Value) -> Result<()> { self.write_random_bytes(address, &value.to_bytes()) } pub fn new_block(&mut self, memory_block: MemoryBlock) -> Result<()> { match self .segments .insert_if_empty(memory_block.range(), RefCell::new(memory_block)) { None => Ok(()), Some(memory_block) => { let sections: Vec<(String, Range<Pointer>)> = self .segments .iter_in(memory_block.borrow().range()) .map(|(_segment, overlapping)| { let block = overlapping.borrow(); (block.name.clone(), block.range()) }) .collect(); let range = memory_block.borrow().range(); println!( "OVERLAPPING RANGE {} {:08x}-{:08x}:", memory_block.borrow().name, range.start, range.end ); for (name, range) in sections.iter() { println!("\t{}: {:08x}-{:08x}", name, range.start, range.end); } Err(Error::MemoryOverlapps { sections }) } } } pub fn new_blank_block(&mut self, block: BlankMemoryBlock) -> Result<()> { let range = block.range; let gaps: Vec<Range<Pointer>> = self .segments .iter_gaps() .map(|gap| Range { start: **gap.start_value().expect("Infinite memory secton."), end: **gap.end_value().expect("Infinite memory secton."), }) .filter(|gap| gap.end >= range.start && gap.start <= range.end) .map(|gap| Range { start: std::cmp::max(gap.start, range.start), end: std::cmp::min(gap.end, range.end), }) .collect(); println!("ADD BLANK SECTION {}: {:016x?}", block.name, gaps); let mut max = range.start; let mut index = 0; for gap in gaps { max = std::cmp::max(max, gap.end); let length = gap.end - gap.start; if length > 0 { let data = Bytes::Original(vec![0u8; length as usize]); let block = MemoryBlock::new( format!("{}-{}", block.name, index), gap.start, data, block.read, block.write, block.execute, ); index += 1; println!( "BLANK BLOCK {}: {:08x}-{:08x}", block.name, gap.start, gap.end ); self.new_block(block)?; } } let end_block_length = range.end + 1 - max; if end_block_length > 0 { let data = Bytes::Original(vec![0u8; end_block_length as usize]); let block = MemoryBlock::new( format!("{}-{}", block.name, index), max, data, block.read, block.write, block.execute, ); println!("BLANK BLOCK {}: {:08x}-{:08x}", block.name, max, range.end); self.new_block(block).ok(); } for i in range { debug_assert!( self.segments.get(&i).is_some(), "Missing address: {:016x}", i ); } Ok(()) } pub fn replace(&mut self, source: ProcessMemory) { self.segments = source.segments; } } #[derive(Clone, Debug)] pub struct MemoryBlock { name: String, read: bool, write: bool, execute: bool, base_address: Pointer, data: Bytes, } impl MemoryBlock { pub fn new( name: impl Into<String>, base_address: Pointer, data: Bytes, read: bool, write: bool, execute: bool, ) -> Self { let name = name.into(); Self { name, read, write, execute, base_address, data, } } } impl MemoryBlock { pub fn is_executable(&self) -> bool { self.execute } pub fn is_read(&self) -> bool { self.read } pub fn is_write(&self) -> bool { self.write } pub fn range(&self) -> std::ops::Range<Pointer> { self.base_address..(self.base_address + self.data.len() as Pointer - 1) } pub fn get_range(&self, range: Range<Pointer>) -> Result<&[u8]> { let start = range.start - self.base_address; let end = range.end - self.base_address; self.data .get(start as usize..end as usize) .map_or(Err(Error::SectionAliacing), Ok) } pub fn get_range_mut(&mut self, range: Range<Pointer>) -> Result<&mut [u8]> { let start = range.start - self.base_address; let end = range.end - self.base_address; self.data .get_mut(start as usize..end as usize) .map_or(Err(Error::SectionAliacing), Ok) } } impl Deref for MemoryBlock { type Target = [u8]; fn deref(&self) -> &Self::Target { self.data.deref() } } impl DerefMut for MemoryBlock { fn deref_mut(&mut self) -> &mut Self::Target { self.data.deref_mut() } } impl std::cmp::PartialEq for MemoryBlock { fn eq(&self, _other: &Self) -> bool { false } } impl std::cmp::Eq for MemoryBlock {} #[derive(Clone)] pub struct BlankMemoryBlock { name: String, range: Range<Pointer>, read: bool, write: bool, execute: bool, } impl BlankMemoryBlock { pub fn new( name: impl Into<String>, base_address: Pointer, length: Pointer, read: bool, write: bool, execute: bool, ) -> Self { let name = name.into(); Self { name, range: Range { start:
random
[ { "content": "type Result<T> = std::result::Result<T, Error>;\n\n\n\npub struct SyscallRequest {\n\n process_id: ProcessId,\n\n call_code: Pointer,\n\n arguments: [Pointer; 6],\n\n}\n\n\n\npub enum SyscallResult {\n\n None,\n\n Some(Pointer),\n\n Exit,\n\n}\n\n\n\npub struct Executable {\n\n ...
Rust
prisma-fmt/src/text_document_completion.rs
ever0de/prisma-engines
4c9d4edf238ad9c4a706eb5b7201ee0b4ebee93e
use datamodel::{ datamodel_connector::{Connector, ReferentialIntegrity}, parse_configuration, parse_schema_ast, schema_ast::ast, }; use log::*; use lsp_types::*; pub(crate) fn empty_completion_list() -> CompletionList { CompletionList { is_incomplete: true, items: Vec::new(), } } pub(crate) fn completion(schema: &str, params: CompletionParams) -> CompletionList { let schema_ast = if let Ok(schema_ast) = parse_schema_ast(schema) { schema_ast } else { warn!("Failed to parse schema AST in completion request."); return empty_completion_list(); }; let position = if let Some(pos) = position_to_offset(&params.text_document_position.position, schema) { pos } else { warn!("Received a position outside of the document boundaries in CompletionParams"); return empty_completion_list(); }; let (connector, referential_integrity) = parse_configuration(schema) .ok() .and_then(|conf| conf.subject.datasources.into_iter().next()) .map(|datasource| (datasource.active_connector, datasource.referential_integrity())) .unwrap_or_else(|| { ( &datamodel::datamodel_connector::EmptyDatamodelConnector, Default::default(), ) }); let mut list = CompletionList { is_incomplete: false, items: Vec::new(), }; push_ast_completions(&mut list, connector, referential_integrity, &schema_ast, position); list } fn position_to_offset(position: &Position, document: &str) -> Option<usize> { let mut offset = 0; let mut line_offset = position.line; let mut character_offset = position.character; let mut chars = document.chars(); while line_offset > 0 { loop { match chars.next() { Some('\n') => { offset += 1; break; } Some(_) => { offset += 1; } None => return None, } } line_offset -= 1; } while character_offset > 0 { match chars.next() { Some('\n') | None => return None, Some(_) => { offset += 1; character_offset -= 1; } } } Some(offset) } fn push_ast_completions( completion_list: &mut CompletionList, connector: &'static dyn Connector, referential_integrity: ReferentialIntegrity, ast: &ast::SchemaAst, position: usize, ) { match ast.find_at_position(position) { ast::SchemaPosition::Model( _model_id, ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("default", _, None)), ) => { if connector.has_capability(datamodel::datamodel_connector::ConnectorCapability::NamedDefaultValues) { completion_list.items.push(CompletionItem { label: "map: ".to_owned(), kind: Some(CompletionItemKind::PROPERTY), ..Default::default() }) } } ast::SchemaPosition::Model( _model_id, ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("relation", _, Some(attr_name))), ) if attr_name == "onDelete" || attr_name == "onUpdate" => { for referential_action in connector.referential_actions(&referential_integrity).iter() { completion_list.items.push(CompletionItem { label: referential_action.as_str().to_owned(), kind: Some(CompletionItemKind::ENUM), detail: Some(referential_action.documentation().to_owned()), ..Default::default() }); } } _ => (), } } #[test] fn position_to_offset_with_crlf() { let schema = "\r\nmodel Test {\r\n id Int @id\r\n}"; let expected_offset = schema.chars().position(|c| c == 'i').unwrap(); let found_offset = position_to_offset(&Position { line: 2, character: 4 }, schema).unwrap(); assert_eq!(found_offset, expected_offset); }
use datamodel::{ datamodel_connector::{Connector, ReferentialIntegrity}, parse_configuration, parse_schema_ast, schema_ast::ast, }; use log::*; use lsp_types::*; pub(crate) fn empty_completion_list() -> CompletionList { CompletionList { is_incomplete: true, items: Vec::new(), } } pub(crate) fn completion(schema: &str, params: CompletionParams) -> CompletionList { let schema_ast = if let Ok(schema_ast) = parse_schema_ast(schema) { schema_ast } else { warn!("Failed to parse schema AST in completion request."); return empty_completion_list(); }; let position = if let Some(pos) = position_to_offset(&params.text_document_position.position, schema) { pos } else { warn!("Received a position outside of the document boundaries in CompletionParams"); return empty_completion_list(); }; let (connector, referential_integrity) = parse_configuration(schema) .ok() .and_then(|conf| conf.subject.datasources.into_iter().next()) .map(|datasource| (datasource.active_connector, datasource.referential_integrity())) .unwrap_or_else(|| { ( &datamodel::datamodel_connector::EmptyDatamodelConnector, Default::default(), ) }); let mut list = CompletionList { is_incomplete: false, items: Vec::new(), }; push_ast_completions(&mut list, connector, referential_integrity, &schema_ast, position); list } fn position_to_offset(position: &Position, document: &str) -> Option<usize> { let mut offset = 0; let mut line_offset = position.line; let mut character_offset = position.character; let mut chars = document.chars(); while line_offset > 0 { loop { match chars.next() { Some('\n') => { offset += 1; break; } Some(_) => { offset += 1; } None => return None, } } line_offset -= 1; } while character_offset > 0 { match chars.next() { Some('\n') | None => return None, Some(_) => { offset += 1; character_offset -= 1; } } } Some(offset) } fn push_ast_completions( completion_list: &mut CompletionList, connector: &'static dyn Connector, referential_integrity: ReferentialIntegrity, ast: &ast::SchemaAst, position: usize, ) { match ast.find_at_position(position) { ast::SchemaPosition::Model( _model_id, ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("default", _, None)), ) => { if connector.has_capability(datamodel::datamodel_connector::ConnectorCapability::NamedDefaultValues) { completion_list.items.push(CompletionItem { label: "map: ".to_owned(), kind: Some(CompletionItemKind::PROPERT
#[test] fn position_to_offset_with_crlf() { let schema = "\r\nmodel Test {\r\n id Int @id\r\n}"; let expected_offset = schema.chars().position(|c| c == 'i').unwrap(); let found_offset = position_to_offset(&Position { line: 2, character: 4 }, schema).unwrap(); assert_eq!(found_offset, expected_offset); }
Y), ..Default::default() }) } } ast::SchemaPosition::Model( _model_id, ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("relation", _, Some(attr_name))), ) if attr_name == "onDelete" || attr_name == "onUpdate" => { for referential_action in connector.referential_actions(&referential_integrity).iter() { completion_list.items.push(CompletionItem { label: referential_action.as_str().to_owned(), kind: Some(CompletionItemKind::ENUM), detail: Some(referential_action.documentation().to_owned()), ..Default::default() }); } } _ => (), } }
function_block-function_prefixed
[]
Rust
MBExtender/rust/speed-boostair/src/dds_loader.rs
RandomityGuy/MBExtender
5b5a4b5f8f9aafe76c6f50ab75f8a214ef22cb3c
use crate::dds_types::*; use crate::gl::{self, types::*}; use crate::io; use log::{debug, error, trace}; use mbx::core::{self, ResourceInstance, Stream}; use mbx::dgl::{self, BitmapFormat, GBitmap, TextureObject}; use mbx::prelude::*; use mbx::util; use std::error::Error; use std::ffi::CString; use std::mem; use std::os::raw::c_char; use std::ptr; use std::time::Instant; enum TextureLayout { Pixels(usize), Blocks(usize), } impl TextureLayout { fn data_size(&self, width: u32, height: u32) -> usize { match *self { Self::Pixels(bpp) => (width as usize) * (height as usize) * bpp, Self::Blocks(block_size) => { let width_blocks = (width as usize + 3) / 4; let height_blocks = (height as usize + 3) / 4; width_blocks * height_blocks * block_size } } } } impl From<BitmapFormat> for TextureLayout { fn from(format: BitmapFormat) -> Self { match format { BitmapFormat::Palettized => Self::Pixels(1), BitmapFormat::Intensity => Self::Pixels(1), BitmapFormat::Rgb => Self::Pixels(3), BitmapFormat::Rgba => Self::Pixels(4), BitmapFormat::Alpha => Self::Pixels(1), BitmapFormat::Rgb565 => Self::Pixels(2), BitmapFormat::Rgb5551 => Self::Pixels(2), BitmapFormat::Luminance => Self::Pixels(1), BitmapFormat::XDxt1 => Self::Blocks(8), BitmapFormat::XDxt3 => Self::Blocks(16), BitmapFormat::XDxt5 => Self::Blocks(16), BitmapFormat::XBc5S => Self::Blocks(16), BitmapFormat::XBc5U => Self::Blocks(16), } } } fn dds_bitmap_format(format: &PixelFormat) -> Result<BitmapFormat, &'static str> { match *format { DDSPF_R8G8B8 => Ok(BitmapFormat::Rgb), DDSPF_A8R8G8B8 => Ok(BitmapFormat::Rgba), DDSPF_A8 => Ok(BitmapFormat::Alpha), DDSPF_R5G6B5 => Ok(BitmapFormat::Rgb565), DDSPF_A1R5G5B5 => Ok(BitmapFormat::Rgb5551), DDSPF_L8 => Ok(BitmapFormat::Luminance), DDSPF_DXT1 => Ok(BitmapFormat::XDxt1), DDSPF_DXT3 => Ok(BitmapFormat::XDxt3), DDSPF_DXT5 => Ok(BitmapFormat::XDxt5), DDSPF_BC5_SNORM => Ok(BitmapFormat::XBc5S), DDSPF_BC5_UNORM | DDSPF_ATI2 => Ok(BitmapFormat::XBc5U), _ => Err("unsupported texture format"), } } fn swap_channels(data: &mut [u8], channels: usize) { for pixel in data.chunks_exact_mut(channels) { pixel.swap(0, 2); } } fn do_read_dds(stream: &mut Stream) -> Result<Box<GBitmap>, Box<dyn Error>> { let start_time = Instant::now(); let magic: [u8; 4] = io::read_val(stream)?; if magic != DDS_MAGIC { return Err("bad magic".into()); } let header: Header = io::read_val(stream)?; trace!("DDS header: {:?}", header); if header.size as usize != mem::size_of::<Header>() { return Err("unrecognized header size".into()); } else if header.flags & DDS_HEADER_FLAGS_TEXTURE != DDS_HEADER_FLAGS_TEXTURE { return Err("missing texture information".into()); } else if header.flags & DDS_HEADER_FLAGS_VOLUME != 0 || header.caps2 != 0 { return Err("volumetric and cubemap textures are not supported".into()); } else if header.width == 0 || header.height == 0 { return Err("invalid texture size".into()); } let mut bitmap = GBitmap::empty(); bitmap.width = header.width; bitmap.height = header.height; bitmap.format = dds_bitmap_format(&header.format)?; let layout = TextureLayout::from(bitmap.format); bitmap.bytes_per_pixel = match layout { TextureLayout::Pixels(bpp) => bpp as u32, TextureLayout::Blocks(_) => 0, }; let mut mip_levels = 1; if header.flags & DDS_HEADER_FLAGS_MIPMAP != 0 { mip_levels = header.mip_map_count.max(1).min(10); } bitmap.num_mip_levels = mip_levels; let mut total_size = 0; for i in 0..mip_levels { bitmap.mip_level_offsets[i as usize] = total_size as u32; total_size += layout.data_size(bitmap.mip_width(i), bitmap.mip_height(i)); } bitmap.byte_size = total_size as u32; let mut data: Vec<u8> = io::read_array(stream, total_size)?; match bitmap.format { BitmapFormat::Rgb => swap_channels(&mut data, 3), BitmapFormat::Rgba => swap_channels(&mut data, 4), _ => (), } debug!("Loaded {}x{} DDS in {:?}", bitmap.width, bitmap.height, start_time.elapsed()); bitmap.bits = util::leak_vec_ptr(data); Ok(bitmap) } extern "C" fn read_dds(stream: &mut Stream) -> *mut ResourceInstance { match do_read_dds(stream) { Ok(bitmap) => Box::into_raw(bitmap).cast(), Err(e) => { error!("Error loading DDS file: {}", e); ptr::null_mut() } } } #[fn_override(original_create_gl_name)] unsafe fn my_create_gl_name( bitmap: &mut GBitmap, clamp_to_edge: bool, first_mip: u32, texture_type: u32, to: &mut TextureObject, ) -> bool { let gl_format = match bitmap.format { BitmapFormat::XDxt1 => gl::COMPRESSED_RGB_S3TC_DXT1_EXT, BitmapFormat::XDxt3 => gl::COMPRESSED_RGBA_S3TC_DXT3_EXT, BitmapFormat::XDxt5 => gl::COMPRESSED_RGBA_S3TC_DXT5_EXT, BitmapFormat::XBc5S => gl::COMPRESSED_SIGNED_RG_RGTC2, BitmapFormat::XBc5U => gl::COMPRESSED_RG_RGTC2, _ => return original_create_gl_name(bitmap, clamp_to_edge, first_mip, texture_type, to), }; gl::GenTextures(1, &mut to.gl_texture_name); gl::BindTexture(gl::TEXTURE_2D, to.gl_texture_name); let layout = TextureLayout::from(bitmap.format); for i in first_mip..bitmap.num_mip_levels { let width = bitmap.mip_width(i); let height = bitmap.mip_height(i); let size = layout.data_size(width, height); gl::CompressedTexImage2D( gl::TEXTURE_2D, (i - first_mip) as GLint, gl_format, width as GLint, height as GLint, 0, size as GLsizei, bitmap.mip_bits(i).cast(), ); } to.texture_width = bitmap.mip_width(first_mip); to.texture_height = bitmap.mip_height(first_mip); let (min_filter, mag_filter) = if to.filter_nearest { (gl::NEAREST, gl::NEAREST) } else if bitmap.num_mip_levels - first_mip > 1 { (gl::LINEAR_MIPMAP_LINEAR, gl::LINEAR) } else { (gl::LINEAR, gl::LINEAR) }; gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, min_filter as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, mag_filter as GLint); if clamp_to_edge { gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as GLint); } else { gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::REPEAT as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::REPEAT as GLint); } true } #[method_override(original_extrude_mip_levels)] unsafe fn my_extrude_mip_levels(this: &mut GBitmap, clear_borders: bool) { if let TextureLayout::Blocks(_) = this.format.into() { return; } if this.num_mip_levels > 1 { return; } original_extrude_mip_levels(this, clear_borders); } tge_statics! { static mut EXT1: *const c_char = tge_addr!(0x65d908, 0x2da900); static mut EXT2: *const c_char = tge_addr!(0x65d924, 0x2da908); } const DDS_EXTENSION: &'static str = ".dds"; pub fn init(plugin: &Plugin) -> Result<(), &'static str> { unsafe { let ext = CString::new(DDS_EXTENSION).unwrap().into_raw(); *EXT1 = ext; *EXT2 = ext; (*core::TGE_RESOURCE_MANAGER).register_extension(DDS_EXTENSION, read_dds); } plugin.intercept(dgl::tge_create_gl_name, my_create_gl_name, &original_create_gl_name)?; plugin.intercept( dgl::tge_extrude_mip_levels, my_extrude_mip_levels, &original_extrude_mip_levels, )?; Ok(()) }
use crate::dds_types::*; use crate::gl::{self, types::*}; use crate::io; use log::{debug, error, trace}; use mbx::core::{self, ResourceInstance, Stream}; use mbx::dgl::{self, BitmapFormat, GBitmap, TextureObject}; use mbx::prelude::*; use mbx::util; use std::error::Error; use std::ffi::CString; use std::mem; use std::os::raw::c_char; use std::ptr; use std::time::Instant; enum TextureLayout { Pixels(usize), Blocks(usize), } impl TextureLayout { fn data_size(&self, width: u32, height: u32) -> usize { match *self { Self::Pixels(bpp) => (width as usize) * (height as usize) * bpp, Self::Blocks(block_size) => { let width_blocks = (width as usize + 3) / 4; let height_blocks = (height as usize + 3) / 4; width_blocks * height_blocks * block_size } } } } impl From<BitmapFormat> for TextureLayout { fn from(format: BitmapFormat) -> Self { match format { BitmapFormat::Palettized => Self::Pixels(1), BitmapFormat::Intensity => Self::Pixels(1), BitmapFormat::Rgb => Self::Pixels(3), BitmapFormat::Rgba => Self::Pixels(4), BitmapFormat::Alpha => Self::Pixels(1), BitmapFormat::Rgb565 => Self::Pixels(2), BitmapFormat::Rgb5551 => Self::Pixels(2), BitmapFormat::Luminance => Self::Pixels(1), BitmapFormat::XDxt1 => Self::Blocks(8), BitmapFormat::XDxt3 => Self::Blocks(16), BitmapFormat::XDxt5 => Self::Blocks(16), BitmapFormat::XBc5S => Self::Blocks(16), BitmapFormat::XBc5U => Self::Blocks(16), } } } fn dds_bitmap_format(format: &PixelFormat) -> Result<BitmapFormat, &'static str> { match *format { DDSPF_R8G8B8 => Ok(BitmapFormat::Rgb), DDSPF_A8R8G8B8 => Ok(BitmapFormat::Rgba), DDSPF_A8 => Ok(BitmapFormat::Alpha), DDSPF_R5G6B5 => Ok(BitmapFormat::Rgb565), DDSPF_A1R5G5B5 => Ok(BitmapFormat::Rgb5551), DDSPF_L8 => Ok(BitmapFormat::Luminance), DDSPF_DXT1 => Ok(BitmapFormat::XDxt1), DDSPF_DXT3 => Ok(BitmapFormat::XDxt3), DDSPF_DXT5 => Ok(BitmapFormat::XDxt5), DDSPF_BC5_SNORM => Ok(BitmapFormat::XBc5S), DDSPF_BC5_UNORM | DDSPF_ATI2 => Ok(BitmapFormat::XBc5U), _ => Err("unsupported texture format"), } } fn swap_channels(data: &mut [u8], channels: usize) { for pixel in data.chunks_exact_mut(channels) { pixel.swap(0, 2); } } fn do_read_dds(stream: &mut Stream) -> Result<Box<GBitmap>, Box<dyn Error>> { let start_time = Instant::now(); let magic: [u8; 4] = io::read_val(stream)?; if magic != DDS_MAGIC { return Err("bad magic".into()); } let header: Header = io::read_val(stream)?; trace!("DDS header: {:?}", header); if header.size as usize != mem::size_of::<Header>() { return Err("unrecognized header size".into()); } else if header.flags & DDS_HEADER_FLAGS_TEXTURE != DDS_HEADER_FLAGS_TEXTURE { return Err("missing texture information".into()); } else if header.flags & DDS_HEADER_FLAGS_VOLUME != 0 || header.caps2 != 0 { return Err("volumetric and cubemap textures are not supported".into()); } else if header.width == 0 || header.height == 0 { return Err("invalid texture size".into()); } let mut bitmap = GBitmap::empty(); bitmap.width = header.width; bitmap.height = header.height; bitmap.format = dds_bitmap_format(&header.format)?; let layout = TextureLayout::from(bitmap.format); bitmap.bytes_per_pixel = match layout { TextureLayout::P
l::TEXTURE_MIN_FILTER, min_filter as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, mag_filter as GLint); if clamp_to_edge { gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as GLint); } else { gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::REPEAT as GLint); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::REPEAT as GLint); } true } #[method_override(original_extrude_mip_levels)] unsafe fn my_extrude_mip_levels(this: &mut GBitmap, clear_borders: bool) { if let TextureLayout::Blocks(_) = this.format.into() { return; } if this.num_mip_levels > 1 { return; } original_extrude_mip_levels(this, clear_borders); } tge_statics! { static mut EXT1: *const c_char = tge_addr!(0x65d908, 0x2da900); static mut EXT2: *const c_char = tge_addr!(0x65d924, 0x2da908); } const DDS_EXTENSION: &'static str = ".dds"; pub fn init(plugin: &Plugin) -> Result<(), &'static str> { unsafe { let ext = CString::new(DDS_EXTENSION).unwrap().into_raw(); *EXT1 = ext; *EXT2 = ext; (*core::TGE_RESOURCE_MANAGER).register_extension(DDS_EXTENSION, read_dds); } plugin.intercept(dgl::tge_create_gl_name, my_create_gl_name, &original_create_gl_name)?; plugin.intercept( dgl::tge_extrude_mip_levels, my_extrude_mip_levels, &original_extrude_mip_levels, )?; Ok(()) }
ixels(bpp) => bpp as u32, TextureLayout::Blocks(_) => 0, }; let mut mip_levels = 1; if header.flags & DDS_HEADER_FLAGS_MIPMAP != 0 { mip_levels = header.mip_map_count.max(1).min(10); } bitmap.num_mip_levels = mip_levels; let mut total_size = 0; for i in 0..mip_levels { bitmap.mip_level_offsets[i as usize] = total_size as u32; total_size += layout.data_size(bitmap.mip_width(i), bitmap.mip_height(i)); } bitmap.byte_size = total_size as u32; let mut data: Vec<u8> = io::read_array(stream, total_size)?; match bitmap.format { BitmapFormat::Rgb => swap_channels(&mut data, 3), BitmapFormat::Rgba => swap_channels(&mut data, 4), _ => (), } debug!("Loaded {}x{} DDS in {:?}", bitmap.width, bitmap.height, start_time.elapsed()); bitmap.bits = util::leak_vec_ptr(data); Ok(bitmap) } extern "C" fn read_dds(stream: &mut Stream) -> *mut ResourceInstance { match do_read_dds(stream) { Ok(bitmap) => Box::into_raw(bitmap).cast(), Err(e) => { error!("Error loading DDS file: {}", e); ptr::null_mut() } } } #[fn_override(original_create_gl_name)] unsafe fn my_create_gl_name( bitmap: &mut GBitmap, clamp_to_edge: bool, first_mip: u32, texture_type: u32, to: &mut TextureObject, ) -> bool { let gl_format = match bitmap.format { BitmapFormat::XDxt1 => gl::COMPRESSED_RGB_S3TC_DXT1_EXT, BitmapFormat::XDxt3 => gl::COMPRESSED_RGBA_S3TC_DXT3_EXT, BitmapFormat::XDxt5 => gl::COMPRESSED_RGBA_S3TC_DXT5_EXT, BitmapFormat::XBc5S => gl::COMPRESSED_SIGNED_RG_RGTC2, BitmapFormat::XBc5U => gl::COMPRESSED_RG_RGTC2, _ => return original_create_gl_name(bitmap, clamp_to_edge, first_mip, texture_type, to), }; gl::GenTextures(1, &mut to.gl_texture_name); gl::BindTexture(gl::TEXTURE_2D, to.gl_texture_name); let layout = TextureLayout::from(bitmap.format); for i in first_mip..bitmap.num_mip_levels { let width = bitmap.mip_width(i); let height = bitmap.mip_height(i); let size = layout.data_size(width, height); gl::CompressedTexImage2D( gl::TEXTURE_2D, (i - first_mip) as GLint, gl_format, width as GLint, height as GLint, 0, size as GLsizei, bitmap.mip_bits(i).cast(), ); } to.texture_width = bitmap.mip_width(first_mip); to.texture_height = bitmap.mip_height(first_mip); let (min_filter, mag_filter) = if to.filter_nearest { (gl::NEAREST, gl::NEAREST) } else if bitmap.num_mip_levels - first_mip > 1 { (gl::LINEAR_MIPMAP_LINEAR, gl::LINEAR) } else { (gl::LINEAR, gl::LINEAR) }; gl::TexParameteri(gl::TEXTURE_2D, g
random
[ { "content": "fn fix_texture(name: u32, bitmap: &mut GBitmap, texture_type: u32) {\n\n if texture_type == 0 || texture_type == 1 || texture_type == 2 {\n\n return;\n\n }\n\n if bitmap.width <= 512 && bitmap.height <= 512 {\n\n return;\n\n }\n\n if bitmap.num_mip_levels > 1 {\n\n ...
Rust
src/discard_call_lives.rs
cgswords/rsc
cdfe135888fe1c6da11a792527fa98f86224acbe
use util::Binop; use util::Relop; use util::Label; use util::Location; use util::mk_uvar; use util::Ident; use std::collections::HashMap; use finalize_locations::Program as FLProgram; use finalize_locations::LetrecEntry as FLLetrecEntry; use finalize_locations::Body as FLBody; use finalize_locations::Exp as FLExp; use finalize_locations::Effect as FLEffect; use finalize_locations::Pred as FLPred; use finalize_locations::Triv as FLTriv; use finalize_locations::Offset as FLOffset; use finalize_locations::Variable as FLVar; #[derive(Debug)] pub enum Program { Letrec(Vec<LetrecEntry>, Body) } #[derive(Debug)] pub struct LetrecEntry { pub label : Label , pub rhs : Body } #[derive(Debug)] pub struct Body { pub locations : HashMap<Ident, Location> , pub expression : Exp } #[derive(Debug)] pub enum Exp { Call(Triv, Vec<Location>) , If(Pred,Box<Exp>,Box<Exp>) , Begin(Vec<Effect>,Box<Exp>) } #[derive(Debug)] pub enum Pred { True , False , Op(Relop,Triv,Triv) , If(Box<Pred>,Box<Pred>,Box<Pred>) , Begin(Vec<Effect>, Box<Pred>) } #[derive(Debug)] pub enum Effect { SetOp(Variable, (Binop, Triv, Triv)) , Set(Variable, Triv) , Nop , MSet(Variable, Offset, Triv) , ReturnPoint(Label, Exp, i64) , If(Pred, Box<Effect>, Box<Effect>) , Begin(Box<Vec<Effect>>, Box<Effect>) } #[derive(Debug)] pub enum Variable { Loc(Location) , UVar(Ident) } #[derive(Debug)] pub enum Triv { Var(Variable) , Num(i64) , Label(Label) , MRef(Variable, Offset) } #[derive(Debug)] pub enum Offset { UVar(Ident) , Reg(Ident) , Num(i64) } pub fn discard_call_lives(input : Program) -> FLProgram { return match input { Program::Letrec(letrecs, pgm_body) => FLProgram::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect() , body(pgm_body)) } } fn letrec_entry(input : LetrecEntry) -> FLLetrecEntry { FLLetrecEntry { label : input.label, rhs : body(input.rhs) } } fn body(input : Body) -> FLBody { FLBody { locations : input.locations , expression : exp(input.expression) } } macro_rules! mk_box { ($e:expr) => [Box::new($e)] } fn exp(input : Exp) -> FLExp { return match input { Exp::Call(t, _) => FLExp::Call(triv(t)) , Exp::If(test, conseq, alt) => FLExp::If(pred(test), mk_box!(exp(*conseq)), mk_box!(exp(*alt))) , Exp::Begin(effs, body) => FLExp::Begin(effs.into_iter().map(|e| effect(e)).collect(), mk_box!(exp(*body))) } } fn pred(input : Pred) -> FLPred { return match input { Pred::True => FLPred::True , Pred::False => FLPred::False , Pred::Op(op,t1,t2) => FLPred::Op(op, triv(t1), triv(t2)) , Pred::If(test, conseq, alt) => FLPred::If(mk_box!(pred(*test)), mk_box!(pred(*conseq)), mk_box!(pred(*alt))) , Pred::Begin(effs, body) => FLPred::Begin( effs.into_iter().map(|e| effect(e)).collect(), mk_box!(pred(*body))) } } fn effect(input: Effect) -> FLEffect { return match input { Effect::SetOp(l, (op, t1, t2)) => FLEffect::SetOp(var(l), (op, triv(t1), triv(t2))) , Effect::Set(l, t) => FLEffect::Set(var(l), triv(t)) , Effect::Nop => FLEffect::Nop , Effect::MSet(base, off, val) => FLEffect::MSet(var(base), offset(off), triv(val)) , Effect::ReturnPoint(lbl, body, off) => FLEffect::ReturnPoint(lbl, exp(body), off) , Effect::If(test, conseq, alt) => FLEffect::If(pred(test), mk_box!(effect(*conseq)) , mk_box!(effect(*alt))) , Effect::Begin(effs, body) => FLEffect::Begin( mk_box!((*effs).into_iter().map(|e| effect(e)).collect()) , mk_box!(effect(*body))) } } fn loc(input : Location) -> Location { return input; } fn var(input : Variable) -> FLVar { return match input { Variable::Loc(l) => FLVar::Loc(loc(l)) , Variable::UVar(uv) => FLVar::UVar(uv) } } fn triv(input : Triv) -> FLTriv { return match input { Triv::Var(v) => FLTriv::Var(var(v)) , Triv::Num(n) => FLTriv::Num(n) , Triv::Label(l) => FLTriv::Label(l) , Triv::MRef(base, off) => FLTriv::MRef(var(base), offset(off)) } } fn offset(input: Offset) -> FLOffset { return match input { Offset::UVar(uv) => FLOffset::UVar(uv) , Offset::Reg(r) => FLOffset::Reg(r) , Offset::Num(n) => FLOffset::Num(n) } } fn mk_num_lit(n: i64) -> Triv { return Triv::Num(n); } fn mk_fv_triv(n: i64) -> Triv { return mk_loc_triv(Location::FrameVar(n)); } fn mk_reg(s: &str) -> Variable { return Variable::Loc(mk_loc_reg(s)); } fn mk_loc_reg(s: &str) -> Location { return Location::Reg(Ident::from_str(s)); } fn mk_call(s: &str, lives: Vec<Location>) -> Exp { return Exp::Call(Triv::Label(mk_lbl(s)), lives); } fn mk_lbl(s : &str) -> Label { return Label::new(Ident::from_str(s)); } fn mk_set_op(dest: Variable, op: Binop, t1 : Triv, t2: Triv) -> Effect { return Effect::SetOp(dest, (op, t1, t2)); } fn mk_mset(dest: Variable, offset: Offset, val : Triv) -> Effect { return Effect::MSet(dest, offset, val); } fn mk_loc_triv(l : Location) -> Triv { return as_var_triv(loc_as_var(l)); } fn mk_var(id : Ident) -> Variable { return Variable::UVar(id); } fn mk_var_triv(id: Ident) -> Triv { return as_var_triv(Variable::UVar(id)); } fn as_var_triv(v: Variable) -> Triv { return Triv::Var(v); } fn loc_as_var(l: Location) -> Variable { return Variable::Loc(l); } fn mk_set(dest: Variable, val: Triv) -> Effect { return Effect::Set(dest,val) } pub fn test1() -> Program { let x0 = mk_uvar("x"); let x1 = mk_uvar("x"); let x2 = mk_uvar("x"); let x3 = mk_uvar("x"); let y4 = mk_uvar("y"); let mut map = HashMap::new(); map.insert(x0, mk_loc_reg("rbx")); map.insert(x1, Location::FrameVar(2)); map.insert(x2, mk_loc_reg("r8")); map.insert(x3, mk_loc_reg("r9")); map.insert(y4, mk_loc_reg("r15")); let mut body_map = HashMap::new(); body_map.insert(x2, mk_loc_reg("r8")); body_map.insert(x3, mk_loc_reg("r9")); return Program::Letrec( vec![ LetrecEntry{ label : mk_lbl("X1") , rhs : Body { locations : map , expression : Exp::If(Pred::Op(Relop::LT, mk_var_triv(x2), mk_var_triv(x3)), Box::new( Exp::Begin( vec![ mk_set_op(mk_var(x1), Binop::Plus, mk_var_triv(x1), mk_num_lit(35)) , mk_mset(mk_var(x0), Offset::Num(10), mk_num_lit(40)) , mk_mset(mk_var(x0), Offset::UVar(y4), mk_num_lit(25)) , Effect::ReturnPoint(mk_lbl("foo"), Exp::Begin( vec![ mk_set(mk_reg("rax"), mk_fv_triv(1)) ] , mk_box!(mk_call("X1", Vec::new()))) , 16) , mk_set(mk_var(x0), Triv::MRef(mk_reg("rax"),Offset::Num(10)))] , Box::new(mk_call("void", vec![mk_loc_reg("rax")])))) , Box::new( Exp::Begin( vec![mk_set_op(mk_reg("rax"), Binop::Plus, as_var_triv(mk_reg("rax")), mk_num_lit(10))] , Box::new(mk_call("void", vec![mk_loc_reg("rax"), mk_loc_reg("rbp")]))))) } } ] , Body { locations : body_map , expression : Exp::Begin( vec![ mk_set(mk_var(x2), mk_num_lit(0)) , mk_set(mk_var(x3), mk_num_lit(1)) ] , Box::new(mk_call("X1", vec![mk_loc_reg("rax"), mk_loc_reg("rbp")]))) }); }
use util::Binop; use util::Relop; use util::Label; use util::Location; use util::mk_uvar; use util::Ident; use std::collections::HashMap; use finalize_locations::Program as FLProgram; use finalize_locations::LetrecEntry as FLLetrecEntry; use finalize_locations::Body as FLBody; use finalize_locations::Exp as FLExp; use finalize_locations::Effect as FLEffect; use finalize_locations::Pred as FLPred; use finalize_locations::Triv as FLTriv; use finalize_locations::Offset as FLOffset; use finalize_locations::Variable as FLVar; #[derive(Debug)] pub enum Program { Letrec(Vec<LetrecEntry>, Body) } #[derive(Debug)] pub struct LetrecEntry { pub label : Label , pub rhs : Body } #[derive(Debug)] pub struct Body { pub locations : HashMap<Ident, Location> , pub expression : Exp } #[derive(Debug)] pub enum Exp { Call(Triv, Vec<Location>) , If(Pred,Box<Exp>,Box<Exp>) , Begin(Vec<Effect>,Box<Exp>) } #[derive(Debug)] pub enum Pred { True , False , Op(Relop,Triv,Triv) , If(Box<Pred>,Box<Pred>,Box<Pred>) , Begin(Vec<Effect>, Box<Pred>) } #[derive(Debug)] pub enum Effect { SetOp(Variable, (Binop, Triv, Triv)) , Set(Variable, Triv) , Nop , MSet(Variable, Offset, Triv) , ReturnPoint(Label, Exp, i64) , If(Pred, Box<Effect>, Box<Effect>) , Begin(Box<Vec<Effect>>, Box<Effect>) } #[derive(Debug)] pub enum Variable { Loc(Location) , UVar(Ident) } #[derive(Debug)] pub enum Triv { Var(Variable) , Num(i64) , Label(Label) , MRef(Variable, Offset) } #[derive(Debug)] pub enum Offset { UVar(Ident) , Reg(Ident) , Num(i64) } pub fn discard_call_lives(input : Program) -> FLProgram { return
} fn letrec_entry(input : LetrecEntry) -> FLLetrecEntry { FLLetrecEntry { label : input.label, rhs : body(input.rhs) } } fn body(input : Body) -> FLBody { FLBody { locations : input.locations , expression : exp(input.expression) } } macro_rules! mk_box { ($e:expr) => [Box::new($e)] } fn exp(input : Exp) -> FLExp { return match input { Exp::Call(t, _) => FLExp::Call(triv(t)) , Exp::If(test, conseq, alt) => FLExp::If(pred(test), mk_box!(exp(*conseq)), mk_box!(exp(*alt))) , Exp::Begin(effs, body) => FLExp::Begin(effs.into_iter().map(|e| effect(e)).collect(), mk_box!(exp(*body))) } } fn pred(input : Pred) -> FLPred { return match input { Pred::True => FLPred::True , Pred::False => FLPred::False , Pred::Op(op,t1,t2) => FLPred::Op(op, triv(t1), triv(t2)) , Pred::If(test, conseq, alt) => FLPred::If(mk_box!(pred(*test)), mk_box!(pred(*conseq)), mk_box!(pred(*alt))) , Pred::Begin(effs, body) => FLPred::Begin( effs.into_iter().map(|e| effect(e)).collect(), mk_box!(pred(*body))) } } fn effect(input: Effect) -> FLEffect { return match input { Effect::SetOp(l, (op, t1, t2)) => FLEffect::SetOp(var(l), (op, triv(t1), triv(t2))) , Effect::Set(l, t) => FLEffect::Set(var(l), triv(t)) , Effect::Nop => FLEffect::Nop , Effect::MSet(base, off, val) => FLEffect::MSet(var(base), offset(off), triv(val)) , Effect::ReturnPoint(lbl, body, off) => FLEffect::ReturnPoint(lbl, exp(body), off) , Effect::If(test, conseq, alt) => FLEffect::If(pred(test), mk_box!(effect(*conseq)) , mk_box!(effect(*alt))) , Effect::Begin(effs, body) => FLEffect::Begin( mk_box!((*effs).into_iter().map(|e| effect(e)).collect()) , mk_box!(effect(*body))) } } fn loc(input : Location) -> Location { return input; } fn var(input : Variable) -> FLVar { return match input { Variable::Loc(l) => FLVar::Loc(loc(l)) , Variable::UVar(uv) => FLVar::UVar(uv) } } fn triv(input : Triv) -> FLTriv { return match input { Triv::Var(v) => FLTriv::Var(var(v)) , Triv::Num(n) => FLTriv::Num(n) , Triv::Label(l) => FLTriv::Label(l) , Triv::MRef(base, off) => FLTriv::MRef(var(base), offset(off)) } } fn offset(input: Offset) -> FLOffset { return match input { Offset::UVar(uv) => FLOffset::UVar(uv) , Offset::Reg(r) => FLOffset::Reg(r) , Offset::Num(n) => FLOffset::Num(n) } } fn mk_num_lit(n: i64) -> Triv { return Triv::Num(n); } fn mk_fv_triv(n: i64) -> Triv { return mk_loc_triv(Location::FrameVar(n)); } fn mk_reg(s: &str) -> Variable { return Variable::Loc(mk_loc_reg(s)); } fn mk_loc_reg(s: &str) -> Location { return Location::Reg(Ident::from_str(s)); } fn mk_call(s: &str, lives: Vec<Location>) -> Exp { return Exp::Call(Triv::Label(mk_lbl(s)), lives); } fn mk_lbl(s : &str) -> Label { return Label::new(Ident::from_str(s)); } fn mk_set_op(dest: Variable, op: Binop, t1 : Triv, t2: Triv) -> Effect { return Effect::SetOp(dest, (op, t1, t2)); } fn mk_mset(dest: Variable, offset: Offset, val : Triv) -> Effect { return Effect::MSet(dest, offset, val); } fn mk_loc_triv(l : Location) -> Triv { return as_var_triv(loc_as_var(l)); } fn mk_var(id : Ident) -> Variable { return Variable::UVar(id); } fn mk_var_triv(id: Ident) -> Triv { return as_var_triv(Variable::UVar(id)); } fn as_var_triv(v: Variable) -> Triv { return Triv::Var(v); } fn loc_as_var(l: Location) -> Variable { return Variable::Loc(l); } fn mk_set(dest: Variable, val: Triv) -> Effect { return Effect::Set(dest,val) } pub fn test1() -> Program { let x0 = mk_uvar("x"); let x1 = mk_uvar("x"); let x2 = mk_uvar("x"); let x3 = mk_uvar("x"); let y4 = mk_uvar("y"); let mut map = HashMap::new(); map.insert(x0, mk_loc_reg("rbx")); map.insert(x1, Location::FrameVar(2)); map.insert(x2, mk_loc_reg("r8")); map.insert(x3, mk_loc_reg("r9")); map.insert(y4, mk_loc_reg("r15")); let mut body_map = HashMap::new(); body_map.insert(x2, mk_loc_reg("r8")); body_map.insert(x3, mk_loc_reg("r9")); return Program::Letrec( vec![ LetrecEntry{ label : mk_lbl("X1") , rhs : Body { locations : map , expression : Exp::If(Pred::Op(Relop::LT, mk_var_triv(x2), mk_var_triv(x3)), Box::new( Exp::Begin( vec![ mk_set_op(mk_var(x1), Binop::Plus, mk_var_triv(x1), mk_num_lit(35)) , mk_mset(mk_var(x0), Offset::Num(10), mk_num_lit(40)) , mk_mset(mk_var(x0), Offset::UVar(y4), mk_num_lit(25)) , Effect::ReturnPoint(mk_lbl("foo"), Exp::Begin( vec![ mk_set(mk_reg("rax"), mk_fv_triv(1)) ] , mk_box!(mk_call("X1", Vec::new()))) , 16) , mk_set(mk_var(x0), Triv::MRef(mk_reg("rax"),Offset::Num(10)))] , Box::new(mk_call("void", vec![mk_loc_reg("rax")])))) , Box::new( Exp::Begin( vec![mk_set_op(mk_reg("rax"), Binop::Plus, as_var_triv(mk_reg("rax")), mk_num_lit(10))] , Box::new(mk_call("void", vec![mk_loc_reg("rax"), mk_loc_reg("rbp")]))))) } } ] , Body { locations : body_map , expression : Exp::Begin( vec![ mk_set(mk_var(x2), mk_num_lit(0)) , mk_set(mk_var(x3), mk_num_lit(1)) ] , Box::new(mk_call("X1", vec![mk_loc_reg("rax"), mk_loc_reg("rbp")]))) }); }
match input { Program::Letrec(letrecs, pgm_body) => FLProgram::Letrec( letrecs.into_iter().map(|x| letrec_entry(x)).collect() , body(pgm_body)) }
if_condition
[ { "content": "fn mk_mset(dest: Variable, offset: Offset, val : Triv) -> Effect {\n\n return Effect::MSet(dest, offset, val);\n\n}\n\n\n", "file_path": "src/finalize_locations.rs", "rank": 0, "score": 258680.8981013795 }, { "content": "fn mk_set_op(dest: Variable, op: Binop, t1 : Triv, t2: T...
Rust
src/lib.rs
rust-cv/cv-geom
645b4894a3c056f9043b4be7755a3a4dd0e41ae6
#![no_std] use cv_core::nalgebra::{zero, Matrix3x4, Matrix4, RowVector4}; use cv_core::{ Bearing, CameraPoint, CameraToCamera, Pose, TriangulatorObservances, TriangulatorRelative, WorldPoint, WorldToCamera, }; #[derive(Copy, Clone, Debug)] pub struct MinSquaresTriangulator { epsilon: f64, max_iterations: usize, } impl MinSquaresTriangulator { pub fn new() -> Self { Default::default() } pub fn epsilon(self, epsilon: f64) -> Self { Self { epsilon, ..self } } pub fn max_iterations(self, max_iterations: usize) -> Self { Self { max_iterations, ..self } } } impl Default for MinSquaresTriangulator { fn default() -> Self { Self { epsilon: 1e-9, max_iterations: 100, } } } impl TriangulatorObservances for MinSquaresTriangulator { fn triangulate_observances<B: Bearing>( &self, pairs: impl IntoIterator<Item = (WorldToCamera, B)>, ) -> Option<WorldPoint> { let mut a: Matrix4<f64> = zero(); for (pose, bearing) in pairs { let bearing = bearing.bearing().into_inner(); let rot = pose.0.rotation.matrix(); let trans = pose.0.translation.vector; let pose = Matrix3x4::<f64>::from_columns(&[ rot.column(0), rot.column(1), rot.column(2), trans.column(0), ]); let term = pose - bearing * bearing.transpose() * pose; a += term.transpose() * term; } let se = a.try_symmetric_eigen(self.epsilon, self.max_iterations)?; se.eigenvalues .iter() .enumerate() .min_by_key(|&(_, &n)| float_ord::FloatOrd(n)) .map(|(ix, _)| se.eigenvectors.column(ix).into_owned()) .map(|v| if v.w.is_sign_negative() { -v } else { v }) .map(Into::into) } } #[derive(Copy, Clone, Debug)] pub struct RelativeDltTriangulator { epsilon: f64, max_iterations: usize, } impl RelativeDltTriangulator { pub fn new() -> Self { Default::default() } pub fn epsilon(self, epsilon: f64) -> Self { Self { epsilon, ..self } } pub fn max_iterations(self, max_iterations: usize) -> Self { Self { max_iterations, ..self } } } impl Default for RelativeDltTriangulator { fn default() -> Self { Self { epsilon: 1e-9, max_iterations: 100, } } } impl TriangulatorRelative for RelativeDltTriangulator { fn triangulate_relative<A: Bearing, B: Bearing>( &self, relative_pose: CameraToCamera, a: A, b: B, ) -> Option<CameraPoint> { let pose = relative_pose.homogeneous(); let a = a.bearing_unnormalized(); let b = b.bearing_unnormalized(); let mut design = Matrix4::zeros(); design .row_mut(0) .copy_from(&RowVector4::new(-a.z, 0.0, a.x, 0.0)); design .row_mut(1) .copy_from(&RowVector4::new(0.0, -a.z, a.y, 0.0)); design .row_mut(2) .copy_from(&(b.x * pose.row(2) - b.z * pose.row(0))); design .row_mut(3) .copy_from(&(b.y * pose.row(2) - b.z * pose.row(1))); let svd = design.try_svd(false, true, self.epsilon, self.max_iterations)?; svd.singular_values .iter() .enumerate() .min_by_key(|&(_, &n)| float_ord::FloatOrd(n)) .map(|(ix, _)| svd.v_t.unwrap().row(ix).transpose().into_owned()) .map(|v| if v.w.is_sign_negative() { -v } else { v }) .map(Into::into) } }
#![no_std] use cv_core::nalgebra::{zero, Matrix3x4, Matrix4, RowVector4}; use cv_core::{ Bearing, CameraPoint, CameraToCamera, Pose, TriangulatorObservances, TriangulatorRelative, WorldPoint, WorldToCamera, }; #[derive(Copy, Clone, Debug)] pub struct MinSquaresTriangulator { epsilon: f64, max_iterations: usize, } impl MinSquaresTriangulator { pub fn new() -> Self { Default::default() } pub fn epsilon(self, epsilon: f64) -> Self { Self { epsilon, ..self } } pub fn max_iterations(self, max_iterations: usize) -> Self { Self { max_iterations, ..self } } } impl Default for MinSquaresTriangulator { fn default() -> Self { Self { epsilon: 1e-9, max_iterations: 100, } } } impl TriangulatorObservances for MinSquaresTriangulator { fn triangulate_observances<B: Bearing>( &self, pairs: impl IntoIterator<Item = (WorldToCamera, B)>, ) -> Option<WorldPoint> { let mut a: Matrix4<f64> = zero(); for (pose, bearing) in pairs { let bearing = bearing.bearing().into_inner(); let rot = pose.0.rotation.matrix(); let trans = pose.0.translation.vector; let pose = Matrix3x4::<f64>::from_columns(&[ rot.column(0), rot.column(1), rot.column(2), trans.column(0), ]); let term = pose - bearing * bearing.transpose() * pose; a += term.transpose() * term; } let se = a.try_symmetric_eigen(self.epsilon, self.max_iterations)?; se.eigenvalues .iter() .enumerate() .min_by_key(|&(_, &n)| float_ord::FloatOrd(n)) .map(|(ix, _)| se.eigenvectors.column(ix).into_owned()) .map(|v| if v.w.is_sign_negative() { -v } else { v }) .map(Into::into) } } #[derive(Copy, Clone, Debug)] pub struct RelativeDltTriangulator { epsilon: f64, max_iterations: usize, } impl RelativeDltTriangulator { pub fn new() -> Self { Default::default() } pub fn epsilon(self, epsilon: f64) -> Self { Self { epsilon, ..self } } pub fn max_iterations(self, max_iterations: usize) -> Self { Self { max_iterations, ..self } } } impl Default for RelativeDltTriangulator { fn default() -> Self { Self { epsilon: 1e-9, max_iterations: 100, } } } impl TriangulatorRelative for RelativeDltTriangulator { fn triangulate_relative<A: Bearing, B: Bearing>( &self, relative_pose: CameraToCamera, a: A, b: B, ) -> Option<CameraPoint> { let pose = relative_pose.homogeneous(); let a = a.bearing_unnormalized(); let b = b.bearing_unnormalized(); let mut design = Matrix4::zeros(); design .row_mut(0) .copy_from(&RowVector4::new(-a.z, 0.0, a.x, 0.0)); design .row_mut(1) .copy_from(&RowVector4::new(0.0, -a.z, a.y, 0.0)); design .row_mut(2) .copy_from(&(b.x * pose.row(2) - b.z * pose.row(0))); design .row_mut(3) .copy_from(&(b.y * pose.row(2) - b.z * pose.row(1))); let svd = design.try_svd(false, true, self.epsilon, self.max_iteration
}
s)?; svd.singular_values .iter() .enumerate() .min_by_key(|&(_, &n)| float_ord::FloatOrd(n)) .map(|(ix, _)| svd.v_t.unwrap().row(ix).transpose().into_owned()) .map(|v| if v.w.is_sign_negative() { -v } else { v }) .map(Into::into) }
function_block-function_prefixed
[ { "content": "#[panic_handler]\n\nfn panic(_info: &PanicInfo) -> ! {\n\n loop {}\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn _start() -> ! {\n\n loop {}\n\n}\n", "file_path": "ensure_no_std/src/main.rs", "rank": 0, "score": 12230.500703461832 }, { "content": "// ensure_no_std/src/m...
Rust
tests/src/vector.rs
sndels/yuki
bac8c1530ecc03b2c9657cd4cbde91112fbf82a8
#[cfg(test)] mod tests { use approx::{assert_abs_diff_eq, assert_abs_diff_ne, assert_relative_eq, assert_relative_ne}; use std::panic; use yuki::math::{Normal, Point3, Vec2, Vec3, Vec4}; #[test] fn new() { let v = Vec2::new(0.0, 1.0); assert_eq!(v.x, 0.0); assert_eq!(v.y, 1.0); assert_eq!(Vec2::new(0.0, 1.0), v); let v = Vec3::new(0.0, 1.0, 2.0); assert_eq!(v.x, 0.0); assert_eq!(v.y, 1.0); assert_eq!(v.z, 2.0); assert_eq!(Vec3::new(0.0, 1.0, 2.0), v); let v = Vec4::new(0.0, 1.0, 2.0, 3.0); assert_eq!(v.x, 0.0f32); assert_eq!(v.y, 1.0f32); assert_eq!(v.z, 2.0f32); assert_eq!(v.w, 3.0f32); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0), v); } #[test] fn zeros() { assert_eq!(Vec2::zeros(), Vec2::new(0, 0)); assert_eq!(Vec3::zeros(), Vec3::new(0, 0, 0)); assert_eq!(Vec4::zeros(), Vec4::new(0, 0, 0, 0)); } #[test] fn ones() { assert_eq!(Vec2::ones(), Vec2::new(1, 1)); assert_eq!(Vec3::ones(), Vec3::new(1, 1, 1)); assert_eq!(Vec4::ones(), Vec4::new(1, 1, 1, 1)); } #[test] fn has_nans() { let result = panic::catch_unwind(|| Vec2::new(f32::NAN, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(0.0, f32::NAN)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec3::new(f32::NAN, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec4::new(f32::NAN, 0.0, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(f32::NAN, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(0.0, f32::NAN)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec3::new(f32::NAN, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec4::new(f32::NAN, 0.0, 0.0, 0.0)); assert!(result.is_err()); } #[test] fn dot() { assert_eq!(Vec2::new(2, 3).dot(Vec2::new(4, 5)), 2 * 4 + 3 * 5); assert_eq!( Vec3::new(2, 3, 4).dot(Vec3::new(5, 6, 7)), 2 * 5 + 3 * 6 + 4 * 7 ); assert_eq!( Vec4::new(2, 3, 4, 5).dot(Vec4::new(6, 7, 8, 9)), 2 * 6 + 3 * 7 + 4 * 8 + 5 * 9 ); assert_eq!( Vec3::new(2.0, 3.0, 4.0).dot_n(Normal::new(5.0, 6.0, 7.0)), 2.0 * 5.0 + 3.0 * 6.0 + 4.0 * 7.0 ); } #[test] fn cross() { assert_eq!( Vec3::new(2.0, 3.0, 4.0).cross(Vec3::new(5.0, 6.0, -7.0)), Vec3::new(-45.0, 34.0, -3.0) ); } #[test] fn len_sqr() { assert_eq!(Vec2::new(2, 3).len_sqr(), 2 * 2 + 3 * 3); assert_eq!(Vec3::new(2, 3, 4).len_sqr(), 2 * 2 + 3 * 3 + 4 * 4); assert_eq!( Vec4::new(2, 3, 4, 5).len_sqr(), 2 * 2 + 3 * 3 + 4 * 4 + 5 * 5 ); } #[test] fn len() { assert_abs_diff_eq!( Vec2::new(2.0, 3.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32).sqrt() ); assert_abs_diff_eq!( Vec3::new(2.0, 3.0, 4.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32 + 4.0f32 * 4.0f32).sqrt() ); assert_abs_diff_eq!( Vec4::new(2.0, 3.0, 4.0, 5.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32 + 4.0f32 * 4.0f32 + 5.0f32 * 5.0f32).sqrt() ); } #[test] fn normalized() { assert_abs_diff_eq!(Vec2::new(1.0, 1.0).normalized().len(), 1.0); assert_abs_diff_eq!(Vec3::new(1.0, 1.0, 1.0).normalized().len(), 1.0); assert_abs_diff_eq!(Vec4::new(1.0, 1.0, 1.0, 1.0).normalized().len(), 1.0); } #[test] fn min() { let a = Vec2::new(0, 2); let b = Vec2::new(3, 1); assert_eq!(a.min(b), Vec2::new(0, 1)); assert_eq!(a.min(b), b.min(a)); let a = Vec3::new(0, 2, 4); let b = Vec3::new(3, 1, 5); assert_eq!(a.min(b), Vec3::new(0, 1, 4)); assert_eq!(a.min(b), b.min(a)); let a = Vec4::new(0, 2, 4, 7); let b = Vec4::new(3, 1, 5, 6); assert_eq!(a.min(b), Vec4::new(0, 1, 4, 6)); assert_eq!(a.min(b), b.min(a)); } #[test] fn max() { let a = Vec2::new(0, 2); let b = Vec2::new(3, 1); assert_eq!(a.max(b), Vec2::new(3, 2)); assert_eq!(a.max(b), b.max(a)); let a = Vec3::new(0, 2, 4); let b = Vec3::new(3, 1, 5); assert_eq!(a.max(b), Vec3::new(3, 2, 5)); assert_eq!(a.max(b), b.max(a)); let a = Vec4::new(0, 2, 4, 7); let b = Vec4::new(3, 1, 5, 6); assert_eq!(a.max(b), Vec4::new(3, 2, 5, 7)); assert_eq!(a.max(b), b.max(a)); } #[test] fn min_comp() { assert_eq!(Vec2::new(0.0, 1.0).min_comp(), 0.0); assert_eq!(Vec2::new(1.0, 0.0).min_comp(), 0.0); assert_eq!(Vec3::new(0.0, 1.0, 2.0).min_comp(), 0.0); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).min_comp(), 0.0); } #[test] fn max_comp() { assert_eq!(Vec2::new(0.0, 1.0).max_comp(), 1.0); assert_eq!(Vec2::new(1.0, 0.0).max_comp(), 1.0); assert_eq!(Vec3::new(0.0, 1.0, 2.0).max_comp(), 2.0); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).max_comp(), 3.0); } #[test] fn max_dimension() { assert_eq!(Vec2::new(0.0, 1.0).max_dimension(), 1); assert_eq!(Vec2::new(1.0, 0.0).max_dimension(), 0); assert_eq!(Vec3::new(0.0, 1.0, 2.0).max_dimension(), 2); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).max_dimension(), 3); } #[test] fn permutation() { assert_eq!(Vec2::new(2.0, 3.0).permuted(1, 0), Vec2::new(3.0, 2.0)); assert_eq!( Vec3::new(3.0, 4.0, 5.0).permuted(1, 2, 0), Vec3::new(4.0, 5.0, 3.0) ); assert_eq!( Vec4::new(4.0, 5.0, 6.0, 7.0).permuted(1, 2, 3, 0), Vec4::new(5.0, 6.0, 7.0, 4.0) ); } #[test] fn from() { assert_eq!(Vec2::from(2), Vec2::new(2, 2)); assert_eq!(Vec3::from(2), Vec3::new(2, 2, 2)); assert_eq!(Vec4::from(2), Vec4::new(2, 2, 2, 2)); assert_eq!( Vec3::from(Normal::new(1.0, 2.0, 3.0)), Vec3::new(1.0, 2.0, 3.0) ); assert_eq!( Vec3::from(Point3::new(1.0, 2.0, 3.0)), Vec3::new(1.0, 2.0, 3.0) ); } #[test] fn index() { let v = Vec2::new(0.0, 1.0); assert_eq!(v.x, v[0]); assert_eq!(v.y, v[1]); let v = Vec3::new(0.0, 1.0, 2.0); assert_eq!(v.x, v[0]); let v = Vec4::new(0.0, 1.0, 2.0, 3.0); assert_eq!(v.x, v[0]); let mut v = Vec2::zeros(); v[0] = 1.0; v[1] = 2.0; assert_eq!(v[0], 1.0); assert_eq!(v[1], 2.0); let mut v = Vec3::zeros(); v[0] = 1.0; assert_eq!(v[0], 1.0); let mut v = Vec4::zeros(); v[0] = 1.0; assert_eq!(v[0], 1.0); } #[test] fn neg() { assert_eq!(-Vec2::new(1, 2), Vec2::new(-1, -2)); assert_eq!(-Vec3::new(1, 2, 3), Vec3::new(-1, -2, -3)); assert_eq!(-Vec4::new(1, 2, 3, 4), Vec4::new(-1, -2, -3, -4)); } #[test] fn add() { assert_eq!(Vec2::new(1, 2) + Vec2::new(4, 6), Vec2::new(5, 8)); assert_eq!(Vec3::new(1, 2, 3) + Vec3::new(4, 6, 7), Vec3::new(5, 8, 10)); assert_eq!( Vec4::new(1, 2, 3, 4) + Vec4::new(5, 7, 9, 10), Vec4::new(6, 9, 12, 14) ); assert_eq!(Vec2::new(1, 2) + 3, Vec2::new(4, 5)); assert_eq!(Vec3::new(1, 2, 3) + 4, Vec3::new(5, 6, 7)); assert_eq!(Vec4::new(1, 2, 3, 4) + 5, Vec4::new(6, 7, 8, 9)); } #[test] fn sub() { assert_eq!(Vec2::new(5, 5) - Vec2::new(1, 2), Vec2::new(4, 3)); assert_eq!(Vec3::new(7, 7, 7) - Vec3::new(1, 2, 3), Vec3::new(6, 5, 4)); assert_eq!( Vec4::new(9, 9, 9, 9) - Vec4::new(1, 2, 3, 4), Vec4::new(8, 7, 6, 5) ); assert_eq!(Vec2::new(3, 2) - 2, Vec2::new(1, 0)); assert_eq!(Vec3::new(7, 6, 5) - 4, Vec3::new(3, 2, 1)); assert_eq!(Vec4::new(9, 8, 7, 6) - 5, Vec4::new(4, 3, 2, 1)); } #[test] fn mul() { assert_eq!(Vec2::new(2, 3) * 4, Vec2::new(8, 12)); assert_eq!(Vec3::new(2, 3, 4) * 5, Vec3::new(10, 15, 20)); assert_eq!(Vec4::new(2, 3, 4, 5) * 6, Vec4::new(12, 18, 24, 30)); } #[test] fn div() { assert_eq!(Vec2::new(8, 12) / 4, Vec2::new(2, 3)); assert_eq!(Vec3::new(10, 15, 20) / 5, Vec3::new(2, 3, 4)); assert_eq!(Vec4::new(12, 18, 24, 30) / 6, Vec4::new(2, 3, 4, 5)); } #[test] fn add_assign() { let mut v = Vec2::new(1, 2); v += Vec2::new(4, 6); assert_eq!(v, Vec2::new(5, 8)); let mut v = Vec3::new(1, 2, 3); v += Vec3::new(4, 6, 7); assert_eq!(v, Vec3::new(5, 8, 10)); let mut v = Vec4::new(1, 2, 3, 4); v += Vec4::new(5, 7, 9, 10); assert_eq!(v, Vec4::new(6, 9, 12, 14)); let mut v = Vec2::new(1, 2); v += 3; assert_eq!(v, Vec2::new(4, 5)); let mut v = Vec3::new(1, 2, 3); v += 4; assert_eq!(v, Vec3::new(5, 6, 7)); let mut v = Vec4::new(1, 2, 3, 4); v += 5; assert_eq!(v, Vec4::new(6, 7, 8, 9)); } #[test] fn sub_assign() { let mut v = Vec2::new(5, 5); v -= Vec2::new(1, 2); assert_eq!(v, Vec2::new(4, 3)); let mut v = Vec3::new(7, 7, 7); v -= Vec3::new(1, 2, 3); assert_eq!(v, Vec3::new(6, 5, 4)); let mut v = Vec4::new(9, 9, 9, 9); v -= Vec4::new(1, 2, 3, 4); assert_eq!(v, Vec4::new(8, 7, 6, 5)); let mut v = Vec2::new(3, 2); v -= 2; assert_eq!(v, Vec2::new(1, 0)); let mut v = Vec3::new(7, 6, 5); v -= 4; assert_eq!(v, Vec3::new(3, 2, 1)); let mut v = Vec4::new(9, 8, 7, 6); v -= 5; assert_eq!(v, Vec4::new(4, 3, 2, 1)); } #[test] fn mul_assign() { let mut v = Vec2::new(2, 3); v *= 4; assert_eq!(v, Vec2::new(8, 12)); let mut v = Vec3::new(2, 3, 4); v *= 5; assert_eq!(v, Vec3::new(10, 15, 20)); let mut v = Vec4::new(2, 3, 4, 5); v *= 6; assert_eq!(v, Vec4::new(12, 18, 24, 30)); } #[test] fn div_assign() { let mut v = Vec2::new(8, 12); v /= 4; assert_eq!(v, Vec2::new(2, 3)); let mut v = Vec3::new(10, 15, 20); v /= 5; assert_eq!(v, Vec3::new(2, 3, 4)); let mut v = Vec4::new(12, 18, 24, 30); v /= 6; assert_eq!(v, Vec4::new(2, 3, 4, 5)); } #[test] fn abs_diff_eq() { assert_abs_diff_eq!(&Vec2::<f32>::zeros(), &Vec2::<f32>::zeros()); assert_abs_diff_ne!(&Vec2::<f32>::zeros(), &Vec2::<f32>::ones()); assert_abs_diff_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_abs_diff_eq!(&Vec2::new(1.0, 1.0), &Vec2::zeros(), epsilon = 1.0); } #[test] fn relative_eq() { assert_relative_eq!(&Vec2::<f32>::zeros(), &Vec2::<f32>::zeros()); assert_relative_ne!(&Vec2::<f32>::zeros(), &Vec2::<f32>::ones()); assert_relative_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_relative_eq!(&Vec2::new(1.0, 1.0), &Vec2::zeros(), epsilon = 1.0,); assert_relative_eq!( &Vec2::new(2.0, 2.0), &Vec2::ones(), epsilon = 0.0, max_relative = 0.5 ); } }
#[cfg(test)] mod tests { use approx::{assert_abs_diff_eq, assert_abs_diff_ne, assert_relative_eq, assert_relative_ne}; use std::panic; use yuki::math::{Normal, Point3, Vec2, Vec3, Vec4}; #[test] fn new() { let v = Vec2::new(0.0, 1.0); assert_eq!(v.x, 0.0); assert_eq!(v.y, 1.0); assert_eq!(Vec2::new(0.0, 1.0), v); let v = Vec3::new(0.0, 1.0, 2.0); assert_eq!(v.x, 0.0); assert_eq!(v.y, 1.0); assert_eq!(v.z, 2.0); assert_eq!(Vec3::new(0.0, 1.0, 2.0), v); let v = Vec4::new(0.0, 1.0, 2.0, 3.0); assert_eq!(v.x, 0.0f32); assert_eq!(v.y, 1.0f32); assert_eq!(v.z, 2.0f32); assert_eq!(v.w, 3.0f32); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0), v); } #[test] fn zeros() { assert_eq!(Vec2::zeros(), Vec2::new(0, 0)); assert_eq!(Vec3::zeros(), Vec3::new(0, 0, 0)); assert_eq!(Vec4::zeros(), Vec4::new(0, 0, 0, 0)); } #[test] fn ones() { assert_eq!(Vec2::ones(), Vec2::new(1, 1)); assert_eq!(Vec3::ones(), Vec3::new(1, 1, 1)); assert_eq!(Vec4::ones(), Vec4::new(1, 1, 1, 1)); } #[test] fn has_nans() { let result = panic::catch_unwind(|| Vec2::new(f32::NAN, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(0.0, f32::NAN)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec3::new(f32::NAN, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec4::new(f32::NAN, 0.0, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(f32::NAN, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec2::new(0.0, f32::NAN)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec3::new(f32::NAN, 0.0, 0.0)); assert!(result.is_err()); let result = panic::catch_unwind(|| Vec4::new(f32::NAN, 0.0, 0.0, 0.0)); assert!(result.is_err()); } #[test] fn dot() { assert_eq!(Vec2::new(2, 3).dot(Vec2::new(4, 5)), 2 * 4 + 3 * 5); assert_eq!( Vec3::new(2, 3, 4).dot(Vec3::new(5, 6, 7)), 2 * 5 + 3 * 6 + 4 * 7 ); assert_eq!( Vec4::new(2, 3, 4, 5).dot(Vec4::new(6, 7, 8, 9)), 2 * 6 + 3 * 7 + 4 * 8 + 5 * 9 ); assert_eq!( Vec3::new(2.0, 3.0, 4.0).dot_n(Normal::new(5.0, 6.0, 7.0)), 2.0 * 5.0 + 3.0 * 6.0 + 4.0 * 7.0 ); } #[test] fn cross() { assert_eq!( Vec3::new(2.0, 3.0, 4.0).cross(Vec3::new(5.0, 6.0, -7.0)), Vec3::new(-45.0, 34.0, -3.0) ); } #[test] fn len_sqr() { assert_eq!(Vec2::new(2, 3).len_sqr(), 2 * 2 + 3 * 3); assert_eq!(Vec3::new(2, 3, 4).len_sqr(), 2 * 2 + 3 * 3 + 4 * 4); assert_eq!( Vec4::new(2, 3, 4, 5).len_sqr(), 2 * 2 + 3 * 3 + 4 * 4 + 5 * 5 ); } #[test] fn len() { assert_abs_diff_eq!( Vec2::new(2.0, 3.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32).sqrt() ); assert_abs_diff_eq!( Vec3::new(2.0, 3.0, 4.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32 + 4.0f32 * 4.0f32).sqrt() ); assert_abs_diff_eq!( Vec4::new(2.0, 3.0, 4.0, 5.0).len(), (2.0f32 * 2.0f32 + 3.0f32 * 3.0f32 + 4.0f32 * 4.0f32 + 5.0f32 * 5.0f32).sqrt() ); } #[test] fn normalized() { assert_abs_diff_eq!(Vec2::new(1.0, 1.0).normalized().len(), 1.0); assert_abs_diff_eq!(Vec3::new(1.0, 1.0, 1.0).normalized().len(), 1.0); assert_abs_diff_eq!(Vec4::new(1.0, 1.0, 1.0, 1.0).normalized().len(), 1.0); } #[test] fn min() { let a = Vec2::new(0, 2); let b = Vec2::new(3, 1); assert_eq!(a.min(b), Vec2::new(0, 1)); assert_eq!(a.min(b), b.min(a)); let a = Vec3::new(0, 2, 4); let b = Vec3::new(3, 1, 5); assert_eq!(a.min(b), Vec3::new(0, 1, 4)); assert_eq!(a.min(b), b.min(a)); let a = Vec4::new(0, 2, 4, 7); let b = Vec4::new(3, 1, 5, 6); assert_eq!(a.min(b), Vec4::new(0, 1, 4, 6)); assert_eq!(a.min(b), b.min(a)); } #[test] fn max() { let a = Vec2::new(0, 2); let b = Vec2::new(3, 1); assert_eq!(a.max(b), Vec2::new(3, 2)); assert_eq!(a.max(b), b.max(a)); let a = Vec3::new(0, 2, 4); let b = Vec3::new(3, 1, 5); assert_eq!(a.max(b), Vec3::new(3, 2, 5)); assert_eq!(a.max(b), b.max(a)); let a = Vec4::new(0, 2, 4, 7); let b = Vec4::new(3, 1, 5, 6); assert_eq!(a.max(b), Vec4::new(3, 2, 5, 7)); assert_eq!(a.max(b), b.max(a)); } #[test] fn min_comp() { assert_eq!(Vec2::new(0.0, 1.0).min_comp(), 0.0); assert_eq!(Vec2::new(1.0, 0.0).min_comp(), 0.0);
#[test] fn max_comp() { assert_eq!(Vec2::new(0.0, 1.0).max_comp(), 1.0); assert_eq!(Vec2::new(1.0, 0.0).max_comp(), 1.0); assert_eq!(Vec3::new(0.0, 1.0, 2.0).max_comp(), 2.0); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).max_comp(), 3.0); } #[test] fn max_dimension() { assert_eq!(Vec2::new(0.0, 1.0).max_dimension(), 1); assert_eq!(Vec2::new(1.0, 0.0).max_dimension(), 0); assert_eq!(Vec3::new(0.0, 1.0, 2.0).max_dimension(), 2); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).max_dimension(), 3); } #[test] fn permutation() { assert_eq!(Vec2::new(2.0, 3.0).permuted(1, 0), Vec2::new(3.0, 2.0)); assert_eq!( Vec3::new(3.0, 4.0, 5.0).permuted(1, 2, 0), Vec3::new(4.0, 5.0, 3.0) ); assert_eq!( Vec4::new(4.0, 5.0, 6.0, 7.0).permuted(1, 2, 3, 0), Vec4::new(5.0, 6.0, 7.0, 4.0) ); } #[test] fn from() { assert_eq!(Vec2::from(2), Vec2::new(2, 2)); assert_eq!(Vec3::from(2), Vec3::new(2, 2, 2)); assert_eq!(Vec4::from(2), Vec4::new(2, 2, 2, 2)); assert_eq!( Vec3::from(Normal::new(1.0, 2.0, 3.0)), Vec3::new(1.0, 2.0, 3.0) ); assert_eq!( Vec3::from(Point3::new(1.0, 2.0, 3.0)), Vec3::new(1.0, 2.0, 3.0) ); } #[test] fn index() { let v = Vec2::new(0.0, 1.0); assert_eq!(v.x, v[0]); assert_eq!(v.y, v[1]); let v = Vec3::new(0.0, 1.0, 2.0); assert_eq!(v.x, v[0]); let v = Vec4::new(0.0, 1.0, 2.0, 3.0); assert_eq!(v.x, v[0]); let mut v = Vec2::zeros(); v[0] = 1.0; v[1] = 2.0; assert_eq!(v[0], 1.0); assert_eq!(v[1], 2.0); let mut v = Vec3::zeros(); v[0] = 1.0; assert_eq!(v[0], 1.0); let mut v = Vec4::zeros(); v[0] = 1.0; assert_eq!(v[0], 1.0); } #[test] fn neg() { assert_eq!(-Vec2::new(1, 2), Vec2::new(-1, -2)); assert_eq!(-Vec3::new(1, 2, 3), Vec3::new(-1, -2, -3)); assert_eq!(-Vec4::new(1, 2, 3, 4), Vec4::new(-1, -2, -3, -4)); } #[test] fn add() { assert_eq!(Vec2::new(1, 2) + Vec2::new(4, 6), Vec2::new(5, 8)); assert_eq!(Vec3::new(1, 2, 3) + Vec3::new(4, 6, 7), Vec3::new(5, 8, 10)); assert_eq!( Vec4::new(1, 2, 3, 4) + Vec4::new(5, 7, 9, 10), Vec4::new(6, 9, 12, 14) ); assert_eq!(Vec2::new(1, 2) + 3, Vec2::new(4, 5)); assert_eq!(Vec3::new(1, 2, 3) + 4, Vec3::new(5, 6, 7)); assert_eq!(Vec4::new(1, 2, 3, 4) + 5, Vec4::new(6, 7, 8, 9)); } #[test] fn sub() { assert_eq!(Vec2::new(5, 5) - Vec2::new(1, 2), Vec2::new(4, 3)); assert_eq!(Vec3::new(7, 7, 7) - Vec3::new(1, 2, 3), Vec3::new(6, 5, 4)); assert_eq!( Vec4::new(9, 9, 9, 9) - Vec4::new(1, 2, 3, 4), Vec4::new(8, 7, 6, 5) ); assert_eq!(Vec2::new(3, 2) - 2, Vec2::new(1, 0)); assert_eq!(Vec3::new(7, 6, 5) - 4, Vec3::new(3, 2, 1)); assert_eq!(Vec4::new(9, 8, 7, 6) - 5, Vec4::new(4, 3, 2, 1)); } #[test] fn mul() { assert_eq!(Vec2::new(2, 3) * 4, Vec2::new(8, 12)); assert_eq!(Vec3::new(2, 3, 4) * 5, Vec3::new(10, 15, 20)); assert_eq!(Vec4::new(2, 3, 4, 5) * 6, Vec4::new(12, 18, 24, 30)); } #[test] fn div() { assert_eq!(Vec2::new(8, 12) / 4, Vec2::new(2, 3)); assert_eq!(Vec3::new(10, 15, 20) / 5, Vec3::new(2, 3, 4)); assert_eq!(Vec4::new(12, 18, 24, 30) / 6, Vec4::new(2, 3, 4, 5)); } #[test] fn add_assign() { let mut v = Vec2::new(1, 2); v += Vec2::new(4, 6); assert_eq!(v, Vec2::new(5, 8)); let mut v = Vec3::new(1, 2, 3); v += Vec3::new(4, 6, 7); assert_eq!(v, Vec3::new(5, 8, 10)); let mut v = Vec4::new(1, 2, 3, 4); v += Vec4::new(5, 7, 9, 10); assert_eq!(v, Vec4::new(6, 9, 12, 14)); let mut v = Vec2::new(1, 2); v += 3; assert_eq!(v, Vec2::new(4, 5)); let mut v = Vec3::new(1, 2, 3); v += 4; assert_eq!(v, Vec3::new(5, 6, 7)); let mut v = Vec4::new(1, 2, 3, 4); v += 5; assert_eq!(v, Vec4::new(6, 7, 8, 9)); } #[test] fn sub_assign() { let mut v = Vec2::new(5, 5); v -= Vec2::new(1, 2); assert_eq!(v, Vec2::new(4, 3)); let mut v = Vec3::new(7, 7, 7); v -= Vec3::new(1, 2, 3); assert_eq!(v, Vec3::new(6, 5, 4)); let mut v = Vec4::new(9, 9, 9, 9); v -= Vec4::new(1, 2, 3, 4); assert_eq!(v, Vec4::new(8, 7, 6, 5)); let mut v = Vec2::new(3, 2); v -= 2; assert_eq!(v, Vec2::new(1, 0)); let mut v = Vec3::new(7, 6, 5); v -= 4; assert_eq!(v, Vec3::new(3, 2, 1)); let mut v = Vec4::new(9, 8, 7, 6); v -= 5; assert_eq!(v, Vec4::new(4, 3, 2, 1)); } #[test] fn mul_assign() { let mut v = Vec2::new(2, 3); v *= 4; assert_eq!(v, Vec2::new(8, 12)); let mut v = Vec3::new(2, 3, 4); v *= 5; assert_eq!(v, Vec3::new(10, 15, 20)); let mut v = Vec4::new(2, 3, 4, 5); v *= 6; assert_eq!(v, Vec4::new(12, 18, 24, 30)); } #[test] fn div_assign() { let mut v = Vec2::new(8, 12); v /= 4; assert_eq!(v, Vec2::new(2, 3)); let mut v = Vec3::new(10, 15, 20); v /= 5; assert_eq!(v, Vec3::new(2, 3, 4)); let mut v = Vec4::new(12, 18, 24, 30); v /= 6; assert_eq!(v, Vec4::new(2, 3, 4, 5)); } #[test] fn abs_diff_eq() { assert_abs_diff_eq!(&Vec2::<f32>::zeros(), &Vec2::<f32>::zeros()); assert_abs_diff_ne!(&Vec2::<f32>::zeros(), &Vec2::<f32>::ones()); assert_abs_diff_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_abs_diff_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_abs_diff_eq!(&Vec2::new(1.0, 1.0), &Vec2::zeros(), epsilon = 1.0); } #[test] fn relative_eq() { assert_relative_eq!(&Vec2::<f32>::zeros(), &Vec2::<f32>::zeros()); assert_relative_ne!(&Vec2::<f32>::zeros(), &Vec2::<f32>::ones()); assert_relative_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(0.0, 1.0), &Vec2::zeros()); assert_relative_ne!(&Vec2::new(1.0, 0.0), &Vec2::zeros()); assert_relative_eq!(&Vec2::new(1.0, 1.0), &Vec2::zeros(), epsilon = 1.0,); assert_relative_eq!( &Vec2::new(2.0, 2.0), &Vec2::ones(), epsilon = 0.0, max_relative = 0.5 ); } }
assert_eq!(Vec3::new(0.0, 1.0, 2.0).min_comp(), 0.0); assert_eq!(Vec4::new(0.0, 1.0, 2.0, 3.0).min_comp(), 0.0); }
function_block-function_prefixed
[ { "content": "// Returns the refracted direction for `wi` and `n` or `None` if total internal reflection happens.\n\nfn refract(wi: Vec3<f32>, n: Normal<f32>, eta: f32) -> Option<Vec3<f32>> {\n\n let cos_theta_i = n.dot_v(wi);\n\n let sin_2_theta_i = (1.0 - cos_theta_i * cos_theta_i).max(0.0);\n\n let ...
Rust
wiz/wiz/src/build.rs
ChanTsune/wiz
199d0f4698822a177ede8015bf8e04f190f39934
use crate::core::dep::{resolve_manifest_dependencies, ResolvedDependencyTree}; use crate::core::error::CliError; use crate::core::load_project; use crate::core::workspace::Workspace; use clap::ArgMatches; use std::collections::{BTreeSet, HashMap, HashSet}; use std::env; use std::error::Error; use std::fs::create_dir_all; use std::path::PathBuf; use wiz_utils::topological_sort::topological_sort; pub(crate) const COMMAND_NAME: &str = "build"; pub(crate) fn command(_: &str, options: &ArgMatches) -> Result<(), Box<dyn Error>> { let manifest_path = options.value_of("manifest-path"); let another_std = options.value_of("std"); let ws = load_project(manifest_path)?; let resolved_dependencies = resolve_manifest_dependencies(&ws.cws, &ws.get_manifest()?, another_std)?; println!("{:?}", resolved_dependencies); let target_dir = if let Some(target_dir) = options.value_of("target-dir") { let d = PathBuf::from(target_dir); if d.exists() && !d.is_dir() { return Err(Box::from(CliError::from(format!( "{} is not directory", d.display() )))); } else { d } } else { env::current_dir()?.join("target") }; create_dir_all(&target_dir)?; let wlib_paths = compile_dependencies(&ws, resolved_dependencies, target_dir.to_str().unwrap())?; let mut args = vec![ws.cws.to_str().unwrap()]; args.extend(["--out-dir", target_dir.to_str().unwrap()]); args.extend([ "--name", ws.cws.file_name().and_then(|p| p.to_str()).unwrap(), ]); args.extend(["--type", "bin"]); for wlib_path in wlib_paths.iter() { args.extend(["--library", wlib_path]); } if let Some(target_triple) = options.value_of("target-triple") { args.extend(["--target-triple", target_triple]); }; super::subcommand::execute("wizc", &args) } #[derive(Clone, Debug, Eq, PartialEq, Hash)] struct Task { name: String, version: String, src_path: String, } fn dependency_list(dependencies: ResolvedDependencyTree) -> HashMap<Task, HashSet<Task>> { fn dependency_list( result: &mut HashMap<Task, HashSet<Task>>, dep: ResolvedDependencyTree, ) -> Task { let ResolvedDependencyTree { name, version, src_path, dependencies, } = dep; let task = Task { name, version, src_path, }; let dependencies = dependencies .into_iter() .map(|d| dependency_list(result, d)) .collect(); result.insert(task.clone(), dependencies); task } let mut result = HashMap::new(); for dependency in dependencies.dependencies { dependency_list(&mut result, dependency); } result } fn compile_dependencies( ws: &Workspace, dependencies: ResolvedDependencyTree, target_dir: &str, ) -> Result<BTreeSet<String>, Box<dyn Error>> { let mut wlib_paths = BTreeSet::new(); let dependen_list = dependency_list(dependencies); let dep_list = topological_sort(dependen_list.clone())?; for dep in dep_list.into_iter().flatten() { let dep_wlib_paths = dependen_list .get(&dep) .unwrap() .iter() .map(|d| format!("{}/{}.wlib", target_dir, d.name)) .collect::<Vec<_>>(); let mut args = vec![dep.src_path.as_str()]; args.extend(["--out-dir", target_dir]); args.extend(["--name", dep.name.as_str()]); args.extend(["--type", "lib"]); for wlib_path in dep_wlib_paths.iter() { args.extend(["--library", wlib_path]); } let output = super::subcommand::output("wizc", &args)?; println!("{}", String::from_utf8_lossy(&output.stdout)); if !output.stderr.is_empty() { eprintln!("{}", String::from_utf8_lossy(&output.stderr)); } if !output.status.success() { return Err(Box::new(CliError::from(format!( "compile failed {:?}", dep.name )))); } wlib_paths.extend(dep_wlib_paths); wlib_paths.insert(format!("{}/{}.wlib", target_dir, dep.name)); } Ok(wlib_paths) }
use crate::core::dep::{resolve_manifest_dependencies, ResolvedDependencyTree}; use crate::core::error::CliError; use crate::core::load_project; use crate::core::workspace::Workspace; use clap::ArgMatches; use std::collections::{BTreeSet, HashMap, HashSet}; use std::env; use std::error::Error; use std::fs::create_dir_all; use std::path::PathBuf; use wiz_utils::topological_sort::topological_sort; pub(crate) const COMMAND_NAME: &str = "build"; pub(crate) fn command(_: &str, options: &ArgMatches) -> Result<(), Box<dyn Error>> { let manifest_path = options.value_of("manifest-path"); let another_std = options.value_of("std"); let ws = load_project(manifest_path)?; let resolved_dependencies = resolve_manifest_dependencies(&ws.cws, &ws.get_manifest()?, another_std)?; println!("{:?}", resolved_dependencies); let target_dir = if let Some(target_dir) = options.value_of("target-dir") { let d = PathBuf::from(target_dir); if d.exists() && !d.is_dir() { return Err(Box::from(CliError::from(format!( "{} is not directory", d.display() )))); } else { d } } else { env::current_dir()?.join("target") }; create_dir_all(&target_dir)?; let wlib_paths = compile_dependencies(&ws, resolved_dependencies, target_dir.to_str().unwrap())?; let mut args = vec![ws.cws.to_str().unwrap()]; args.extend(["--out-dir", target_dir.to_str().unwrap()]); args.extend([ "--name", ws.cws.file_name().and_then(|p| p.to_str()).unwrap(), ]); args.extend(["--type", "bin"]); for wlib_path in wlib_paths.iter() { args.extend(["--library", wlib_path]); } if let Some(target_triple) = options.value_of("target-triple") { args.extend(["--target-triple", target_triple]); }; super::subcommand::execute("wizc", &args) } #[derive(Clone, Debug, Eq, PartialEq, Hash)] struct Task { name: String, version: String, src_path: String, } fn dependency_list(dependencies: ResolvedDependencyTree) -> HashMap<Task, HashSet<Task>> { fn dependency_list( result: &mut HashMap<Task, HashSet<T
let mut result = HashMap::new(); for dependency in dependencies.dependencies { dependency_list(&mut result, dependency); } result } fn compile_dependencies( ws: &Workspace, dependencies: ResolvedDependencyTree, target_dir: &str, ) -> Result<BTreeSet<String>, Box<dyn Error>> { let mut wlib_paths = BTreeSet::new(); let dependen_list = dependency_list(dependencies); let dep_list = topological_sort(dependen_list.clone())?; for dep in dep_list.into_iter().flatten() { let dep_wlib_paths = dependen_list .get(&dep) .unwrap() .iter() .map(|d| format!("{}/{}.wlib", target_dir, d.name)) .collect::<Vec<_>>(); let mut args = vec![dep.src_path.as_str()]; args.extend(["--out-dir", target_dir]); args.extend(["--name", dep.name.as_str()]); args.extend(["--type", "lib"]); for wlib_path in dep_wlib_paths.iter() { args.extend(["--library", wlib_path]); } let output = super::subcommand::output("wizc", &args)?; println!("{}", String::from_utf8_lossy(&output.stdout)); if !output.stderr.is_empty() { eprintln!("{}", String::from_utf8_lossy(&output.stderr)); } if !output.status.success() { return Err(Box::new(CliError::from(format!( "compile failed {:?}", dep.name )))); } wlib_paths.extend(dep_wlib_paths); wlib_paths.insert(format!("{}/{}.wlib", target_dir, dep.name)); } Ok(wlib_paths) }
ask>>, dep: ResolvedDependencyTree, ) -> Task { let ResolvedDependencyTree { name, version, src_path, dependencies, } = dep; let task = Task { name, version, src_path, }; let dependencies = dependencies .into_iter() .map(|d| dependency_list(result, d)) .collect(); result.insert(task.clone(), dependencies); task }
function_block-function_prefixed
[ { "content": "pub fn parse_from_string(src: &str, name: Option<&str>) -> Result<WizFile> {\n\n match file(Span::from(src)) {\n\n Ok((s, f)) => {\n\n if !s.is_empty() {\n\n let location = Location::new(s.location_offset(), s.location_line());\n\n Err(ParseError:...
Rust
src/window.rs
TomasKralCZ/Leoric
df1c20319e4f0cc140dfbbce33a76dab0ac28188
use std::time::Instant; use egui::CtxRef; use egui_backend::{painter::Painter, DpiScaling, EguiStateHandler}; use egui_sdl2_gl::ShaderVersion; use eyre::{eyre, Result}; use sdl2::{ event::{Event, WindowEvent}, video::Window, video::{GLContext, GLProfile, SwapInterval}, EventPump, Sdl, VideoSubsystem, }; use egui_sdl2_gl as egui_backend; pub struct MyWindow { _sdl_context: Sdl, _video_subsystem: VideoSubsystem, window: Window, _gl_ctx: GLContext, pub event_pump: EventPump, pub egui_ctx: CtxRef, egui_state: EguiStateHandler, painter: Painter, start_time: Instant, pub width: u32, pub height: u32, } impl MyWindow { pub fn new(title: &str) -> Result<Self> { let sdl_context = sdl2::init().map_err(|e| eyre!("{e}"))?; let video_subsystem = sdl_context.video().map_err(|e| eyre!("{e}"))?; let size = video_subsystem .display_bounds(0) .map_err(|e| eyre!("{e}"))?; let width = (size.width() as f32 * 0.7) as u32; let height = (size.height() as f32 * 0.7) as u32; let window = video_subsystem .window(title, width, height) .opengl() .resizable() .position_centered() .allow_highdpi() .build()?; let gl_ctx = window.gl_create_context().map_err(|e| eyre!("{e}"))?; let gl_attr = video_subsystem.gl_attr(); gl_attr.set_context_major_version(4); gl_attr.set_context_minor_version(2); gl_attr.set_context_profile(GLProfile::Core); gl_attr.set_context_flags().debug().set(); gl_attr.set_double_buffer(true); window .subsystem() .gl_set_swap_interval(SwapInterval::Immediate) .map_err(|e| eyre!("{e}"))?; let shader_ver = ShaderVersion::Default; let custom_dpi = { if width <= 1280 && height <= 720 { 1.0 } else if width <= 1920 && height <= 1080 { 1.5 } else { 2.5 } }; let (painter, egui_state) = egui_backend::with_sdl2(&window, shader_ver, DpiScaling::Custom(custom_dpi)); let egui_ctx = egui::CtxRef::default(); let event_pump = sdl_context.event_pump().map_err(|e| eyre!("{e}"))?; Ok(Self { _sdl_context: sdl_context, _video_subsystem: video_subsystem, window, _gl_ctx: gl_ctx, event_pump, egui_ctx, egui_state, painter, start_time: Instant::now(), width, height, }) } pub fn begin_frame(&mut self) { self.egui_state.input.time = Some(self.start_time.elapsed().as_secs_f64()); self.egui_ctx.begin_frame(self.egui_state.input.take()); } pub fn end_frame(&mut self) -> bool { unsafe { gl::Disable(gl::DEPTH_TEST); gl::Disable(gl::CULL_FACE); gl::PolygonMode(gl::FRONT_AND_BACK, gl::FILL); } let (egui_output, paint_cmds) = self.egui_ctx.end_frame(); self.egui_state.process_output(&self.window, &egui_output); let paint_jobs = self.egui_ctx.tessellate(paint_cmds); if !egui_output.needs_repaint { /* if let Some(event) = self.event_pump.wait_event_timeout(5) { match event { Event::Quit { .. } => return true, _ => { self.egui_state .process_input(&self.window, event, &mut self.painter); } } } */ } else { self.painter .paint_jobs(None, paint_jobs, &self.egui_ctx.font_image()); self.window.gl_swap_window(); } for event in self.event_pump.poll_iter() { match event { Event::Quit { .. } => return true, Event::Window { timestamp: _, window_id: _, win_event: WindowEvent::Resized(new_width, new_height), } => { self.width = new_width as u32; self.height = new_height as u32; } _ => { self.egui_state .process_input(&self.window, event, &mut self.painter); } } } false } }
use std::time::Instant; use egui::CtxRef; use egui_backend::{painter::Painter, DpiScaling, EguiStateHandler}; use egui_sdl2_gl::ShaderVersion; use eyre::{eyre, Result}; use sdl2::{ event::{Event, WindowEvent}, video::Window, video::{GLContext, GLProfile, SwapInterval}, EventPump, Sdl, VideoSubsystem, }; use egui_sdl2_gl as egui_backend; pub struct MyWindow { _sdl_context: Sdl, _video_subsystem: VideoSubsystem, window: Window, _gl_ctx: GLContext, pub event_pump: EventPump, pub egui_ctx: CtxRef, egui_state: EguiStateHandler, painter: Painter, start_time: Instant, pub width: u32, pub height: u32, } impl MyWindow {
pub fn begin_frame(&mut self) { self.egui_state.input.time = Some(self.start_time.elapsed().as_secs_f64()); self.egui_ctx.begin_frame(self.egui_state.input.take()); } pub fn end_frame(&mut self) -> bool { unsafe { gl::Disable(gl::DEPTH_TEST); gl::Disable(gl::CULL_FACE); gl::PolygonMode(gl::FRONT_AND_BACK, gl::FILL); } let (egui_output, paint_cmds) = self.egui_ctx.end_frame(); self.egui_state.process_output(&self.window, &egui_output); let paint_jobs = self.egui_ctx.tessellate(paint_cmds); if !egui_output.needs_repaint { /* if let Some(event) = self.event_pump.wait_event_timeout(5) { match event { Event::Quit { .. } => return true, _ => { self.egui_state .process_input(&self.window, event, &mut self.painter); } } } */ } else { self.painter .paint_jobs(None, paint_jobs, &self.egui_ctx.font_image()); self.window.gl_swap_window(); } for event in self.event_pump.poll_iter() { match event { Event::Quit { .. } => return true, Event::Window { timestamp: _, window_id: _, win_event: WindowEvent::Resized(new_width, new_height), } => { self.width = new_width as u32; self.height = new_height as u32; } _ => { self.egui_state .process_input(&self.window, event, &mut self.painter); } } } false } }
pub fn new(title: &str) -> Result<Self> { let sdl_context = sdl2::init().map_err(|e| eyre!("{e}"))?; let video_subsystem = sdl_context.video().map_err(|e| eyre!("{e}"))?; let size = video_subsystem .display_bounds(0) .map_err(|e| eyre!("{e}"))?; let width = (size.width() as f32 * 0.7) as u32; let height = (size.height() as f32 * 0.7) as u32; let window = video_subsystem .window(title, width, height) .opengl() .resizable() .position_centered() .allow_highdpi() .build()?; let gl_ctx = window.gl_create_context().map_err(|e| eyre!("{e}"))?; let gl_attr = video_subsystem.gl_attr(); gl_attr.set_context_major_version(4); gl_attr.set_context_minor_version(2); gl_attr.set_context_profile(GLProfile::Core); gl_attr.set_context_flags().debug().set(); gl_attr.set_double_buffer(true); window .subsystem() .gl_set_swap_interval(SwapInterval::Immediate) .map_err(|e| eyre!("{e}"))?; let shader_ver = ShaderVersion::Default; let custom_dpi = { if width <= 1280 && height <= 720 { 1.0 } else if width <= 1920 && height <= 1080 { 1.5 } else { 2.5 } }; let (painter, egui_state) = egui_backend::with_sdl2(&window, shader_ver, DpiScaling::Custom(custom_dpi)); let egui_ctx = egui::CtxRef::default(); let event_pump = sdl_context.event_pump().map_err(|e| eyre!("{e}"))?; Ok(Self { _sdl_context: sdl_context, _video_subsystem: video_subsystem, window, _gl_ctx: gl_ctx, event_pump, egui_ctx, egui_state, painter, start_time: Instant::now(), width, height, }) }
function_block-full_function
[ { "content": "/// Create an opengl buffer with integer content.\n\n///\n\n/// 'buffer' is a reference to a slice of T.\n\n///\n\n/// 'components', 'attrib index' and 'typ' have the same meaning as the respective\n\n/// arguments in glVertexAttribPointer.\n\npub fn create_int_buf<T: Copy>(buffer: &[T], component...
Rust
rmqtt-plugins/rmqtt-plugin-template/src/lib.rs
phial3/rmqtt
8c29529e273007178fd0af73dccb6b0bf6729339
use async_trait::async_trait; use rmqtt::{ broker::hook::{Handler, HookResult, Parameter, Register, ReturnType, Type}, plugin::{DynPlugin, DynPluginResult, Plugin}, Result, Runtime, }; #[inline] pub async fn register( runtime: &'static Runtime, name: &'static str, descr: &'static str, default_startup: bool, immutable: bool, ) -> Result<()> { runtime .plugins .register(name, default_startup, immutable, move || -> DynPluginResult { Box::pin(async move { Template::new(runtime, name, descr).await.map(|p| -> DynPlugin { Box::new(p) }) }) }) .await?; Ok(()) } struct Template { _runtime: &'static Runtime, name: String, descr: String, register: Box<dyn Register>, } impl Template { #[inline] async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> { let register = runtime.extends.hook_mgr().await.register(); Ok(Self { _runtime: runtime, name: name.into(), descr: descr.into(), register }) } } #[async_trait] impl Plugin for Template { #[inline] async fn init(&mut self) -> Result<()> { log::debug!("{} init", self.name); self.register.add(Type::ClientConnack, Box::new(HookHandler::new())).await; self.register.add(Type::ClientSubscribe, Box::new(HookHandler::new())).await; self.register.add(Type::ClientUnsubscribe, Box::new(HookHandler::new())).await; self.register.add(Type::MessageDelivered, Box::new(HookHandler::new())).await; self.register.add(Type::MessagePublish, Box::new(HookHandler::new())).await; self.register.add_priority(Type::ClientSubscribeCheckAcl, 10, Box::new(HookHandler::new())).await; self.register.add_priority(Type::GrpcMessageReceived, 10, Box::new(HookHandler::new())).await; Ok(()) } #[inline] fn name(&self) -> &str { &self.name } #[inline] async fn start(&mut self) -> Result<()> { log::info!("{} start", self.name); self.register.start().await; Ok(()) } #[inline] async fn stop(&mut self) -> Result<bool> { log::info!("{} stop", self.name); self.register.stop().await; Ok(true) } #[inline] fn version(&self) -> &str { "0.1.1" } #[inline] fn descr(&self) -> &str { &self.descr } } struct HookHandler {} impl HookHandler { fn new() -> Self { Self {} } } #[async_trait] impl Handler for HookHandler { async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType { match param { Parameter::ClientConnack(connect_info, r) => { log::debug!("client connack, {:?}, {:?}", connect_info, r); } Parameter::ClientSubscribe(_session, c, subscribe) => { log::debug!("{:?} client subscribe, {:?}", c.id, subscribe); } Parameter::ClientUnsubscribe(_session, c, unsubscribe) => { log::debug!("{:?} client unsubscribe, {:?}", c.id, unsubscribe); } Parameter::MessagePublish(_session, c, publish) => { log::debug!("{:?} message publish, {:?}", c.id, publish); } Parameter::MessageDelivered(_session, c, from, _publish) => { log::debug!("{:?} MessageDelivered, {:?}", c.id, from); } Parameter::ClientSubscribeCheckAcl(_s, _c, subscribe) => { log::debug!("{:?} ClientSubscribeCheckAcl, {:?}", _c.id, subscribe); } _ => { log::error!("unimplemented, {:?}", param) } } (true, acc) } }
use async_trait::async_trait; use rmqtt::{ broker::hook::{Handler, HookResult, Parameter, Register, ReturnType, Type}, plugin::{DynPlugin, DynPluginResult, Plugin}, Result, Runtime, }; #[inline] pub async fn register( runtime: &'static Runtime, name: &'static str, descr: &'static str, default_startup: bool, immutable: bool, ) -> Result<()> { runtime .plugins .register(name, default_startup, immutable, move || -> DynPluginResult { Box::pin(async move { Template::new(runtime, name, descr).await.map(|p| -> DynPlugin { Box::new(p) }) }) }) .await?; Ok(()) } struct Template { _runtime: &'static Runtime, name: String, descr: String, register: Box<dyn Register>, } impl Template { #[inline] async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> { let register = runtime.extends.hook_mgr().await.register(); Ok(Self { _runtime: runtime, name: name.into(), descr: descr.into(), register }) } } #[async_trait] impl Plugin for Template { #[inline] async fn init(&mut self) -> Result<()> { log::debug!("{} init", self.name); self.register.add(Type::ClientConnack, Box::new(HookHandler::new())).await; self.register.add(Type::ClientSubscribe, Box::new(HookHandler::new())).await; self.register.add(Type::ClientUnsubscribe, Box::new(HookHandler::new())).await; self.register.add(Type::MessageDelivered, Box::new(HookHandler::new())).await; self.register.add(Type::MessagePublish, Box::new(HookHandler::new())).await; self.register.add_priority(Type::ClientSubscribeCheckAcl, 10, Box::new(HookHandler::new())).await; self.register.add_priority(Type::GrpcMessageReceived, 10, Box::new(HookHandler::new())).await; Ok(()) } #[inline] fn name(&self) -> &str { &self.name } #[inline] async fn start(&mut self) -> Result<()> { log::info!("{} start", self.name); self.register.start().await; Ok(()) } #[inline] async fn stop(&mut self) -> Result<bool> { log::info!("{} stop", self.name); self.register.stop().await; Ok(true) } #[inline] fn version(&self) -> &str { "0.1.1" } #[inline] fn descr(&self) -> &str { &self.descr } } struct HookHandler {} impl HookHandler { fn new() -> Self { Self {} } } #[async_trait] impl Handler for HookHandler { async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType { match param { Parameter::ClientC
} Parameter::MessagePublish(_session, c, publish) => { log::debug!("{:?} message publish, {:?}", c.id, publish); } Parameter::MessageDelivered(_session, c, from, _publish) => { log::debug!("{:?} MessageDelivered, {:?}", c.id, from); } Parameter::ClientSubscribeCheckAcl(_s, _c, subscribe) => { log::debug!("{:?} ClientSubscribeCheckAcl, {:?}", _c.id, subscribe); } _ => { log::error!("unimplemented, {:?}", param) } } (true, acc) } }
onnack(connect_info, r) => { log::debug!("client connack, {:?}, {:?}", connect_info, r); } Parameter::ClientSubscribe(_session, c, subscribe) => { log::debug!("{:?} client subscribe, {:?}", c.id, subscribe); } Parameter::ClientUnsubscribe(_session, c, unsubscribe) => { log::debug!("{:?} client unsubscribe, {:?}", c.id, unsubscribe);
function_block-random_span
[ { "content": "pub trait PluginFn: 'static + Sync + Send + Fn() -> BoxFuture<Result<DynPlugin>> {}\n\n\n\nimpl<T> PluginFn for T where T: 'static + Sync + Send + ?Sized + Fn() -> BoxFuture<Result<DynPlugin>> {}\n\n\n\npub type DynPluginResult = BoxFuture<Result<DynPlugin>>;\n\npub type DynPlugin = Box<dyn Plugin...
Rust
src/scope/lefthandsideexpr.rs
farodin91/js-parser-rs
8fdfe5b200788ec997d1446e1c44859a126493b7
use error::JsResult; use lexer::enums::{TokenType}; use scope::parser::{Parser, Item}; macro_rules! wait { ($expr:expr) => (match $expr { Item::None => (), Item::Item => return Ok(Item::Item), }) } macro_rules! none { ($expr:expr) => (match $expr { Item::None => return Ok(Item::None), Item::Item => (), }) } impl Parser { pub fn parse_left_hand_side_expr(&mut self) -> JsResult<Item> { println!("parse_left_hand_side_expr {:?}", self.peek()); wait!(try!(self.parse_call_expr())); self.parse_new_expr() } pub fn parse_new_expr(&mut self) -> JsResult<Item> { println!("parse_new_expr {:?}", self.peek()); wait!(try!(self.parse_member_expr())); if try!(self.consume(TokenType::New)) { try!(self.parse_new_expr()); Ok(Item::Item) } else { Ok(Item::None) } } pub fn parse_super_prop(&mut self) -> JsResult<Item> { if !try!(self.consume(TokenType::Super)) { return Ok(Item::None) } if try!(self.consume(TokenType::LeftBracket)) { try!(self.expect(TokenType::RightBracket)); } else { try!(self.expect(TokenType::Point)); try!(self.expect_identifier()); } Ok(Item::Item) } pub fn parse_meta_prop(&mut self) -> JsResult<Item> { try!(self.expect(TokenType::New)); try!(self.expect(TokenType::Point)); try!(self.expect(TokenType::Target)); Ok(Item::Item) } pub fn bump_new_and_member_expr_and_arguments(&mut self) -> JsResult<Item> { println!("bump_new_and_member_expr_and_arguments {:?}", self.peek()); if !try!(self.consume(TokenType::New)) { return Ok(Item::None) } try!(self.parse_member_expr()); try!(self.parse_arguments()); Ok(Item::Item) } pub fn parse_arguments(&mut self) -> JsResult<Item> { println!("parse_arguments {:?}", self.peek()); if !try!(self.consume(TokenType::LeftParen)) { return Ok(Item::None) } if !try!(self.consume(TokenType::ThreePoints)) { try!(self.parse_expr()); try!(self.consume(TokenType::ThreePoints)); } try!(self.consume_all_lineterminates()); try!(self.expect(TokenType::RightParen)); Ok(Item::Item) } pub fn parse_member(&mut self) -> JsResult<Item> { println!("parse_member {:?}", self.peek()); if try!(self.consume(TokenType::LeftBracket)) { try!(self.parse_expr()); try!(self.expect(TokenType::RightBracket)); return Ok(Item::Item) } if try!(self.consume(TokenType::Point)) { try!(self.expect_identifier_name()); return Ok(Item::Item) } Ok(Item::None) } pub fn member_expr_and_members(&mut self) -> JsResult<Item> { println!("member_expr_and_members {:?}", self.peek()); none!(try!(self.parse_member_expr())); self.parse_member() } pub fn parse_member_expr(&mut self) -> JsResult<Item> { let mut first = false; loop { println!("parse_member_expr {:?} {:?}", self.peek(), first); match try!(self.parse_primary_expr()) { Item::Item => { first = true; continue }, Item::None => (), } match try!(self.parse_super_prop()) { Item::Item => { first = true; continue }, Item::None => (), } if !first { return Ok(Item::None) } match try!(self.parse_member()) { Item::Item => continue, Item::None => (), } break } Ok(Item::Item) } pub fn parse_super_call(&mut self) -> JsResult<Item> { println!("parse_super_call {:?}", self.peek()); if !try!(self.consume(TokenType::Super)) { return Ok(Item::None) } try!(self.parse_arguments()); Ok(Item::Item) } pub fn member_and_arguments(&mut self) -> JsResult<Item> { println!("member_and_arguments {:?}", self.peek()); none!(try!(self.parse_member_expr())); self.parse_arguments() } pub fn parse_call_expr(&mut self) -> JsResult<Item> { let mut first = false; loop { println!("parse_call_expr {:?} {:?}", self.peek(), first); match try!(self.member_and_arguments()) { Item::Item => { first = true; continue }, Item::None => (), } match try!(self.parse_super_call()) { Item::Item => { first = true; continue }, Item::None => (), } if !first { return Ok(Item::None) } match try!(self.parse_member()) { Item::Item => continue, Item::None => (), } match try!(self.parse_arguments()) { Item::Item => continue, Item::None => (), } break } Ok(Item::Item) } }
use error::JsResult; use lexer::enums::{TokenType}; use scope::parser::{Parser, Item}; macro_rules! wait { ($expr:expr) => (match $expr { Item::None => (), Item::Item => return Ok(Item::Item), }) } macro_rules! none { ($expr:expr) => (match $expr { Item::None => return Ok(Item::None), Item::Item => (), }) } impl Parser { pub fn parse_left_hand_side_expr(&mut self) -> JsResult<Item> { println!("parse_left_hand_side_expr {:?}", self.peek()); wait!(try!(self.parse_call_expr())); self.parse_new_expr() } pub fn parse_new_expr(&mut self) -> JsResult<Item> { println!("parse_new_expr {:?}", self.peek()); wait!(try!(self.parse_member_expr())); if try!(self.consume(TokenType::New)) { try!(self.parse_new_expr()); Ok(Item::Item) } else { Ok(Item::None) } } pub fn parse_super_prop(&mut self) -> JsResult<Item> { if !try!(self.consume(TokenType::Super)) { return Ok(Item::None) } if try!(self.consume(TokenType::LeftBracket)) { try!(self.expect(TokenType::RightBracket)); } else { try!(self.expect(TokenType::Point)); try!(self.expect_identifier()); } Ok(Item::Item) } pub fn parse_meta_prop(&mut self) -> JsResult<Item> { try!(self.expect(TokenType::New)); try!(self.expect(TokenType::Point)); try!(self.expect(TokenType::Target)); Ok(Item::Item) } pub fn bump_new_and_member_expr_and_arguments(&mut self) -> JsResult<Item> { println!("bump_new_and_member_expr_and_arguments {:?}", self.peek()); if !try!(self.consume(TokenType::New)) { return Ok(Item::None) } try!(self.parse_member_expr()); try!(self.parse_arguments()); Ok(Item::Item) } pub fn parse_arguments(&mut self) -> JsResult<Item> { println!("parse_arguments {:?}", self.peek()); if !try!(self.consume(TokenType::LeftParen)) { return Ok(Item::None) } if !try!(self.consume(TokenType::ThreePoints)) { try!(self.parse_expr()); try!(self.consume(TokenType::ThreePoints)); } try!(self.consume_all_lineterminates()); try!(self.expect(TokenType::RightParen)); Ok(Item::Item) } pub fn parse_member(&mut self) -> JsResult<Item> { println!("parse_member {:?}", self.peek()); if try!(self.consume(TokenType::LeftBracket)) { try!(self.parse_expr()); try!(self.expect(TokenType::RightBracket)); return Ok(Item::Item) } if try!(self.consume(TokenType::Point)) { try!(self.expect_identifier_name()); return Ok(Item::Item
Item => { first = true; continue }, Item::None => (), } match try!(self.parse_super_prop()) { Item::Item => { first = true; continue }, Item::None => (), } if !first { return Ok(Item::None) } match try!(self.parse_member()) { Item::Item => continue, Item::None => (), } break } Ok(Item::Item) } pub fn parse_super_call(&mut self) -> JsResult<Item> { println!("parse_super_call {:?}", self.peek()); if !try!(self.consume(TokenType::Super)) { return Ok(Item::None) } try!(self.parse_arguments()); Ok(Item::Item) } pub fn member_and_arguments(&mut self) -> JsResult<Item> { println!("member_and_arguments {:?}", self.peek()); none!(try!(self.parse_member_expr())); self.parse_arguments() } pub fn parse_call_expr(&mut self) -> JsResult<Item> { let mut first = false; loop { println!("parse_call_expr {:?} {:?}", self.peek(), first); match try!(self.member_and_arguments()) { Item::Item => { first = true; continue }, Item::None => (), } match try!(self.parse_super_call()) { Item::Item => { first = true; continue }, Item::None => (), } if !first { return Ok(Item::None) } match try!(self.parse_member()) { Item::Item => continue, Item::None => (), } match try!(self.parse_arguments()) { Item::Item => continue, Item::None => (), } break } Ok(Item::Item) } }
) } Ok(Item::None) } pub fn member_expr_and_members(&mut self) -> JsResult<Item> { println!("member_expr_and_members {:?}", self.peek()); none!(try!(self.parse_member_expr())); self.parse_member() } pub fn parse_member_expr(&mut self) -> JsResult<Item> { let mut first = false; loop { println!("parse_member_expr {:?} {:?}", self.peek(), first); match try!(self.parse_primary_expr()) { Item::
random
[ { "content": "pub fn parse<T, I>(iter: T) -> Result<Vec<TokenType>, ErrorType> where\n\n T: IntoIterator<Item = char, IntoIter = I> + Sized,\n\n I: Iterator<Item = char> + 'static {\n\n let state = &mut LexerState::new(Box::new(iter.into_iter()));\n\n match state.parse() {\n\n Ok(_)=> (),\n\n...
Rust
src/lib/runner.rs
jokeyrhyme/tuning
06b8efa15bebb1ddfefddd3e2322a81cce101edc
use std::{ collections::HashMap, sync::{Arc, Mutex}, thread, }; use thiserror::Error as ThisError; use crate::jobs::{self, is_result_done, is_result_settled, Execute, Status}; const MAX_THREADS: usize = 2; #[derive(Debug, ThisError)] pub enum Error { #[error(transparent)] Job { #[from] source: jobs::Error, }, } pub fn run(jobs: Vec<(impl Execute + Send + 'static)>) { let mut results = HashMap::<String, jobs::Result>::new(); jobs.iter().for_each(|job| { if job.needs().is_empty() { results.insert(job.name(), Ok(Status::Pending)); } else { results.insert(job.name(), Ok(Status::Blocked)); } }); let jobs_arc = Arc::new(Mutex::new(jobs)); let results_arc = Arc::new(Mutex::new(results)); let mut handles = Vec::<thread::JoinHandle<_>>::with_capacity(MAX_THREADS); for _ in 0..MAX_THREADS { let my_jobs_arc = jobs_arc.clone(); let my_results_arc = results_arc.clone(); let handle = thread::spawn(move || { loop { let current_job; { let mut my_jobs = my_jobs_arc.lock().unwrap(); let mut my_results = my_results_arc.lock().unwrap(); for job in my_jobs.iter() { let name = job.name(); if !job.when() { my_results.insert(name.clone(), Ok(Status::Skipped)); } } for job in my_jobs.iter() { let name = job.name(); if is_equal_status(my_results.get(&name).unwrap(), &Status::Blocked) && job .needs() .iter() .all(|n| is_result_done(my_results.get(n).unwrap())) { my_results.insert(name, Ok(Status::Pending)); } } if is_all_settled(&my_results) { return; } let index = match my_jobs.iter().enumerate().find(|(_, job)| { let name = job.name(); is_equal_status(my_results.get(&name).unwrap(), &Status::Pending) }) { Some((i, _)) => i, None => { return; } }; current_job = my_jobs.remove(index); let name = current_job.name(); my_results.insert(name.clone(), Ok(Status::InProgress)); println!( "job: {}: {}", &name, jobs::result_display(my_results.get(&name).unwrap()) ); } let name = current_job.name(); let result = current_job.execute(); { let mut my_results = my_results_arc.lock().unwrap(); my_results.insert(name.clone(), result); println!( "job: {}: {}", &name, jobs::result_display(my_results.get(&name).unwrap()) ); } } }); handles.push(handle); } for handle in handles { handle.join().expect("worker thread failed"); } } fn is_all_settled(results: &HashMap<String, jobs::Result>) -> bool { results.iter().all(|(_, result)| is_result_settled(result)) } fn is_equal_status(result: &jobs::Result, status: &Status) -> bool { match result { Ok(s) => s == status, Err(_) => false, } } #[cfg(test)] mod tests { use std::time::{Duration, Instant}; use super::*; struct FakeJob { name: String, needs: Vec<String>, result: jobs::Result, sleep: Duration, spy_arc: Arc<Mutex<FakeJobSpy>>, when: bool, } impl Default for FakeJob { fn default() -> Self { Self { name: String::new(), needs: Vec::<String>::new(), result: Ok(jobs::Status::Done), sleep: Duration::from_millis(0), spy_arc: Arc::new(Mutex::new(FakeJobSpy { calls: 0, time: None, })), when: true, } } } impl FakeJob { fn new<S>(name: S, result: jobs::Result) -> (Self, Arc<Mutex<FakeJobSpy>>) where S: AsRef<str>, { let job = FakeJob { name: String::from(name.as_ref()), result, ..Default::default() }; let spy_arc = job.spy_arc.clone(); (job, spy_arc) } } impl Execute for FakeJob { fn execute(&self) -> jobs::Result { thread::sleep(self.sleep); let mut my_spy = self.spy_arc.lock().unwrap(); my_spy.calls += 1; my_spy.time = Some(Instant::now()); result_clone(&self.result) } fn name(&self) -> String { self.name.clone() } fn needs(&self) -> Vec<String> { self.needs.clone() } fn when(&self) -> bool { self.when } } struct FakeJobSpy { calls: usize, time: Option<Instant>, } impl FakeJobSpy { fn assert_called_once(&self) { assert_eq!(self.calls, 1); assert!(self.time.is_some()); } fn assert_never_called(&self) { assert_eq!(self.calls, 0); assert!(self.time.is_none()); } } #[test] fn run_does_not_execute_job_with_false_when_or_needs_job_with_false_when() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); a.when = false; let (mut b, b_spy) = FakeJob::new("b", Ok(jobs::Status::Done)); b.needs.push(String::from("a")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); my_a_spy.assert_never_called(); let my_b_spy = b_spy.lock().unwrap(); my_b_spy.assert_never_called(); } #[test] fn run_executes_unordered_jobs() { const MAX_COUNT: usize = 10; let mut jobs = Vec::<FakeJob>::with_capacity(MAX_COUNT); let mut spy_arcs = Vec::<Arc<Mutex<FakeJobSpy>>>::with_capacity(MAX_COUNT); for i in 0..MAX_COUNT { let (job, spy_arc) = FakeJob::new( format!("{}", i), match i % 2 { 0 => Ok(jobs::Status::Done), _ => Ok(jobs::Status::NoChange(format!("{}", i))), }, ); jobs.push(job); spy_arcs.push(spy_arc); } run(jobs); for spy_arc in spy_arcs { let spy = spy_arc.lock().unwrap(); spy.assert_called_once(); } } #[test] fn run_executes_unordered_jobs_concurrently() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (mut b, b_spy) = FakeJob::new("b", Ok(jobs::Status::Done)); a.sleep = Duration::from_millis(500); b.sleep = Duration::from_millis(500); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_called_once(); my_b_spy.assert_called_once(); assert!(my_a_spy.time.expect("a").elapsed() < Duration::from_millis(100)); assert!(my_b_spy.time.expect("b").elapsed() < Duration::from_millis(100)); } #[test] fn run_executes_jobs_with_complex_needs() { const MAX_COUNT: usize = 100; let mut jobs = Vec::<FakeJob>::with_capacity(MAX_COUNT); let mut spy_arcs = Vec::<Arc<Mutex<FakeJobSpy>>>::with_capacity(MAX_COUNT); for i in 0..MAX_COUNT { let (mut job, spy_arc) = FakeJob::new( format!("{}", i), match i % 2 { 0 => Ok(jobs::Status::Done), _ => Ok(jobs::Status::NoChange(format!("{}", i))), }, ); match i % 10 { 2 => { job.needs = vec![format!("{}", i + 2)]; } 3 => { job.needs = vec![format!("{}", i - 3)]; } 4 => { job.needs = vec![format!("{}", i + 3)]; } 7 => { job.needs = vec![String::from("99")]; } _ => { /* noop */ } } jobs.push(job); spy_arcs.push(spy_arc); } run(jobs); for i in 0..MAX_COUNT { let spy_arc = &spy_arcs[i]; let spy = spy_arc.lock().unwrap(); spy.assert_called_once(); match i % 10 { 2 => { let spyx4_arc = &spy_arcs[i + 2]; let spyx4 = spyx4_arc.lock().unwrap(); assert!(spy.time.expect("x4") > spyx4.time.expect("x7")); } 3 => { let spyx0_arc = &spy_arcs[i - 3]; let spyx0 = spyx0_arc.lock().unwrap(); assert!(spy.time.expect("x3") > spyx0.time.expect("x7")); } 4 => { let spyx7_arc = &spy_arcs[i + 3]; let spyx7 = spyx7_arc.lock().unwrap(); assert!(spy.time.expect("x4") > spyx7.time.expect("x7")); } 7 => { let spy99_arc = &spy_arcs[99]; let spy99 = spy99_arc.lock().unwrap(); assert!(spy.time.expect("x7") > spy99.time.expect("99")); } _ => { /* noop */ } } } } #[test] fn run_executes_ordered_jobs() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (b, b_spy) = FakeJob::new("b", Ok(jobs::Status::NoChange(String::from("b")))); a.needs.push(String::from("b")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_called_once(); my_b_spy.assert_called_once(); assert!(my_a_spy.time.expect("a") > my_b_spy.time.expect("b")); } #[test] fn run_does_not_execute_ordered_job_when_needs_are_not_done() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (b, b_spy) = FakeJob::new("b", Err(jobs::Error::SomethingBad)); a.needs.push(String::from("b")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_never_called(); my_b_spy.assert_called_once(); } #[test] fn run_does_not_execute_ordered_job_when_some_needs_are_not_done() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (mut b, b_spy) = FakeJob::new("b", Err(jobs::Error::SomethingBad)); let (c, c_spy) = FakeJob::new("c", Ok(jobs::Status::Done)); a.needs.push(String::from("b")); a.needs.push(String::from("c")); b.needs.push(String::from("c")); let jobs = vec![a, b, c]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); let my_c_spy = c_spy.lock().unwrap(); my_a_spy.assert_never_called(); my_b_spy.assert_called_once(); my_c_spy.assert_called_once(); } fn result_clone(result: &jobs::Result) -> jobs::Result { match result { Ok(s) => Ok(s.clone()), Err(_) => Err(jobs::Error::SomethingBad), } } }
use std::{ collections::HashMap, sync::{Arc, Mutex}, thread, }; use thiserror::Error as ThisError; use crate::jobs::{self, is_result_done, is_result_settled, Execute, Status}; const MAX_THREADS: usize = 2; #[derive(Debug, ThisError)] pub enum Error { #[error(transparent)] Job { #[from] source: jobs::Error, }, } pub fn run(jobs: Vec<(impl Execute + Send + 'static)>) { let mut results = HashMap::<String, jobs::Result>::new(); jobs.iter().for_each(|job| { if job.needs().is_empty() { results.insert(job.name(), Ok(Status::Pending)); } else { results.insert(job.name(), Ok(Status::Blocked)); } }); let jobs_arc = Arc::new(Mutex::new(jobs)); let results_arc = Arc::new(Mutex::new(results)); let mut handles = Vec::<thread::JoinHandle<_>>::with_capacity(MAX_THREADS); for _ in 0..MAX_THREADS { let my_jobs_arc = jobs_arc.clone(); let my_results_arc = results_arc.clone(); let handle = thread::spawn(move || { loop { let current_job; { let mut my_jobs = my_jobs_arc.lock().unwrap(); let mut my_results = my_results_arc.lock().unwrap(); for job in my_jobs.iter() { let name = job.name(); if !job.when() { my_results.insert(name.clone(), Ok(Status::Skipped)); } } for job in my_jobs.iter() { let name = job.name(); if is_equal_status(my_results.get(&name).unwrap(), &Status::Blocked) && job .needs() .iter() .all(|n| is_result_done(my_results.get(n).unwrap())) { my_results.insert(name, Ok(Status::Pending)); } } if is_all_settled(&my_results) { return; } let index = match my_jobs.iter().enumerate().find(|(_, job)| { let name = job.name(); is_equal_status(my_results.get(&name).unwrap(), &Status::Pending) }) { Some((i, _)) => i, None => { return; } }; current_job = my_jobs.remove(index); let name = current_job.name(); my_results.insert(name.clone(), Ok(Status::InProgress)); println!( "job: {}: {}", &name, jobs::result_display(my_results.get(&name).unwrap()) ); } let name = current_job.name(); let result = current_job.execute(); { let mut my_results = my_results_arc.lock().unwrap(); my_results.insert(name.clone(), result); println!( "job: {}: {}", &name, jobs::result_display(my_results.get(&name).unwrap()) ); } } }); handles.push(handle); } for handle in handles { handle.join().expect("worker thread failed"); } } fn is_all_settled(results: &HashMap<String, jobs::Result>) -> bool { results.iter().all(|(_, result)| is_result_settled(result)) } fn is_equal_status(result: &jobs::Result, status: &Status) -> bool { match result { Ok(s) => s == status, Err(_) => false, } } #[cfg(test)] mod tests { use std::time::{Duration, Instant}; use super::*; struct FakeJob { name: String, needs: Vec<String>, result: jobs::Result, sleep: Duration, spy_arc: Arc<Mutex<FakeJobSpy>>, when: bool, } impl Default for FakeJob { fn default() -> Self { Self { name: String::new(), needs: Vec::<String>::new(), result: Ok(jobs::Status::Done), slee
} impl FakeJob { fn new<S>(name: S, result: jobs::Result) -> (Self, Arc<Mutex<FakeJobSpy>>) where S: AsRef<str>, { let job = FakeJob { name: String::from(name.as_ref()), result, ..Default::default() }; let spy_arc = job.spy_arc.clone(); (job, spy_arc) } } impl Execute for FakeJob { fn execute(&self) -> jobs::Result { thread::sleep(self.sleep); let mut my_spy = self.spy_arc.lock().unwrap(); my_spy.calls += 1; my_spy.time = Some(Instant::now()); result_clone(&self.result) } fn name(&self) -> String { self.name.clone() } fn needs(&self) -> Vec<String> { self.needs.clone() } fn when(&self) -> bool { self.when } } struct FakeJobSpy { calls: usize, time: Option<Instant>, } impl FakeJobSpy { fn assert_called_once(&self) { assert_eq!(self.calls, 1); assert!(self.time.is_some()); } fn assert_never_called(&self) { assert_eq!(self.calls, 0); assert!(self.time.is_none()); } } #[test] fn run_does_not_execute_job_with_false_when_or_needs_job_with_false_when() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); a.when = false; let (mut b, b_spy) = FakeJob::new("b", Ok(jobs::Status::Done)); b.needs.push(String::from("a")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); my_a_spy.assert_never_called(); let my_b_spy = b_spy.lock().unwrap(); my_b_spy.assert_never_called(); } #[test] fn run_executes_unordered_jobs() { const MAX_COUNT: usize = 10; let mut jobs = Vec::<FakeJob>::with_capacity(MAX_COUNT); let mut spy_arcs = Vec::<Arc<Mutex<FakeJobSpy>>>::with_capacity(MAX_COUNT); for i in 0..MAX_COUNT { let (job, spy_arc) = FakeJob::new( format!("{}", i), match i % 2 { 0 => Ok(jobs::Status::Done), _ => Ok(jobs::Status::NoChange(format!("{}", i))), }, ); jobs.push(job); spy_arcs.push(spy_arc); } run(jobs); for spy_arc in spy_arcs { let spy = spy_arc.lock().unwrap(); spy.assert_called_once(); } } #[test] fn run_executes_unordered_jobs_concurrently() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (mut b, b_spy) = FakeJob::new("b", Ok(jobs::Status::Done)); a.sleep = Duration::from_millis(500); b.sleep = Duration::from_millis(500); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_called_once(); my_b_spy.assert_called_once(); assert!(my_a_spy.time.expect("a").elapsed() < Duration::from_millis(100)); assert!(my_b_spy.time.expect("b").elapsed() < Duration::from_millis(100)); } #[test] fn run_executes_jobs_with_complex_needs() { const MAX_COUNT: usize = 100; let mut jobs = Vec::<FakeJob>::with_capacity(MAX_COUNT); let mut spy_arcs = Vec::<Arc<Mutex<FakeJobSpy>>>::with_capacity(MAX_COUNT); for i in 0..MAX_COUNT { let (mut job, spy_arc) = FakeJob::new( format!("{}", i), match i % 2 { 0 => Ok(jobs::Status::Done), _ => Ok(jobs::Status::NoChange(format!("{}", i))), }, ); match i % 10 { 2 => { job.needs = vec![format!("{}", i + 2)]; } 3 => { job.needs = vec![format!("{}", i - 3)]; } 4 => { job.needs = vec![format!("{}", i + 3)]; } 7 => { job.needs = vec![String::from("99")]; } _ => { /* noop */ } } jobs.push(job); spy_arcs.push(spy_arc); } run(jobs); for i in 0..MAX_COUNT { let spy_arc = &spy_arcs[i]; let spy = spy_arc.lock().unwrap(); spy.assert_called_once(); match i % 10 { 2 => { let spyx4_arc = &spy_arcs[i + 2]; let spyx4 = spyx4_arc.lock().unwrap(); assert!(spy.time.expect("x4") > spyx4.time.expect("x7")); } 3 => { let spyx0_arc = &spy_arcs[i - 3]; let spyx0 = spyx0_arc.lock().unwrap(); assert!(spy.time.expect("x3") > spyx0.time.expect("x7")); } 4 => { let spyx7_arc = &spy_arcs[i + 3]; let spyx7 = spyx7_arc.lock().unwrap(); assert!(spy.time.expect("x4") > spyx7.time.expect("x7")); } 7 => { let spy99_arc = &spy_arcs[99]; let spy99 = spy99_arc.lock().unwrap(); assert!(spy.time.expect("x7") > spy99.time.expect("99")); } _ => { /* noop */ } } } } #[test] fn run_executes_ordered_jobs() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (b, b_spy) = FakeJob::new("b", Ok(jobs::Status::NoChange(String::from("b")))); a.needs.push(String::from("b")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_called_once(); my_b_spy.assert_called_once(); assert!(my_a_spy.time.expect("a") > my_b_spy.time.expect("b")); } #[test] fn run_does_not_execute_ordered_job_when_needs_are_not_done() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (b, b_spy) = FakeJob::new("b", Err(jobs::Error::SomethingBad)); a.needs.push(String::from("b")); let jobs = vec![a, b]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); my_a_spy.assert_never_called(); my_b_spy.assert_called_once(); } #[test] fn run_does_not_execute_ordered_job_when_some_needs_are_not_done() { let (mut a, a_spy) = FakeJob::new("a", Ok(jobs::Status::Done)); let (mut b, b_spy) = FakeJob::new("b", Err(jobs::Error::SomethingBad)); let (c, c_spy) = FakeJob::new("c", Ok(jobs::Status::Done)); a.needs.push(String::from("b")); a.needs.push(String::from("c")); b.needs.push(String::from("c")); let jobs = vec![a, b, c]; run(jobs); let my_a_spy = a_spy.lock().unwrap(); let my_b_spy = b_spy.lock().unwrap(); let my_c_spy = c_spy.lock().unwrap(); my_a_spy.assert_never_called(); my_b_spy.assert_called_once(); my_c_spy.assert_called_once(); } fn result_clone(result: &jobs::Result) -> jobs::Result { match result { Ok(s) => Ok(s.clone()), Err(_) => Err(jobs::Error::SomethingBad), } } }
p: Duration::from_millis(0), spy_arc: Arc::new(Mutex::new(FakeJobSpy { calls: 0, time: None, })), when: true, } }
function_block-function_prefixed
[ { "content": "pub fn result_display(result: &Result) -> String {\n\n match result {\n\n Ok(s) => format!(\"{}\", s),\n\n Err(e) => format!(\"{:#?}\", e).red().to_string(),\n\n }\n\n}\n", "file_path": "src/lib/jobs/mod.rs", "rank": 0, "score": 172012.30064669653 }, { "cont...
Rust
rust/lib-hedgewars-engine/src/render/gear.rs
emorrp1/hw
0afbbead97ad3aa231391f60605a73cd2629a849
use crate::render::{ atlas::{AtlasCollection, SpriteIndex}, camera::Camera, gl::{ Buffer, BufferType, BufferUsage, InputElement, InputFormat, InputLayout, PipelineState, Shader, Texture2D, TextureDataType, TextureFilter, TextureFormat, TextureInternalFormat, VariableBinding, }, }; use integral_geometry::{Rect, Size}; use png::{ColorType, Decoder, DecodingError}; use std::{ collections::HashMap, ffi::OsString, fs::{read_dir, File}, io, io::BufReader, mem::size_of, path::{Path, PathBuf}, }; const VERTEX_SHADER: &'static str = r#" #version 330 core uniform mat4 projection; layout(location = 0) in vec2 position; layout(location = 1) in vec2 texCoords; out vec2 varTexCoords; void main() { varTexCoords = texCoords; gl_Position = projection * vec4(position, 0.0, 1.0); } "#; const PIXEL_SHADER: &'static str = r#" #version 330 core uniform sampler2D texture; in vec2 varTexCoords; out vec4 outColor; void main() { outColor = texture2D(texture, varTexCoords); } "#; #[repr(C)] #[derive(Copy, Clone)] struct Vertex { position: [f32; 2], tex_coords: [f32; 2], } #[derive(PartialEq, Debug, Clone, Copy)] #[repr(u32)] pub enum SpriteId { Mine = 0, Grenade, Cheese, Cleaver, MaxSprite, } const SPRITE_LOAD_LIST: &[(SpriteId, &str)] = &[ ( SpriteId::Mine, "../../share/hedgewars/Data/Graphics/MineOn.png", ), ( SpriteId::Grenade, "../../share/hedgewars/Data/Graphics/Bomb.png", ), ( SpriteId::Cheese, "../../share/hedgewars/Data/Graphics/cheese.png", ), ( SpriteId::Cleaver, "../../share/hedgewars/Data/Graphics/cleaver.png", ), ]; const MAX_SPRITES: usize = SpriteId::MaxSprite as usize + 1; type SpriteTexCoords = (u32, [[f32; 2]; 4]); pub struct GearEntry { position: [f32; 2], size: Size, } impl GearEntry { pub fn new(x: f32, y: f32, size: Size) -> Self { Self { position: [x, y], size, } } } pub struct GearRenderer { atlas: AtlasCollection, texture: Texture2D, allocation: Box<[SpriteTexCoords; MAX_SPRITES]>, shader: Shader, layout: InputLayout, vertex_buffer: Buffer, } struct SpriteData { size: Size, filename: PathBuf, } const ATLAS_SIZE: Size = Size::square(2048); impl GearRenderer { pub fn new() -> Self { let mut atlas = AtlasCollection::new(ATLAS_SIZE); let texture = Texture2D::new( ATLAS_SIZE, TextureInternalFormat::Rgba8, TextureFilter::Linear, ); let mut allocation = Box::new([Default::default(); MAX_SPRITES]); for (sprite, file) in SPRITE_LOAD_LIST { let path = Path::new(file); let size = load_sprite_size(path).expect(&format!("Unable to open {}", file)); let index = atlas .insert_sprite(size) .expect(&format!("Could not store sprite {:?}", sprite)); let (texture_index, rect) = atlas.get_rect(index).unwrap(); let mut pixels = vec![255u8; size.area() * 4].into_boxed_slice(); load_sprite_pixels(path, &mut pixels).expect("Unable to load Graphics"); texture.update( rect, &pixels, None, TextureFormat::Rgba, TextureDataType::UnsignedByte, ); let mut tex_coords = [ [rect.left() as f32, rect.bottom() as f32 + 1.0], [rect.right() as f32 + 1.0, rect.bottom() as f32 + 1.0], [rect.left() as f32, rect.top() as f32], [rect.right() as f32 + 1.0, rect.top() as f32], ]; for coords in &mut tex_coords { coords[0] /= ATLAS_SIZE.width as f32; coords[1] /= ATLAS_SIZE.height as f32; } allocation[*sprite as usize] = (texture_index, tex_coords); } let shader = Shader::new( VERTEX_SHADER, Some(PIXEL_SHADER), &[VariableBinding::Sampler("texture", 0)], ) .unwrap(); let layout = InputLayout::new(vec![ InputElement { shader_slot: 0, buffer_slot: 0, format: InputFormat::Float(gl::FLOAT, false), components: 2, stride: size_of::<Vertex>() as u32, offset: 0, }, InputElement { shader_slot: 1, buffer_slot: 0, format: InputFormat::Float(gl::FLOAT, false), components: 2, stride: size_of::<Vertex>() as u32, offset: size_of::<[f32; 2]>() as u32, }, ]); let vertex_buffer = Buffer::empty(BufferType::Array, BufferUsage::DynamicDraw); Self { atlas, texture, allocation, shader, layout, vertex_buffer, } } pub fn render(&mut self, camera: &Camera, entries: &[GearEntry]) { let mut data = Vec::with_capacity(entries.len() * 6); for (index, entry) in entries.iter().enumerate() { let sprite_id = match index & 0b11 { 0 => SpriteId::Mine, 1 => SpriteId::Grenade, 2 => SpriteId::Cheese, _ => SpriteId::Cleaver, }; let sprite_coords = &self.allocation[sprite_id as usize].1; let v = [ Vertex { position: [ entry.position[0] - entry.size.width as f32 / 2.0, entry.position[1] + entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[0], }, Vertex { position: [ entry.position[0] + entry.size.width as f32 / 2.0, entry.position[1] + entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[1], }, Vertex { position: [ entry.position[0] - entry.size.width as f32 / 2.0, entry.position[1] - entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[2], }, Vertex { position: [ entry.position[0] + entry.size.width as f32 / 2.0, entry.position[1] - entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[3], }, ]; data.extend_from_slice(&[v[0], v[1], v[2], v[1], v[3], v[2]]); } let projection = camera.projection(); self.shader.bind(); self.shader.set_matrix("projection", projection.as_ptr()); self.shader.bind_texture_2d(0, &self.texture); self.vertex_buffer.write_typed(&data); let _buffer_bind = self.layout.bind(&[(0, &self.vertex_buffer)], None); let _state = PipelineState::new().with_blend(); unsafe { gl::DrawArrays(gl::TRIANGLES, 0, entries.len() as i32 * 6); } } } fn load_sprite_pixels(path: &Path, buffer: &mut [u8]) -> io::Result<Size> { let decoder = Decoder::new(BufReader::new(File::open(path)?)); let (info, mut reader) = decoder.read_info()?; let size = Size::new(info.width as usize, info.height as usize); reader.next_frame(buffer)?; Ok(size) } fn load_sprite_size(path: &Path) -> io::Result<Size> { let decoder = Decoder::new(BufReader::new(File::open(path)?)); let (info, mut reader) = decoder.read_info()?; let size = Size::new(info.width as usize, info.height as usize); Ok(size) } fn load_sprites(path: &Path) -> io::Result<Vec<SpriteData>> { let mut result = vec![]; for file in read_dir(path)? { let file = file?; if let Some(extension) = file.path().extension() { if extension == "png" { let path = file.path(); let sprite = load_sprite_size(&path)?; result.push(SpriteData { size: sprite, filename: path, }); } } } Ok(result) }
use crate::render::{ atlas::{AtlasCollection, SpriteIndex}, camera::Camera, gl::{ Buffer, BufferType, BufferUsage, InputElement, InputFormat, InputLayout, PipelineState, Shader, Texture2D, TextureDataType, TextureFilter, TextureFormat, TextureInternalFormat, VariableBinding, }, }; use integral_geometry::{Rect, Size}; use png::{ColorType, Decoder, DecodingError}; use std::{ collections::HashMap, ffi::OsString, fs::{read_dir, File}, io, io::BufReader, mem::size_of, path::{Path, PathBuf}, }; const VERTEX_SHADER: &'static str = r#" #version 330 core uniform mat4 projection; layout(location = 0) in vec2 position; layout(location = 1) in vec2 texCoords; out vec2 varTexCoords; void main() { varTexCoords = texCoords; gl_Position = projection * vec4(position, 0.0, 1.0); } "#; const PIXEL_SHADER: &'static str = r#" #version 330 core uniform sampler2D texture; in vec2 varTexCoords; out vec4 outColor; void main() { outColor = texture2D(texture, varTexCoords); } "#; #[repr(C)] #[derive(Copy, Clone)] struct Vertex { position: [f32; 2], tex_coords: [f32; 2], } #[derive(PartialEq, Debug, Clone, Copy)] #[repr(u32)] pub enum SpriteId { Mine = 0, Grenade, Cheese, Cleaver, MaxSprite, } const SPRITE_LOAD_LIST: &[(SpriteId, &str)] = &[ ( SpriteId::Mine, "../../share/hedgewars/Data/Graphics/MineOn.png", ), ( SpriteId::Grenade, "../../share/hedgewars/Data/Graphics/Bomb.png", ), ( SpriteId::Cheese, "../../share/hedgewars/Data/Graphics/cheese.png", ), ( SpriteId::Cleaver, "../../share/hedgewars/Data/Graphics/cleaver.png", ), ]; const MAX_SPRITES: usize = SpriteId::MaxSprite as usize + 1; type SpriteTexCoords = (u32, [[f32; 2]; 4]); pub struct GearEntry { position: [f32; 2], size: Size, } impl GearEntry { pub fn new(x: f32, y: f32, size: Size) -> Self { Self { position: [x, y], size, } } } pub struct GearRenderer { atlas: AtlasCollection, texture: Texture2D, allocation: Box<[SpriteTexCoords; MAX_SPRITES]>, shader: Shader, layout: InputLayout, vertex_buffer: Buffer, } struct SpriteData { size: Size, filename: PathBuf, } const ATLAS_SIZE: Size = Size::square(2048); impl GearRenderer { pub fn new() -> Self { let mut atlas = AtlasCollection::new(ATLAS_SIZE); let texture = Texture2D::new( ATLAS_SIZE, TextureInternalFormat::Rgba8, TextureFilter::Linear, ); let mut allocation = Box::new([Default::default(); MAX_SPRITES]); for (sprite, file) in SPRITE_LOAD_LIST { let path = Path::new(file); let size = load_sprite_size(path).expect(&format!("Unable to open {}", file)); let index = atlas .insert_sprite(size) .expect(&format!("Could not store sprite {:?}", sprite)); let (texture_index, rect) = atlas.get_rect(index).unwrap(); let mut pixels = vec![255u8; size.area() * 4].into_boxed_slice(); load_sprite_pixels(path, &mut pixels).expect("Unable to load Graphics"); texture.update( rect, &pixels, None, TextureFormat::Rgba, TextureDataType::UnsignedByte, ); let mut tex_coords = [ [rect.left() as f32, rect.bottom() as f32 + 1.0], [rect.right() as f32 + 1.0, rect.bottom() as f32 + 1.0], [rect.left() as f32, rect.top() as f32], [rect.right() as f32 + 1.0, rect.top() as f32], ]; for coords in &mut tex_coords { coords[0] /= ATLAS_SIZE.width as f32; coords[1] /= ATLAS_SIZE.height as f32; } allocation[*sprite as usize] = (texture_index, tex_coords); } let shader = Shader::new( VERTEX_SHADER, Some(PIXEL_SHADER), &[VariableBinding::Sampler("texture", 0)], ) .unwrap(); let layout = InputLayout::new(vec![ InputElement { shader_slot: 0, buffer_slot: 0, format: InputFormat::Float(gl::FLOAT, false), components: 2, stride: size_of::<Vertex>() as u32, offset: 0, }, InputElement { shader_slot: 1, buffer_slot: 0, format: InputFormat::Float(gl::FLOAT, false), components: 2, stride: size_of::<Vertex>() as u32, offset: size_of::<[f32; 2]>() as u32, }, ]); let vertex_buffer = Buffer::empty(BufferType::Array, BufferUsage::DynamicDraw); Self { atlas, texture, allocation, shader, layout, vertex_buffer, } } pub fn render(&mut self, camera: &Camera, entries: &[GearEntry]) { let mut data = Vec::with_capacity(entries.len() * 6); for (index, entry) in entries.iter().enumerate() { let sprite_id = match index & 0b11 { 0 => SpriteId::Mine, 1 => SpriteId::Grenade, 2 => SpriteId::Cheese, _ => SpriteId::Cleaver, }; let sprite_coords = &self.allocation[sprite_id as usize].1; let v = [ Vertex { position: [ entry.position[0] - entry.size.width as f32 / 2.0,
} fn load_sprite_pixels(path: &Path, buffer: &mut [u8]) -> io::Result<Size> { let decoder = Decoder::new(BufReader::new(File::open(path)?)); let (info, mut reader) = decoder.read_info()?; let size = Size::new(info.width as usize, info.height as usize); reader.next_frame(buffer)?; Ok(size) } fn load_sprite_size(path: &Path) -> io::Result<Size> { let decoder = Decoder::new(BufReader::new(File::open(path)?)); let (info, mut reader) = decoder.read_info()?; let size = Size::new(info.width as usize, info.height as usize); Ok(size) } fn load_sprites(path: &Path) -> io::Result<Vec<SpriteData>> { let mut result = vec![]; for file in read_dir(path)? { let file = file?; if let Some(extension) = file.path().extension() { if extension == "png" { let path = file.path(); let sprite = load_sprite_size(&path)?; result.push(SpriteData { size: sprite, filename: path, }); } } } Ok(result) }
entry.position[1] + entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[0], }, Vertex { position: [ entry.position[0] + entry.size.width as f32 / 2.0, entry.position[1] + entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[1], }, Vertex { position: [ entry.position[0] - entry.size.width as f32 / 2.0, entry.position[1] - entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[2], }, Vertex { position: [ entry.position[0] + entry.size.width as f32 / 2.0, entry.position[1] - entry.size.height as f32 / 2.0, ], tex_coords: sprite_coords[3], }, ]; data.extend_from_slice(&[v[0], v[1], v[2], v[1], v[3], v[2]]); } let projection = camera.projection(); self.shader.bind(); self.shader.set_matrix("projection", projection.as_ptr()); self.shader.bind_texture_2d(0, &self.texture); self.vertex_buffer.write_typed(&data); let _buffer_bind = self.layout.bind(&[(0, &self.vertex_buffer)], None); let _state = PipelineState::new().with_blend(); unsafe { gl::DrawArrays(gl::TRIANGLES, 0, entries.len() as i32 * 6); } }
function_block-function_prefix_line
[ { "content": "fn get_protocol_number(executable: &str) -> std::io::Result<u32> {\n\n let output = Command::new(executable).arg(\"--protocol\").output()?;\n\n\n\n Ok(u32::from_str(&String::from_utf8(output.stdout).unwrap().trim()).unwrap_or(55))\n\n}\n\n\n", "file_path": "rust/hedgewars-checker/src/mai...
Rust
src/views/help.rs
RedlineTriad/rshub
778d0a2f2d66998b6b1669def1f79573ccb54781
use std::io; use std::sync::Arc; use crossterm::event::KeyCode; use tui::layout::Rect; use tui::{ backend::CrosstermBackend, layout::{Alignment, Constraint, Direction, Layout, Margin}, style::{Modifier, Style}, text::Span, widgets::{Block, Borders, Paragraph, Wrap}, Frame, }; use crate::app::AppAction; use crate::input::UserInput; use crate::states::help::HotKey; use crate::states::AppState; use crate::views::{AppView, Drawable, HotKeys, InputProcessor, Named}; pub struct Help {} impl AppView for Help {} #[async_trait::async_trait] impl Named for Help { fn name(&self) -> String { "Help Screen".to_owned() } } #[async_trait::async_trait] impl HotKeys for Help { fn hotkeys(&self) -> Vec<HotKey> { vec![HotKey { description: "Close help", key: KeyCode::Esc, modifiers: None, }] } } #[async_trait::async_trait] impl InputProcessor for Help { async fn on_input(&mut self, input: &UserInput, _: Arc<AppState>) -> Option<AppAction> { match input { UserInput::Back => Some(AppAction::CloseView), _ => None, } } } #[async_trait::async_trait] impl Drawable for Help { async fn draw( &mut self, f: &mut Frame<CrosstermBackend<io::Stdout>>, area: Rect, app: Arc<AppState>, ) { let help = app.help.lock().unwrap(); let list_length = (help.global_hotkeys.len() + help.local_hotkeys.len()) as u16 + 2 + 1 + 1; let vertical_margin = if list_length < area.height { (area.height - list_length) / 2 } else { 0 }; f.render_widget( Block::default() .title(help.view_name.clone()) .title_alignment(Alignment::Center) .borders(Borders::ALL), area, ); let chunks = Layout::default() .direction(Direction::Vertical) .constraints(vec![ Constraint::Length(vertical_margin), Constraint::Length(1), Constraint::Length(help.global_hotkeys.len() as u16), Constraint::Length(1), Constraint::Length(help.local_hotkeys.len() as u16), ]) .split(area.inner(&Margin { vertical: 1, horizontal: 1, })); f.render_widget( Block::default() .title(Span::styled( "GLOBAL", Style::default().add_modifier(Modifier::BOLD), )) .title_alignment(Alignment::Center), chunks[1], ); f.render_widget( Block::default() .title(Span::styled( "LOCAL", Style::default().add_modifier(Modifier::BOLD), )) .title_alignment(Alignment::Center), chunks[3], ); let chunks_global = Layout::default() .direction(Direction::Horizontal) .constraints(vec![Constraint::Percentage(50), Constraint::Percentage(50)]) .split(chunks[2]); let chunks_local = Layout::default() .direction(Direction::Horizontal) .constraints(vec![Constraint::Percentage(50), Constraint::Percentage(50)]) .split(chunks[4]); f.render_widget( Paragraph::new( help.global_hotkeys .iter() .map(|h| format!("{} :", h.description)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Right) .wrap(Wrap { trim: true }), chunks_global[0], ); f.render_widget( Paragraph::new( help.global_hotkeys .iter() .map(|h| format!(" {}", h)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Left) .wrap(Wrap { trim: false }), chunks_global[1], ); f.render_widget( Paragraph::new( help.local_hotkeys .iter() .map(|h| format!("{} :", h.description)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Right) .wrap(Wrap { trim: true }), chunks_local[0], ); f.render_widget( Paragraph::new( help.local_hotkeys .iter() .map(|h| format!(" {}", h)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Left) .wrap(Wrap { trim: false }), chunks_local[1], ); } }
use std::io; use std::sync::Arc; use crossterm::event::KeyCode; use tui::layout::Rect; use tui::{ backend::CrosstermBackend, layout::{Alignment, Constraint, Direction, Layout, Margin}, style::{Modifier, Style}, text::Span, widgets::{Block, Borders, Paragraph, Wrap}, Frame, }; use crate::app::AppAction; use crate::input::UserInput; use crate::states::help::HotKey; use crate::states::AppState; use crate::views::{AppView, Drawable, HotKeys, InputProcessor, Named}; pub struct Help {} impl AppView for Help {} #[async_trait::async_trait] impl Named for Help { fn name(&self) -> String { "Help Screen".to_owned() } } #[async_trait::async_trait] impl HotKeys for Help { fn hotkeys(&self) -> Vec<HotKey> { vec![HotKey { description: "Close help", key: KeyCode::Esc, modifiers: None, }] } } #[async_trait::async_trait] impl InputProcessor for Help { async fn on_input(&mut self, input: &UserInput, _: Arc<AppState>) -> Option<AppAction> { match input { UserInput::Back => Some(AppAction::CloseView), _ => None, } } } #[async_trait::async_trait] impl Drawable for Help { async fn draw( &mut self, f: &mut Frame<CrosstermBackend<io::Stdout>>, area: Rect, app: Arc<AppState>, ) { let help = app.help.lock().unwrap(); let list_length = (help.global_hotkeys.len() + help.local_hotkeys.len()) as u16 + 2 + 1 + 1; let vertical_margin = if list_length < area.height { (area.height - list_length) / 2 } else { 0 }; f.render_widget( Block::default() .title(help.view_name.clone()) .title_alignment(Alignment::Center) .borders(Borders::ALL), area, );
f.render_widget( Block::default() .title(Span::styled( "GLOBAL", Style::default().add_modifier(Modifier::BOLD), )) .title_alignment(Alignment::Center), chunks[1], ); f.render_widget( Block::default() .title(Span::styled( "LOCAL", Style::default().add_modifier(Modifier::BOLD), )) .title_alignment(Alignment::Center), chunks[3], ); let chunks_global = Layout::default() .direction(Direction::Horizontal) .constraints(vec![Constraint::Percentage(50), Constraint::Percentage(50)]) .split(chunks[2]); let chunks_local = Layout::default() .direction(Direction::Horizontal) .constraints(vec![Constraint::Percentage(50), Constraint::Percentage(50)]) .split(chunks[4]); f.render_widget( Paragraph::new( help.global_hotkeys .iter() .map(|h| format!("{} :", h.description)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Right) .wrap(Wrap { trim: true }), chunks_global[0], ); f.render_widget( Paragraph::new( help.global_hotkeys .iter() .map(|h| format!(" {}", h)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Left) .wrap(Wrap { trim: false }), chunks_global[1], ); f.render_widget( Paragraph::new( help.local_hotkeys .iter() .map(|h| format!("{} :", h.description)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Right) .wrap(Wrap { trim: true }), chunks_local[0], ); f.render_widget( Paragraph::new( help.local_hotkeys .iter() .map(|h| format!(" {}", h)) .collect::<Vec<String>>() .join("\n"), ) .alignment(Alignment::Left) .wrap(Wrap { trim: false }), chunks_local[1], ); } }
let chunks = Layout::default() .direction(Direction::Vertical) .constraints(vec![ Constraint::Length(vertical_margin), Constraint::Length(1), Constraint::Length(help.global_hotkeys.len() as u16), Constraint::Length(1), Constraint::Length(help.local_hotkeys.len() as u16), ]) .split(area.inner(&Margin { vertical: 1, horizontal: 1, }));
assignment_statement
[ { "content": "fn draw_info(f: &mut Frame<CrosstermBackend<io::Stdout>>, area: Rect, app: Arc<AppState>) {\n\n let chunks = Layout::default()\n\n .direction(Direction::Horizontal)\n\n .constraints(vec![Constraint::Percentage(100)])\n\n .split(area);\n\n\n\n f.render_widget(\n\n ...
Rust
puffin_http/src/client.rs
MarijnS95/puffin
71a4d7c97a63dac8dff42c573af3684559b952c6
use std::sync::{ atomic::{AtomicBool, Ordering::SeqCst}, Arc, Mutex, }; use puffin::{FrameData, FrameView}; pub struct Client { addr: String, connected: Arc<AtomicBool>, alive: Arc<AtomicBool>, frame_view: Arc<Mutex<FrameView>>, } impl Drop for Client { fn drop(&mut self) { self.alive.store(false, SeqCst); } } impl Client { pub fn new(addr: String) -> Self { let alive = Arc::new(AtomicBool::new(true)); let connected = Arc::new(AtomicBool::new(false)); let frame_view = Arc::new(Mutex::new(FrameView::default())); let client = Self { addr: addr.clone(), connected: connected.clone(), alive: alive.clone(), frame_view: frame_view.clone(), }; std::thread::spawn(move || { log::info!("Connecting to {}…", addr); while alive.load(SeqCst) { match std::net::TcpStream::connect(&addr) { Ok(mut stream) => { log::info!("Connected to {}", addr); connected.store(true, SeqCst); while alive.load(SeqCst) { match consume_message(&mut stream) { Ok(frame_data) => { frame_view .lock() .unwrap() .add_frame(std::sync::Arc::new(frame_data)); } Err(err) => { log::warn!( "Connection to puffin server closed: {}", error_display_chain(err.as_ref()) ); connected.store(false, SeqCst); break; } } } } Err(err) => { log::debug!("Failed to connect to {}: {}", addr, err); std::thread::sleep(std::time::Duration::from_secs(1)); } } } }); client } pub fn addr(&self) -> &str { &self.addr } pub fn connected(&self) -> bool { self.connected.load(SeqCst) } pub fn frame_view(&self) -> std::sync::MutexGuard<'_, FrameView> { self.frame_view.lock().unwrap() } } pub fn consume_message(stream: &mut impl std::io::Read) -> anyhow::Result<puffin::FrameData> { let mut server_version = [0_u8; 2]; stream.read_exact(&mut server_version)?; let server_version = u16::from_le_bytes(server_version); match server_version.cmp(&crate::PROTOCOL_VERSION) { std::cmp::Ordering::Less => { anyhow::bail!( "puffin server is using an older protocol version ({}) than the client ({}).", server_version, crate::PROTOCOL_VERSION ); } std::cmp::Ordering::Equal => {} std::cmp::Ordering::Greater => { anyhow::bail!( "puffin server is using a newer protocol version ({}) than the client ({}). Update puffin_viewer with 'cargo install puffin_viewer'.", server_version, crate::PROTOCOL_VERSION ); } } use anyhow::Context as _; FrameData::read_next(stream) .context("Failed to parse FrameData")? .ok_or_else(|| anyhow::format_err!("End of stream")) } fn error_display_chain(error: &dyn std::error::Error) -> String { let mut s = error.to_string(); if let Some(source) = error.source() { s.push_str(" -> "); s.push_str(&error_display_chain(source)); } s }
use std::sync::{ atomic::{AtomicBool, Ordering::SeqCst}, Arc, Mutex, }; use puffin::{FrameData, FrameView}; pub struct Client { addr: String, connected: Arc<AtomicBool>, alive: Arc<AtomicBool>, frame_view: Arc<Mutex<FrameView>>, } impl Drop for Client { fn drop(&mut self) { self.alive.store(false, SeqCst); } } impl Client { pub fn new(addr: String) -> Self { let alive = Arc::new(AtomicBool::new(true)); let connected = Arc::new(AtomicBool::new(false)); let frame_view = Arc::new(Mutex::new(FrameView::default())); let client = Self { addr: addr.clone(), connected: connected.clone(), alive: alive.clone(), frame_view: frame_view.clone(), }; std::thread::spawn(move || { log::info!("Connecting to {}…", addr); while alive.load(SeqCst) { match std::net::TcpStream::connect(&addr) { Ok(mut stream) => { log::info!("Connected to {}", addr); connected.store(true, SeqCst); while alive.load(SeqCst) { match consume_message(&mut stream) { Ok(frame_data) => { frame_view .lock() .unwrap() .add_frame(std::sync::Arc::new(frame_data)); } Err(err) => { log::warn!( "Connection to puffin server closed: {}", error_display_chain(err.as_ref()) ); connected.store(false, SeqCst); break; } } } } Err(err) => { log::debug!("Failed to connect to {}: {}", addr, err); std::thread::sleep(std::time::Duration::from_secs(1)); } } } }); client } pub fn addr(&self) -> &str { &self.addr } pub fn connected(&self) -> bool { self.connected.load(SeqCst) } pub fn frame_view(&self) -> std::sync::MutexGuard<'_, FrameView> { self.frame_view.lock().unwrap() } } pub fn consume_message(stream: &mut impl std::io::Read) -> anyhow::Result<puffin::FrameData> { let mut server_version = [0_u8; 2]; stream.read_exact(&mut server_version)?; let server_version = u16::from_le_bytes(server_version);
use anyhow::Context as _; FrameData::read_next(stream) .context("Failed to parse FrameData")? .ok_or_else(|| anyhow::format_err!("End of stream")) } fn error_display_chain(error: &dyn std::error::Error) -> String { let mut s = error.to_string(); if let Some(source) = error.source() { s.push_str(" -> "); s.push_str(&error_display_chain(source)); } s }
match server_version.cmp(&crate::PROTOCOL_VERSION) { std::cmp::Ordering::Less => { anyhow::bail!( "puffin server is using an older protocol version ({}) than the client ({}).", server_version, crate::PROTOCOL_VERSION ); } std::cmp::Ordering::Equal => {} std::cmp::Ordering::Greater => { anyhow::bail!( "puffin server is using a newer protocol version ({}) than the client ({}). Update puffin_viewer with 'cargo install puffin_viewer'.", server_version, crate::PROTOCOL_VERSION ); } }
if_condition
[ { "content": "/// Are the profiler scope macros turned on?\n\n/// This is [`false`] by default.\n\npub fn are_scopes_on() -> bool {\n\n MACROS_ON.load(Ordering::Relaxed)\n\n}\n\n\n\n/// All times are expressed as integer nanoseconds since some event.\n\npub type NanoSecond = i64;\n\n\n\n// ------------------...
Rust
firmware/hal/build.rs
Lotterleben/embedded2020
623fa53461c4da29d69318aa85de8b4119c50719
use std::{ env, error::Error, fs, path::{Path, PathBuf}, }; fn main() -> Result<(), Box<dyn Error>> { let out_dir = &PathBuf::from(env::var("OUT_DIR")?); let flash = env::var_os("CARGO_FEATURE_FLASH").is_some(); descs(&out_dir)?; fs::copy("interrupts.x", out_dir.join("interrupts.x"))?; let suffix = if flash { "flash" } else { "ram" }; fs::copy(format!("link-{}.x", suffix), out_dir.join("link.x"))?; println!("cargo:rustc-link-search={}", out_dir.display()); Ok(()) } fn descs(out_dir: &Path) -> Result<(), Box<dyn Error>> { use core::num::NonZeroU8; use quote::quote; use usb2::{ cdc::{self, acm, call, header, union}, configuration::{self, bmAttributes}, device::{self, bMaxPacketSize0}, endpoint, hid, ia, interface, Direction, Endpoint, }; const PACKET_SIZE: bMaxPacketSize0 = bMaxPacketSize0::B64; const CONFIG_VAL: u8 = 1; const CDC_IFACE: u8 = 0; const HID_IFACE: u8 = 2; let device_desc = device::Descriptor { bDeviceClass: 0xEF, bDeviceSubClass: 2, bDeviceProtocol: 1, bMaxPacketSize0: bMaxPacketSize0::B64, bNumConfigurations: NonZeroU8::new(1).unwrap(), bcdDevice: 0x01_00, iManufacturer: None, iProduct: None, iSerialNumber: None, idProduct: consts::PID, idVendor: consts::VID, }; fn full_config_desc() -> Vec<u8> { let hid = env::var_os("CARGO_FEATURE_HID").is_some(); let mut bytes = vec![]; let mut nifaces = 2; if hid { nifaces += 1; } let config = configuration::Descriptor { bConfigurationValue: NonZeroU8::new(CONFIG_VAL).unwrap(), bMaxPower: 250, bNumInterfaces: NonZeroU8::new(nifaces).unwrap(), bmAttributes: bmAttributes { remote_wakeup: false, self_powered: false, }, iConfiguration: None, wTotalLength: 0, }; bytes.extend_from_slice(&config.bytes()); { let comm = cdc::Class::Communications { subclass: cdc::SubClass::AbstractControlModel, protocol: cdc::Protocol::ATCommands, }; let ia = ia::Descriptor { bFirstInterface: CDC_IFACE, bFunctionClass: comm.class(), bFunctionSubClass: comm.subclass(), bFunctionProtocol: comm.protocol(), bInterfaceCount: NonZeroU8::new(2).unwrap(), iFunction: None, }; bytes.extend_from_slice(&ia.bytes()); let iface0 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: CDC_IFACE, bInterfaceClass: comm.class().get(), bInterfaceSubClass: comm.subclass(), bInterfaceProtocol: comm.protocol(), bNumEndpoints: 1, iInterface: None, }; bytes.extend_from_slice(&iface0.bytes()); let header = header::Descriptor { bcdCDC: 0x01_10 }; bytes.extend_from_slice(&header.bytes()); let call = call::Descriptor { bmCapabilities: call::Capabilities { call_management: true, data_class: true, }, bDataInterface: 1, }; bytes.extend_from_slice(&call.bytes()); let acm = acm::Descriptor { bmCapabilities: acm::Capabilities { comm_features: false, line_serial: true, network_connection: false, send_break: false, }, }; bytes.extend_from_slice(&acm.bytes()); let union = union::Descriptor { bControlInterface: 0, bSubordinateInterface0: 1, }; bytes.extend_from_slice(&union.bytes()); let ep1in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 1, }, bInterval: 32, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep1in.bytes()); } { let cdc_data = cdc::Class::CdcData; let iface1 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: 1, bInterfaceClass: cdc_data.class().get(), bInterfaceSubClass: cdc_data.subclass(), bInterfaceProtocol: cdc_data.protocol(), bNumEndpoints: 2, iInterface: None, }; bytes.extend_from_slice(&iface1.bytes()); let ep2out = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::Out, number: 2, }, bInterval: 0, ty: endpoint::Type::Bulk, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep2out.bytes()); let ep2in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 2, }, bInterval: 0, ty: endpoint::Type::Bulk, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep2in.bytes()); } if hid { let hid = hid::Class; let iface2 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: HID_IFACE, bInterfaceClass: hid.class().get(), bInterfaceSubClass: hid.subclass(), bInterfaceProtocol: hid.protocol(), bNumEndpoints: 2, iInterface: None, }; bytes.extend_from_slice(&iface2.bytes()); let report = hid::Descriptor { bCountryCode: hid::Country::NotSupported, wDescriptorLength: 33, }; bytes.extend_from_slice(&report.bytes()); let ep3out = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::Out, number: 3, }, bInterval: 1, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep3out.bytes()); let ep3in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 3, }, bInterval: 1, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep3in.bytes()); } let total_length = bytes.len(); assert!( total_length <= usize::from(u16::max_value()), "configuration descriptor is too long" ); bytes[2] = total_length as u8; bytes[3] = (total_length >> 8) as u8; bytes } let line_coding = acm::LineCoding { bCharFormat: acm::bCharFormat::Stop1, bDataBits: acm::bDataBits::_8, bParityType: acm::bParityType::None, dwDTERate: 9_600, }; let serial_state = acm::SerialState { interface: 0, bOverRun: false, bParity: false, bFraming: false, bRingSignal: false, bBreak: false, bTxCarrier: true, bRxCarrier: true, }; let max_packet_size0 = PACKET_SIZE as u8; let lcb = line_coding.bytes(); let lcl = lcb.len(); let ssb = serial_state.bytes(); let ssl = ssb.len(); let ddb = device_desc.bytes(); let ddl = ddb.len(); let cdb = full_config_desc(); let cdl = cdb.len(); fs::write( out_dir.join("descs.rs"), quote!( const CONFIG_VAL: core::num::NonZeroU8 = unsafe { core::num::NonZeroU8::new_unchecked(#CONFIG_VAL) }; const MAX_PACKET_SIZE0: u8 = #max_packet_size0; #[allow(dead_code)] #[link_section = ".data.CONFIG_DESC"] static CONFIG_DESC: [u8; #cdl] = [#(#cdb,)*]; #[allow(dead_code)] #[link_section = ".data.DEVICE_DESC"] static DEVICE_DESC: [u8; #ddl] = [#(#ddb,)*]; #[allow(dead_code)] static mut LINE_CODING: [u8; #lcl] = [#(#lcb,)*]; #[allow(dead_code)] #[link_section = ".data.SERIAL_STATE"] static SERIAL_STATE: crate::util::Align4<[u8; #ssl]> = crate::util::Align4([#(#ssb,)*]); #[allow(dead_code)] const CDC_IFACE: u8 = #CDC_IFACE; #[allow(dead_code)] const HID_IFACE: u8 = #HID_IFACE; ) .to_string(), )?; Ok(()) }
use std::{ env, error::Error, fs, path::{Path, PathBuf}, }; fn main() -> Result<(), Box<dyn Error>> { let out_dir = &PathBuf::from(env::var("OUT_DIR")?); let flash = env::var_os("CARGO_FEATURE_FLASH").is_some(); descs(&out_dir)?; fs::copy("interrupts.x", out_dir.join("interrupts.x"))?; let suffix = if flash { "flash" } else { "ram" }; fs::copy(format!("link-{}.x", suffix), out_dir.join("link.x"))?; println!("cargo:rustc-link-search={}", out_dir.display()); Ok(()) } fn descs(out_dir: &Path) -> Result<(), Box<dyn Error>> { use core::num::NonZeroU8; use quote::quote; use usb2::{ cdc::{self, acm, call, header, union}, configuration::{self, bmAttributes}, device::{self, bMaxPacketSize0}, endpoint, hid, ia, interface, Direction, Endpoint, }; const PACKET_SIZE: bMaxPacketSize0 = bMaxPacketSize0::B64; const CONFIG_VAL: u8 = 1; const CDC_IFACE: u8 = 0; const HID_IFACE: u8 = 2; let device_desc = device::Descriptor { bDeviceClass: 0xEF, bDeviceSubClass: 2, bDeviceProtocol: 1, bMaxPacketSize0: bMaxPacketSize0::B64, bNumConfigurations: NonZeroU8::new(1).unwrap(), bcdDevice: 0x01_00, iManufacturer: None, iProduct: None, iSerialNumber: None, idProduct: consts::PID, idVendor: consts::VID, }; fn full_config_desc() -> Vec<u8> { let hid = env::var_os("CARGO_FEATURE_HID").is_some(); let mut bytes = vec![]; let mut nifaces = 2; if hid { nifaces += 1; } let config = configuration::Descriptor { bConfigurationValue: NonZeroU8::new(CONFIG_VAL).unwrap(), bMaxPower: 250, bNumInterfaces: NonZeroU8::new(nifaces).unwrap(), bmAttributes: bmAttributes { remote_wakeup: false, self_powered: false, }, iConfiguration: None, wTotalLength: 0, }; bytes.extend_from_slice(&config.bytes()); { let comm = cdc::Class::Communications { subclass: cdc::SubClass::AbstractControlModel, protocol: cdc::Protocol::ATCommands, }; let ia = ia::Descriptor { bFirstInterface: CDC_IFACE, bFunctionClass: comm.class(), bFunctionSubClass: comm.subclass(), bFunctionProtocol: comm.protocol(), bInterfaceCount: NonZeroU8::new(2).unwrap(), iFunction: None, }; bytes.extend_from_slice(&ia.bytes()); let iface0 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: CDC_IFACE, bInterfaceClass: comm.class().get(), bInterfaceSubClass: comm.subclass(), bInterfaceProtocol: comm.protocol(), bNumEndpoints: 1, iInterface: None, }; bytes.extend_from_slice(&iface0.bytes()); let header = header::Descriptor { bcdCDC: 0x01_10 }; bytes.extend_from_slice(&header.bytes()); let call = call::Descriptor { bmCapabilities: call::Capabilities { call_management: true, data_class: true, }, bDataInterface: 1, }; bytes.extend_from_slice(&call.bytes()); let acm = acm::Descripto
let line_coding = acm::LineCoding { bCharFormat: acm::bCharFormat::Stop1, bDataBits: acm::bDataBits::_8, bParityType: acm::bParityType::None, dwDTERate: 9_600, }; let serial_state = acm::SerialState { interface: 0, bOverRun: false, bParity: false, bFraming: false, bRingSignal: false, bBreak: false, bTxCarrier: true, bRxCarrier: true, }; let max_packet_size0 = PACKET_SIZE as u8; let lcb = line_coding.bytes(); let lcl = lcb.len(); let ssb = serial_state.bytes(); let ssl = ssb.len(); let ddb = device_desc.bytes(); let ddl = ddb.len(); let cdb = full_config_desc(); let cdl = cdb.len(); fs::write( out_dir.join("descs.rs"), quote!( const CONFIG_VAL: core::num::NonZeroU8 = unsafe { core::num::NonZeroU8::new_unchecked(#CONFIG_VAL) }; const MAX_PACKET_SIZE0: u8 = #max_packet_size0; #[allow(dead_code)] #[link_section = ".data.CONFIG_DESC"] static CONFIG_DESC: [u8; #cdl] = [#(#cdb,)*]; #[allow(dead_code)] #[link_section = ".data.DEVICE_DESC"] static DEVICE_DESC: [u8; #ddl] = [#(#ddb,)*]; #[allow(dead_code)] static mut LINE_CODING: [u8; #lcl] = [#(#lcb,)*]; #[allow(dead_code)] #[link_section = ".data.SERIAL_STATE"] static SERIAL_STATE: crate::util::Align4<[u8; #ssl]> = crate::util::Align4([#(#ssb,)*]); #[allow(dead_code)] const CDC_IFACE: u8 = #CDC_IFACE; #[allow(dead_code)] const HID_IFACE: u8 = #HID_IFACE; ) .to_string(), )?; Ok(()) }
r { bmCapabilities: acm::Capabilities { comm_features: false, line_serial: true, network_connection: false, send_break: false, }, }; bytes.extend_from_slice(&acm.bytes()); let union = union::Descriptor { bControlInterface: 0, bSubordinateInterface0: 1, }; bytes.extend_from_slice(&union.bytes()); let ep1in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 1, }, bInterval: 32, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep1in.bytes()); } { let cdc_data = cdc::Class::CdcData; let iface1 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: 1, bInterfaceClass: cdc_data.class().get(), bInterfaceSubClass: cdc_data.subclass(), bInterfaceProtocol: cdc_data.protocol(), bNumEndpoints: 2, iInterface: None, }; bytes.extend_from_slice(&iface1.bytes()); let ep2out = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::Out, number: 2, }, bInterval: 0, ty: endpoint::Type::Bulk, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep2out.bytes()); let ep2in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 2, }, bInterval: 0, ty: endpoint::Type::Bulk, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep2in.bytes()); } if hid { let hid = hid::Class; let iface2 = interface::Descriptor { bAlternativeSetting: 0, bInterfaceNumber: HID_IFACE, bInterfaceClass: hid.class().get(), bInterfaceSubClass: hid.subclass(), bInterfaceProtocol: hid.protocol(), bNumEndpoints: 2, iInterface: None, }; bytes.extend_from_slice(&iface2.bytes()); let report = hid::Descriptor { bCountryCode: hid::Country::NotSupported, wDescriptorLength: 33, }; bytes.extend_from_slice(&report.bytes()); let ep3out = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::Out, number: 3, }, bInterval: 1, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep3out.bytes()); let ep3in = endpoint::Descriptor { bEndpointAddress: Endpoint { direction: Direction::In, number: 3, }, bInterval: 1, ty: endpoint::Type::Interrupt { transactions_per_microframe: endpoint::Transactions::_1, }, max_packet_size: PACKET_SIZE as u16, }; bytes.extend_from_slice(&ep3in.bytes()); } let total_length = bytes.len(); assert!( total_length <= usize::from(u16::max_value()), "configuration descriptor is too long" ); bytes[2] = total_length as u8; bytes[3] = (total_length >> 8) as u8; bytes }
function_block-function_prefixed
[ { "content": "fn main() -> Result<(), anyhow::Error> {\n\n let dev = serialport::available_ports()?\n\n .into_iter()\n\n .filter(|info| match info.port_type {\n\n SerialPortType::UsbPort(ref port) => port.vid == consts::VID,\n\n _ => false,\n\n })\n\n .next()...
Rust
examples/main.rs
grafana/grafana-plugin-sdk-rust
707d2b65b0c0be0d43d80a7b7778384d12f9d3c7
use std::{ sync::{ atomic::{AtomicUsize, Ordering}, Arc, }, time::Duration, }; use bytes::Bytes; use chrono::prelude::*; use futures_util::stream::FuturesOrdered; use http::Response; use thiserror::Error; use tokio_stream::StreamExt; use tracing::{debug, info}; use grafana_plugin_sdk::{backend, data, prelude::*}; #[derive(Clone, Debug, Default)] struct MyPluginService(Arc<AtomicUsize>); impl MyPluginService { fn new() -> Self { Self(Arc::new(AtomicUsize::new(0))) } } #[derive(Debug, Error)] #[error("Error querying backend for query {ref_id}: {source}")] struct QueryError { source: data::Error, ref_id: String, } impl backend::DataQueryError for QueryError { fn ref_id(self) -> String { self.ref_id } } #[tonic::async_trait] impl backend::DataService for MyPluginService { type QueryError = QueryError; type Stream = backend::BoxDataResponseStream<Self::QueryError>; async fn query_data(&self, request: backend::QueryDataRequest) -> Self::Stream { Box::pin( request .queries .into_iter() .map(|x| async { Ok(backend::DataResponse::new( x.ref_id.clone(), vec![[ [ Utc.ymd(2021, 1, 1).and_hms(12, 0, 0), Utc.ymd(2021, 1, 1).and_hms(12, 0, 1), Utc.ymd(2021, 1, 1).and_hms(12, 0, 2), ] .into_field("time"), [1_u32, 2, 3].into_field("x"), ["a", "b", "c"].into_field("y"), ] .into_frame("foo") .check() .map_err(|source| QueryError { ref_id: x.ref_id, source, })?], )) }) .collect::<FuturesOrdered<_>>(), ) } } #[derive(Debug, Error)] #[error("Error streaming data")] enum StreamError { #[error("Error converting frame: {0}")] Conversion(#[from] backend::ConvertToError), #[error("Invalid frame returned: {0}")] InvalidFrame(#[from] data::Error), } #[tonic::async_trait] impl backend::StreamService for MyPluginService { type JsonValue = (); async fn subscribe_stream( &self, request: backend::SubscribeStreamRequest, ) -> Result<backend::SubscribeStreamResponse, Self::Error> { let response = if request.path.as_str() == "stream" { backend::SubscribeStreamResponse::ok(None) } else { backend::SubscribeStreamResponse::not_found() }; info!(path = %request.path, "Subscribing to stream"); Ok(response) } type Error = StreamError; type Stream = backend::BoxRunStream<Self::Error>; async fn run_stream( &self, _request: backend::RunStreamRequest, ) -> Result<Self::Stream, Self::Error> { info!("Running stream"); let mut x = 0u32; let n = 3; let mut frame = data::Frame::new("foo").with_field((x..x + n).into_field("x")); Ok(Box::pin( async_stream::try_stream! { loop { frame.fields_mut()[0].set_values( (x..x+n) )?; let packet = backend::StreamPacket::from_frame(frame.check()?)?; debug!("Yielding frame from {} to {}", x, x+n); yield packet; x += n; } } .throttle(Duration::from_secs(1)), )) } async fn publish_stream( &self, _request: backend::PublishStreamRequest, ) -> Result<backend::PublishStreamResponse, Self::Error> { info!("Publishing to stream"); todo!() } } #[derive(Debug, Error)] enum ResourceError { #[error("HTTP error: {0}")] Http(#[from] http::Error), #[error("Not found")] NotFound, } impl backend::ErrIntoHttpResponse for ResourceError { fn into_http_response(self) -> Result<http::Response<Bytes>, Box<dyn std::error::Error>> { let status = match &self { Self::Http(_) => http::StatusCode::INTERNAL_SERVER_ERROR, Self::NotFound => http::StatusCode::NOT_FOUND, }; Ok(Response::builder() .status(status) .header(http::header::CONTENT_TYPE, "application/json") .body(Bytes::from(serde_json::to_vec( &serde_json::json!({"error": self.to_string()}), )?))?) } } #[tonic::async_trait] impl backend::ResourceService for MyPluginService { type Error = ResourceError; type InitialResponse = http::Response<Bytes>; type Stream = backend::BoxResourceStream<Self::Error>; async fn call_resource( &self, r: backend::CallResourceRequest, ) -> Result<(Self::InitialResponse, Self::Stream), Self::Error> { let count = Arc::clone(&self.0); let response_and_stream = match r.request.uri().path() { "/echo" => Ok(( Response::new(r.request.into_body()), Box::pin(futures::stream::empty()) as Self::Stream, )), "/count" => Ok(( Response::new( count .fetch_add(1, Ordering::SeqCst) .to_string() .into_bytes() .into(), ), Box::pin(async_stream::try_stream! { loop { let body = count .fetch_add(1, Ordering::SeqCst) .to_string() .into_bytes() .into(); yield body; } }) as Self::Stream, )), _ => return Err(ResourceError::NotFound), }; response_and_stream } } #[grafana_plugin_sdk::main( services(data, resource, stream), init_subscriber = true, shutdown_handler = "0.0.0.0:10001" )] async fn plugin() -> MyPluginService { MyPluginService::new() }
use std::{ sync::{ atomic::{AtomicUsize, Ordering}, Arc, }, time::Duration, }; use bytes::Bytes; use chrono::prelude::*; use futures_util::stream::FuturesOrdered; use http::Response; use thiserror::Error; use tokio_stream::StreamExt; use tracing::{debug, info}; use grafana_plugin_sdk::{backend, data, prelude::*}; #[derive(Clone, Debug, Default)] struct MyPluginService(Arc<AtomicUsize>); impl MyPluginService { fn new() -> Self { Self(Arc::new(AtomicUsize::new(0))) } } #[derive(Debug, Error)] #[error("Error querying backend for query {ref_id}: {source}")] struct QueryError { source: data::Error, ref_id: String, } impl backend::DataQueryError for QueryError { fn ref_id(self) -> String { self.ref_id } } #[tonic::async_trait] impl backend::DataService for MyPluginService { type QueryError = QueryError; type Stream = backend::BoxDataResponseStream<Self::QueryError>; async fn query_data(&self, request: backend::QueryDataRequest) -> Self::Stream { Box::pin( request .queries .into_iter() .map(|x| async { Ok(backend::DataResponse::new( x.ref_id.clone(), vec![[ [ Utc.ymd(2021, 1, 1).and_hms(12, 0, 0), Utc.ymd(2021, 1, 1).and_hms(12, 0, 1), Utc.ymd(2021, 1, 1).and_hms(12, 0, 2), ] .into_field("time"), [1_u32, 2, 3].into_field("x"), ["a", "b", "c"].into_field("y"), ] .into_frame("foo") .check() .map_err(|source| QueryError { ref_id: x.ref_id, source, })?], )) }) .collect::<FuturesOrdered<_>>(), ) } } #[derive(Debug, Error)] #[error("Error streaming data")] enum StreamError { #[error("Error converting frame: {0}")] Conversion(#[from] backend::ConvertToError), #[error("Invalid frame returned: {0}")] InvalidFrame(#[from] data::Error), } #[tonic::async_trait] impl backend::StreamService for MyPluginService { type JsonValue = (); async fn subscribe_stream( &self, request: backend::SubscribeStreamRequest, ) -> Result<backend::SubscribeStreamResponse, Self::Error> { let response = if request.path.as_str() == "stream" { backend::SubscribeStreamResponse::ok(None) } else { backend::SubscribeStreamResponse::not_found() }; info!(path = %request.path, "Subscribing to stream"); Ok(response) } type Error = StreamError; type Stream = backend::BoxRunStream<Self::Error>; async fn run_stream( &self, _request: backend::RunStreamRequest, ) -> Result<Self::Stream, Self::Error> { info!("Running stream"); let mut x = 0u32; let n = 3; let mut frame = da
async fn publish_stream( &self, _request: backend::PublishStreamRequest, ) -> Result<backend::PublishStreamResponse, Self::Error> { info!("Publishing to stream"); todo!() } } #[derive(Debug, Error)] enum ResourceError { #[error("HTTP error: {0}")] Http(#[from] http::Error), #[error("Not found")] NotFound, } impl backend::ErrIntoHttpResponse for ResourceError { fn into_http_response(self) -> Result<http::Response<Bytes>, Box<dyn std::error::Error>> { let status = match &self { Self::Http(_) => http::StatusCode::INTERNAL_SERVER_ERROR, Self::NotFound => http::StatusCode::NOT_FOUND, }; Ok(Response::builder() .status(status) .header(http::header::CONTENT_TYPE, "application/json") .body(Bytes::from(serde_json::to_vec( &serde_json::json!({"error": self.to_string()}), )?))?) } } #[tonic::async_trait] impl backend::ResourceService for MyPluginService { type Error = ResourceError; type InitialResponse = http::Response<Bytes>; type Stream = backend::BoxResourceStream<Self::Error>; async fn call_resource( &self, r: backend::CallResourceRequest, ) -> Result<(Self::InitialResponse, Self::Stream), Self::Error> { let count = Arc::clone(&self.0); let response_and_stream = match r.request.uri().path() { "/echo" => Ok(( Response::new(r.request.into_body()), Box::pin(futures::stream::empty()) as Self::Stream, )), "/count" => Ok(( Response::new( count .fetch_add(1, Ordering::SeqCst) .to_string() .into_bytes() .into(), ), Box::pin(async_stream::try_stream! { loop { let body = count .fetch_add(1, Ordering::SeqCst) .to_string() .into_bytes() .into(); yield body; } }) as Self::Stream, )), _ => return Err(ResourceError::NotFound), }; response_and_stream } } #[grafana_plugin_sdk::main( services(data, resource, stream), init_subscriber = true, shutdown_handler = "0.0.0.0:10001" )] async fn plugin() -> MyPluginService { MyPluginService::new() }
ta::Frame::new("foo").with_field((x..x + n).into_field("x")); Ok(Box::pin( async_stream::try_stream! { loop { frame.fields_mut()[0].set_values( (x..x+n) )?; let packet = backend::StreamPacket::from_frame(frame.check()?)?; debug!("Yielding frame from {} to {}", x, x+n); yield packet; x += n; } } .throttle(Duration::from_secs(1)), )) }
function_block-function_prefixed
[ { "content": "/// Error supertrait used in [`DataService::query_data`].\n\npub trait DataQueryError: std::error::Error {\n\n /// Return the `ref_id` of the incoming query to which this error corresponds.\n\n ///\n\n /// This allows the SDK to align queries up with any failed requests.\n\n fn ref_id(...
Rust
src/command/last.rs
wojexe/baca-cli
c024bd3d95a2e54dfc25bdc0dec352551a3b4e7c
use crate::api::baca_api::BacaApi; use crate::command::details::Details; use crate::command::Command; use crate::error::{Error, Result}; use crate::model::Submit; use crate::workspace::{ConfigObject, ConnectionConfig, Workspace}; use clap::ArgMatches; pub struct Last { task_id: Option<String>, } impl Last { pub fn new() -> Self { Self { task_id: None } } pub fn with_filter(task_id: &str) -> Self { Self { task_id: Some(task_id.to_string()), } } fn get_last_submit<A>(&self, connection_config: &ConnectionConfig, api: &A) -> Result<Submit> where A: BacaApi, { let results = if let Some(task_id) = &self.task_id { api.get_results_by_task(connection_config, task_id)? } else { api.get_results(connection_config)? }; Ok(results.submits.first().ok_or(Error::NoSubmitsYet)?.clone()) } } impl Command for Last { fn execute<W, A>(self, workspace: &W, api: &A) -> Result<()> where W: Workspace, A: BacaApi, { let connection_config = ConnectionConfig::read_config(workspace)?; let last = self.get_last_submit(&connection_config, api)?; Details::new(&last.id).execute(workspace, api) } } impl From<&ArgMatches<'_>> for Last { fn from(args: &ArgMatches) -> Self { if let Some(task_id) = args.value_of("task") { return Last::with_filter(task_id); } Last::new() } } #[cfg(test)] mod tests { use super::*; use crate::api::baca_api::MockBacaApi; use crate::model::SubmitStatus; use crate::model::{Results, Submit}; use crate::workspace::{ConnectionConfig, MockWorkspace}; #[test] fn no_submits() { let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(|_| Ok(Results { submits: vec![] })); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_err()); assert!(matches!(result.err().unwrap(), Error::NoSubmitsYet)); } #[test] fn one_submit() { let expected = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "3".to_string(), max_points: None, problem_name: "Test Problem".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); let results = Results { submits: vec![expected.clone()], }; mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(move |_| Ok(results.clone())); let submit = expected; mock_api .expect_get_submit_details() .withf(|x, id| *x == ConnectionConfig::default() && id == "3") .returning(move |_, _| Ok(submit.clone())); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_ok()); } #[test] fn three_submits() { let submit1 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "1".to_string(), max_points: None, problem_name: "Test Problem 1".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let submit2 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "2".to_string(), max_points: None, problem_name: "Test Problem 2".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let submit3 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "3".to_string(), max_points: None, problem_name: "Test Problem 3".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let all_submits = vec![submit1.clone(), submit2, submit3]; let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); let results = Results { submits: all_submits, }; mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(move |_| Ok(results.clone())); let submit = submit1; mock_api .expect_get_submit_details() .withf(|x, id| *x == ConnectionConfig::default() && id == "1") .returning(move |_, _| Ok(submit.clone())); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_ok()); } }
use crate::api::baca_api::BacaApi; use crate::command::details::Details; use crate::command::Command; use crate::error::{Error, Result}; use crate::model::Submit; use crate::workspace::{ConfigObject, ConnectionConfig, Workspace}; use clap::ArgMatches; pub struct Last { task_id: Option<String>, } impl Last { pub fn new() -> Self { Self { task_id: None } } pub fn with_filter(task_id: &str) -> Self { Self { task_id: Some(task_id.to_string()), } } fn get_last_submit<A>(&self, connection_config: &ConnectionConfig, api: &A) -> Result<Submit> where A: BacaApi, { let results =
; Ok(results.submits.first().ok_or(Error::NoSubmitsYet)?.clone()) } } impl Command for Last { fn execute<W, A>(self, workspace: &W, api: &A) -> Result<()> where W: Workspace, A: BacaApi, { let connection_config = ConnectionConfig::read_config(workspace)?; let last = self.get_last_submit(&connection_config, api)?; Details::new(&last.id).execute(workspace, api) } } impl From<&ArgMatches<'_>> for Last { fn from(args: &ArgMatches) -> Self { if let Some(task_id) = args.value_of("task") { return Last::with_filter(task_id); } Last::new() } } #[cfg(test)] mod tests { use super::*; use crate::api::baca_api::MockBacaApi; use crate::model::SubmitStatus; use crate::model::{Results, Submit}; use crate::workspace::{ConnectionConfig, MockWorkspace}; #[test] fn no_submits() { let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(|_| Ok(Results { submits: vec![] })); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_err()); assert!(matches!(result.err().unwrap(), Error::NoSubmitsYet)); } #[test] fn one_submit() { let expected = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "3".to_string(), max_points: None, problem_name: "Test Problem".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); let results = Results { submits: vec![expected.clone()], }; mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(move |_| Ok(results.clone())); let submit = expected; mock_api .expect_get_submit_details() .withf(|x, id| *x == ConnectionConfig::default() && id == "3") .returning(move |_, _| Ok(submit.clone())); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_ok()); } #[test] fn three_submits() { let submit1 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "1".to_string(), max_points: None, problem_name: "Test Problem 1".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let submit2 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "2".to_string(), max_points: None, problem_name: "Test Problem 2".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let submit3 = Submit { status: SubmitStatus::InternalError, points: 0.0, lateness: None, accepted: 0, size: 123, timestamp: "2002".to_string(), language: "Java".to_string(), id: "3".to_string(), max_points: None, problem_name: "Test Problem 3".to_string(), link: "www.baca.pl".to_string(), test_results: None, }; let all_submits = vec![submit1.clone(), submit2, submit3]; let mut mock_workspace = MockWorkspace::new(); mock_workspace .expect_read_config_object() .returning(|| Ok(ConnectionConfig::default())); let mut mock_api = MockBacaApi::new(); let results = Results { submits: all_submits, }; mock_api .expect_get_results() .withf(|x| *x == ConnectionConfig::default()) .returning(move |_| Ok(results.clone())); let submit = submit1; mock_api .expect_get_submit_details() .withf(|x, id| *x == ConnectionConfig::default() && id == "1") .returning(move |_, _| Ok(submit.clone())); let last = Last::new(); let result = last.execute(&mock_workspace, &mock_api); assert!(result.is_ok()); } }
if let Some(task_id) = &self.task_id { api.get_results_by_task(connection_config, task_id)? } else { api.get_results(connection_config)? }
if_condition
[ { "content": "pub fn assert_contains_pattern(command: &[&str], pattern: &str) -> Result<(), Box<dyn Error>> {\n\n let (dir, mut cmd) = set_up_with_dir()?;\n\n\n\n cmd.args(command);\n\n cmd.assert()\n\n // .failure() // todo: exit codes\n\n .stdout(predicate::str::contains(pattern));\n\n\...
Rust
src/cmus_status/output/mod.rs
Noah2610/cmus-status-line
04727c8994b3e53c3d715f01500a226711e030d0
mod builder; mod format; pub use format::prelude::*; use super::data::prelude::*; use crate::error::prelude::*; use std::fmt; use builder::StatusOutputBuilder; const OVERFLOW_STR: &str = "..."; pub struct StatusOutput { data: CmusData, format: Format, } impl StatusOutput { pub fn builder() -> StatusOutputBuilder { StatusOutputBuilder::default() } fn get_format_text_for_parts<'a>( &self, parts: Vec<&'a FormatPart>, ) -> String { parts .iter() .filter_map(|part| self.get_format_text(part)) .collect::<Vec<String>>() .join("") } fn get_format_text(&self, part: &FormatPart) -> Option<String> { match part { FormatPart::Text(text) => Some(text.to_string()), FormatPart::Title => self.data.get_title(), FormatPart::Status => Some(self.data.get_status().to_string()), FormatPart::Tag(tag_name) => self.data.get_tag(tag_name), FormatPart::MatchStatus(playback_status, text) => { if self.data.is_status(playback_status) { Some(text.to_string()) } else { None } } FormatPart::Truncate(format_part_inner, max) => { let max = *max; self.get_format_text(format_part_inner.as_ref()) .map(|text| { let mut text = text.to_string(); if text.len() > max { let overflow_str_len = OVERFLOW_STR.len(); if max >= overflow_str_len * 2 { text.truncate(max - overflow_str_len); text.push_str(OVERFLOW_STR); } else { text.truncate(max); } } text }) } FormatPart::HtmlEscape(format_part_inner) => self .get_format_text(format_part_inner.as_ref()) .map(|text| htmlescape::encode_minimal(text.as_str())), FormatPart::ProgressBar(bar_config) => { if let Some(time) = self.data.get_time() { let width = bar_config.inner_width(); let percent_complete = time.completion_percentage(); let characters = (width as f32 * percent_complete).round() as usize; Some(bar_config.text_with_filled(characters)) } else { None } } FormatPart::Container(format_parts_inner) => Some( self.get_format_text_for_parts( format_parts_inner .iter() .map(std::ops::Deref::deref) .collect(), ), ), FormatPart::If(expression, format_part_inner) => { if self.is_expression_true(expression) { self.get_format_text(format_part_inner) } else { None } } FormatPart::IfElse( expression, format_part_true, format_part_false, ) => { if self.is_expression_true(expression) { self.get_format_text(format_part_true) } else { self.get_format_text(format_part_false) } } } } fn is_expression_true(&self, expression: &FormatExpression) -> bool { match expression { FormatExpression::True => true, FormatExpression::False => false, FormatExpression::And(expr_one, expr_two) => { self.is_expression_true(expr_one) && self.is_expression_true(expr_two) } FormatExpression::Or(expr_one, expr_two) => { self.is_expression_true(expr_one) || self.is_expression_true(expr_two) } FormatExpression::Not(expr) => !self.is_expression_true(expr), FormatExpression::IsStatus(playback_status) => { self.data.is_status(&playback_status) } FormatExpression::HasTag(tag_name) => self.data.has_tag(&tag_name), } } } impl fmt::Display for StatusOutput { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", self.get_format_text_for_parts(self.format.iter().collect()) ) } }
mod builder; mod format; pub use format::prelude::*; use super::data::prelude::*; use crate::error::prelude::*; use std::fmt; use builder::StatusOutputBuilder; const OVERFLOW_STR: &str = "..."; pub struct StatusOutput { data: CmusData, format: Format, } impl StatusOutput { pub fn builder() -> StatusOutputBuilder { StatusOutputBuilder::default() } fn get_format_text_for_parts<'a>( &self, parts: Vec<&'a FormatPart>, ) -> String { parts .iter() .filter_map(|part| self.get_format_text(part)) .collect::<Vec<String>>() .join("") } fn get_format_text(&self, part: &FormatPart) -> Option<String> { match part { FormatPart::Text(text) => Some(text.to_string()), FormatPart::Title => self.data.get_title(), FormatPart::Status => Some(self.data.get_status().to_string()), FormatPart::Tag(tag_name) => self.data.get_tag(tag_name), FormatPart::MatchStatus(playback_status, text) => { if self.data.is_status(playback_status) { Some(text.to_string()) } else { None } } FormatPart::Truncate(format_part_inner, max) => { let max = *max; self.get_format_text(format_part_inner.as_ref()) .map(|text| { let mut text = text.to_string(); if text.len() > max { let overflow_str_len = OVERFLOW_STR.len(); if max >= overflow_str_len * 2 { text.truncate(max - overflow_str_len); text.push_str(OVERFLOW_STR); } else { text.truncate(max); } } text }) } FormatPart::HtmlEscape(format_part_inner) => self .get_format_text(format_part_inner.as_ref()) .map(|text| htmlescape::encode_minimal(text.as_str())), FormatPart::ProgressBar(bar_config) => {
} FormatPart::Container(format_parts_inner) => Some( self.get_format_text_for_parts( format_parts_inner .iter() .map(std::ops::Deref::deref) .collect(), ), ), FormatPart::If(expression, format_part_inner) => { if self.is_expression_true(expression) { self.get_format_text(format_part_inner) } else { None } } FormatPart::IfElse( expression, format_part_true, format_part_false, ) => { if self.is_expression_true(expression) { self.get_format_text(format_part_true) } else { self.get_format_text(format_part_false) } } } } fn is_expression_true(&self, expression: &FormatExpression) -> bool { match expression { FormatExpression::True => true, FormatExpression::False => false, FormatExpression::And(expr_one, expr_two) => { self.is_expression_true(expr_one) && self.is_expression_true(expr_two) } FormatExpression::Or(expr_one, expr_two) => { self.is_expression_true(expr_one) || self.is_expression_true(expr_two) } FormatExpression::Not(expr) => !self.is_expression_true(expr), FormatExpression::IsStatus(playback_status) => { self.data.is_status(&playback_status) } FormatExpression::HasTag(tag_name) => self.data.has_tag(&tag_name), } } } impl fmt::Display for StatusOutput { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", self.get_format_text_for_parts(self.format.iter().collect()) ) } }
if let Some(time) = self.data.get_time() { let width = bar_config.inner_width(); let percent_complete = time.completion_percentage(); let characters = (width as f32 * percent_complete).round() as usize; Some(bar_config.text_with_filled(characters)) } else { None }
if_condition
[ { "content": "pub fn dump_config() {\n\n print!(\n\n r#\"# DEFAULT CONFIG FOR {name}\n\n# To write this config to the proper config file, run something like:\n\n# mkdir -p ~/.config/{name}\n\n# {name} {cmd_dump_config} > ~/.config/{name}/config.toml\n\n\n\n{config}\"#,\n\n name = crate:...
Rust
risc0/zkvm/sdk/rust/serde/src/deserializer.rs
risc0/risc0
2e8a4959a4b4247a1d4b35678af20ab184317931
use serde::de::{Deserialize, DeserializeSeed, IntoDeserializer, Visitor}; use crate::{ align_up, err::{Error, Result}, }; pub fn from_slice<'a, T: Deserialize<'a>>(slice: &'a [u32]) -> Result<T> { let mut deserializer = Deserializer::new(slice); T::deserialize(&mut deserializer) } pub struct Deserializer<'de> { slice: &'de [u32], } struct SeqAccess<'a, 'de> { deserializer: &'a mut Deserializer<'de>, len: usize, } impl<'de, 'a> serde::de::SeqAccess<'de> for SeqAccess<'a, 'de> { type Error = Error; fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>> where T: DeserializeSeed<'de>, { if self.len > 0 { self.len -= 1; Ok(Some(DeserializeSeed::deserialize( seed, &mut *self.deserializer, )?)) } else { Ok(None) } } fn size_hint(&self) -> Option<usize> { Some(self.len) } } impl<'de, 'a> serde::de::VariantAccess<'de> for &'a mut Deserializer<'de> { type Error = Error; fn unit_variant(self) -> Result<()> { Ok(()) } fn newtype_variant_seed<V: DeserializeSeed<'de>>(self, seed: V) -> Result<V::Value> { DeserializeSeed::deserialize(seed, self) } fn tuple_variant<V: Visitor<'de>>(self, len: usize, visitor: V) -> Result<V::Value> { serde::de::Deserializer::deserialize_tuple(self, len, visitor) } fn struct_variant<V: Visitor<'de>>( self, fields: &'static [&'static str], visitor: V, ) -> Result<V::Value> { serde::de::Deserializer::deserialize_tuple(self, fields.len(), visitor) } } impl<'de, 'a> serde::de::EnumAccess<'de> for &'a mut Deserializer<'de> { type Error = Error; type Variant = Self; fn variant_seed<V: DeserializeSeed<'de>>(self, seed: V) -> Result<(V::Value, Self)> { let tag = self.try_take_word()?; let val = DeserializeSeed::deserialize(seed, tag.into_deserializer())?; Ok((val, self)) } } struct MapAccess<'a, 'de> { deserializer: &'a mut Deserializer<'de>, len: usize, } impl<'a, 'de: 'a> serde::de::MapAccess<'de> for MapAccess<'a, 'de> { type Error = Error; fn next_key_seed<K: DeserializeSeed<'de>>(&mut self, seed: K) -> Result<Option<K::Value>> { if self.len > 0 { self.len -= 1; Ok(Some(DeserializeSeed::deserialize( seed, &mut *self.deserializer, )?)) } else { Ok(None) } } fn next_value_seed<V: DeserializeSeed<'de>>(&mut self, seed: V) -> Result<V::Value> { DeserializeSeed::deserialize(seed, &mut *self.deserializer) } fn size_hint(&self) -> Option<usize> { Some(self.len) } } impl<'de> Deserializer<'de> { pub fn new(slice: &'de [u32]) -> Self { Deserializer { slice } } fn try_take_word(&mut self) -> Result<u32> { if self.slice.len() >= 1 { let (head, tail) = self.slice.split_first().unwrap(); self.slice = tail; Ok(*head) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_dword(&mut self) -> Result<u64> { if self.slice.len() >= 2 { let (head, tail) = self.slice.split_at(2); self.slice = tail; let low: u64 = head[0].into(); let high: u64 = head[1].into(); Ok(low | high << 32) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_n(&mut self, len: usize) -> Result<&'de [u32]> { if self.slice.len() >= len { let (head, tail) = self.slice.split_at(len); self.slice = tail; Ok(head) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_n_bytes(&mut self, len: usize) -> Result<&'de [u8]> { let len_words = align_up(len, 4) / 4; let words: &'de [u32] = self.try_take_n(len_words)?; Ok(&bytemuck::cast_slice(words)[..len]) } } impl<'de, 'a> serde::Deserializer<'de> for &'a mut Deserializer<'de> { type Error = Error; fn is_human_readable(&self) -> bool { false } fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let val = match self.try_take_word()? { 0 => false, 1 => true, _ => return Err(Error::DeserializeBadBool), }; visitor.visit_bool(val) } fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i16<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i32<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i64(self.try_take_dword()? as i64) } fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u32<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u64(self.try_take_dword()?) } fn deserialize_f32<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_f64<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_char<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; if len_bytes > 4 { return Err(Error::DeserializeBadChar); } let bytes: &'de [u8] = self.try_take_n_bytes(len_bytes)?; let character = core::str::from_utf8(&bytes) .map_err(|_| Error::DeserializeBadChar)? .chars() .next() .ok_or(Error::DeserializeBadChar)?; visitor.visit_char(character) } fn deserialize_str<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; let bytes = self.try_take_n_bytes(len_bytes)?; let str = core::str::from_utf8(bytes).map_err(|_| Error::DeserializeBadUtf8)?; visitor.visit_borrowed_str(str) } fn deserialize_string<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_str(visitor) } fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; let bytes = self.try_take_n_bytes(len_bytes)?; visitor.visit_borrowed_bytes(bytes) } fn deserialize_byte_buf<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_bytes(visitor) } fn deserialize_option<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { match self.try_take_word()? { 0 => visitor.visit_none(), 1 => visitor.visit_some(self), _ => Err(Error::DeserializeBadOption), } } fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_unit() } fn deserialize_unit_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_unit(visitor) } fn deserialize_newtype_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_newtype_struct(self) } fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len = self.try_take_word()? as usize; visitor.visit_seq(SeqAccess { deserializer: self, len, }) } fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_seq(SeqAccess { deserializer: self, len, }) } fn deserialize_tuple_struct<V>( self, _name: &'static str, len: usize, visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_tuple(len, visitor) } fn deserialize_map<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len = self.try_take_word()? as usize; visitor.visit_map(MapAccess { deserializer: self, len, }) } fn deserialize_struct<V>( self, _name: &'static str, fields: &'static [&'static str], visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_tuple(fields.len(), visitor) } fn deserialize_enum<V>( self, _name: &'static str, _variants: &'static [&'static str], visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_enum(self) } fn deserialize_identifier<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_ignored_any<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } } #[cfg(test)] mod test { use alloc::string::String; use super::*; #[test] fn test_struct() { use serde::Deserialize; #[derive(Deserialize, PartialEq, Debug)] struct Test { bool: bool, i8: i8, u8: u8, i16: i16, u16: u16, i32: i32, u32: u32, i64: i64, u64: u64, } let words = [ 1, -4_i32 as u32, 4, -5_i32 as u32, 5, -6_i32 as u32, 6, -7_i32 as u32, 0xffffffff, 7, 0x00000000, ]; let expected = Test { bool: true, i8: -4, u8: 4, i16: -5, u16: 5, i32: -6, u32: 6, i64: -7, u64: 7, }; assert_eq!(expected, from_slice(&words).unwrap()); } #[test] fn test_str() { use serde::Deserialize; #[derive(Deserialize, PartialEq, Debug)] struct Test { first: String, second: String, } let words = [1, 0x00000061, 3, 0x00636261]; let expected = Test { first: "a".into(), second: "abc".into(), }; assert_eq!(expected, from_slice(&words).unwrap()); } }
use serde::de::{Deserialize, DeserializeSeed, IntoDeserializer, Visitor}; use crate::{ align_up, err::{Error, Result}, }; pub fn from_slice<'a, T: Deserialize<'a>>(slice: &'a [u32]) -> Result<T> { let mut deserializer = Deserializer::new(slice); T::deserialize(&mut deserializer) } pub struct Deserializer<'de> { slice: &'de [u32], } struct SeqAccess<'a, 'de> { deserializer: &'a mut Deserializer<'de>, len: usize, } impl<'de, 'a> serde::de::SeqAccess<'de> for SeqAccess<'a, 'de> { type Error = Error; fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>> where T: DeserializeSeed<'de>, { if self.len > 0 { self.len -= 1; Ok(Some(DeserializeSeed::deserialize( seed, &mut *self.deserializer, )?)) } else { Ok(None) } } fn size_hint(&self) -> Option<usize> { Some(self.len) } } impl<'de, 'a> serde::de::VariantAccess<'de> for &'a mut Deserializer<'de> { type Error = Error; fn unit_variant(self) -> Result<()> { Ok(()) } fn newtype_variant_seed<V: DeserializeSeed<'de>>(self, seed: V) -> Result<V::Value> { DeserializeSeed::deserialize(seed, self) } fn tuple_variant<V: Visitor<'de>>(self, len: usize, visitor: V) -> Result<V::Value> { serde::de::Deserializer::deserialize_tuple(self, len, visitor) } fn struct_variant<V: Visitor<'de>>( self, fields: &'static [&'static str], visitor: V, ) -> Result<V::Value> { serde::de::Deserializer::deserialize_tuple(self, fields.len(), visitor) } } impl<'de, 'a> serde::de::EnumAccess<'de> for &'a mut Deserializer<'de> { type Error = Error; type Variant = Self; fn variant_seed<V: DeserializeSeed<'de>>(self, seed: V) -> Result<(V::Value, Self)> { let tag = self.try_take_word()?; let val = DeserializeSeed::deserialize(seed, tag.into_deserializer())?; Ok((val, self)) } } struct MapAccess<'a, 'de> { deserializer: &'a mut Deserializer<'de>, len: usize, } impl<'a, 'de: 'a> serde::de::MapAccess<'de> for MapAccess<'a, 'de> { type Error = Error; fn next_key_seed<K: DeserializeSeed<'de>>(&mut self, seed: K) -> Result<Option<K::Value>> { if self.len > 0 { self.len -= 1; Ok(Some(DeserializeSeed::deserialize( seed, &mut *self.deserializer, )?)) } else { Ok(None) } } fn next_value_seed<V: DeserializeSeed<'de>>(&mut self, seed: V) -> Result<V::Value> { DeserializeSeed::deserialize(seed, &mut *self.deserializer) } fn size_hint(&self) -> Option<usize> { Some(self.len) } } impl<'de> Deserializer<'de> { pub fn new(slice: &'de [u32]) -> Self { Deserializer { slice } } fn try_take_word(&mut self) -> Result<u32> { if self.slice.len() >= 1 { let (head, tail) = self.slice.split_first().unwrap(); self.slice = tail; Ok(*head) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_dword(&mut self) -> Result<u64> { if self.slice.len() >= 2 { let (head, tail) = self.slice.split_at(2); self.slice = tail; let low: u64 = head[0].into(); let high: u64 = head[1].into(); Ok(low | high << 32) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_n(&mut self, len: usize) -> Result<&'de [u32]> { if self.slice.len() >= len { let (head, tail) = self.slice.split_at(len); self.slice = tail; Ok(head) } else { Err(Error::DeserializeUnexpectedEnd) } } fn try_take_n_bytes(&mut self, len: usize) -> Result<&'de [u8]> { let len_words = align_up(len, 4) / 4; let words: &'de [u32] = self.try_take_n(len_words)?; Ok(&bytemuck::cast_slice(words)[..len]) } } impl<'de, 'a> serde::Deserializer<'de> for &'a mut Deserializer<'de> { type Error = Error; fn is_human_readable(&self) -> bool { false } fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let val = match self.try_take_word()? { 0 => false, 1 => true, _ => return Err(Error::DeserializeBadBool), }; visitor.visit_bool(val) } fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i16<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i32<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i32(self.try_take_word()? as i32) } fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_i64(self.try_take_dword()? as i64) } fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u32<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u32(self.try_take_word()?) } fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_u64(self.try_take_dword()?) } fn deserialize_f32<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_f64<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_char<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; if len_bytes > 4 { return Err(Error::DeserializeBadChar); } let bytes: &'de [u8] = self.try_take_n_bytes(len_bytes)?;
visitor.visit_char(character) } fn deserialize_str<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; let bytes = self.try_take_n_bytes(len_bytes)?; let str = core::str::from_utf8(bytes).map_err(|_| Error::DeserializeBadUtf8)?; visitor.visit_borrowed_str(str) } fn deserialize_string<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_str(visitor) } fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len_bytes = self.try_take_word()? as usize; let bytes = self.try_take_n_bytes(len_bytes)?; visitor.visit_borrowed_bytes(bytes) } fn deserialize_byte_buf<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_bytes(visitor) } fn deserialize_option<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { match self.try_take_word()? { 0 => visitor.visit_none(), 1 => visitor.visit_some(self), _ => Err(Error::DeserializeBadOption), } } fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_unit() } fn deserialize_unit_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_unit(visitor) } fn deserialize_newtype_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_newtype_struct(self) } fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len = self.try_take_word()? as usize; visitor.visit_seq(SeqAccess { deserializer: self, len, }) } fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_seq(SeqAccess { deserializer: self, len, }) } fn deserialize_tuple_struct<V>( self, _name: &'static str, len: usize, visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_tuple(len, visitor) } fn deserialize_map<V>(self, visitor: V) -> Result<V::Value> where V: Visitor<'de>, { let len = self.try_take_word()? as usize; visitor.visit_map(MapAccess { deserializer: self, len, }) } fn deserialize_struct<V>( self, _name: &'static str, fields: &'static [&'static str], visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { self.deserialize_tuple(fields.len(), visitor) } fn deserialize_enum<V>( self, _name: &'static str, _variants: &'static [&'static str], visitor: V, ) -> Result<V::Value> where V: Visitor<'de>, { visitor.visit_enum(self) } fn deserialize_identifier<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } fn deserialize_ignored_any<V>(self, _visitor: V) -> Result<V::Value> where V: Visitor<'de>, { Err(Error::NotSupported) } } #[cfg(test)] mod test { use alloc::string::String; use super::*; #[test] fn test_struct() { use serde::Deserialize; #[derive(Deserialize, PartialEq, Debug)] struct Test { bool: bool, i8: i8, u8: u8, i16: i16, u16: u16, i32: i32, u32: u32, i64: i64, u64: u64, } let words = [ 1, -4_i32 as u32, 4, -5_i32 as u32, 5, -6_i32 as u32, 6, -7_i32 as u32, 0xffffffff, 7, 0x00000000, ]; let expected = Test { bool: true, i8: -4, u8: 4, i16: -5, u16: 5, i32: -6, u32: 6, i64: -7, u64: 7, }; assert_eq!(expected, from_slice(&words).unwrap()); } #[test] fn test_str() { use serde::Deserialize; #[derive(Deserialize, PartialEq, Debug)] struct Test { first: String, second: String, } let words = [1, 0x00000061, 3, 0x00636261]; let expected = Test { first: "a".into(), second: "abc".into(), }; assert_eq!(expected, from_slice(&words).unwrap()); } }
let character = core::str::from_utf8(&bytes) .map_err(|_| Error::DeserializeBadChar)? .chars() .next() .ok_or(Error::DeserializeBadChar)?;
assignment_statement
[ { "content": "fn into_words(slice: &[u8]) -> Result<Vec<u32>> {\n\n let mut vec = Vec::new();\n\n let chunks = slice.chunks_exact(4);\n\n assert!(chunks.remainder().len() == 0);\n\n for chunk in chunks {\n\n let word = chunk[0] as u32\n\n | (chunk[1] as u32) << 8\n\n | (...
Rust
botan/src/rng.rs
chux0519/botan-rs
4fc7560f2fa29a0ced584027fe2c791393d24773
use botan_sys::*; use utils::*; #[derive(Debug)] pub struct RandomNumberGenerator { obj: botan_rng_t } impl Drop for RandomNumberGenerator { fn drop(&mut self) { unsafe { botan_rng_destroy(self.obj); } } } impl RandomNumberGenerator { fn new_of_type(typ: &str) -> Result<RandomNumberGenerator> { let mut obj = ptr::null_mut(); let typ = make_cstr(typ)?; call_botan! { botan_rng_init(&mut obj, typ.as_ptr()) } Ok(RandomNumberGenerator { obj }) } pub(crate) fn handle(&self) -> botan_rng_t { self.obj } pub fn new_userspace() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_of_type("user") } pub fn new_system() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_of_type("system") } pub fn new() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_userspace() } pub fn read(&self, len: usize) -> Result<Vec<u8>> { let mut result = vec![0; len]; self.fill(&mut result)?; Ok(result) } pub fn fill(&self, out: &mut [u8]) -> Result<()> { call_botan! { botan_rng_get(self.obj, out.as_mut_ptr(), out.len()) } Ok(()) } pub fn reseed(&self, bits: usize) -> Result<()> { call_botan! { botan_rng_reseed(self.obj, bits) } Ok(()) } pub fn reseed_from_rng(&self, source: &RandomNumberGenerator, bits: usize) -> Result<()> { call_botan! { botan_rng_reseed_from_rng(self.obj, source.handle(), bits) } Ok(()) } pub fn add_entropy(&self, seed: &[u8]) -> Result<()> { call_botan! { botan_rng_add_entropy(self.obj, seed.as_ptr(), seed.len()) } Ok(()) } }
use botan_sys::*; use utils::*; #[derive(Debug)] pub struct RandomNumberGenerator { obj: botan_rng_t } impl Drop for RandomNumberGenerator { fn drop(&mut self) { unsafe { botan_rng_destroy(self.obj); } } } impl RandomNumberGenerator { fn new_of_type(typ: &str) -> Result<RandomNumberGe
} pub fn new() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_userspace() } pub fn read(&self, len: usize) -> Result<Vec<u8>> { let mut result = vec![0; len]; self.fill(&mut result)?; Ok(result) } pub fn fill(&self, out: &mut [u8]) -> Result<()> { call_botan! { botan_rng_get(self.obj, out.as_mut_ptr(), out.len()) } Ok(()) } pub fn reseed(&self, bits: usize) -> Result<()> { call_botan! { botan_rng_reseed(self.obj, bits) } Ok(()) } pub fn reseed_from_rng(&self, source: &RandomNumberGenerator, bits: usize) -> Result<()> { call_botan! { botan_rng_reseed_from_rng(self.obj, source.handle(), bits) } Ok(()) } pub fn add_entropy(&self, seed: &[u8]) -> Result<()> { call_botan! { botan_rng_add_entropy(self.obj, seed.as_ptr(), seed.len()) } Ok(()) } }
nerator> { let mut obj = ptr::null_mut(); let typ = make_cstr(typ)?; call_botan! { botan_rng_init(&mut obj, typ.as_ptr()) } Ok(RandomNumberGenerator { obj }) } pub(crate) fn handle(&self) -> botan_rng_t { self.obj } pub fn new_userspace() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_of_type("user") } pub fn new_system() -> Result<RandomNumberGenerator> { RandomNumberGenerator::new_of_type("system")
random
[ { "content": "/// Password based key derivation function\n\n///\n\n/// Note currently only PBKDF2 is supported by this interface.\n\n/// For PBKDF2, iterations >= 100000 is recommended.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// let rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let salt = rng.read(...
Rust
src/tests.rs
metal4people/merkletree
e2d3d22c51eb51a90da3b629ef71fc0423e61018
#![cfg(test)] use crate::merkletree::MerkleTree; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; fn hash_value<T>(value: T) -> u64 where T: Hash, { let mut hasher = DefaultHasher::new(); value.hash(&mut hasher); return hasher.finish(); } #[test] fn test_from_str_vec() { let values = vec!["one", "two", "three", "four"]; let hashes = vec![ hash_value(&values[0]), hash_value(&values[1]), hash_value(&values[2]), hash_value(&values[3]), ]; let count = values.len(); let tree = MerkleTree::build_tree(values); let h01 = hash_value((hashes[0], hashes[1])); let h23 = hash_value((hashes[2], hashes[3])); let root_hash = hash_value((h01, h23)); assert_eq!(tree.count(), count); assert_eq!(tree.height(), 2); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree_empty() { let values: Vec<Vec<u8>> = vec![]; let tree = MerkleTree::build_tree(values); let mut hasher = DefaultHasher::new(); "".hash(&mut hasher); let empty_hash = hasher.finish(); let root_hash = tree.root_hash().clone(); assert_eq!(root_hash, empty_hash); } #[test] fn test_build_tree1() { let values = vec!["hello, world".to_string()]; let root_hash = hash_value(&values[0]); let tree = MerkleTree::build_tree(values); assert_eq!(tree.count(), 1); assert_eq!(tree.height(), 0); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree3() { let values = vec![vec![1], vec![2], vec![3]]; let tree = MerkleTree::build_tree(values); let hashes = vec![ hash_value(&vec![1]), hash_value(&vec![2]), hash_value(&vec![3]), ]; let h01 = hash_value((&hashes[0], &hashes[1])); let h2 = &hashes[2]; let root_hash = hash_value((&h01, h2)); assert_eq!(tree.count(), 3); assert_eq!(tree.height(), 2); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree9() { let values = (1..10).map(|x| vec![x]).collect::<Vec<_>>(); let hashes = values.iter().map(|v| hash_value(v)).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values); let h01 = hash_value((&hashes[0], &hashes[1])); let h23 = hash_value((&hashes[2], &hashes[3])); let h45 = hash_value((&hashes[4], &hashes[5])); let h67 = hash_value((&hashes[6], &hashes[7])); let h8 = &hashes[8]; let h0123 = hash_value((&h01, &h23)); let h4567 = hash_value((&h45, &h67)); let h1to7 = hash_value((&h0123, &h4567)); let root_hash = hash_value((&h1to7, h8)); assert_eq!(tree.count(), 9); assert_eq!(tree.height(), 4); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_valid_proof() { let values = (1..10).map(|x| vec![x]).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values.clone()); for value in values { let proof = tree.gen_proof(value); assert!(proof.is_some()); let is_valid = tree.validate_proof(&proof.unwrap()); assert!(is_valid); } } #[test] fn test_valid_proof_str() { let values = vec!["Hello", "my", "name", "is", "Rusty"]; let tree = MerkleTree::build_tree(values); let value = "Rusty"; let proof = tree.gen_proof(&value); assert!(proof.is_some()); let is_valid = tree.validate_proof(&proof.unwrap()); assert!(is_valid); } #[test] fn test_wrong_proof() { let values1 = vec![vec![1], vec![2], vec![3], vec![4]]; let tree1 = MerkleTree::build_tree(values1.clone()); let values2 = vec![vec![4], vec![5], vec![6], vec![7]]; let tree2 = MerkleTree::build_tree(values2); for value in values1 { let proof = tree1.gen_proof(value); assert!(proof.is_some()); let is_valid = tree2.validate_proof(&proof.unwrap()); assert_eq!(is_valid, false); } } #[test] fn test_nth_proof() { for &count in &[1, 2, 3, 10, 15, 16, 17, 22] { let values = (1..=count).map(|x| vec![x as u8]).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values.clone()); for i in 0..count { let proof = tree.gen_nth_proof(i); assert!(proof.is_some()); assert_eq!(vec![i as u8 + 1], proof.as_ref().unwrap().value); assert!(tree.validate_proof(&proof.unwrap())); } assert!(tree.gen_nth_proof(count).is_none()); assert!(tree.gen_nth_proof(count + 1000).is_none()); } }
#![cfg(test)] use crate::merkletree::MerkleTree; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; fn hash_value<T>(value: T) -> u64 where T: Hash, { let mut hasher = DefaultHasher::new(); value.hash(&mut hasher); return hasher.finish(); } #[test] fn test_from_str_vec() { let values = vec!["one", "two", "three", "four"]; let hashes = vec![ hash_value(&values[0]), hash_value(&values[1]), hash_value(&values[2]), hash_value(&values[3]), ]; let count = values.len(); let tree = MerkleTree::build_tree(values); let h01 = hash_value((hashes[0], hashes[1])); let h23 = hash_value((hashes[2], hashes[3])); let root_hash = hash_value((h01, h23)); assert_eq!(tree.count(), count); assert_eq!(tree.height(), 2); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree_empty() { let values: Vec<Vec<u8>> = vec![]; let tree = MerkleTree::build_tree(values); let mut hasher = DefaultHasher::new(); "".hash(&mut hasher); let empty_hash = hasher.finish(); let root_hash = tree.root_hash().clone(); assert_eq!(root_hash, empty_hash); } #[test] fn test_build_tree1() { let values = vec!["hello, world".to_string()]; let root_hash = hash_value(&values[0]); let tree = MerkleTree::build_tree(values); assert_eq!(tree.count(), 1); assert_eq!(tree.height(), 0); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree3() { let values = vec![vec![1], vec![2], vec![3]]; let tree = MerkleTree::build_tree(values); let hashes = vec![ hash_value(&vec![1]), hash_value(&vec![2]), hash_value(&vec![3]), ]; let h01 = hash_value((&hashes[0], &hashes[1])); let h2 = &hashes[2]; let root_hash = hash_value((&h01, h2)); assert_eq!(tree.count(), 3); assert_eq!(tree.height(), 2); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_build_tree9() { let values = (1..10).map(|x| vec![x]).collect::<Vec<_>>(); let hashes = values.iter().map(|v| hash_value(v)).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values); let h01 = hash_value((&hashes[0], &hashes[1])); let h23 = hash_value((&hashes[2], &hashes[3])); let h45 = hash_value((&hashes[4], &hashes[5])); let h67 = hash_value((&hashes[6], &hashes[7])); let h8 = &hashes[8]; let h0123 = hash_value((&h01, &h23)); let h4567 = hash_value((&h45, &h67)); let h1to7 = hash_value((&h0123, &h4567)); let root_hash = hash_value((&h1to7, h8)); assert_eq!(tree.count(), 9); assert_eq!(tree.height(), 4); assert_eq!(tree.root_hash(), root_hash); } #[test] fn test_va
#[test] fn test_valid_proof_str() { let values = vec!["Hello", "my", "name", "is", "Rusty"]; let tree = MerkleTree::build_tree(values); let value = "Rusty"; let proof = tree.gen_proof(&value); assert!(proof.is_some()); let is_valid = tree.validate_proof(&proof.unwrap()); assert!(is_valid); } #[test] fn test_wrong_proof() { let values1 = vec![vec![1], vec![2], vec![3], vec![4]]; let tree1 = MerkleTree::build_tree(values1.clone()); let values2 = vec![vec![4], vec![5], vec![6], vec![7]]; let tree2 = MerkleTree::build_tree(values2); for value in values1 { let proof = tree1.gen_proof(value); assert!(proof.is_some()); let is_valid = tree2.validate_proof(&proof.unwrap()); assert_eq!(is_valid, false); } } #[test] fn test_nth_proof() { for &count in &[1, 2, 3, 10, 15, 16, 17, 22] { let values = (1..=count).map(|x| vec![x as u8]).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values.clone()); for i in 0..count { let proof = tree.gen_nth_proof(i); assert!(proof.is_some()); assert_eq!(vec![i as u8 + 1], proof.as_ref().unwrap().value); assert!(tree.validate_proof(&proof.unwrap())); } assert!(tree.gen_nth_proof(count).is_none()); assert!(tree.gen_nth_proof(count + 1000).is_none()); } }
lid_proof() { let values = (1..10).map(|x| vec![x]).collect::<Vec<_>>(); let tree = MerkleTree::build_tree(values.clone()); for value in values { let proof = tree.gen_proof(value); assert!(proof.is_some()); let is_valid = tree.validate_proof(&proof.unwrap()); assert!(is_valid); } }
function_block-function_prefixed
[ { "content": "fn bench_big_rnd_tree(c: &mut Criterion) {\n\n c.bench_function(\"MerkleTree::build_tree - big\", |b| {\n\n let mut values = vec![vec![0u8; 256]; 160];\n\n let mut rng = rand::thread_rng();\n\n\n\n for mut v in &mut values {\n\n rng.fill_bytes(&mut v);\n\n ...
Rust
src/routes/balances/handlers_v2.rs
tharsis/safe-client-gateway
98206c5bfcd89f10c3429edccdc108017b179d49
use std::cmp::Ordering; use std::str::FromStr; use bigdecimal::BigDecimal; use rocket::futures::{stream, StreamExt}; use crate::cache::cache_operations::RequestCached; use crate::common::models::backend::balances_v2::Balance as BalanceDto; use crate::common::models::backend::balances_v2::TokenPrice as BackendTokenPrice; use crate::common::models::backend::chains::NativeCurrency; use crate::config::{ balances_core_request_cache_duration, balances_request_timeout, concurrent_balance_token_requests, token_price_cache_duration, }; use crate::providers::fiat::FiatInfoProvider; use crate::providers::info::{DefaultInfoProvider, InfoProvider}; use crate::routes::balances::models::{Balance, Balances, TokenPrice}; use crate::utils::context::RequestContext; use crate::utils::errors::ApiResult; pub async fn balances( context: &RequestContext, chain_id: &str, safe_address: &str, fiat: &str, trusted: bool, exclude_spam: bool, ) -> ApiResult<Balances> { let info_provider = DefaultInfoProvider::new(chain_id, context); let fiat_info_provider = FiatInfoProvider::new(context); let url = core_uri!( info_provider, "/v1/safes/{}/balances/?trusted={}&exclude_spam={}", safe_address, trusted, exclude_spam )?; let body = RequestCached::new_from_context(url, context) .cache_duration(balances_core_request_cache_duration()) .request_timeout(balances_request_timeout()) .execute() .await?; let backend_balances: Vec<BalanceDto> = serde_json::from_str(&body)?; let usd_to_fiat = fiat_info_provider .exchange_usd_to(fiat) .await .unwrap_or(BigDecimal::from(0)); let native_currency: NativeCurrency = info_provider.chain_info().await?.native_currency; let mut total_fiat = 0.0; let token_prices: Vec<TokenPrice> = get_token_prices(context, &info_provider, &backend_balances).await; let mut service_balances: Vec<Balance> = backend_balances .iter() .map(|it| { let token_address: String = it .token_address .to_owned() .unwrap_or("0x0000000000000000000000000000000000000000".to_string()); let token_price: Option<&TokenPrice> = token_prices .iter() .find(|&token_price| token_price.address == token_address); let token_to_usd: BigDecimal = token_price .and_then(|t| Some(t.fiat_price.to_owned())) .unwrap_or(BigDecimal::from(0)); let balance = it.to_balance_v2(&token_to_usd, &usd_to_fiat, &native_currency); total_fiat += balance.fiat_balance.parse::<f64>().unwrap_or(0.0); balance }) .collect::<Vec<Balance>>(); service_balances.sort_by(|b1, b2| { BigDecimal::from_str(&b2.fiat_balance) .unwrap() .partial_cmp(&BigDecimal::from_str(&b1.fiat_balance).unwrap()) .unwrap_or(Ordering::Equal) }); Ok(Balances { fiat_total: total_fiat.to_string(), items: service_balances, }) } async fn get_token_prices( context: &RequestContext, info_provider: &impl InfoProvider, backend_balances: &Vec<BalanceDto>, ) -> Vec<TokenPrice> { let token_addresses: Vec<String> = backend_balances .iter() .map(|balance| { balance .token_address .to_owned() .unwrap_or("0x0000000000000000000000000000000000000000".to_string()) }) .collect(); return stream::iter(token_addresses) .map(|token_address| get_token_usd_rate(context, token_address, info_provider)) .buffer_unordered(concurrent_balance_token_requests()) .filter_map(|t| async move { match t { Ok(token_price) => Some(token_price), Err(_) => None, } }) .collect() .await; } async fn get_token_usd_rate( context: &RequestContext, token_address: String, info_provider: &impl InfoProvider, ) -> ApiResult<TokenPrice> { let url = core_uri!(info_provider, "/v1/tokens/{}/prices/usd/", token_address)?; let body = RequestCached::new_from_context(url, context) .cache_duration(token_price_cache_duration()) .execute() .await?; let response: BackendTokenPrice = serde_json::from_str(&body)?; return Ok(TokenPrice { address: token_address.to_string(), fiat_code: response.fiat_code, fiat_price: response.fiat_price, timestamp: response.timestamp, }); }
use std::cmp::Ordering; use std::str::FromStr; use bigdecimal::BigDecimal; use rocket::futures::{stream, StreamExt}; use crate::cache::cache_operations::RequestCached; use crate::common::models::backend::balances_v2::Balance as BalanceDto; use crate::common::models::backend::balances_v2::TokenPrice as BackendTokenPrice; use crate::common::models::backend::chains::NativeCurrency; use crate::config::{ balances_core_request_cache_duration, balances_request_timeout, concurrent_balance_token_requests, token_price_cache_duration, }; use crate::providers::fiat::FiatInfoProvider; use crate::providers::info::{DefaultInfoProvider, InfoProvider}; use crate::routes::balances::models::{Balance, Balances, TokenPrice}; use crate::utils::context::RequestContext; use crate::utils::errors::ApiResult; pub async fn balances( context: &RequestContext, chain_id: &str, safe_address: &str, fiat: &str, trusted: bool, exclude_spam: bool, ) -> ApiResult<Balances> { let info_provider = DefaultInfoProvider::new(chain_id, context); let fiat_info_provider = FiatInfoProvider::new(context); let url = core_uri!( info_provider, "/v1/safes/{}/balances/?trusted={}&exclude_spam={}", safe_address, trusted, exclude_spam )?; let body = RequestCached::new_from_context(url, context) .cache_duration(balances_core_request_cache_duration()) .request_timeout(balances_request_timeout()) .execute() .await?; let backend_balances: Vec<BalanceDto> = serde_json::from_str(&body)?; let usd_to_fiat = fiat_info_provider .exchange_usd_to(fiat) .await .unwrap_or(BigDecimal::from(0)); let native_currency: NativeCurrency = info_provider.chain_info().await?.native_currency; let mut total_fiat = 0.0; let token_prices: Vec<TokenPrice> = get_token_prices(context, &info_provider, &backend_balances).await; let mut service_balances: Vec<Balance> = backend_balances .iter() .map(|it| { let token_address: String = it .token_address .to_owned() .unwrap_or("0x0000000000000000000000000000000000000000".to_string()); let token_price: Option<&TokenPrice> = token_prices .iter() .find(|&token_price| token_price.address == token_address); let token_to_usd: BigDecimal = token_price .and_then(|t| Some(t.fiat_price.to_owned())) .unwrap_or(BigDecimal::from(0)); let balance = it.to_balance_v2(&token_to_usd, &usd_to_fiat, &native_currency); total_fiat += balance.fiat_balance.parse::<f64>().unwrap_or(0.0); balance }) .collect::<Vec<Balance>>(); service_balances.sort_by(|b1, b2| { BigDecimal::from_str(&b2.fiat_balance) .unwrap() .partial_cmp(&BigDecimal::from_str(&b1.fiat_balance).unwrap()) .unwrap_or(Ordering::Equal) }); Ok(Balances { fiat_total: total_fiat.to_string(), items: service_balances, }) } async fn get_token_prices( context: &RequestContext, info_provider: &impl InfoProvider, backend_balances: &Vec<BalanceDto>, ) -> Vec<TokenPrice> { let token_addresses: Vec<String> = backend_balances .iter() .map(|balance| { balance .token_address .to_owned() .unwrap_or("0x0000000000000000000000000000000000000000".to_string()) }) .collect(); return stream::iter(token_addresses) .map(|token_address| get_token_usd_rate(context, token_address, info_provider)) .buffer_unordered(concurrent_balance_token_requests()) .filter_map(|t| async move { match t { Ok(token_price) => Some(token_price), Err(_) => None, } }) .collect() .await; } async fn get_token_usd_rate( context: &RequestContext, token_address: String, info_provider: &impl InfoProvider, ) -> ApiResult<TokenPrice> { let url = core_uri!(info_provider, "/v1/tokens/{}/prices/usd/", token_address)?; let body = RequestCached::new_from_context(url, context) .cache_duration(token_price_cache_duration()) .execute() .await?; let response: BackendTokenPrice = serde_json::from_str(&body)?; return
; }
Ok(TokenPrice { address: token_address.to_string(), fiat_code: response.fiat_code, fiat_price: response.fiat_price, timestamp: response.timestamp, })
call_expression
[ { "content": "pub fn build_manifest_url(url: &str) -> ApiResult<String> {\n\n let mut url_parts = Url::parse(url).or(Err(api_error!(\"Not a valid Url\")))?;\n\n\n\n if !url_parts.scheme().starts_with(\"http\") {\n\n Err(api_error!(\"Invalid scheme\"))\n\n } else if url_parts.host_str().is_none()...
Rust
editor/src/inspector/handlers/collider.rs
thomasmatecki/rg3d
e1958b6615ae0a826e83614e8df45a1a6f821f82
use crate::{make_command, physics::Collider, scene::commands::physics::*, SceneCommand}; use rg3d::{ core::pool::Handle, gui::inspector::{FieldKind, PropertyChanged}, physics3d::desc::*, }; use std::any::TypeId; pub fn handle_collider_property_changed( args: &PropertyChanged, handle: Handle<Collider>, collider: &Collider, ) -> Option<SceneCommand> { match args.value { FieldKind::Object(ref value) => match args.name.as_ref() { Collider::FRICTION => { make_command!(SetColliderFrictionCommand, handle, value) } Collider::RESTITUTION => { make_command!(SetColliderRestitutionCommand, handle, value) } Collider::IS_SENSOR => { make_command!(SetColliderIsSensorCommand, handle, value) } Collider::DENSITY => { make_command!(SetColliderDensityCommand, handle, value) } Collider::TRANSLATION => { make_command!(SetColliderPositionCommand, handle, value) } Collider::ROTATION => { make_command!(SetColliderRotationCommand, handle, value) } _ => None, }, FieldKind::Inspectable(ref inner_property) => match args.name.as_ref() { Collider::COLLISION_GROUPS => match inner_property.value { FieldKind::Object(ref value) => match inner_property.name.as_ref() { InteractionGroupsDesc::MEMBERSHIPS => { make_command!(SetColliderCollisionGroupsMembershipsCommand, handle, value) } InteractionGroupsDesc::FILTER => { make_command!(SetColliderCollisionGroupsFilterCommand, handle, value) } _ => None, }, _ => None, }, Collider::SOLVER_GROUPS => match inner_property.value { FieldKind::Object(ref value) => match inner_property.name.as_ref() { InteractionGroupsDesc::MEMBERSHIPS => { make_command!(SetColliderSolverGroupsMembershipsCommand, handle, value) } InteractionGroupsDesc::FILTER => { make_command!(SetColliderSolverGroupsFilterCommand, handle, value) } _ => None, }, _ => None, }, Collider::SHAPE => { if inner_property.owner_type_id == TypeId::of::<CuboidDesc>() { handle_cuboid_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<BallDesc>() { handle_ball_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<CylinderDesc>() { handle_cylinder_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<RoundCylinderDesc>() { handle_round_cylinder_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<ConeDesc>() { handle_cone_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<CapsuleDesc>() { handle_capsule_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<SegmentDesc>() { handle_segment_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<TriangleDesc>() { handle_triangle_desc_property_changed(handle, collider, inner_property) } else { None } } _ => None, }, _ => None, } } fn handle_ball_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Ball(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { BallDesc::RADIUS => make_command!(SetBallRadiusCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_cuboid_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cuboid(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CuboidDesc::HALF_EXTENTS => { make_command!(SetCuboidHalfExtentsCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_cylinder_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cylinder(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CylinderDesc::HALF_HEIGHT => { make_command!(SetCylinderHalfHeightCommand, handle, value) } CylinderDesc::RADIUS => { make_command!(SetCylinderRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_round_cylinder_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::RoundCylinder(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { RoundCylinderDesc::HALF_HEIGHT => { make_command!(SetRoundCylinderHalfHeightCommand, handle, value) } RoundCylinderDesc::RADIUS => { make_command!(SetRoundCylinderRadiusCommand, handle, value) } RoundCylinderDesc::BORDER_RADIUS => { make_command!(SetRoundCylinderBorderRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_cone_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cone(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { ConeDesc::HALF_HEIGHT => { make_command!(SetConeHalfHeightCommand, handle, value) } ConeDesc::RADIUS => make_command!(SetConeRadiusCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_capsule_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Capsule(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CapsuleDesc::BEGIN => make_command!(SetCapsuleBeginCommand, handle, value), CapsuleDesc::END => make_command!(SetCapsuleEndCommand, handle, value), CapsuleDesc::RADIUS => { make_command!(SetCapsuleRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_segment_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Segment(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { SegmentDesc::BEGIN => make_command!(SetSegmentBeginCommand, handle, value), SegmentDesc::END => make_command!(SetSegmentEndCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_triangle_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Triangle(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { TriangleDesc::A => make_command!(SetTriangleACommand, handle, value), TriangleDesc::B => make_command!(SetTriangleBCommand, handle, value), TriangleDesc::C => make_command!(SetTriangleCCommand, handle, value), _ => None, }, _ => None, } } else { None } }
use crate::{make_command, physics::Collider, scene::commands::physics::*, SceneCommand}; use rg3d::{ core::pool::Handle, gui::inspector::{FieldKind, PropertyChanged}, physics3d::desc::*, }; use std::any::TypeId; pub fn handle_collider_property_changed( args: &PropertyChanged, handle: Handle<Collider>, collider: &Collider, ) -> Option<SceneCommand> { match args.value { FieldKind::Object(ref value) => match args.name.as_ref() { Collider::FRICTION => { make_command!(SetColliderFrictionCommand, handle, value) } Collider::RESTITUTION => { make_command!(SetColliderRestitutionCommand, handle, value) } Collider::IS_SENSOR => { make_command!(SetColliderIsSensorCommand, handle, value) } Collider::DENSITY => { make_command!(SetColliderDensityCommand, handle, value) } Collider::TRANSLATION => { make_command!(SetColliderPositionCommand, handle, value) } Collider::ROTATION => { make_command!(SetColliderRotationCommand, handle, value) } _ => None, }, FieldKind::Inspectable(ref inner_property) => match args.name.as_ref() { Collider::COLLISION_GROUPS => match inner_property.value { FieldKind::Object(ref value) => match inner_property.name.as_ref() { InteractionGroupsDesc::MEMBERSHIPS => { make_command!(SetColliderCollisionGroupsMembershipsCommand, handle, value) } InteractionGroupsDesc::FILTER => { make_command!(SetColliderCollisionGroupsFilterCommand, handle, value) } _ => None, }, _ => None, }, Collider::SOLVER_GROUPS => match inner_property.value { FieldKind::Object(ref value) => match inner_property.name.as_ref() { InteractionGroupsDesc::MEMBERSHIPS => { make_command!(SetColliderSolverGroupsMembershipsCommand, handle, value) } InteractionGroupsDesc::FILTER => { make_command!(SetColliderSolverGroupsFilterCommand, handle, value) } _ =>
fn handle_ball_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Ball(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { BallDesc::RADIUS => make_command!(SetBallRadiusCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_cuboid_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cuboid(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CuboidDesc::HALF_EXTENTS => { make_command!(SetCuboidHalfExtentsCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_cylinder_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cylinder(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CylinderDesc::HALF_HEIGHT => { make_command!(SetCylinderHalfHeightCommand, handle, value) } CylinderDesc::RADIUS => { make_command!(SetCylinderRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_round_cylinder_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::RoundCylinder(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { RoundCylinderDesc::HALF_HEIGHT => { make_command!(SetRoundCylinderHalfHeightCommand, handle, value) } RoundCylinderDesc::RADIUS => { make_command!(SetRoundCylinderRadiusCommand, handle, value) } RoundCylinderDesc::BORDER_RADIUS => { make_command!(SetRoundCylinderBorderRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_cone_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Cone(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { ConeDesc::HALF_HEIGHT => { make_command!(SetConeHalfHeightCommand, handle, value) } ConeDesc::RADIUS => make_command!(SetConeRadiusCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_capsule_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Capsule(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { CapsuleDesc::BEGIN => make_command!(SetCapsuleBeginCommand, handle, value), CapsuleDesc::END => make_command!(SetCapsuleEndCommand, handle, value), CapsuleDesc::RADIUS => { make_command!(SetCapsuleRadiusCommand, handle, value) } _ => None, }, _ => None, } } else { None } } fn handle_segment_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Segment(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { SegmentDesc::BEGIN => make_command!(SetSegmentBeginCommand, handle, value), SegmentDesc::END => make_command!(SetSegmentEndCommand, handle, value), _ => None, }, _ => None, } } else { None } } fn handle_triangle_desc_property_changed( handle: Handle<Collider>, collider: &Collider, property_changed: &PropertyChanged, ) -> Option<SceneCommand> { if let ColliderShapeDesc::Triangle(_) = collider.shape { match property_changed.value { FieldKind::Object(ref value) => match property_changed.name.as_ref() { TriangleDesc::A => make_command!(SetTriangleACommand, handle, value), TriangleDesc::B => make_command!(SetTriangleBCommand, handle, value), TriangleDesc::C => make_command!(SetTriangleCCommand, handle, value), _ => None, }, _ => None, } } else { None } }
None, }, _ => None, }, Collider::SHAPE => { if inner_property.owner_type_id == TypeId::of::<CuboidDesc>() { handle_cuboid_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<BallDesc>() { handle_ball_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<CylinderDesc>() { handle_cylinder_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<RoundCylinderDesc>() { handle_round_cylinder_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<ConeDesc>() { handle_cone_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<CapsuleDesc>() { handle_capsule_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<SegmentDesc>() { handle_segment_desc_property_changed(handle, collider, inner_property) } else if inner_property.owner_type_id == TypeId::of::<TriangleDesc>() { handle_triangle_desc_property_changed(handle, collider, inner_property) } else { None } } _ => None, }, _ => None, } }
function_block-function_prefix_line
[ { "content": "/// Returns a list of `pub const [VARIANT_]FIELD: &'static str = \"key_value\"`;\n\npub fn quote_prop_keys(ty_args: &args::TypeArgs) -> TokenStream2 {\n\n let mut prop_idents = Vec::new();\n\n let mut prop_names = Vec::new();\n\n\n\n match &ty_args.data {\n\n ast::Data::Struct(fiel...
Rust
der/src/tag.rs
cipepser/utils
e96656b6b9d4e9313816316f4f287a1d2c04efcc
use crate::{Decodable, Decoder, Encodable, Encoder, Error, ErrorKind, Length, Result}; use core::{convert::TryFrom, fmt}; const CONSTRUCTED_FLAG: u8 = 0b100000; const CONTEXT_SPECIFIC_FLAG: u8 = 0b10000000; #[derive(Copy, Clone, Eq, PartialEq)] #[allow(clippy::identity_op)] #[non_exhaustive] #[repr(u8)] pub enum Tag { Boolean = 0x01, Integer = 0x02, BitString = 0x03, OctetString = 0x04, Null = 0x05, ObjectIdentifier = 0x06, Sequence = 0x10 | CONSTRUCTED_FLAG, ContextSpecific0 = 0 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific1 = 1 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific2 = 2 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific3 = 3 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, } impl TryFrom<u8> for Tag { type Error = Error; fn try_from(byte: u8) -> Result<Tag> { match byte { 0x01 => Ok(Tag::Boolean), 0x02 => Ok(Tag::Integer), 0x03 => Ok(Tag::BitString), 0x04 => Ok(Tag::OctetString), 0x05 => Ok(Tag::Null), 0x06 => Ok(Tag::ObjectIdentifier), 0x30 => Ok(Tag::Sequence), 0xA0 => Ok(Tag::ContextSpecific0), 0xA1 => Ok(Tag::ContextSpecific1), 0xA2 => Ok(Tag::ContextSpecific2), 0xA3 => Ok(Tag::ContextSpecific3), _ => Err(ErrorKind::UnknownTag { byte }.into()), } } } impl Tag { pub fn assert_eq(self, expected: Tag) -> Result<Tag> { if self == expected { Ok(self) } else { Err(ErrorKind::UnexpectedTag { expected: Some(expected), actual: self, } .into()) } } pub fn type_name(self) -> &'static str { match self { Self::Boolean => "BOOLEAN", Self::Integer => "INTEGER", Self::BitString => "BIT STRING", Self::OctetString => "OCTET STRING", Self::Null => "NULL", Self::ObjectIdentifier => "OBJECT IDENTIFIER", Self::Sequence => "SEQUENCE", Self::ContextSpecific0 => "Context Specific 0", Self::ContextSpecific1 => "Context Specific 1", Self::ContextSpecific2 => "Context Specific 2", Self::ContextSpecific3 => "Context Specific 3", } } } impl Decodable<'_> for Tag { fn decode(decoder: &mut Decoder<'_>) -> Result<Self> { decoder.byte().and_then(Self::try_from) } } impl Encodable for Tag { fn encoded_len(&self) -> Result<Length> { Ok(1u8.into()) } fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> { encoder.byte(*self as u8) } } impl fmt::Display for Tag { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.type_name()) } } impl fmt::Debug for Tag { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Tag(0x{:02x}: {})", *self as u8, self.type_name()) } }
use crate::{Decodable, Decoder, Encodable, Encoder, Error, ErrorKind, Length, Result}; use core::{convert::TryFrom, fmt}; const CONSTRUCTED_FLAG: u8 = 0b100000; const CONTEXT_SPECIFIC_FLAG: u8 = 0b10000000; #[derive(Copy, Clone, Eq, PartialEq)] #[allow(clippy::identity_op)] #[non_exhaustive] #[repr(u8)] pub enum Tag { Boolean = 0x01, Integer = 0x02, BitString = 0x03, OctetString = 0x04, Null = 0x05, ObjectIdentifier = 0x06, Sequence = 0x10 | CONSTRUCTED_FLAG, ContextSpecific0 = 0 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific1 = 1 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific2 = 2 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, ContextSpecific3 = 3 | CONTEXT_SPECIFIC_FLAG | CONSTRUCTED_FLAG, } impl TryFrom<u8> for Tag { type Error = Error; fn try_from(byte: u8) -> Result<Tag> { match byte { 0x01 => Ok(Tag::Boolean), 0x02 => Ok(Tag::Integer), 0x03 => Ok(Tag::BitString), 0x04 => Ok(Tag::OctetString), 0x05 => Ok(Tag::Null), 0x06 => Ok(Tag::ObjectIdentifier), 0x30 => Ok(Tag::Sequence), 0xA0 => Ok(Tag::ContextSpecific0), 0xA1 => Ok(Tag::ContextSpecific1), 0xA2 => Ok(Tag::ContextSpecific2), 0xA3 => Ok(Tag::ContextSpecific3), _ => Err(ErrorKind::UnknownTag { byte }.into()), } } } impl Tag {
pub fn type_name(self) -> &'static str { match self { Self::Boolean => "BOOLEAN", Self::Integer => "INTEGER", Self::BitString => "BIT STRING", Self::OctetString => "OCTET STRING", Self::Null => "NULL", Self::ObjectIdentifier => "OBJECT IDENTIFIER", Self::Sequence => "SEQUENCE", Self::ContextSpecific0 => "Context Specific 0", Self::ContextSpecific1 => "Context Specific 1", Self::ContextSpecific2 => "Context Specific 2", Self::ContextSpecific3 => "Context Specific 3", } } } impl Decodable<'_> for Tag { fn decode(decoder: &mut Decoder<'_>) -> Result<Self> { decoder.byte().and_then(Self::try_from) } } impl Encodable for Tag { fn encoded_len(&self) -> Result<Length> { Ok(1u8.into()) } fn encode(&self, encoder: &mut Encoder<'_>) -> Result<()> { encoder.byte(*self as u8) } } impl fmt::Display for Tag { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.type_name()) } } impl fmt::Debug for Tag { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Tag(0x{:02x}: {})", *self as u8, self.type_name()) } }
pub fn assert_eq(self, expected: Tag) -> Result<Tag> { if self == expected { Ok(self) } else { Err(ErrorKind::UnexpectedTag { expected: Some(expected), actual: self, } .into()) } }
function_block-full_function
[ { "content": "/// Obtain the length of an ASN.1 `SEQUENCE` of [`Encodable`] values when\n\n/// serialized as ASN.1 DER, including the `SEQUENCE` tag and length prefix.\n\npub fn encoded_len(encodables: &[&dyn Encodable]) -> Result<Length> {\n\n let inner_len = encoded_len_inner(encodables)?;\n\n Header::n...
Rust
src/xpath/tokenizer/mod.rs
James-LG/Skyscraper
e6fcecc20762d5c4e5d6d22d77cb1da083dd5871
mod helpers; mod tokens; use crate::vecpointer::VecPointerRef; pub use tokens::Token; use thiserror::Error; #[derive(Error, Debug)] pub enum LexError {} pub fn lex(text: &str) -> Result<Vec<Token>, LexError> { let mut symbols: Vec<Token> = Vec::new(); let chars: Vec<char> = text.chars().collect(); let mut pointer = VecPointerRef::new(&chars); while pointer.has_next() { if let Some(s) = helpers::is_double_slash(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_slash(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_open_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_close_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_open_square_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_close_square_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_number(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_wildcard(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_double_dot(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_dot(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_assignment_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_at_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_add_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_minus_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_greater_than_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_less_than_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_double_colon(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_identifier(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_text(&mut pointer) { symbols.push(s); } else { if let Some(c) = pointer.current() { if !c.is_whitespace() { eprintln!("Unknown XPath symbol {}", c); } } pointer.next(); } } Ok(symbols) } #[cfg(test)] mod tests { use super::*; #[test] fn lex_works1() { let text = "//bookstore/book[1]/page[last()-1]"; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("bookstore")), Token::Slash, Token::Identifier(String::from("book")), Token::OpenSquareBracket, Token::Number(1.0), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("page")), Token::OpenSquareBracket, Token::Identifier(String::from("last")), Token::OpenBracket, Token::CloseBracket, Token::MinusSign, Token::Number(1.0), Token::CloseSquareBracket, ]; assert_eq!(expected, result); } #[test] fn lex_works2() { let text = "/bookstore/book[price>35]/price"; let result = lex(text).unwrap(); let expected = vec![ Token::Slash, Token::Identifier(String::from("bookstore")), Token::Slash, Token::Identifier(String::from("book")), Token::OpenSquareBracket, Token::Identifier(String::from("price")), Token::GreaterThanSign, Token::Number(35.0), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("price")), ]; assert_eq!(expected, result); } #[test] fn lex_works3() { let text = r###"//a[@hello="world"]"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("a")), Token::OpenSquareBracket, Token::AtSign, Token::Identifier(String::from("hello")), Token::AssignmentSign, Token::Text(String::from("world")), Token::CloseSquareBracket, ]; assert_eq!(expected, result); } #[test] fn lex_works_alphanumeric_identifier() { let text = r###"//h1[@hello="world"]/h2"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("h1")), Token::OpenSquareBracket, Token::AtSign, Token::Identifier(String::from("hello")), Token::AssignmentSign, Token::Text(String::from("world")), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("h2")), ]; assert_eq!(expected, result); } #[test] fn lex_works_double_colon() { let text = r###"//h1/parent::div"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("h1")), Token::Slash, Token::Identifier(String::from("parent")), Token::DoubleColon, Token::Identifier(String::from("div")), ]; assert_eq!(expected, result); } }
mod helpers; mod tokens; use crate::vecpointer::VecPointerRef; pub use tokens::Token; use thiserror::Error; #[derive(Error, Debug)] pub enum LexError {} pub fn lex(text: &str) -> Result<Vec<Token>, LexError> { let mut symbols: Vec<Token> = Vec::new(); let chars: Vec<char> = text.chars().collect(); let mut pointer = VecPointerRef::new(&chars); while pointer.has_next() { if let Some(s) = helpers::is_double_slash(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_slash(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_open_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_close_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_open_square_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_close_square_bracket(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_number(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_wildcard(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_double_dot(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_dot(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_assignment_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_at_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_add_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_minus_sign(&mut pointer) { s
Token::Number(1.0), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("page")), Token::OpenSquareBracket, Token::Identifier(String::from("last")), Token::OpenBracket, Token::CloseBracket, Token::MinusSign, Token::Number(1.0), Token::CloseSquareBracket, ]; assert_eq!(expected, result); } #[test] fn lex_works2() { let text = "/bookstore/book[price>35]/price"; let result = lex(text).unwrap(); let expected = vec![ Token::Slash, Token::Identifier(String::from("bookstore")), Token::Slash, Token::Identifier(String::from("book")), Token::OpenSquareBracket, Token::Identifier(String::from("price")), Token::GreaterThanSign, Token::Number(35.0), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("price")), ]; assert_eq!(expected, result); } #[test] fn lex_works3() { let text = r###"//a[@hello="world"]"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("a")), Token::OpenSquareBracket, Token::AtSign, Token::Identifier(String::from("hello")), Token::AssignmentSign, Token::Text(String::from("world")), Token::CloseSquareBracket, ]; assert_eq!(expected, result); } #[test] fn lex_works_alphanumeric_identifier() { let text = r###"//h1[@hello="world"]/h2"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("h1")), Token::OpenSquareBracket, Token::AtSign, Token::Identifier(String::from("hello")), Token::AssignmentSign, Token::Text(String::from("world")), Token::CloseSquareBracket, Token::Slash, Token::Identifier(String::from("h2")), ]; assert_eq!(expected, result); } #[test] fn lex_works_double_colon() { let text = r###"//h1/parent::div"###; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("h1")), Token::Slash, Token::Identifier(String::from("parent")), Token::DoubleColon, Token::Identifier(String::from("div")), ]; assert_eq!(expected, result); } }
ymbols.push(s); } else if let Some(s) = helpers::is_greater_than_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_less_than_sign(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_double_colon(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_identifier(&mut pointer) { symbols.push(s); } else if let Some(s) = helpers::is_text(&mut pointer) { symbols.push(s); } else { if let Some(c) = pointer.current() { if !c.is_whitespace() { eprintln!("Unknown XPath symbol {}", c); } } pointer.next(); } } Ok(symbols) } #[cfg(test)] mod tests { use super::*; #[test] fn lex_works1() { let text = "//bookstore/book[1]/page[last()-1]"; let result = lex(text).unwrap(); let expected = vec![ Token::DoubleSlash, Token::Identifier(String::from("bookstore")), Token::Slash, Token::Identifier(String::from("book")), Token::OpenSquareBracket,
random
[ { "content": "/// Checks if the [TextPointer](TextPointer) is currently pointing to a Number [Symbol](Symbol).\n\n/// If true it will move the text pointer to the next symbol, otherwise it will not change the pointer.\n\npub fn is_number(pointer: &mut VecPointerRef<char>) -> Option<Token> {\n\n if let Some(c...
Rust
src/graphics.rs
catid/rust_webgl_demo
b09490c0baba251009b8a9b9bfbf6040b4d43785
use stdweb::unstable::TryInto; use webgl_rendering_context::{ WebGLRenderingContext as WebGL, WebGLUniformLocation, WebGLBuffer, WebGLShader, WebGLProgram, }; use stdweb::web::{ IHtmlElement, IParentNode, document, TypedArray, }; use stdweb::web::html_element::CanvasElement; use glm::{Vec2, Vec3, Quat, Mat4}; use tools::js_log; /* WebGL Context with Right-Handed Projection Matrix */ pub struct Context { canvas: CanvasElement, webgl: WebGL, width: i32, height: i32, projection_matrix: Mat4, } impl Context { pub fn new(element_id: &str) -> Self { let canvas : CanvasElement = document().query_selector(&element_id).unwrap().unwrap().try_into().unwrap(); let webgl : WebGL = canvas.get_context().unwrap(); webgl.enable(WebGL::CULL_FACE); webgl.front_face(WebGL::CCW); webgl.cull_face(WebGL::BACK); webgl.enable(WebGL::DEPTH_TEST); webgl.depth_func(WebGL::LESS); Self { canvas: canvas, webgl: webgl, width: 0, height: 0, projection_matrix: Mat4::identity(), } } pub fn UpdateViewport(&mut self) { let width = self.canvas.offset_width(); let height = self.canvas.offset_height(); if width != self.width || height != self.height { self.canvas.set_width(width as u32); self.canvas.set_height(height as u32); self.webgl.viewport(0, 0, width, height); const fov : f32 = 60.; const near : f32 = 2.; const far : f32 = 20.; self.projection_matrix = glm::perspective_fov_rh_zo( fov.to_radians(), width as f32, height as f32, near, far, ); self.width = width; self.height = height; } } pub fn Clear(&self) { self.webgl.clear_color(0.0, 0.0, 0.0, 1.0); self.webgl.clear_depth(1.0); self.webgl.clear(WebGL::COLOR_BUFFER_BIT | WebGL::DEPTH_BUFFER_BIT); } } /* WebGL Shader Program */ pub struct ShaderProgram { fs: WebGLShader, vs: WebGLShader, webGlProgram: WebGLProgram, } impl ShaderProgram { pub fn new(context: &Context, vsCode: &str, fsCode: &str) -> Self { let webgl = &context.webgl; let vs = webgl.create_shader(WebGL::VERTEX_SHADER).unwrap(); webgl.shader_source(&vs, &vsCode); webgl.compile_shader(&vs); let vs_success : bool = webgl.get_shader_parameter(&vs, WebGL::COMPILE_STATUS).try_into().unwrap(); if !vs_success { let info = webgl.get_shader_info_log(&vs); js_log(format!("CompileShader failed: {}", info.unwrap())); } let fs = webgl.create_shader(WebGL::FRAGMENT_SHADER).unwrap(); webgl.shader_source(&fs, &fsCode); webgl.compile_shader(&fs); let fs_success : bool = webgl.get_shader_parameter(&fs, WebGL::COMPILE_STATUS).try_into().unwrap(); if !fs_success { let info = webgl.get_shader_info_log(&fs); js_log(format!("CompileShader failed: {}", info.unwrap())); } let program = webgl.create_program().unwrap(); webgl.attach_shader(&program, &vs); webgl.attach_shader(&program, &fs); webgl.link_program(&program); let success : bool = webgl.get_program_parameter(&program, WebGL::LINK_STATUS).try_into().unwrap(); if !success { let info = webgl.get_program_info_log(&program); js_log(format!("LinkProgram failed: {}", info.unwrap())); } Self { fs: fs, vs: vs, webGlProgram: program, } } fn GetUniform(&self, context: &Context, name: &str) -> WebGLUniformLocation { context.webgl.get_uniform_location(&self.webGlProgram, name).unwrap() } fn GetAttrib(&self, context: &Context, name: &str) -> u32 { context.webgl.get_attrib_location(&self.webGlProgram, name) as u32 } } /* Cube Renderer */ pub struct Cube { program: ShaderProgram, unifMvpMatrix: WebGLUniformLocation, attrVertexPosition: u32, attrVertexColor: u32, attrVertexNormal: u32, positionVbo: WebGLBuffer, colorVbo: WebGLBuffer, normalVbo: WebGLBuffer, tri_count: i32, } impl Cube { pub fn new(context: &Context) -> Self { let webgl = &context.webgl; let vsCode = include_str!("shaders/flat_vs.glsl"); let fsCode = include_str!("shaders/flat_fs.glsl"); let program = ShaderProgram::new(context, vsCode, fsCode); /* Corner vertices of a cube, oriented x+right, y+top, z+up, centered at 0,0,0, scaled to span from -1 to +1 on each axis. Vertex and side names are based on a perspective looking down. */ let corners = vec![ /* Down-z side of cube */ -1.,-1.,-1., /* LL */ 1.,-1.,-1., /* LR */ 1., 1.,-1., /* UR */ -1., 1.,-1., /* UL */ /* Up+z side of cube */ -1.,-1., 1., /* LL */ 1.,-1., 1., /* LR */ 1., 1., 1., /* UR */ -1., 1., 1., /* UL */ ]; let triIndices : Vec<u8> = vec![ /* Down-z */ 2, 1, 0, 0, 3, 2, /* Up+z */ 4, 5, 6, 6, 7, 4, /* Bottom-y */ 0, 5, 4, 0, 1, 5, /* Top+y */ 3, 7, 6, 3, 6, 2, /* Left-x */ 0, 4, 3, 4, 7, 3, /* Right+x */ 2, 6, 5, 2, 5, 1, ]; let triColors : Vec<u8> = vec![ /* Down-z */ 255,0,200, 255,0,255, /* Up+z */ 200,200,200, 200,200,255, /* Bottom-y */ 100,200,100, 100,255,100, /* Top+y */ 200,200,100, 200,255,100, /* Left-x */ 200,0,0, 255,0,0, /* Right+x */ 0,200,0, 0,255,0, ]; let tri_count = triIndices.len() / 3; let mut vertices = Vec::with_capacity(tri_count * 3); let mut colors = Vec::with_capacity(tri_count * 3); let mut normals = Vec::with_capacity(tri_count * 3); for i in 0..tri_count { let triIndicesOffset = i * 3; let mut triVertices : [Vec3; 3] = unsafe { std::mem::uninitialized() }; for j in 0..3 { let vertexIndex = triIndices[triIndicesOffset + j]; let cornersOffset = vertexIndex as usize * 3; let scale = 1.0f32; let x = corners[cornersOffset] * scale; let y = corners[cornersOffset + 1] * scale; let z = corners[cornersOffset + 2] * scale; triVertices[j] = glm::vec3(x, y, z); vertices.push(x); vertices.push(y); vertices.push(z); } let normal = glm::triangle_normal( &triVertices[0], &triVertices[1], &triVertices[2] ); for _j in 0..3 { normals.push(normal.x); normals.push(normal.y); normals.push(normal.z); } let colorOffset = i as usize * 3; let r = triColors[colorOffset]; let g = triColors[colorOffset + 1]; let b = triColors[colorOffset + 2]; for _j in 0..3 { colors.push(r); colors.push(g); colors.push(b); } } let webVertices = TypedArray::<f32>::from(vertices.as_slice()).buffer(); let webColors = TypedArray::<u8>::from(colors.as_slice()).buffer(); let webNormals = TypedArray::<f32>::from(normals.as_slice()).buffer(); let positionVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&positionVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webVertices), WebGL::STATIC_DRAW); let colorVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&colorVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webColors), WebGL::STATIC_DRAW); let normalVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&normalVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webNormals), WebGL::STATIC_DRAW); let unifMvpMatrix = program.GetUniform(&context, "MVPMatrix"); let attrVertexPosition = program.GetAttrib(&context, "VertexPosition"); let attrVertexColor = program.GetAttrib(&context, "VertexColor"); let attrVertexNormal = program.GetAttrib(&context, "VertexNormal"); Self { program: program, unifMvpMatrix: unifMvpMatrix, attrVertexPosition: attrVertexPosition, attrVertexColor: attrVertexColor, attrVertexNormal: attrVertexNormal, positionVbo: positionVbo, colorVbo: colorVbo, normalVbo: normalVbo, tri_count: tri_count as i32, } } pub fn DrawMultiple(&mut self, context: &Context, mvp_matrices: &Vec<Mat4>) { let webgl = &context.webgl; webgl.use_program(Some(&self.program.webGlProgram)); if self.attrVertexPosition != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.positionVbo)); webgl.vertex_attrib_pointer(self.attrVertexPosition, 3, WebGL::FLOAT, false, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexPosition); } if self.attrVertexColor != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.colorVbo)); webgl.vertex_attrib_pointer(self.attrVertexColor, 3, WebGL::UNSIGNED_BYTE, true, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexColor); } if self.attrVertexNormal != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.normalVbo)); webgl.vertex_attrib_pointer(self.attrVertexNormal, 3, WebGL::FLOAT, false, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexNormal); } for mat in mvp_matrices { webgl.uniform_matrix4fv(Some(&self.unifMvpMatrix), false, mat.as_slice()); webgl.draw_arrays(WebGL::TRIANGLES, 0, self.tri_count * 3); } } pub fn Draw(&mut self, context: &Context, mvp_matrix: Mat4) { let matrices = vec![ mvp_matrix ]; self.DrawMultiple(context, &matrices); } } /* Graphics Subsystem State */ pub struct GraphicsState { context: Context, cube: Cube, positions: Vec<Vec2>, } impl GraphicsState { pub fn new() -> Self { let context = Context::new("#canvas"); let cube = Cube::new(&context); let positions = vec![ glm::vec2(-2.0f32, 0.0f32), glm::vec2(0.0f32, 0.0f32), glm::vec2(2.0f32, 0.0f32), glm::vec2(0.0f32, -2.0f32), glm::vec2(0.0f32, 2.0f32), ]; Self { context: context, cube: cube, positions: positions, } } pub fn RenderScene(&mut self, nowSeconds: f64) { self.context.UpdateViewport(); self.context.Clear(); let eye = glm::vec3(0.0, 0.0, -10.0); let center = glm::vec3(0.0, 0.0, 0.0); let up = glm::vec3(0.0, 1.0, 0.0); let view_matrix = glm::look_at_rh( &eye, &center, &up, ); let proj_view_matrix = self.context.projection_matrix * view_matrix; let angle = glm::modf(nowSeconds / 1000.0f64, glm::two_pi()) as f32; let mut mvp_matrices = vec![]; for position in &self.positions { /* This will scale the whole object, rotate the whole object. Translation is applied to each render within the object frame. mvp = view * translate(rm * sm) Translating the scale matrix will scale the whole thing down. Scaling the translate matrix will scale each object down. */ let translate_matrix = glm::translate(&glm::identity(), &glm::vec3(position.x, position.y, 0.0f32) ); let scale_matrix = glm::scale( &translate_matrix, &glm::vec3(0.5, 0.5, 0.5) ); let quat_angle = &nalgebra_glm::quat_angle_axis( angle, &glm::vec3(1.0, 1.0, 1.0) ); let rotate_matrix = scale_matrix * glm::quat_to_mat4(quat_angle); let mvp = proj_view_matrix * rotate_matrix; mvp_matrices.push(mvp); } self.cube.DrawMultiple(&self.context, &mvp_matrices); } }
use stdweb::unstable::TryInto; use webgl_rendering_context::{ WebGLRenderingContext as WebGL, WebGLUniformLocation, WebGLBuffer, WebGLShader, WebGLProgram, }; use stdweb::web::{ IHtmlElement, IParentNode, document, TypedArray, }; use stdweb::web::html_element::CanvasElement; use glm::{Vec2, Vec3, Quat, Mat4}; use tools::js_log; /* WebGL Context with Right-Handed Projection Matrix */ pub struct Context { canvas: CanvasElement, webgl: WebGL, width: i32, height: i32, projection_matrix: Mat4, } impl Context { pub fn new(element_id: &str) -> Self { let canvas : CanvasElement = document().query_selector(&element_id).unwrap().unwrap().try_into().unwrap(); let webgl : WebGL = canvas.get_context().unwrap(); webgl.enable(WebGL::CULL_FACE); webgl.front_face(WebGL::CCW); webgl.cull_face(WebGL::BACK); webgl.enable(WebGL::DEPTH_TEST); webgl.depth_func(WebGL::LESS); Self { canvas: canvas, webgl: webgl, width: 0, height: 0, projection_matrix: Mat4::identity(), } } pub fn UpdateViewport(&mut self) { let width = self.canvas.offset_width(); let height = self.canvas.offset_height(); if width != self.width || height != self.height { self.canvas.set_width(width as u32); self.canvas.set_height(height as u32); self.webgl.viewport(0, 0, width, height); const fov : f32 = 60.; const near : f32 = 2.; const far : f32 = 20.; self.projection_matrix = glm::perspective_fov_rh_zo( fov.to_radians(), width as f32, height as f32, near, far, ); self.width = width; self.height = height; } } pub fn Clear(&self) { self.webgl.clear_color(0.0, 0.0, 0.0, 1.0); self.webgl.clear_depth(1.0); self.webgl.clear(WebGL::COLOR_BUFFER_BIT | WebGL::DEPTH_BUFFER_BIT); } } /* WebGL Shader Program */ pub struct ShaderProgram { fs: WebGLShader, vs: WebGLShader, webGlProgram: WebGLProgram, } impl ShaderProgram { pub fn new(context: &Context, vsCode: &str, fsCode: &str) -> Self { let webgl = &context.webgl; let vs = webgl.create_shader(WebGL::VERTEX_SHADER).unwrap(); webgl.shader_source(&vs, &vsCode); webgl.compile_shader(&vs); let vs_success : bool = webgl.get_shader_parameter(&vs, WebGL::COMPILE_STATUS).try_into().unwrap(); if !vs_success { let info = webgl.get_shader_info_log(&vs); js_log(format!("CompileShader failed: {}", info.unwrap())); } let fs = webgl.create_shader(WebGL::FRAGMENT_SHADER).unwrap(); webgl.shader_source(&fs, &fsCode); webgl.compile_shader(&fs); let fs_success : bool = webgl.get_shader_parameter(&fs, WebGL::COMPILE_STATUS).try_into().unwrap(); if !fs_success { let info = webgl.get_shader_info_log(&fs); js_log(format!("CompileShader failed: {}", info.unwrap())); } let program = webgl.create_program().unwrap(); webgl.attach_shader(&program, &vs); webgl.attach_shader(&program, &fs); webgl.link_program(&program); let success : bool = webgl.get_program_parameter(&program, WebGL::LINK_STATUS).try_into().unwrap(); if !success { let info = webgl.get_program_info_log(&program); js_log(format!("LinkProgram failed: {}", info.unwrap())); } Self { fs: fs, vs: vs, webGlProgram: program, } } fn GetUniform(&self, context: &Context, name: &str) -> WebGLUniformLocation { context.webgl.get_uniform_location(&self.webGlProgram, name).unwrap() } fn GetAttrib(&self, context: &Context, name: &str) -> u32 { context.webgl.get_attrib_location(&self.webGlProgram, name) as u32 } } /* Cube Renderer */ pub struct Cube { program: ShaderProgram, unifMvpMatrix: WebGLUniformLocation, attrVertexPosition: u32, attrVertexColor: u32, attrVertexNormal: u32, positionVbo: WebGLBuffer, colorVbo: WebGLBuffer, normalVbo: WebGLBuffer, tri_count: i32, } impl Cube { pub fn new(context: &Context) -> Self { let webgl = &context.webgl; let vsCode = include_str!("shaders/flat_vs.glsl"); let fsCode = include_str!("shaders/flat_fs.glsl"); let program = ShaderProgram::new(context, vsCode, fsCode); /* Corner vertices of a cube, oriented x+right, y+top, z+up, centered at 0,0,0, scaled to span from -1 to +1 on each axis. Vertex and side names are based on a perspective looking down. */ let corners = vec![ /* Down-z side of cube */ -1.,-1.,-1., /* LL */ 1.,-1.,-1., /* LR */ 1., 1.,-1., /* UR */ -1., 1.,-1., /* UL */ /* Up+z side of cube */ -1.,-1., 1., /* LL */ 1.,-1., 1., /* LR */ 1., 1., 1., /* UR */ -1., 1., 1., /* UL */ ]; let triIndices : Vec<u8> = vec![ /* Down-z */ 2, 1, 0, 0, 3, 2, /* Up+z */ 4, 5, 6, 6, 7, 4, /* Bottom-y */ 0, 5, 4, 0, 1, 5, /* Top+y */ 3, 7, 6, 3, 6, 2, /* Left-x */ 0, 4, 3, 4, 7, 3, /* Right+x */ 2, 6, 5, 2, 5, 1, ]; let triColors : Vec<u8> = vec![ /* Down-z */ 255,0,200, 255,0,255, /* Up+z */ 200,200,200, 200,200,255, /* Bottom-y */ 100,200,100, 100,255,100, /* Top+y */ 200,200,100, 200,255,100, /* Left-x */ 200,0,0, 255,0,0, /* Right+x */ 0,200,0, 0,255,0, ]; let tri_count = triIndices.len() / 3; let mut vertices = Vec::with_capacity(tri_count * 3); let mut colors = Vec::with_capacity(tri_count * 3); let mut normals = Vec::with_capacity(tri_count * 3); for i in 0..tri_count { let triIndicesOffset = i * 3; let mut triVertices : [Vec3; 3] = unsafe { std::mem::uninitialized() }; for j in 0..3 { let vertexIndex = triIndices[triIndicesOffset + j]; let cornersOffset = vertexIndex as usize * 3; let scale = 1.0f32; let x = corners[cornersOffset] * scale; let y = corners[cornersOffset + 1] * scale; let z = corners[cornersOffset + 2] * scale; triVertices[j] = glm::vec3(x, y, z); vertices.push(x); vertices.push(y); vertices.push(z); } let normal = glm::triangle_normal( &triVertices[0], &triVertices[1], &triVertices[2] ); for _j in 0..3 { normals.push(normal.x); normals.push(normal.y); normals.push(normal.z); } let colorOffset = i as usize * 3; let r = triColors[colorOffset]; let g = triColors[colorOffset + 1]; let b = triColors[colorOffset + 2]; for _j in 0..3 { colors.push(r); colors.push(g); colors.push(b); } } let webVertices = TypedArray::<f32>::from(vertices.as_slice()).buffer(); let webColors = TypedArray::<u8>::from(colors.as_slice()).buffer(); let webNormals = TypedArray::<f32>::from(normals.as_slice()).buffer(); let positionVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&positionVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webVertices), WebGL::STATIC_DRAW); let colorVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&colorVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webColors), WebGL::STATIC_DRAW); let normalVbo = webgl.create_buffer().unwrap(); webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&normalVbo)); webgl.buffer_data_1(WebGL::ARRAY_BUFFER, Some(&webNormals), WebGL::STATIC_DRAW); let unifMvpMatrix = program.GetUniform(&context, "MVPMatrix"); let attrVertexPosition = program.GetAttrib(&context, "VertexPosition"); let attrVertexColor = program.GetAttrib(&context, "VertexColor"); let attrVertexNormal = program.GetAttrib(&context, "VertexNormal"); Self { program: program, unifMvpMatrix: unifMvpMatrix, attrVertexPosition: attrVertexPosition, attrVertexColor: attrVertexColor, attrVertexNormal: attrVertexNormal, positionVbo: positionVbo, colorVbo: colorVbo, normalVbo: normalVbo, tri_count: tri_count as i32, } } pub fn DrawMultiple(&mut self, context: &Context, mvp_matrices: &Vec<Mat4>) { let webgl = &context.webgl; webgl.use_program(Some(&self.program.webGlProgram)); if self.attrVertexPosition != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.positionVbo)); webgl.vertex_attrib_pointer(self.attrVertexPosition, 3, WebGL::FLOAT, false, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexPosition); } if self.attrVertexColor != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.colorVbo)); webgl.vertex_attrib_pointer(self.attrVertexColor, 3, WebGL::UNSIGNED_BYTE, true, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexColor); } if self.attrVertexNormal != u32::max_value() { webgl.bind_buffer(WebGL::ARRAY_BUFFER, Some(&self.normalVbo)); webgl.vertex_attrib_pointer(self.attrVertexNormal, 3, WebGL::FLOAT, false, 0, 0) ; webgl.enable_vertex_attrib_array(self.attrVertexNormal); } for mat in mvp_matrices { webgl.uniform_matrix4fv(Some(&self.unifMvpMatrix), false, mat.as_slice()); webgl.draw_arrays(WebGL::TRIANGLES, 0, self.tri_count * 3); } } pub fn Draw(&mut self, context: &Context, mvp_matrix: Mat4) { let matrices = vec![ mvp_matrix ]; self.DrawMultiple(context, &matrices); } } /* Graphics Subsystem State */ pub struct GraphicsState { context: Context, cube: Cube, positions: Vec<Vec2>, } impl GraphicsState { pub fn new() -> Self { let context = Context::new("#canvas"); let cube = Cube::new(&context); let positions = vec![ glm::vec2(-2.0f32, 0.0f32), glm::vec2(0.0f32, 0.0f32), glm::vec2(2.0f32, 0.0f32), glm::vec2(0.0f32, -2.0f32), glm::vec2(0.0f32, 2.0f32), ]; Self { context: context, cube: cube, positions: positions, } }
}
pub fn RenderScene(&mut self, nowSeconds: f64) { self.context.UpdateViewport(); self.context.Clear(); let eye = glm::vec3(0.0, 0.0, -10.0); let center = glm::vec3(0.0, 0.0, 0.0); let up = glm::vec3(0.0, 1.0, 0.0); let view_matrix = glm::look_at_rh( &eye, &center, &up, ); let proj_view_matrix = self.context.projection_matrix * view_matrix; let angle = glm::modf(nowSeconds / 1000.0f64, glm::two_pi()) as f32; let mut mvp_matrices = vec![]; for position in &self.positions { /* This will scale the whole object, rotate the whole object. Translation is applied to each render within the object frame. mvp = view * translate(rm * sm) Translating the scale matrix will scale the whole thing down. Scaling the translate matrix will scale each object down. */ let translate_matrix = glm::translate(&glm::identity(), &glm::vec3(position.x, position.y, 0.0f32) ); let scale_matrix = glm::scale( &translate_matrix, &glm::vec3(0.5, 0.5, 0.5) ); let quat_angle = &nalgebra_glm::quat_angle_axis( angle, &glm::vec3(1.0, 1.0, 1.0) ); let rotate_matrix = scale_matrix * glm::quat_to_mat4(quat_angle); let mvp = proj_view_matrix * rotate_matrix; mvp_matrices.push(mvp); } self.cube.DrawMultiple(&self.context, &mvp_matrices); }
function_block-full_function
[ { "content": "#[js_export]\n\nfn js_ontouch(x: i32, y: i32, w: i32, h: i32) {\n\n let norm_x = x as f32 / w as f32;\n\n let norm_y = y as f32 / h as f32;\n\n // FIXME: Convert x, y to normalized coordinates between -1..1\n\n js_log(format!(\"Tap at {}, {}\", norm_x, norm_y));\n\n}\n\n\n\nimpl InputS...
Rust
crate/divvunspell/src/ffi/fbs/tokenizer.rs
divvun/divvunspell-swift
9231ffb655752c3c3ad646216abd9aff3ec02ef0
#![allow(dead_code, unused_imports)] use std::cmp::Ordering; use std::mem; extern crate flatbuffers; use self::flatbuffers::EndianScalar; pub enum IndexedWordOffset {} #[derive(Copy, Clone, Debug, PartialEq)] pub struct IndexedWord<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for IndexedWord<'a> { type Inner = IndexedWord<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf: buf, loc: loc }, } } } impl<'a> IndexedWord<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { IndexedWord { _tab: table } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args IndexedWordArgs<'args>, ) -> flatbuffers::WIPOffset<IndexedWord<'bldr>> { let mut builder = IndexedWordBuilder::new(_fbb); builder.add_index(args.index); if let Some(x) = args.value { builder.add_value(x); } builder.finish() } pub const VT_INDEX: flatbuffers::VOffsetT = 4; pub const VT_VALUE: flatbuffers::VOffsetT = 6; #[inline] pub fn index(&self) -> u64 { self._tab .get::<u64>(IndexedWord::VT_INDEX, Some(0)) .unwrap() } #[inline] pub fn value(&self) -> Option<&'a str> { self._tab .get::<flatbuffers::ForwardsUOffset<&str>>(IndexedWord::VT_VALUE, None) } } pub struct IndexedWordArgs<'a> { pub index: u64, pub value: Option<flatbuffers::WIPOffset<&'a str>>, } impl<'a> Default for IndexedWordArgs<'a> { #[inline] fn default() -> Self { IndexedWordArgs { index: 0, value: None, } } } pub struct IndexedWordBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> IndexedWordBuilder<'a, 'b> { #[inline] pub fn add_index(&mut self, index: u64) { self.fbb_.push_slot::<u64>(IndexedWord::VT_INDEX, index, 0); } #[inline] pub fn add_value(&mut self, value: flatbuffers::WIPOffset<&'b str>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<_>>(IndexedWord::VT_VALUE, value); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> IndexedWordBuilder<'a, 'b> { let start = _fbb.start_table(); IndexedWordBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<IndexedWord<'a>> { let o = self.fbb_.end_table(self.start_); flatbuffers::WIPOffset::new(o.value()) } } pub enum WordContextOffset {} #[derive(Copy, Clone, Debug, PartialEq)] pub struct WordContext<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for WordContext<'a> { type Inner = WordContext<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf: buf, loc: loc }, } } } impl<'a> WordContext<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { WordContext { _tab: table } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args WordContextArgs<'args>, ) -> flatbuffers::WIPOffset<WordContext<'bldr>> { let mut builder = WordContextBuilder::new(_fbb); if let Some(x) = args.second_after { builder.add_second_after(x); } if let Some(x) = args.first_after { builder.add_first_after(x); } if let Some(x) = args.second_before { builder.add_second_before(x); } if let Some(x) = args.first_before { builder.add_first_before(x); } if let Some(x) = args.current { builder.add_current(x); } builder.finish() } pub const VT_CURRENT: flatbuffers::VOffsetT = 4; pub const VT_FIRST_BEFORE: flatbuffers::VOffsetT = 6; pub const VT_SECOND_BEFORE: flatbuffers::VOffsetT = 8; pub const VT_FIRST_AFTER: flatbuffers::VOffsetT = 10; pub const VT_SECOND_AFTER: flatbuffers::VOffsetT = 12; #[inline] pub fn current(&self) -> IndexedWord<'a> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>(WordContext::VT_CURRENT, None) .unwrap() } #[inline] pub fn first_before(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_FIRST_BEFORE, None, ) } #[inline] pub fn second_before(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_SECOND_BEFORE, None, ) } #[inline] pub fn first_after(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>(WordContext::VT_FIRST_AFTER, None) } #[inline] pub fn second_after(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_SECOND_AFTER, None, ) } } pub struct WordContextArgs<'a> { pub current: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub first_before: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub second_before: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub first_after: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub second_after: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, } impl<'a> Default for WordContextArgs<'a> { #[inline] fn default() -> Self { WordContextArgs { current: None, first_before: None, second_before: None, first_after: None, second_after: None, } } } pub struct WordContextBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> WordContextBuilder<'a, 'b> { #[inline] pub fn add_current(&mut self, current: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_CURRENT, current, ); } #[inline] pub fn add_first_before(&mut self, first_before: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_FIRST_BEFORE, first_before, ); } #[inline] pub fn add_second_before(&mut self, second_before: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_SECOND_BEFORE, second_before, ); } #[inline] pub fn add_first_after(&mut self, first_after: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_FIRST_AFTER, first_after, ); } #[inline] pub fn add_second_after(&mut self, second_after: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_SECOND_AFTER, second_after, ); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WordContextBuilder<'a, 'b> { let start = _fbb.start_table(); WordContextBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<WordContext<'a>> { let o = self.fbb_.end_table(self.start_); self.fbb_.required(o, WordContext::VT_CURRENT, "current"); flatbuffers::WIPOffset::new(o.value()) } } #[inline] pub fn get_root_as_word_context<'a>(buf: &'a [u8]) -> WordContext<'a> { flatbuffers::get_root::<WordContext<'a>>(buf) } #[inline] pub fn get_size_prefixed_root_as_word_context<'a>(buf: &'a [u8]) -> WordContext<'a> { flatbuffers::get_size_prefixed_root::<WordContext<'a>>(buf) } #[inline] pub fn finish_word_context_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<WordContext<'a>>, ) { fbb.finish(root, None); } #[inline] pub fn finish_size_prefixed_word_context_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<WordContext<'a>>, ) { fbb.finish_size_prefixed(root, None); }
#![allow(dead_code, unused_imports)] use std::cmp::Ordering; use std::mem; extern crate flatbuffers; use self::flatbuffers::EndianScalar; pub enum IndexedWordOffset {} #[derive(Copy, Clone, Debug, PartialEq)] pub struct IndexedWord<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for IndexedWord<'a> { type Inner = IndexedWord<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf: buf, loc: loc }, } } } impl<'a> IndexedWord<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { IndexedWord { _tab: table } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args IndexedWordArgs<'args>, ) -> flatbuffers::WIPOffset<IndexedWord<'bldr>> { let mut builder = IndexedWordBuilder::new(_fbb); builder.add_index(args.index); if let Some(x) = args.value { builder.add_value(x); } builder.finish() } pub const VT_INDEX: flatbuffers::VOffsetT = 4; pub const VT_VALUE: flatbuffers::VOffsetT = 6; #[inline] pub fn index(&self) -> u64 { self._tab .get::<u64>(IndexedWord::VT_INDEX, Some(0)) .unwrap() } #[inline] pub fn value(&self) -> Option<&'a str> { self._tab .get::<flatbuffers::ForwardsUOffset<&str>>(IndexedWord::VT_VALUE, None) } } pub struct IndexedWordArgs<'a> { pub index: u64, pub value: Option<flatbuffers::WIPOffset<&'a str>>, } impl<'a> Default for IndexedWordArgs<'a> { #[inline] fn default() -> Self { IndexedWordArgs { index: 0, value: None, } } } pub struct IndexedWordBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> IndexedWordBuilder<'a, 'b> { #[inline] pub fn add_index(&mut self, index: u64) { self.fbb_.push_slot::<u64>(IndexedWord::VT_INDEX, index, 0); } #[inline] pub fn add_value(&mut self, value: flatbuffers::WIPOffset<&'b str>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<_>>(IndexedWord::VT_VALUE, value); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> IndexedWordBuilder<'a, 'b> { let start = _fbb.start_table(); IndexedWordBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<IndexedWord<'a>> { let o = self.fbb_.end_table(self.start_); flatbuffers::WIPOffset::new(o.value()) } } pub enum WordContextOffset {} #[derive(Copy, Clone, Debug, PartialEq)] pub struct WordContext<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for WordContext<'a> { type Inner = WordContext<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf: buf, loc: loc }, } } } impl<'a> WordContext<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { WordContext { _tab: table } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args WordContextArgs<'args>, ) -> flatbuffers::WIPOffset<WordContext<'bldr>> { let mut builder = WordContextBuilder::new(_fbb); if let Some(x) = args.second_after { builder.add_second_after(x); } if let Some(x) = args.first_after { builder.add_first_after(x); } if let Some(x) = args.second_before { builder.add_second_before(x); } if let Some(x) = args.first_before { builder.add_first_before(x); } if let Some(x) = args.current { builder.add_current(x); } builder.finish() } pub const VT_CURRENT: flatbuffers::VOffsetT = 4; pub const VT_FIRST_BEFORE: flatbuffers::VOffsetT = 6; pub const VT_SECOND_BEFORE: flatbuffers::VOffsetT = 8; pub const VT_FIRST_AFTER: flatbuffers::VOffsetT = 10; pub const VT_SECOND_AFTER: flatbuffers::VOffsetT = 12; #[inline] pub fn current(&self) -> IndexedWord<'a> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>(WordContext::VT_CURRENT, None) .unwrap() } #[inline] pub fn first_before(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_FIRST_BEFORE, None, ) } #[inline] pub fn second_before(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_SECOND_BEFORE, None, ) } #[inline] pub fn first_after(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>(WordContext::VT_FIRST_AFTER, None) } #[inline] pub fn second_after(&self) -> Option<IndexedWord<'a>> { self._tab .get::<flatbuffers::ForwardsUOffset<IndexedWord<'a>>>( WordContext::VT_SECOND_AFTER, None, ) } } pub struct WordContextArgs<'a> { pub current: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub first_before: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub second_before: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub first_after: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, pub second_after: Option<flatbuffers::WIPOffset<IndexedWord<'a>>>, } impl<'a> Default for WordContextArgs<'a> { #[inline] fn default() -> Self { WordContextArgs { current: None, first_before: None, second_before: None, first_after: None, second_after: None, } } } pub struct WordContextBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> WordContextBuilder<'a, 'b> { #[inline] pub fn add_current(&mut self, current: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_CURRENT, current, ); } #[inline] pub fn add_first_before(&mut self, first_before: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_FIRST_BEFORE, first_before, ); } #[inline] pub fn add_second_before(&mut self, second_before: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .
#[inline] pub fn add_first_after(&mut self, first_after: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_FIRST_AFTER, first_after, ); } #[inline] pub fn add_second_after(&mut self, second_after: flatbuffers::WIPOffset<IndexedWord<'b>>) { self.fbb_ .push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_SECOND_AFTER, second_after, ); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WordContextBuilder<'a, 'b> { let start = _fbb.start_table(); WordContextBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<WordContext<'a>> { let o = self.fbb_.end_table(self.start_); self.fbb_.required(o, WordContext::VT_CURRENT, "current"); flatbuffers::WIPOffset::new(o.value()) } } #[inline] pub fn get_root_as_word_context<'a>(buf: &'a [u8]) -> WordContext<'a> { flatbuffers::get_root::<WordContext<'a>>(buf) } #[inline] pub fn get_size_prefixed_root_as_word_context<'a>(buf: &'a [u8]) -> WordContext<'a> { flatbuffers::get_size_prefixed_root::<WordContext<'a>>(buf) } #[inline] pub fn finish_word_context_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<WordContext<'a>>, ) { fbb.finish(root, None); } #[inline] pub fn finish_size_prefixed_word_context_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<WordContext<'a>>, ) { fbb.finish_size_prefixed(root, None); }
push_slot_always::<flatbuffers::WIPOffset<IndexedWord>>( WordContext::VT_SECOND_BEFORE, second_before, ); }
function_block-function_prefix_line
[ { "content": "pub fn cursor_context(first_half: &str, second_half: &str) -> WordContext {\n\n // Find the point in the first half where the first \"word\" happens\n\n let mut first_half_iter = first_half.word_bound_indices().rev();\n\n let mut second_half_iter = second_half.word_bound_indices();\n\n\n\...
Rust
native/src/lib.rs
zhangxh1023/node-image-search
2a66956f1fbe295208c6bc1a49b008d02860d4c6
extern crate neon; use neon::prelude::*; mod image; mod utils; struct ImageSearchTask { parent_image_path: String, child_image_path: String, out: String, result_level: u32, } impl Task for ImageSearchTask { type Output = Vec<Vec<image::ResultPoint>>; type Error = String; type JsEvent = JsArray; fn perform(&self) -> Result<Self::Output, Self::Error> { let parent_image = image::Image::new(self.parent_image_path.clone()); let child_image = image::Image::new(self.child_image_path.clone()); let result = parent_image.search_child_image_point_from_parent_image(&child_image, self.result_level); if &self.out != "" { parent_image.mark_child_image_border_with_new_image(&child_image, &self.out, &result); } Ok(result) } fn complete( self, mut cx: TaskContext, result: Result<Self::Output, Self::Error>, ) -> JsResult<Self::JsEvent> { let result = result.unwrap(); let result_array = JsArray::new(&mut cx, result.len() as u32); for (i, v) in result.iter().enumerate() { let temp_array = JsArray::new(&mut cx, v.len() as u32); for (index, object) in v.iter().enumerate() { let result_object = JsObject::new(&mut cx); let x = object.x; let y = object.y; let x = cx.number(x); let y = cx.number(y); let hash_string = cx.string(object.hash_string.clone()); let hamming_distance = cx.number(object.hamming_distance); result_object.set(&mut cx, "x", x).unwrap(); result_object.set(&mut cx, "y", y).unwrap(); result_object .set(&mut cx, "hash_string", hash_string) .unwrap(); result_object .set(&mut cx, "hamming_distance", hamming_distance) .unwrap(); temp_array .set(&mut cx, index as u32, result_object) .unwrap(); } result_array.set(&mut cx, i as u32, temp_array).unwrap(); } Ok(result_array) } } fn image_search(mut cx: FunctionContext) -> JsResult<JsUndefined> { let parent_image_path = cx.argument::<JsString>(0)?.value(); let child_image_path = cx.argument::<JsString>(1)?.value(); let options = cx.argument::<JsObject>(2)?; let out = options .get(&mut cx, "out")? .downcast::<JsString>() .or_throw(&mut cx)? .value(); let result_level = options .get(&mut cx, "result_level")? .downcast::<JsNumber>() .or_throw(&mut cx)? .value(); let f = cx.argument::<JsFunction>(3)?; let image_search_task = ImageSearchTask { parent_image_path, child_image_path, out, result_level: result_level as u32, }; image_search_task.schedule(f); Ok(cx.undefined()) } struct GetDHashTask { image_path: String, } impl Task for GetDHashTask { type Output = String; type Error = String; type JsEvent = JsString; fn perform(&self) -> Result<Self::Output, Self::Error> { let image = image::Image::new(self.image_path.clone()); let result = image.get_d_hash(); Ok(result) } fn complete( self, mut cx: TaskContext, result: Result<Self::Output, Self::Error>, ) -> JsResult<Self::JsEvent> { let result = result.unwrap(); Ok(cx.string(result)) } } fn get_d_hash(mut cx: FunctionContext) -> JsResult<JsUndefined> { let image_path = cx.argument::<JsString>(0)?.value(); let f = cx.argument::<JsFunction>(1)?; let get_d_hash_task = GetDHashTask { image_path }; get_d_hash_task.schedule(f); Ok(cx.undefined()) } fn get_hamming_distance_by_hex_hash(mut cx: FunctionContext) -> JsResult<JsNumber> { let hash_1 = cx.argument::<JsString>(0)?.value(); let hash_2 = cx.argument::<JsString>(1)?.value(); let result = utils::get_hamming_distance_by_hex_hash(&hash_1, &hash_2); Ok(cx.number(result)) } register_module!(mut m, { m.export_function("image_search", image_search)?; m.export_function("get_d_hash", get_d_hash)?; m.export_function( "get_hamming_distance_by_hex_hash", get_hamming_distance_by_hex_hash, )?; Ok(()) });
extern crate neon; use neon::prelude::*; mod image; mod utils; struct ImageSearchTask { parent_image_path: String, child_image_path: String, out: String, result_level: u32, } impl Task for ImageSearchTask { type Output = Vec<Vec<image::ResultPoint>>; type Error = String; type JsEvent = JsArray; fn perform(&self) -> Result<Self::Output, Self::Error> { let parent_image = image::Image::new(self.parent_image_path.clone()); let child_image = image::Image::new(self.child_image_path.clone()); let result = parent_image.search_child_image_point_from_parent_image(&child_image, self.result_level); if &self.out != "" { parent_image.mark_child_image_border_with_new_image(&child_image, &self.out, &result); } Ok(result) } fn complete( self, mut cx: TaskContext, result: Result<Self::Output, Self::Error>, ) -> JsResult<Self::JsEvent> { let result = result.unwrap(); let result_array = JsArray::new(&mut cx, result.len() as u32); for (i, v) in result.iter().enumerate() { let temp_array = JsArray::new(&mut cx, v.len() as u32); for (index, object) in v.iter().enumerate() { let result_object = JsObject::new(&mut cx); let x = object.x; let y = object.y; let x = cx.number(x); let y = cx.number(y); let hash_string = cx.string(object.hash_string.clone()); let hamming_distance = cx.number(object.hamming_distance); result_object.set(&mut cx, "x", x).unwrap(); result_object.set(&mut cx, "y", y).unwrap(); result_object .set(&mut cx, "hash_string", hash_string) .unwrap(); result_object .set(&mut cx, "hamming_distance", hamming_distance) .unwrap(); temp_array .set(&mut cx, index as u32, result_object) .unwrap(); } result_array.set(&mut cx, i as u32, temp_array).unwrap(); } Ok(result_array) } }
struct GetDHashTask { image_path: String, } impl Task for GetDHashTask { type Output = String; type Error = String; type JsEvent = JsString; fn perform(&self) -> Result<Self::Output, Self::Error> { let image = image::Image::new(self.image_path.clone()); let result = image.get_d_hash(); Ok(result) } fn complete( self, mut cx: TaskContext, result: Result<Self::Output, Self::Error>, ) -> JsResult<Self::JsEvent> { let result = result.unwrap(); Ok(cx.string(result)) } } fn get_d_hash(mut cx: FunctionContext) -> JsResult<JsUndefined> { let image_path = cx.argument::<JsString>(0)?.value(); let f = cx.argument::<JsFunction>(1)?; let get_d_hash_task = GetDHashTask { image_path }; get_d_hash_task.schedule(f); Ok(cx.undefined()) } fn get_hamming_distance_by_hex_hash(mut cx: FunctionContext) -> JsResult<JsNumber> { let hash_1 = cx.argument::<JsString>(0)?.value(); let hash_2 = cx.argument::<JsString>(1)?.value(); let result = utils::get_hamming_distance_by_hex_hash(&hash_1, &hash_2); Ok(cx.number(result)) } register_module!(mut m, { m.export_function("image_search", image_search)?; m.export_function("get_d_hash", get_d_hash)?; m.export_function( "get_hamming_distance_by_hex_hash", get_hamming_distance_by_hex_hash, )?; Ok(()) });
fn image_search(mut cx: FunctionContext) -> JsResult<JsUndefined> { let parent_image_path = cx.argument::<JsString>(0)?.value(); let child_image_path = cx.argument::<JsString>(1)?.value(); let options = cx.argument::<JsObject>(2)?; let out = options .get(&mut cx, "out")? .downcast::<JsString>() .or_throw(&mut cx)? .value(); let result_level = options .get(&mut cx, "result_level")? .downcast::<JsNumber>() .or_throw(&mut cx)? .value(); let f = cx.argument::<JsFunction>(3)?; let image_search_task = ImageSearchTask { parent_image_path, child_image_path, out, result_level: result_level as u32, }; image_search_task.schedule(f); Ok(cx.undefined()) }
function_block-full_function
[ { "content": "pub fn convert_to_binary_from_hex(hex: &str) -> String {\n\n hex.chars().map(to_binary).collect()\n\n}\n\n\n", "file_path": "native/src/utils.rs", "rank": 4, "score": 67880.1477159762 }, { "content": "pub fn get_hamming_distance_by_hex_hash(hash_1: &str, hash_2: &str) -> u32 {...
Rust
services/headless-lms/server/src/ts_binding_generator.rs
rage/secret-project-331
3c78c02f2f1d2e4539522e73c3065ae8866604e3
use crate::controllers::{ auth::Login, course_material::{ exams::{ExamData, ExamEnrollmentData}, submissions::PreviousSubmission, }, main_frontend::{ courses::GetFeedbackQuery, exams::ExamCourseInfo, exercises::ExerciseSubmissions, feedback::MarkAsRead, proposed_edits::GetEditProposalsQuery, }, ErrorResponse, UploadResult, }; use headless_lms_models::*; use headless_lms_utils::pagination::Pagination; macro_rules! export { ($target:expr, $($types:ty),*) => { { let target = $target; fn _export(target: &mut impl ::std::io::Write) -> ::std::result::Result<(), ::std::io::Error> { $( writeln!(target, "export {}\n", <$types as ::ts_rs::TS>::decl())?; )* Ok(()) } _export(target) } }; } #[test] fn ts_binding_generator() { let mut target = std::fs::File::create("../../../shared-module/src/bindings.ts").unwrap(); let res = export! { &mut target, chapters::Chapter, chapters::ChapterStatus, chapters::ChapterUpdate, chapters::ChapterWithStatus, chapters::NewChapter, chapters::UserCourseInstanceChapterProgress, course_instance_enrollments::CourseInstanceEnrollment, course_instances::ChapterScore, course_instances::CourseInstance, course_instances::CourseInstanceForm, course_instances::PointMap, course_instances::Points, course_instances::VariantStatus, courses::Course, courses::CourseStructure, courses::CourseUpdate, courses::NewCourse, courses::CourseCount, email_templates::EmailTemplate, email_templates::EmailTemplateNew, email_templates::EmailTemplateUpdate, exams::CourseExam, exams::Exam, exams::ExamEnrollment, exercise_service_info::CourseMaterialExerciseServiceInfo, exercise_service_info::ExerciseServiceInfoApi, exercise_services::ExerciseService, exercise_services::ExerciseServiceNewOrUpdate, exercise_slides::ExerciseSlide, exercise_tasks::CourseMaterialExerciseTask, exercise_tasks::ExerciseTask, exercises::ActivityProgress, exercises::CourseMaterialExercise, exercises::Exercise, exercises::ExerciseStatus, exercises::GradingProgress, feedback::Feedback, feedback::FeedbackBlock, feedback::FeedbackCount, feedback::NewFeedback, gradings::Grading, gradings::UserPointsUpdateStrategy, organizations::Organization, page_history::PageHistory, page_history::HistoryChangeReason, pages::CmsPageExercise, pages::CmsPageExerciseSlide, pages::CmsPageExerciseTask, pages::CmsPageUpdate, pages::ContentManagementPage, pages::CoursePageWithUserData, pages::ExerciseWithExerciseTasks, pages::HistoryRestoreData, pages::Page, pages::PageRoutingDataWithChapterStatus, pages::PageSearchRequest, pages::PageSearchResult, pages::PageWithExercises, pages::NewPage, playground_examples::PlaygroundExample, playground_examples::PlaygroundExampleData, proposed_block_edits::BlockProposal, proposed_block_edits::BlockProposalAction, proposed_block_edits::BlockProposalInfo, proposed_block_edits::NewProposedBlockEdit, proposed_block_edits::ProposalStatus, proposed_page_edits::EditProposalInfo, proposed_page_edits::NewProposedPageEdits, proposed_page_edits::PageProposal, proposed_page_edits::ProposalCount, submissions::Submission, submissions::SubmissionCount, submissions::SubmissionCountByWeekAndHour, submissions::SubmissionCountByExercise, submissions::SubmissionInfo, submissions::SubmissionResult, submissions::NewSubmission, submissions::GradingResult, user_course_settings::UserCourseSettings, user_exercise_states::UserCourseInstanceChapterExerciseProgress, user_exercise_states::UserCourseInstanceProgress, users::User, PreviousSubmission, ExamData, ExamEnrollmentData, ExamCourseInfo, Login, UploadResult, ExerciseSubmissions, MarkAsRead, GetFeedbackQuery, GetEditProposalsQuery, ErrorResponse, Pagination }; res.unwrap(); }
use crate::controllers::{ auth::Login, course_material::{ exams::{ExamData, ExamEnrollmentData}, submissions::PreviousSubmission, }, main_frontend::{ courses::GetFeedbackQuery, exams::ExamCourseInfo, exercises::ExerciseSubmissions, feedback::MarkAsRead, proposed_edits::GetEditProposalsQuery, }, ErrorResponse, UploadResult, }; use headless_lms_models::*; use headless_lms_utils::pagination::Pagination; macro_rules! export { ($target:expr, $($types:ty),*) => { { let target = $target; fn _export(target: &mut impl ::std::io::Write) -> ::std::result::Result<(), ::std::io::Error> { $( writeln!(target, "export {}\n", <$types as ::ts_rs::TS>::decl())?; )* Ok(()) } _export(target) } }; } #[test] fn ts_binding_generator() { let mut target = std::fs::File::create("../../../shared-module/src/bindings.ts").unwrap(); let res = export! { &mut target, chapters::Chapter, chapters::ChapterStatus, chapters::ChapterUpdate, chapters::ChapterWithStatus, chapters::NewChapter, chapters::UserCourseInstanceChapterProgress, course_instance_enrollments::CourseInstanceEnrollment, course_instances::ChapterScore, course_instances::CourseInstance, course_instances::CourseInstanceForm, course_instances::PointMap, course_instances::Points, course_instances::VariantStatus, courses::Course, courses::CourseStructure, courses::CourseUpdate, courses::NewCourse, courses::CourseCount, email_templates::EmailTemplate, email_templates::EmailTemplateNew, email_templates::EmailTemplateUpdate, exams::CourseExam, exams::Exam, exams::ExamEnrollment, exercise_service_info::CourseMaterialExerciseServiceInfo, exercise_service_info::ExerciseServiceInfoApi, exercise_services::ExerciseService, exercise_services::ExerciseServiceNewOrUpdate, exercise_slides::ExerciseSlide, exercise_tasks::CourseMaterialExerciseTask, exercise_tasks::ExerciseTask, exercises::ActivityProgress, exercises::CourseMaterialExercise, exercises::Exercise, exercises::ExerciseStatus, exercises::GradingProgress, feedback::Feedback, feedback::FeedbackBlock, feedback::FeedbackCount, feedback::NewFeedback, gradings::Grading, gradings::UserPointsUpdateStrategy,
organizations::Organization, page_history::PageHistory, page_history::HistoryChangeReason, pages::CmsPageExercise, pages::CmsPageExerciseSlide, pages::CmsPageExerciseTask, pages::CmsPageUpdate, pages::ContentManagementPage, pages::CoursePageWithUserData, pages::ExerciseWithExerciseTasks, pages::HistoryRestoreData, pages::Page, pages::PageRoutingDataWithChapterStatus, pages::PageSearchRequest, pages::PageSearchResult, pages::PageWithExercises, pages::NewPage, playground_examples::PlaygroundExample, playground_examples::PlaygroundExampleData, proposed_block_edits::BlockProposal, proposed_block_edits::BlockProposalAction, proposed_block_edits::BlockProposalInfo, proposed_block_edits::NewProposedBlockEdit, proposed_block_edits::ProposalStatus, proposed_page_edits::EditProposalInfo, proposed_page_edits::NewProposedPageEdits, proposed_page_edits::PageProposal, proposed_page_edits::ProposalCount, submissions::Submission, submissions::SubmissionCount, submissions::SubmissionCountByWeekAndHour, submissions::SubmissionCountByExercise, submissions::SubmissionInfo, submissions::SubmissionResult, submissions::NewSubmission, submissions::GradingResult, user_course_settings::UserCourseSettings, user_exercise_states::UserCourseInstanceChapterExerciseProgress, user_exercise_states::UserCourseInstanceProgress, users::User, PreviousSubmission, ExamData, ExamEnrollmentData, ExamCourseInfo, Login, UploadResult, ExerciseSubmissions, MarkAsRead, GetFeedbackQuery, GetEditProposalsQuery, ErrorResponse, Pagination }; res.unwrap(); }
function_block-function_prefix_line
[ { "content": "pub fn _add_routes(cfg: &mut ServiceConfig) {\n\n cfg.route(\"/uploads/{tail:.*}\", web::get().to(serve_upload))\n\n .route(\"{tail:.*}\", web::get().to(redirect_to_storage_service));\n\n}\n", "file_path": "services/headless-lms/server/src/controllers/files.rs", "rank": 0, "s...
Rust
src/lib.rs
Elinvynia/schwifty
e0233bf836af6382e5565801df452baf7c9f935b
#![forbid(unsafe_code)] #![warn(missing_debug_implementations)] #![warn(missing_docs)] pub use crate::country::Country; pub use crate::error::ValidationError; use std::str::FromStr; pub mod country; pub(crate) mod country_specific; pub mod error; #[allow(clippy::all)] pub(crate) mod u256 { uint::construct_uint! { pub(crate) struct U256(4); } } #[derive(Debug)] #[non_exhaustive] pub struct Iban { pub country: Country, pub(crate) raw: String, } impl Iban { pub fn account_number(&self) -> String { self.country.account_number(&self.raw) } pub fn bank_code(&self) -> String { self.country.bank_code(&self.raw) } pub fn country_code(&self) -> String { self.country.to_string() } pub fn raw(&self) -> &str { &self.raw } } pub fn validate<I: AsRef<str>>(input: I) -> Result<Iban, ValidationError> { let input = input.as_ref(); let input: String = input.split_whitespace().collect(); if input.len() > 34 { return Err(ValidationError::TooLong); }; if !input.chars().all(|ch| ch.is_alphanumeric()) { return Err(ValidationError::InvalidChar); }; if input.len() < 2 { return Err(ValidationError::InvalidCountryCode); }; let country_code = &input[0..2]; let country = match Country::from_str(country_code) { Ok(c) => c, Err(_) => return Err(ValidationError::InvalidCountryCode), }; if input.len() != country.length() { return Err(ValidationError::InvalidLength); } if !country.format().is_match(&input) { return Err(ValidationError::InvalidFormat); } if !country.custom_validation(&input) { return Err(ValidationError::CountryCheckFailed); } let (start, rest) = input.split_at(4); let mut rearranged = String::with_capacity(34); rearranged.push_str(rest); rearranged.push_str(start); let mut integer_string = String::with_capacity(34); for ch in rearranged.chars() { if ch.is_numeric() { integer_string.push(ch); } else { let x = ch.to_digit(36).unwrap().to_string(); integer_string.push_str(&x) } } let integer = u256::U256::from_dec_str(&integer_string).unwrap(); if integer % 97 != 1.into() { return Err(ValidationError::InvalidIban); } Ok(Iban { country, raw: input, }) }
#![forbid(unsafe_code)] #![warn(missing_debug_implementations)] #![warn(missing_docs)] pub use crate::country::Country; pub use crate::error::ValidationError; use std::str::FromStr; pub mod country; pub(crate) mod country_specific; pub mod error; #[allow(clippy::all)] pub(crate) mod u256 { uint::construct_uint! { pub(crate) struct U256(4); } } #[derive(Debug)] #[non_exhaustive] pub struct Iban { pub country: Country, pub(crate) raw: String, } impl Iban { pub fn account_number(&self) -> String { self.country.account_number(&self.raw) } pub fn bank_code(&self) -> String { self.country.bank_code(&self.raw) } pub fn country_code(&self) -> String { self.country.to_string() } pub fn raw(&self) -> &str { &self.raw } } pub fn validate<I: AsRef<str>>(input: I) -> Result<Iban, ValidationError> { let input = input.as_ref(); let input: String = input.split_whitespace().collect(); if input.len() > 34 { return Err(ValidationError::TooLong); }; if !input.chars().all(|ch| ch.is_alphanumeric()) { return Err(ValidationError::InvalidChar); }; if input.len() < 2 { return Err(ValidationError::InvalidCountryCode); }; let country_code = &input[0..2]; let country = match Country::from_str(country_code) { Ok(c) => c, Err(_) => return Err(ValidationError::InvalidCountryCode), }; if input.len() != country.length() { return Err(ValidationError::InvalidLength); } if !country.format().is_match(&input) { return Err(ValidationError::InvalidFormat); } if !country.custom_validation(&input) { return Err(ValidationError::CountryCheckFailed); } let (start, rest) = input.split_at(4); let mut rearranged = String::with_capacity(34); rearranged.push_str(rest); rearranged.push_str(start); let mut integer_string = String::with_capacity(34); for ch in rearranged.chars() {
} let integer = u256::U256::from_dec_str(&integer_string).unwrap(); if integer % 97 != 1.into() { return Err(ValidationError::InvalidIban); } Ok(Iban { country, raw: input, }) }
if ch.is_numeric() { integer_string.push(ch); } else { let x = ch.to_digit(36).unwrap().to_string(); integer_string.push_str(&x) }
if_condition
[ { "content": "fn c(num: usize) -> String {\n\n format!(\"[a-zA-Z0-9]{{{}}}\", num)\n\n}\n\n\n", "file_path": "src/country.rs", "rank": 1, "score": 59182.76963598044 }, { "content": "fn a(num: usize) -> String {\n\n format!(\"[A-Z]{{{}}}\", num)\n\n}\n\n\n\n// Almost-DSL macro for comfo...
Rust
examples/max11619-adc.rs
us-irs/vorago-reb1-rs
742a53d63a8d531d3e36068ee09987c2f38ef77e
#![no_main] #![no_std] use cortex_m_rt::entry; use embedded_hal::{blocking::delay::DelayUs, spi}; use max116xx_10bit::VoltageRefMode; use max116xx_10bit::{AveragingConversions, AveragingResults}; use panic_rtt_target as _; use rtt_target::{rprintln, rtt_init_print}; use va108xx_hal::timer::CountDownTimer; use va108xx_hal::{ gpio::PinsA, pac::{self, interrupt, SPIB}, prelude::*, spi::{Spi, SpiBase, SpiConfig, TransferConfig}, timer::{default_ms_irq_handler, set_up_ms_timer, Delay, IrqCfg}, utility::{port_mux, Funsel, PortSel}, }; use vorago_reb1::max11619::{ max11619_externally_clocked_no_wakeup, max11619_externally_clocked_with_wakeup, max11619_internally_clocked, EocPin, AN2_CHANNEL, POTENTIOMETER_CHANNEL, }; #[derive(Debug, PartialEq, Copy, Clone)] pub enum ExampleMode { UsingEoc, NotUsingEoc, NotUsingEocWithDelay, } #[derive(Debug, PartialEq, Copy, Clone)] pub enum ReadMode { Single, Multiple, MultipleNToHighest, AverageN, } #[derive(Debug, PartialEq, Copy, Clone)] pub enum MuxMode { None, PortB19to17, } const EXAMPLE_MODE: ExampleMode = ExampleMode::NotUsingEoc; const READ_MODE: ReadMode = ReadMode::Multiple; const MUX_MODE: MuxMode = MuxMode::None; #[entry] fn main() -> ! { rtt_init_print!(); rprintln!("-- Vorago ADC Example --"); let mut dp = pac::Peripherals::take().unwrap(); let tim0 = set_up_ms_timer( IrqCfg::new(pac::Interrupt::OC0, true, true), &mut dp.SYSCONFIG, Some(&mut dp.IRQSEL), 50.mhz(), dp.TIM0, ); let delay = Delay::new(tim0); unsafe { cortex_m::peripheral::NVIC::unmask(pac::Interrupt::OC0); } let pinsa = PinsA::new(&mut dp.SYSCONFIG, None, dp.PORTA); let spi_cfg = SpiConfig::default(); let (sck, mosi, miso) = ( pinsa.pa20.into_funsel_2(), pinsa.pa19.into_funsel_2(), pinsa.pa18.into_funsel_2(), ); if MUX_MODE == MuxMode::PortB19to17 { port_mux(&mut dp.IOCONFIG, PortSel::PortB, 19, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 18, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 17, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 16, Funsel::Funsel1).ok(); } let mut accel_cs = pinsa.pa16.into_push_pull_output(); accel_cs .set_high() .expect("Setting accelerometer chip select high failed"); let transfer_cfg = TransferConfig::new( 3.mhz(), spi::MODE_0, Some(pinsa.pa17.into_funsel_2()), true, false, ); let spi = Spi::spib( dp.SPIB, (sck, miso, mosi), 50.mhz(), spi_cfg, Some(&mut dp.SYSCONFIG), Some(&transfer_cfg.downgrade()), ) .downgrade(); match EXAMPLE_MODE { ExampleMode::NotUsingEoc => spi_example_externally_clocked(spi, delay), ExampleMode::UsingEoc => { spi_example_internally_clocked(spi, delay, pinsa.pa14.into_floating_input()); } ExampleMode::NotUsingEocWithDelay => { let delay_us = CountDownTimer::new(&mut dp.SYSCONFIG, 50.mhz(), dp.TIM2); spi_example_externally_clocked_with_delay(spi, delay, delay_us); } } } #[interrupt] fn OC0() { default_ms_irq_handler(); } fn spi_example_externally_clocked(spi: SpiBase<SPIB>, mut delay: Delay) -> ! { let mut adc = max11619_externally_clocked_no_wakeup(spi) .expect("Creating externally clocked MAX11619 device failed"); if READ_MODE == ReadMode::AverageN { adc.averaging( AveragingConversions::FourConversions, AveragingResults::FourResults, ) .expect("Error setting up averaging register"); } let mut cmd_buf: [u8; 32] = [0; 32]; let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { rprintln!("Reading single potentiometer channel"); let pot_val = adc .read_single_channel(&mut cmd_buf, POTENTIOMETER_CHANNEL) .expect("Creating externally clocked MAX11619 ADC failed"); rprintln!("Single channel read:"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { let mut res_buf: [u16; 4] = [0; 4]; adc.read_multiple_channels_0_to_n( &mut cmd_buf, &mut res_buf.iter_mut(), POTENTIOMETER_CHANNEL, ) .expect("Multi-Channel read failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { let mut res_buf: [u16; 2] = [0; 2]; adc.read_multiple_channels_n_to_highest( &mut cmd_buf, &mut res_buf.iter_mut(), AN2_CHANNEL, ) .expect("Multi-Channel read failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { rprintln!("Scanning and averaging not possible for externally clocked mode"); } } counter += 1; delay.delay_ms(500); } } fn spi_example_externally_clocked_with_delay( spi: SpiBase<SPIB>, mut delay: Delay, mut delay_us: impl DelayUs<u8>, ) -> ! { let mut adc = max11619_externally_clocked_with_wakeup(spi).expect("Creating MAX116xx device failed"); let mut cmd_buf: [u8; 32] = [0; 32]; let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { rprintln!("Reading single potentiometer channel"); let pot_val = adc .read_single_channel(&mut cmd_buf, POTENTIOMETER_CHANNEL, &mut delay_us) .expect("Creating externally clocked MAX11619 ADC failed"); rprintln!("Single channel read:"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { let mut res_buf: [u16; 4] = [0; 4]; adc.read_multiple_channels_0_to_n( &mut cmd_buf, &mut res_buf.iter_mut(), POTENTIOMETER_CHANNEL, &mut delay_us, ) .expect("Multi-Channel read failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { let mut res_buf: [u16; 2] = [0; 2]; adc.read_multiple_channels_n_to_highest( &mut cmd_buf, &mut res_buf.iter_mut(), AN2_CHANNEL, &mut delay_us, ) .expect("Multi-Channel read failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { rprintln!("Scanning and averaging not possible for externally clocked mode"); } } counter += 1; delay.delay_ms(500); } } fn spi_example_internally_clocked(spi: SpiBase<SPIB>, mut delay: Delay, eoc_pin: EocPin) -> ! { let mut adc = max11619_internally_clocked( spi, eoc_pin, VoltageRefMode::ExternalSingleEndedNoWakeupDelay, ) .expect("Creating MAX116xx device failed"); let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { adc.request_single_channel(POTENTIOMETER_CHANNEL) .expect("Requesting single channel value failed"); let pot_val = nb::block!(adc.get_single_channel()) .expect("Reading single channel value failed"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { adc.request_multiple_channels_0_to_n(POTENTIOMETER_CHANNEL) .expect("Requesting single channel value failed"); let mut res_buf: [u16; 4] = [0; 4]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { adc.request_multiple_channels_n_to_highest(AN2_CHANNEL) .expect("Requesting single channel value failed"); let mut res_buf: [u16; 4] = [0; 4]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { adc.request_channel_n_repeatedly(POTENTIOMETER_CHANNEL) .expect("Reading channel multiple times failed"); let mut res_buf: [u16; 16] = [0; 16]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); rprintln!("Reading potentiometer 4 times"); rprintln!("\tValue 0: {}", res_buf[0]); rprintln!("\tValue 1: {}", res_buf[1]); rprintln!("\tValue 2: {}", res_buf[2]); rprintln!("\tValue 3: {}", res_buf[3]); } } counter += 1; delay.delay_ms(500); } } fn print_res_buf(buf: &[u16; 4]) { rprintln!("Multi channel read from 0 to 3:"); rprintln!("\tAN0 value: {}", buf[0]); rprintln!("\tAN1 value: {}", buf[1]); rprintln!("\tAN2 value: {}", buf[2]); rprintln!("\tAN3 / Potentiometer value: {}", buf[3]); }
#![no_main] #![no_std] use cortex_m_rt::entry; use embedded_hal::{blocking::delay::DelayUs, spi}; use max116xx_10bit::VoltageRefMode; use max116xx_10bit::{AveragingConversions, AveragingResults}; use panic_rtt_target as _; use rtt_target::{rprintln, rtt_init_print}; use va108xx_hal::timer::CountDownTimer; use va108xx_hal::{ gpio::PinsA, pac::{self, interrupt, SPIB}, prelude::*, spi::{Spi, SpiBase, SpiConfig, TransferConfig}, timer::{default_ms_irq_handler, set_up_ms_timer, Delay, IrqCfg}, utility::{port_mux, Funsel, PortSel}, }; use vorago_reb1::max11619::{ max11619_externally_clocked_no_wakeup, max11619_externally_clocked_with_wakeup, max11619_internally_clocked, EocPin, AN2_CHANNEL, POTENTIOMETER_CHANNEL, }; #[derive(Debug, PartialEq, Copy, Clone)] pub enum ExampleMode { UsingEoc, NotUsingEoc, NotUsingEocWithDelay, } #[derive(Debug, PartialEq, Copy, Clone)] pub enum ReadMode { Single, Multiple, MultipleNToHighest, AverageN, } #[derive(Debug, PartialEq, Copy, Clone)] pub enum MuxMode { None, PortB19to17, } const EXAMPLE_MODE: ExampleMode = ExampleMode::NotUsingEoc; const READ_MODE: ReadMode = ReadMode::Multiple; const MUX_MODE: MuxMode = MuxMode::None; #[entry] fn main() -> ! { rtt_init_print!(); rprintln!("-- Vorago ADC Example --"); let mut dp = pac::Peripherals::take().unwrap(); let tim0 = set_up_ms_timer( IrqCfg::new(pac::Interrupt::OC0, true, true), &mut dp.SYSCONFIG, Some(&mut dp.IRQSEL), 50.mhz(), dp.TIM0, ); let delay = Delay::new(tim0); unsafe { cortex_m::peripheral::NVIC::unmask(pac::Interrupt::OC0); } let pinsa = PinsA::new(&mut dp.SYSCONFIG, None, dp.PORTA); let spi_cfg = SpiConfig::default(); let (sck, mosi, miso) = ( pinsa.pa20.into_funsel_2(), pinsa.pa19.into_funsel_2(), pinsa.pa18.into_funsel_2(), ); if MUX_MODE == MuxMode::PortB19to17 { port_mux(&mut dp.IOCONFIG, PortSel::PortB, 19, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 18, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 17, Funsel::Funsel1).ok(); port_mux(&mut dp.IOCONFIG, PortSel::PortB, 16, Funsel::Funsel1).ok(); } let mut accel_cs = pinsa.pa16.into_push_pull_output(); accel_cs .set_high() .expect("Setting accelerometer chip select high failed"); let transfer_cfg = TransferConfig::new( 3.mhz(), spi::MODE_0, Some(pinsa.pa17.into_funsel_2()), true, false, ); let spi = Spi::spib( dp.SPIB, (sck, miso, mosi), 50.mhz(), spi_cfg, Some(&mut dp.SYSCONFIG), Some(&transfer_cfg.downgrade()), ) .downgrade(); match EXAMPLE_MODE { ExampleMode::NotUsingEoc => spi_example_externally_clocked(spi, delay), ExampleMode::UsingEoc => { spi_example_internally_clocked(spi, delay, pinsa.pa14.into_floating_input()); } ExampleMode::NotUsingEocWithDelay => { let delay_us = CountDownTimer::new(&mut dp.SYSCONFIG, 50.mhz(), dp.TIM2); spi_example_externally_clocked_with_delay(spi, delay, delay_us); } } } #[interrupt] fn OC0() { default_ms_irq_handler(); } fn spi_example_externally_clocked(spi: SpiBase<SPIB>, mut delay: Delay) -> ! { let mut adc = max11619_externally_clocked_no_wake
fn spi_example_externally_clocked_with_delay( spi: SpiBase<SPIB>, mut delay: Delay, mut delay_us: impl DelayUs<u8>, ) -> ! { let mut adc = max11619_externally_clocked_with_wakeup(spi).expect("Creating MAX116xx device failed"); let mut cmd_buf: [u8; 32] = [0; 32]; let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { rprintln!("Reading single potentiometer channel"); let pot_val = adc .read_single_channel(&mut cmd_buf, POTENTIOMETER_CHANNEL, &mut delay_us) .expect("Creating externally clocked MAX11619 ADC failed"); rprintln!("Single channel read:"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { let mut res_buf: [u16; 4] = [0; 4]; adc.read_multiple_channels_0_to_n( &mut cmd_buf, &mut res_buf.iter_mut(), POTENTIOMETER_CHANNEL, &mut delay_us, ) .expect("Multi-Channel read failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { let mut res_buf: [u16; 2] = [0; 2]; adc.read_multiple_channels_n_to_highest( &mut cmd_buf, &mut res_buf.iter_mut(), AN2_CHANNEL, &mut delay_us, ) .expect("Multi-Channel read failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { rprintln!("Scanning and averaging not possible for externally clocked mode"); } } counter += 1; delay.delay_ms(500); } } fn spi_example_internally_clocked(spi: SpiBase<SPIB>, mut delay: Delay, eoc_pin: EocPin) -> ! { let mut adc = max11619_internally_clocked( spi, eoc_pin, VoltageRefMode::ExternalSingleEndedNoWakeupDelay, ) .expect("Creating MAX116xx device failed"); let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { adc.request_single_channel(POTENTIOMETER_CHANNEL) .expect("Requesting single channel value failed"); let pot_val = nb::block!(adc.get_single_channel()) .expect("Reading single channel value failed"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { adc.request_multiple_channels_0_to_n(POTENTIOMETER_CHANNEL) .expect("Requesting single channel value failed"); let mut res_buf: [u16; 4] = [0; 4]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { adc.request_multiple_channels_n_to_highest(AN2_CHANNEL) .expect("Requesting single channel value failed"); let mut res_buf: [u16; 4] = [0; 4]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { adc.request_channel_n_repeatedly(POTENTIOMETER_CHANNEL) .expect("Reading channel multiple times failed"); let mut res_buf: [u16; 16] = [0; 16]; nb::block!(adc.get_multi_channel(&mut res_buf.iter_mut())) .expect("Requesting multiple channel values failed"); rprintln!("Reading potentiometer 4 times"); rprintln!("\tValue 0: {}", res_buf[0]); rprintln!("\tValue 1: {}", res_buf[1]); rprintln!("\tValue 2: {}", res_buf[2]); rprintln!("\tValue 3: {}", res_buf[3]); } } counter += 1; delay.delay_ms(500); } } fn print_res_buf(buf: &[u16; 4]) { rprintln!("Multi channel read from 0 to 3:"); rprintln!("\tAN0 value: {}", buf[0]); rprintln!("\tAN1 value: {}", buf[1]); rprintln!("\tAN2 value: {}", buf[2]); rprintln!("\tAN3 / Potentiometer value: {}", buf[3]); }
up(spi) .expect("Creating externally clocked MAX11619 device failed"); if READ_MODE == ReadMode::AverageN { adc.averaging( AveragingConversions::FourConversions, AveragingResults::FourResults, ) .expect("Error setting up averaging register"); } let mut cmd_buf: [u8; 32] = [0; 32]; let mut counter = 0; loop { rprintln!("-- Measurement {} --", counter); match READ_MODE { ReadMode::Single => { rprintln!("Reading single potentiometer channel"); let pot_val = adc .read_single_channel(&mut cmd_buf, POTENTIOMETER_CHANNEL) .expect("Creating externally clocked MAX11619 ADC failed"); rprintln!("Single channel read:"); rprintln!("\tPotentiometer value: {}", pot_val); } ReadMode::Multiple => { let mut res_buf: [u16; 4] = [0; 4]; adc.read_multiple_channels_0_to_n( &mut cmd_buf, &mut res_buf.iter_mut(), POTENTIOMETER_CHANNEL, ) .expect("Multi-Channel read failed"); print_res_buf(&res_buf); } ReadMode::MultipleNToHighest => { let mut res_buf: [u16; 2] = [0; 2]; adc.read_multiple_channels_n_to_highest( &mut cmd_buf, &mut res_buf.iter_mut(), AN2_CHANNEL, ) .expect("Multi-Channel read failed"); rprintln!("Multi channel read from 2 to 3:"); rprintln!("\tAN2 value: {}", res_buf[0]); rprintln!("\tAN3 / Potentiometer value: {}", res_buf[1]); } ReadMode::AverageN => { rprintln!("Scanning and averaging not possible for externally clocked mode"); } } counter += 1; delay.delay_ms(500); } }
function_block-function_prefixed
[ { "content": "#[entry]\n\nfn main() -> ! {\n\n rtt_init_print!();\n\n rprintln!(\"-- Vorago Accelerometer Example --\");\n\n let mut dp = pac::Peripherals::take().unwrap();\n\n let mut delay = set_up_ms_delay_provider(&mut dp.SYSCONFIG, 50.mhz(), dp.TIM0);\n\n let pinsa = PinsA::new(&mut dp.SYSCO...
Rust
violetabftstore/src/store/worker/split_check.rs
whtcorpsinc/einsteindb-prod
3b1c63a421ff37f1ca6bef4d67ef7f4c4c78a6d7
use std::cmp::Ordering; use std::collections::BinaryHeap; use std::fmt::{self, Display, Formatter}; use std::mem; use edb::{CfName, IterOptions, Iterable, Iteron, CausetEngine, Causet_WRITE, LARGE_CausetS}; use ekvproto::meta_timeshare::Brane; use ekvproto::meta_timeshare::BraneEpoch; use ekvproto::fidel_timeshare::CheckPolicy; use crate::interlock::Config; use crate::interlock::InterlockHost; use crate::interlock::SplitCheckerHost; use crate::store::{Callback, CasualMessage, CasualRouter}; use crate::Result; use configuration::{ConfigChange, Configuration}; use violetabftstore::interlock::::CausetLearnedKey::CausetLearnedKey; use violetabftstore::interlock::::worker::Runnable; use super::metrics::*; #[derive(PartialEq, Eq)] pub struct KeyEntry { key: Vec<u8>, pos: usize, value_size: usize, causet: CfName, } impl KeyEntry { pub fn new(key: Vec<u8>, pos: usize, value_size: usize, causet: CfName) -> KeyEntry { KeyEntry { key, pos, value_size, causet, } } pub fn key(&self) -> &[u8] { self.key.as_ref() } pub fn is_commit_version(&self) -> bool { self.causet == Causet_WRITE } pub fn entry_size(&self) -> usize { self.value_size + self.key.len() } } impl PartialOrd for KeyEntry { fn partial_cmp(&self, rhs: &KeyEntry) -> Option<Ordering> { Some(self.key.cmp(&rhs.key).reverse()) } } impl Ord for KeyEntry { fn cmp(&self, rhs: &KeyEntry) -> Ordering { self.partial_cmp(rhs).unwrap() } } struct MergedIterator<I> { iters: Vec<(CfName, I)>, heap: BinaryHeap<KeyEntry>, } impl<I> MergedIterator<I> where I: Iteron, { fn new<E: CausetEngine>( db: &E, causets: &[CfName], spacelike_key: &[u8], lightlike_key: &[u8], fill_cache: bool, ) -> Result<MergedIterator<E::Iteron>> { let mut iters = Vec::with_capacity(causets.len()); let mut heap = BinaryHeap::with_capacity(causets.len()); for (pos, causet) in causets.iter().enumerate() { let iter_opt = IterOptions::new( Some(CausetLearnedKey::from_slice(spacelike_key, 0, 0)), Some(CausetLearnedKey::from_slice(lightlike_key, 0, 0)), fill_cache, ); let mut iter = db.Iteron_causet_opt(causet, iter_opt)?; let found: Result<bool> = iter.seek(spacelike_key.into()).map_err(|e| box_err!(e)); if found? { heap.push(KeyEntry::new( iter.key().to_vec(), pos, iter.value().len(), *causet, )); } iters.push((*causet, iter)); } Ok(MergedIterator { iters, heap }) } fn next(&mut self) -> Option<KeyEntry> { let pos = match self.heap.peek() { None => return None, Some(e) => e.pos, }; let (causet, iter) = &mut self.iters[pos]; if iter.next().unwrap() { let mut e = KeyEntry::new(iter.key().to_vec(), pos, iter.value().len(), causet); let mut front = self.heap.peek_mut().unwrap(); mem::swap(&mut e, &mut front); Some(e) } else { self.heap.pop() } } } pub enum Task { SplitCheckTask { brane: Brane, auto_split: bool, policy: CheckPolicy, }, ChangeConfig(ConfigChange), #[causet(any(test, feature = "testexport"))] Validate(Box<dyn FnOnce(&Config) + lightlike>), } impl Task { pub fn split_check(brane: Brane, auto_split: bool, policy: CheckPolicy) -> Task { Task::SplitCheckTask { brane, auto_split, policy, } } } impl Display for Task { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Task::SplitCheckTask { brane, auto_split, .. } => write!( f, "[split check worker] Split Check Task for {}, auto_split: {:?}", brane.get_id(), auto_split ), Task::ChangeConfig(_) => write!(f, "[split check worker] Change Config Task"), #[causet(any(test, feature = "testexport"))] Task::Validate(_) => write!(f, "[split check worker] Validate config"), } } } pub struct Runner<E, S> where E: CausetEngine, { engine: E, router: S, interlock: InterlockHost<E>, causet: Config, } impl<E, S> Runner<E, S> where E: CausetEngine, S: CasualRouter<E>, { pub fn new(engine: E, router: S, interlock: InterlockHost<E>, causet: Config) -> Runner<E, S> { Runner { engine, router, interlock, causet, } } fn check_split(&mut self, brane: &Brane, auto_split: bool, policy: CheckPolicy) { let brane_id = brane.get_id(); let spacelike_key = tuplespaceInstanton::enc_spacelike_key(brane); let lightlike_key = tuplespaceInstanton::enc_lightlike_key(brane); debug!( "executing task"; "brane_id" => brane_id, "spacelike_key" => log_wrappers::Key(&spacelike_key), "lightlike_key" => log_wrappers::Key(&lightlike_key), ); CHECK_SPILT_COUNTER.all.inc(); let mut host = self.interlock.new_split_checker_host( &self.causet, brane, &self.engine, auto_split, policy, ); if host.skip() { debug!("skip split check"; "brane_id" => brane.get_id()); return; } let split_tuplespaceInstanton = match host.policy() { CheckPolicy::Scan => { match self.scan_split_tuplespaceInstanton(&mut host, brane, &spacelike_key, &lightlike_key) { Ok(tuplespaceInstanton) => tuplespaceInstanton, Err(e) => { error!(%e; "failed to scan split key"; "brane_id" => brane_id,); return; } } } CheckPolicy::Approximate => match host.approximate_split_tuplespaceInstanton(brane, &self.engine) { Ok(tuplespaceInstanton) => tuplespaceInstanton .into_iter() .map(|k| tuplespaceInstanton::origin_key(&k).to_vec()) .collect(), Err(e) => { error!(%e; "failed to get approximate split key, try scan way"; "brane_id" => brane_id, ); match self.scan_split_tuplespaceInstanton(&mut host, brane, &spacelike_key, &lightlike_key) { Ok(tuplespaceInstanton) => tuplespaceInstanton, Err(e) => { error!(%e; "failed to scan split key"; "brane_id" => brane_id,); return; } } } }, CheckPolicy::Usekey => vec![], }; if !split_tuplespaceInstanton.is_empty() { let brane_epoch = brane.get_brane_epoch().clone(); let msg = new_split_brane(brane_epoch, split_tuplespaceInstanton); let res = self.router.lightlike(brane_id, msg); if let Err(e) = res { warn!("failed to lightlike check result"; "brane_id" => brane_id, "err" => %e); } CHECK_SPILT_COUNTER.success.inc(); } else { debug!( "no need to lightlike, split key not found"; "brane_id" => brane_id, ); CHECK_SPILT_COUNTER.ignore.inc(); } } fn scan_split_tuplespaceInstanton( &self, host: &mut SplitCheckerHost<'_, E>, brane: &Brane, spacelike_key: &[u8], lightlike_key: &[u8], ) -> Result<Vec<Vec<u8>>> { let timer = CHECK_SPILT_HISTOGRAM.spacelike_coarse_timer(); MergedIterator::<<E as Iterable>::Iteron>::new( &self.engine, LARGE_CausetS, spacelike_key, lightlike_key, false, ) .map(|mut iter| { let mut size = 0; let mut tuplespaceInstanton = 0; while let Some(e) = iter.next() { if host.on_kv(brane, &e) { return; } size += e.entry_size() as u64; tuplespaceInstanton += 1; } info!( "fidelio approximate size and tuplespaceInstanton with accurate value"; "brane_id" => brane.get_id(), "size" => size, "tuplespaceInstanton" => tuplespaceInstanton, ); let _ = self.router.lightlike( brane.get_id(), CasualMessage::BraneApproximateSize { size }, ); let _ = self.router.lightlike( brane.get_id(), CasualMessage::BraneApproximateTuplespaceInstanton { tuplespaceInstanton }, ); })?; timer.observe_duration(); Ok(host.split_tuplespaceInstanton()) } fn change_causet(&mut self, change: ConfigChange) { info!( "split check config fideliod"; "change" => ?change ); self.causet.fidelio(change); } } impl<E, S> Runnable for Runner<E, S> where E: CausetEngine, S: CasualRouter<E>, { type Task = Task; fn run(&mut self, task: Task) { match task { Task::SplitCheckTask { brane, auto_split, policy, } => self.check_split(&brane, auto_split, policy), Task::ChangeConfig(c) => self.change_causet(c), #[causet(any(test, feature = "testexport"))] Task::Validate(f) => f(&self.causet), } } } fn new_split_brane<E>(brane_epoch: BraneEpoch, split_tuplespaceInstanton: Vec<Vec<u8>>) -> CasualMessage<E> where E: CausetEngine, { CasualMessage::SplitBrane { brane_epoch, split_tuplespaceInstanton, callback: Callback::None, } }
use std::cmp::Ordering; use std::collections::BinaryHeap; use std::fmt::{self, Display, Formatter}; use std::mem; use edb::{CfName, IterOptions, Iterable, Iteron, CausetEngine, Causet_WRITE, LARGE_CausetS}; use ekvproto::meta_timeshare::Brane; use ekvproto::meta_timeshare::BraneEpoch; use ekvproto::fidel_timeshare::CheckPolicy; use crate::interlock::Config; use crate::interlock::InterlockHost; use crate::interlock::SplitCheckerHost; use crate::store::{Callback, CasualMessage, CasualRouter}; use crate::Result; use configuration::{ConfigChange, Configuration}; use violetabftstore::interlock::::CausetLearnedKey::CausetLearnedKey; use violetabftstore::interlock::::worker::Runnable; use super::metrics::*; #[derive(PartialEq, Eq)] pub struct KeyEntry { key: Vec<u8>, pos: usize, value_size: usize, causet: CfName, } impl KeyEntry { pub fn new(key: Vec<u8>, pos: usize, value_size: usize, causet: CfName) -> KeyEntry { KeyEntry { key, pos, value_size, causet, } } pub fn key(&self) -> &[u8] { self.key.as_ref() } pub fn is_commit_version(&self) -> bool { self.causet == Causet_WRITE } pub fn entry_size(&self) -> usize { self.value_size + self.key.len() } } impl PartialOrd for KeyEntry { fn partial_cmp(&self, rhs: &KeyEntry) -> Option<Ordering> { Some(self.key.cmp(&rhs.key).reverse()) } } impl Ord for KeyEntry { fn cmp(&self, rhs: &KeyEntry) -> Ordering { self.partial_cmp(rhs).unwrap() } } struct MergedIterator<I> { iters: Vec<(CfName, I)>, heap: BinaryHeap<KeyEntry>, } impl<I> MergedIterator<I> where I: Iteron, { fn new<E: CausetEngine>( db: &E, causets: &[CfName], spacelike_key: &[u8], lightlike_key: &[u8], fill_cache: bool, ) -> Result<MergedIterator<E::Iteron>> { let mut iters = Vec::with_capacity(causets.len()); let mut heap = BinaryHeap::with_capacity(causets.len()); for (pos, causet) in causets.iter().enumerate() { let iter_opt = IterOptions::new( Some(CausetLearnedKey::from_slice(spacelike_key, 0, 0)), Some(CausetLearnedKey::from_slice(lightlike_key, 0, 0)), fill_cache, ); let mut iter = db.Iteron_causet_opt(causet, iter_opt)?; let found: Result<bool> = iter.seek(spacelike_key.into()).map_err(|e| box_err!(e)); if found? { heap.push(KeyEntry::new( iter.key().to_vec(), pos, iter.value().len(), *causet, )); } iters.push((*causet, iter)); } Ok(MergedIterator { iters, heap }) } fn next(&mut self) -> Option<KeyEntry> { let pos = match self.heap.peek() { None => return None, Some(e) => e.pos, }; let (causet, iter) = &mut self.iters[pos]; if iter.next().unwrap() { let mut e = KeyEntry::new(iter.key().to_vec(), pos, iter.value().len(), causet); let mut front = self.heap.peek_mut().unwrap(); mem::swap(&mut e, &mut front); Some(e) } else { self.heap.pop() } } } pub enum Task { SplitCheckTask { brane: Brane, auto_split: bool, policy: CheckPolicy, }, ChangeConfig(ConfigChange), #[causet(any(test, feature = "testexport"))] Validate(Box<dyn FnOnce(&Config) + lightlike>), } impl Task { pub fn split_check(brane: Brane, auto_split: bool, policy: CheckPolicy) -> Task { Task::SplitCheckTask { brane, auto_split, policy, } } } impl Display for Task { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { Task::SplitCheckTask { brane, auto_split, .. } => write!( f, "[split check worker] Split Check Task for {}, auto_split: {:?}", brane.get_id(), auto_split ), Task::ChangeConfig(_) => write!(f, "[split check worker] Change Config Task"), #[causet(any(test, feature = "testexport"))] Task::Validate(_) => write!(f, "[split check worker] Validate config"), } } } pub struct Runner<E, S> where E: CausetEngine, { engine: E, router: S, interlock: InterlockHost<E>, causet: Config, } impl<E, S> Runner<E, S> where E: CausetEngine, S: CasualRouter<E>, { pub fn new(engine: E, router: S, interlock: InterlockHost<E>, causet: Config) -> Runner<E, S> { Runner { engine, router, interlock, causet, } } fn check_split(&mut self, brane: &Brane, auto_split: bool, policy: CheckPolicy) { let brane_id = brane.get_id(); let spacelike_key = tuplespaceInstanton::enc_spacelike_key(brane); let lightlike_key = tuplespaceInstanton::enc_lightlike_key(brane); debug!( "executing task"; "brane_id" => brane_id, "spacelike_key" => log_wrappers::Key(&spacelike_key), "lightlike_key" => log_wrappers::Key(&lightlike_key), ); CHECK_SPILT_COUNTER.all.inc(); let mut host = self.interlock.new_split_checker_host( &self.causet, brane, &self.engine, auto_split, policy, ); if host.skip() { debug!("skip split check"; "brane_id" => brane.get_id()); return; } let split_tuplespaceInstanton = match host.policy() { CheckPolicy::Scan => { match self.scan_split_tuplespaceInstanton(&mut host, brane, &spacelike_key, &lightlike_key) { Ok(tuplespaceInstanton) => tuplespaceInstanton, Err(e) => { error!(%e; "failed to scan split key"; "brane_id" => brane_id,); return; } } } CheckPolicy::Approximate => match host.approximate_split_tuplespaceInstanton(brane, &self.engine) { Ok(tuplespaceInstanton) => tuplespaceInstanton .into_iter() .map(|k| tuplespaceInstanton::origin_key(&k).to_vec()) .collect(), Err(e) => { error!(%e; "failed to get approximate split key, try scan way"; "brane_id" => brane_id, ); match self.scan_split_tuplespaceInstanton(&mut host, brane, &spacelike_key, &lightlike_key) { Ok(tuplespaceInstanton) => tuplespaceInstanton, Err(e) => { error!(%e; "failed to scan split key"; "brane_id" => brane_id,); return; } } } }, CheckPolicy::Usekey => vec![], }; if !split_tuplespaceInstanton.is_empty() { let brane_epoch = brane.get_brane_epoch().clone(); let msg = new_split_brane(brane_epoch, split_tuplespaceInstanton); let res = self.router.lightlike(brane_id, msg); if let Err(e) = res { warn!("failed to lightlike check result"; "brane_id" => brane_id, "err" => %e); } CHECK_SPILT_COUNTER.success.inc(); } else { debug!( "no need to lightlike, split key not found"; "brane_id" => brane_id, ); CHECK_SPILT_COUNTER.ignore.inc(); } } fn scan_split_tuplespaceInstanton( &self, host: &mut SplitCheckerHost<'_, E>, brane: &Brane, spacelike_key: &[u8], lightlike_key: &[u8], ) -> Result<Vec<Vec<u8>>> { let timer = CHECK_SPILT_HISTOGRAM.spacelike_coarse_timer(); MergedIterator::<<E as Iterable>::Iteron>::new( &self.engine, LARGE_CausetS, spacelike_key, lightlike_key, false, ) .map(|mut iter| { let mut size = 0; let mut tuplespaceInstanton = 0; while let Some(e) = iter.next() { if host.on_kv(brane, &e) { return; } size += e.entry_size() as u64; tuplespaceInstanton += 1; } info!( "fidelio approximate size and tuplespaceInstanton with accurate value"; "brane_id" => brane.get_id(), "size" => size, "tuplespaceInstanton" => tuplespaceInstanton, ); let _ = self.router.lightlike( brane.get_id(), CasualMessage::BraneApproximateSize { size }, ); let _ = self.router.lightlike( brane.get_id(), CasualMessage::BraneApproximateTuplespaceInstanton { tuplespaceInstanton }, ); })?; timer.observe_duration(); Ok(host.split_tuplespaceInstanton()) }
} impl<E, S> Runnable for Runner<E, S> where E: CausetEngine, S: CasualRouter<E>, { type Task = Task; fn run(&mut self, task: Task) { match task { Task::SplitCheckTask { brane, auto_split, policy, } => self.check_split(&brane, auto_split, policy), Task::ChangeConfig(c) => self.change_causet(c), #[causet(any(test, feature = "testexport"))] Task::Validate(f) => f(&self.causet), } } } fn new_split_brane<E>(brane_epoch: BraneEpoch, split_tuplespaceInstanton: Vec<Vec<u8>>) -> CasualMessage<E> where E: CausetEngine, { CasualMessage::SplitBrane { brane_epoch, split_tuplespaceInstanton, callback: Callback::None, } }
fn change_causet(&mut self, change: ConfigChange) { info!( "split check config fideliod"; "change" => ?change ); self.causet.fidelio(change); }
function_block-full_function
[ { "content": "fn last_key_of_brane(db: &impl CausetEngine, brane: &Brane) -> Result<Option<Vec<u8>>> {\n\n let spacelike_key = tuplespaceInstanton::enc_spacelike_key(brane);\n\n let lightlike_key = tuplespaceInstanton::enc_lightlike_key(brane);\n\n let mut last_key = None;\n\n\n\n let iter_opt = Ite...
Rust
src/senet/renderer.rs
raybritton/rust_games_proto
5268bf7d37836d7e7bc1038ebbf49abdee1c649e
use crate::boards::idx_coord::BoardCoord; use crate::boards::{board_cols, board_rows}; use crate::constants::colors::{ BROWN, CREAM, LIGHT_BLUE, LIGHT_GRAY, PIECE_COMPUTER, PIECE_HUMAN, RED, WHITE, }; use crate::constants::Direction; use crate::senet::rules::{HOUSE_BEAUTY, HOUSE_HAPPINESS, HOUSE_REBIRTH, HOUSE_WATER}; use crate::senet::{Move, Square, State}; use crate::system::letter_mesh::make_letter_mesh; use crate::system::math::{pt, Offset, OffsetTuple, Point}; use crate::system::mesh_helper::MeshHelper; use crate::system::TurnState::{SelectingMove, SelectingPiece}; use ggez::graphics::DrawMode; use ggez::{Context, GameResult}; pub(super) fn render( ctx: &mut Context, mesh_helper: &mut MeshHelper, state: &State, ) -> GameResult<()> { let cell_size = mesh_helper.calc_width(0.09); let stick_width = mesh_helper.calc_height(0.015); let stick_height = mesh_helper.calc_height(0.1); let msg_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_height(0.45)); let stick_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_height(0.5)); let board_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_width(0.05)); let human = mesh_helper.make_circle(ctx, cell_size, cell_size * 0.3, DrawMode::fill())?; let computer = mesh_helper.make_triangle(ctx, cell_size * 0.6, cell_size * 0.6, Direction::Up)?; let grid = mesh_helper.make_grid( ctx, cell_size * board_cols() as f32, cell_size * board_rows() as f32, board_cols(), board_rows(), 2., LIGHT_GRAY, None, )?; let rect = mesh_helper.make_rect( ctx, cell_size * board_cols() as f32, cell_size * board_rows() as f32, DrawMode::stroke(2.), )?; mesh_helper.draw_mesh(ctx, grid.as_ref(), board_start); mesh_helper.draw_mesh(ctx, rect.as_ref(), board_start); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_WATER, "WATER", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_HAPPINESS, "HAPPINESS", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_BEAUTY, "BEAUTY", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_REBIRTH, "REBIRTH", ); mesh_helper.draw_white_text( ctx, "WATER", Point::from(BoardCoord::from(HOUSE_WATER)) .multiply(cell_size, cell_size) .offset_point(board_start) .offset(cell_size * 0.5, 8.), 12., true, ); state.board.iter().enumerate().for_each(|(idx, square)| { let result = match square { Square::Empty => None, Square::Human => Some((human.as_ref(), PIECE_HUMAN, board_start)), Square::Computer => Some(( computer.as_ref(), PIECE_COMPUTER, board_start.offset(cell_size * 0.16, cell_size * 0.15), )), }; if let Some((mesh, colour, offset)) = result { let pos = Point::from(BoardCoord::from(idx)) .multiply(cell_size, cell_size) .offset_point(offset); mesh_helper.draw_coloured_mesh(ctx, mesh, pos, colour); } }); if state.play_state.is_either(SelectingPiece) && state.roll.is_some() { state .cursor .render(ctx, mesh_helper, board_start, cell_size)?; for mov in state.get_moves_for_selected_piece() { draw_move(ctx, mesh_helper, cell_size, board_start, &mov, false)?; } } else if state.play_state.is_either(SelectingMove) { state .cursor .render_dark(ctx, mesh_helper, board_start, cell_size)?; draw_move( ctx, mesh_helper, cell_size, board_start, &state.get_selected_move(), true, )?; } if let Some(roll) = state.roll { let stick = mesh_helper.make_rect(ctx, stick_width, stick_height, DrawMode::fill())?; for i in 0..4 { let colour = if i < roll && roll != 5 { CREAM } else { BROWN }; mesh_helper.draw_coloured_mesh( ctx, stick.as_ref(), stick_start.offset(stick_width * 1.5 * i as f32, 0.), colour, ); } } if let Some(msg) = &state.msg { mesh_helper.draw_white_text(ctx, msg, msg_start, 20., false); } Ok(()) } fn draw_cell_text( ctx: &mut Context, mesh_helper: &mut MeshHelper, cell_size: f32, board_start: Point, index: usize, text: &str, ) { mesh_helper.draw_white_text( ctx, text, Point::from(BoardCoord::from(index)) .multiply(cell_size, cell_size) .offset_point(board_start) .offset(cell_size * 0.5, 8.), 12., true, ); } fn draw_move( ctx: &mut Context, mesh_helper: &mut MeshHelper, cell_size: f32, board_start: Point, mov: &Move, highlight: bool, ) -> GameResult<()> { let move_mesh = mesh_helper.make_circle(ctx, cell_size, cell_size * 0.1, DrawMode::stroke(1.))?; let capture_mesh = make_letter_mesh(ctx, mesh_helper, cell_size * 0.3, 'x')?; let point = Point::from(BoardCoord::from(mov.dest)) .multiply(cell_size, cell_size) .offset_point(board_start); let (mesh, colour, pt) = if mov.exchange { ( capture_mesh.as_ref(), RED, point.offset(cell_size * 0.35, cell_size * 0.35), ) } else { (move_mesh.as_ref(), WHITE, point) }; mesh_helper.draw_coloured_mesh(ctx, mesh, pt, if highlight { LIGHT_BLUE } else { colour }); Ok(()) }
use crate::boards::idx_coord::BoardCoord; use crate::boards::{board_cols, board_rows}; use crate::constants::colors::{ BROWN, CREAM, LIGHT_BLUE, LIGHT_GRAY, PIECE_COMPUTER, PIECE_HUMAN, RED, WHITE, }; use crate::constants::Direction; use crate::senet::rules::{HOUSE_BEAUTY, HOUSE_HAPPINESS, HOUSE_REBIRTH, HOUSE_WATER}; use crate::senet::{Move, Square, State}; use crate::system::letter_mesh::make_letter_mesh; use crate::system::math::{pt, Offset, OffsetTuple, Point}; use crate::system::mesh_helper::MeshHelper; use crate::system::TurnState::{SelectingMove, SelectingPiece}; use ggez::graphics::DrawMode; use ggez::{Context, GameResult}; pub(super) fn render( ctx: &mut Context, mesh_helper: &mut MeshHelper, state: &State, ) -> GameResult<()> { let cell_size = mesh_helper.calc_width(0.09); let stick_width = mesh_helper.calc_height(0.015); let stick_height = mesh_helper.calc_height(0.1); let msg_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_height(0.45)); let stick_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_height(0.5)); let board_start = pt(mesh_helper.calc_width(0.05), mesh_helper.calc_width(0.05)); let human = mesh_helper.make_circle(ctx, cell_size, cell_size * 0.3, DrawMode::fill())?; let computer = mesh_helper.make_triangle(ctx, cell_size * 0.6, cell_size * 0.6, Direction::Up)?; let grid = mesh_helper.make_grid( ctx, cell_size * board_cols() as f32, cell_size * board_rows() as f32, board_cols(), board_rows(), 2., LIGHT_GRAY, None, )?; let rect = mesh_helper.make_rect( ctx, cell_size * board_cols() as f32, cell_size * board_rows() as f32, DrawMode::stroke(2.), )?; mesh_helper.draw_mesh(ctx, grid.as_ref(), board_start); mesh_helper.draw_mesh(ctx, rect.as_ref(), board_start); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_WATER, "WATER", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_HAPPINESS, "HAPPINESS", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_BEAUTY, "BEAUTY", ); draw_cell_text( ctx, mesh_helper, cell_size, board_start, HOUSE_REBIRTH, "REBIRTH", ); mesh_helper.draw_white_text( ctx, "WATER", Point::from(BoardCoord::from(HOUSE_WATER)) .multiply(cell_size, cell_size) .offset_point(board_start) .offset(cell_size * 0.5, 8.), 12., true, ); state.board.iter().enumerate().for_each(|(idx, square)| { let result =
; if let Some((mesh, colour, offset)) = result { let pos = Point::from(BoardCoord::from(idx)) .multiply(cell_size, cell_size) .offset_point(offset); mesh_helper.draw_coloured_mesh(ctx, mesh, pos, colour); } }); if state.play_state.is_either(SelectingPiece) && state.roll.is_some() { state .cursor .render(ctx, mesh_helper, board_start, cell_size)?; for mov in state.get_moves_for_selected_piece() { draw_move(ctx, mesh_helper, cell_size, board_start, &mov, false)?; } } else if state.play_state.is_either(SelectingMove) { state .cursor .render_dark(ctx, mesh_helper, board_start, cell_size)?; draw_move( ctx, mesh_helper, cell_size, board_start, &state.get_selected_move(), true, )?; } if let Some(roll) = state.roll { let stick = mesh_helper.make_rect(ctx, stick_width, stick_height, DrawMode::fill())?; for i in 0..4 { let colour = if i < roll && roll != 5 { CREAM } else { BROWN }; mesh_helper.draw_coloured_mesh( ctx, stick.as_ref(), stick_start.offset(stick_width * 1.5 * i as f32, 0.), colour, ); } } if let Some(msg) = &state.msg { mesh_helper.draw_white_text(ctx, msg, msg_start, 20., false); } Ok(()) } fn draw_cell_text( ctx: &mut Context, mesh_helper: &mut MeshHelper, cell_size: f32, board_start: Point, index: usize, text: &str, ) { mesh_helper.draw_white_text( ctx, text, Point::from(BoardCoord::from(index)) .multiply(cell_size, cell_size) .offset_point(board_start) .offset(cell_size * 0.5, 8.), 12., true, ); } fn draw_move( ctx: &mut Context, mesh_helper: &mut MeshHelper, cell_size: f32, board_start: Point, mov: &Move, highlight: bool, ) -> GameResult<()> { let move_mesh = mesh_helper.make_circle(ctx, cell_size, cell_size * 0.1, DrawMode::stroke(1.))?; let capture_mesh = make_letter_mesh(ctx, mesh_helper, cell_size * 0.3, 'x')?; let point = Point::from(BoardCoord::from(mov.dest)) .multiply(cell_size, cell_size) .offset_point(board_start); let (mesh, colour, pt) = if mov.exchange { ( capture_mesh.as_ref(), RED, point.offset(cell_size * 0.35, cell_size * 0.35), ) } else { (move_mesh.as_ref(), WHITE, point) }; mesh_helper.draw_coloured_mesh(ctx, mesh, pt, if highlight { LIGHT_BLUE } else { colour }); Ok(()) }
match square { Square::Empty => None, Square::Human => Some((human.as_ref(), PIECE_HUMAN, board_start)), Square::Computer => Some(( computer.as_ref(), PIECE_COMPUTER, board_start.offset(cell_size * 0.16, cell_size * 0.15), )), }
if_condition
[ { "content": "fn render(ctx: &mut Context, mesh_helper: &mut MeshHelper, state: &State) -> GameResult<()> {\n\n let menu_start = pt(34., 100.);\n\n let cursor_start = pt(16., 100.);\n\n let cursor = mesh_helper.make_triangle(ctx, 12., 12., Direction::Right)?;\n\n\n\n mesh_helper.draw_mesh(\n\n ...
Rust
mmids-core/src/endpoints/rtmp_server/actor/tests/rtmp_client.rs
AircastDev/mmids
c304d67d1498f7526e5186d315f07986aade1984
use crate::net::tcp::{OutboundPacket, RequestFailureReason, TcpSocketRequest, TcpSocketResponse}; use crate::net::ConnectionId; use crate::test_utils; use bytes::Bytes; use rml_rtmp::handshake::{Handshake, HandshakeProcessResult, PeerType}; use rml_rtmp::sessions::{ ClientSession, ClientSessionConfig, ClientSessionError, ClientSessionEvent, ClientSessionResult, PublishRequestType, StreamMetadata, }; use rml_rtmp::time::RtmpTimestamp; use std::net::{SocketAddr, SocketAddrV4}; use std::time::Duration; use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; use tokio::time::timeout; pub const CONNECTION_ID: &'static str = "test-1234"; pub struct RtmpTestClient { socket_manager_receiver: UnboundedReceiver<TcpSocketRequest>, socket_manager_response_sender: Option<UnboundedSender<TcpSocketResponse>>, port: Option<u16>, connection: Option<Connection>, } struct Connection { incoming_bytes: UnboundedSender<Bytes>, outgoing_bytes: UnboundedReceiver<OutboundPacket>, session: ClientSession, } impl RtmpTestClient { pub fn new() -> (Self, UnboundedSender<TcpSocketRequest>) { let (sender, receiver) = unbounded_channel(); let client = RtmpTestClient { socket_manager_receiver: receiver, socket_manager_response_sender: None, port: None, connection: None, }; (client, sender) } pub async fn accept_port_request(&mut self, port: u16, use_tls: bool) { let request = test_utils::expect_mpsc_response(&mut self.socket_manager_receiver).await; match request { TcpSocketRequest::OpenPort { port: requested_port, use_tls: requested_tls, response_channel, } => { assert_eq!( requested_port, port, "Requested port was not the expected port" ); assert_eq!( requested_tls, use_tls, "Requested TLS flag was not expected" ); if response_channel.is_closed() { panic!("Response channel was closed"); } if self.socket_manager_response_sender.is_some() { panic!("Port already registered"); } let _ = response_channel.send(TcpSocketResponse::RequestAccepted {}); self.socket_manager_response_sender = Some(response_channel); self.port = Some(port); } } } pub async fn deny_port_request(&mut self, port: u16, use_tls: bool) { let request = test_utils::expect_mpsc_response(&mut self.socket_manager_receiver).await; match request { TcpSocketRequest::OpenPort { port: requested_port, use_tls: requested_tls, response_channel, } => { assert_eq!( requested_port, port, "Requested port was not the expected port" ); assert_eq!( requested_tls, use_tls, "Requested TLS flag was not expected" ); if response_channel.is_closed() { panic!("Response channel was closed"); } if self.socket_manager_response_sender.is_some() { panic!("Port already registered"); } let _ = response_channel.send(TcpSocketResponse::RequestDenied { reason: RequestFailureReason::PortInUse, }); } } } pub async fn expect_empty_request_channel(&mut self) { test_utils::expect_mpsc_timeout(&mut self.socket_manager_receiver).await; } pub async fn assert_connection_sender_closed(&mut self) { let connection = self .connection .as_mut() .expect("Connection not established yet"); match timeout( Duration::from_millis(10), connection.incoming_bytes.closed(), ) .await { Ok(()) => return, Err(_) => panic!("Response sender not closed as expected (not disconnected"), } } pub async fn perform_handshake(&mut self) { if self.connection.is_some() { panic!("Only one connection is supported at a time"); } let connection_id = ConnectionId(CONNECTION_ID.to_string()); let (incoming_sender, incoming_receiver) = unbounded_channel(); let (outgoing_sender, mut outgoing_receiver) = unbounded_channel(); self.socket_manager_response_sender .as_ref() .unwrap() .send(TcpSocketResponse::NewConnection { port: self.port.unwrap(), connection_id: connection_id.clone(), incoming_bytes: incoming_receiver, outgoing_bytes: outgoing_sender, socket_address: SocketAddr::V4(SocketAddrV4::new([127, 0, 0, 1].into(), 1234)), }) .expect("Failed to send new connection signal"); let mut handshake = Handshake::new(PeerType::Client); let p0_and_p1 = handshake .generate_outbound_p0_and_p1() .expect("Failed to generate p0 and p1"); incoming_sender .send(Bytes::from(p0_and_p1)) .expect("incoming bytes channel closed"); let response = test_utils::expect_mpsc_response(&mut outgoing_receiver).await; let result = handshake .process_bytes(&response.bytes) .expect("Failed to process received p0 and p1 packet"); let response_bytes = match result { HandshakeProcessResult::InProgress { response_bytes } => response_bytes, HandshakeProcessResult::Completed { .. } => { panic!("Did not expect to be completed after first packet") } }; incoming_sender .send(Bytes::from(response_bytes)) .expect("Incoming bytes channel closed"); let response = test_utils::expect_mpsc_response(&mut outgoing_receiver).await; let result = handshake .process_bytes(&response.bytes) .expect("Failed to process p2 packet"); match result { HandshakeProcessResult::InProgress { .. } => { panic!("Did not expect to still be in progress after 2nd packet") } HandshakeProcessResult::Completed { remaining_bytes, .. } => { if remaining_bytes.len() > 0 { panic!("Expected no leftover bytes after handshake completed"); } } } let (mut session, client_results) = ClientSession::new(ClientSessionConfig::new()) .expect("Failed to generate client session"); for result in client_results { match result { ClientSessionResult::OutboundResponse(packet) => { incoming_sender .send(Bytes::from(packet.bytes)) .expect("Incoming bytes channel closed"); } x => panic!("Unexpected session result of {:?}", x), } } loop { let packet = match timeout(Duration::from_millis(10), outgoing_receiver.recv()).await { Ok(Some(packet)) => packet, Ok(None) => panic!("outgoing receiver sender closed"), Err(_) => break, }; let results = session .handle_input(&packet.bytes) .expect("Error processing bytes"); for result in results { match result { ClientSessionResult::OutboundResponse(packet) => { incoming_sender .send(Bytes::from(packet.bytes)) .expect("Incoming bytes channel closed"); } _ => (), } } } self.connection = Some(Connection { session, incoming_bytes: incoming_sender, outgoing_bytes: outgoing_receiver, }) } pub async fn connect_to_app(&mut self, app: String, should_succeed: bool) { self.execute_session_method_single_result(|session| session.request_connection(app)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let response = test_utils::expect_mpsc_response(&mut connection.outgoing_bytes).await; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); let mut event_raised = false; for result in results { match result { ClientSessionResult::RaisedEvent( ClientSessionEvent::ConnectionRequestAccepted, ) => event_raised = true, _ => (), } } if !event_raised { panic!("No connection request accepted event raised"); } } } pub async fn publish_to_stream_key(&mut self, stream_key: String, should_succeed: bool) { self.execute_session_method_single_result(|session| { session.request_publishing(stream_key, PublishRequestType::Live) }); let receiver = &mut self.connection.as_mut().unwrap().outgoing_bytes; let response = test_utils::expect_mpsc_response(receiver).await; self.execute_session_method_vec_result(|session| session.handle_input(&response.bytes)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let mut all_results = Vec::new(); loop { let response = match timeout( Duration::from_millis(10), connection.outgoing_bytes.recv(), ) .await { Ok(Some(response)) => response, Ok(None) => panic!("Outgoing bytes channel closed"), Err(_) => break, }; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); all_results.extend(results); } assert_eq!(all_results.len(), 1, "Only one result expected"); match all_results.remove(0) { ClientSessionResult::RaisedEvent(ClientSessionEvent::PublishRequestAccepted) => (), result => panic!("Unexpected result seen: {:?}", result), } } } pub async fn watch_stream_key(&mut self, stream_key: String, should_succeed: bool) { self.execute_session_method_single_result(|session| session.request_playback(stream_key)); let receiver = &mut self.connection.as_mut().unwrap().outgoing_bytes; let response = test_utils::expect_mpsc_response(receiver).await; self.execute_session_method_vec_result(|session| session.handle_input(&response.bytes)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let mut all_results = Vec::new(); loop { let response = match timeout( Duration::from_millis(10), connection.outgoing_bytes.recv(), ) .await { Ok(Some(response)) => response, Ok(None) => panic!("Outgoing bytes channel closed"), Err(_) => break, }; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); all_results.extend(results); } let mut accepted_event_received = false; for result in all_results { match result { ClientSessionResult::RaisedEvent( ClientSessionEvent::PlaybackRequestAccepted, ) => accepted_event_received = true, _ => (), } } assert!( accepted_event_received, "PlaybackRequestAccepted event not raised" ); } } pub async fn stop_watching(&mut self) { self.execute_session_method_vec_result(|session| session.stop_playback()); } pub fn disconnect(&mut self) { self.connection = None; } pub async fn stop_publishing(&mut self) { self.execute_session_method_vec_result(|session| session.stop_publishing()); } pub fn publish_metadata(&mut self, metadata: StreamMetadata) { self.execute_session_method_single_result(|session| session.publish_metadata(&metadata)); } pub fn publish_video(&mut self, data: Bytes, timestamp: RtmpTimestamp) { self.execute_session_method_single_result(|session| { session.publish_video_data(data, timestamp, false) }); } pub fn publish_audio(&mut self, data: Bytes, timestamp: RtmpTimestamp) { self.execute_session_method_single_result(|session| { session.publish_audio_data(data, timestamp, false) }); } pub fn execute_session_method_single_result( &mut self, function: impl FnOnce(&mut ClientSession) -> Result<ClientSessionResult, ClientSessionError>, ) { let connection = self .connection .as_mut() .expect("Connection not established yet"); let result = function(&mut connection.session).expect("Client session returned error"); match result { ClientSessionResult::OutboundResponse(packet) => connection .incoming_bytes .send(Bytes::from(packet.bytes)) .expect("Failed to send stop publishing command"), x => panic!("Unexpected session result: {:?}", x), } } fn execute_session_method_vec_result( &mut self, function: impl FnOnce( &mut ClientSession, ) -> Result<Vec<ClientSessionResult>, ClientSessionError>, ) { let connection = self .connection .as_mut() .expect("Connection not established yet"); let results = function(&mut connection.session).expect("Client session returned error"); for result in results { match result { ClientSessionResult::OutboundResponse(packet) => connection .incoming_bytes .send(Bytes::from(packet.bytes)) .expect("Failed to send packet"), x => panic!("Unexpected session result: {:?}", x), } } } pub async fn get_next_event(&mut self) -> Option<ClientSessionEvent> { let connection = self .connection .as_mut() .expect("Connection not established yet"); loop { let packet = match timeout(Duration::from_millis(10), connection.outgoing_bytes.recv()).await { Ok(Some(packet)) => packet, _ => break, }; let results = connection .session .handle_input(&packet.bytes) .expect("Failed to handle packet"); for result in results { match result { ClientSessionResult::RaisedEvent(event) => return Some(event), _ => (), } } } return None; } }
use crate::net::tcp::{OutboundPacket, RequestFailureReason, TcpSocketRequest, TcpSocketResponse}; use crate::net::ConnectionId; use crate::test_utils; use bytes::Bytes; use rml_rtmp::handshake::{Handshake, HandshakeProcessResult, PeerType}; use rml_rtmp::sessions::{ ClientSession, ClientSessionConfig, ClientSessionError, ClientSessionEvent, ClientSessionResult, PublishRequestType, StreamMetadata, }; use rml_rtmp::time::RtmpTimestamp; use std::net::{SocketAddr, SocketAddrV4}; use std::time::Duration; use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; use tokio::time::timeout; pub const CONNECTION_ID: &'static str = "test-1234"; pub struct RtmpTestClient { socket_manager_receiver: UnboundedReceiver<TcpSocketRequest>, socket_manager_response_sender: Option<UnboundedSender<TcpSocketResponse>>, port: Option<u16>, connection: Option<Connection>, } struct Connection { incoming_bytes: UnboundedSender<Bytes>, outgoing_bytes: UnboundedReceiver<OutboundPacket>, session: ClientSession, } impl RtmpTestClient { pub fn new() -> (Self, UnboundedSender<TcpSocketRequest>) { le
pub async fn accept_port_request(&mut self, port: u16, use_tls: bool) { let request = test_utils::expect_mpsc_response(&mut self.socket_manager_receiver).await; match request { TcpSocketRequest::OpenPort { port: requested_port, use_tls: requested_tls, response_channel, } => { assert_eq!( requested_port, port, "Requested port was not the expected port" ); assert_eq!( requested_tls, use_tls, "Requested TLS flag was not expected" ); if response_channel.is_closed() { panic!("Response channel was closed"); } if self.socket_manager_response_sender.is_some() { panic!("Port already registered"); } let _ = response_channel.send(TcpSocketResponse::RequestAccepted {}); self.socket_manager_response_sender = Some(response_channel); self.port = Some(port); } } } pub async fn deny_port_request(&mut self, port: u16, use_tls: bool) { let request = test_utils::expect_mpsc_response(&mut self.socket_manager_receiver).await; match request { TcpSocketRequest::OpenPort { port: requested_port, use_tls: requested_tls, response_channel, } => { assert_eq!( requested_port, port, "Requested port was not the expected port" ); assert_eq!( requested_tls, use_tls, "Requested TLS flag was not expected" ); if response_channel.is_closed() { panic!("Response channel was closed"); } if self.socket_manager_response_sender.is_some() { panic!("Port already registered"); } let _ = response_channel.send(TcpSocketResponse::RequestDenied { reason: RequestFailureReason::PortInUse, }); } } } pub async fn expect_empty_request_channel(&mut self) { test_utils::expect_mpsc_timeout(&mut self.socket_manager_receiver).await; } pub async fn assert_connection_sender_closed(&mut self) { let connection = self .connection .as_mut() .expect("Connection not established yet"); match timeout( Duration::from_millis(10), connection.incoming_bytes.closed(), ) .await { Ok(()) => return, Err(_) => panic!("Response sender not closed as expected (not disconnected"), } } pub async fn perform_handshake(&mut self) { if self.connection.is_some() { panic!("Only one connection is supported at a time"); } let connection_id = ConnectionId(CONNECTION_ID.to_string()); let (incoming_sender, incoming_receiver) = unbounded_channel(); let (outgoing_sender, mut outgoing_receiver) = unbounded_channel(); self.socket_manager_response_sender .as_ref() .unwrap() .send(TcpSocketResponse::NewConnection { port: self.port.unwrap(), connection_id: connection_id.clone(), incoming_bytes: incoming_receiver, outgoing_bytes: outgoing_sender, socket_address: SocketAddr::V4(SocketAddrV4::new([127, 0, 0, 1].into(), 1234)), }) .expect("Failed to send new connection signal"); let mut handshake = Handshake::new(PeerType::Client); let p0_and_p1 = handshake .generate_outbound_p0_and_p1() .expect("Failed to generate p0 and p1"); incoming_sender .send(Bytes::from(p0_and_p1)) .expect("incoming bytes channel closed"); let response = test_utils::expect_mpsc_response(&mut outgoing_receiver).await; let result = handshake .process_bytes(&response.bytes) .expect("Failed to process received p0 and p1 packet"); let response_bytes = match result { HandshakeProcessResult::InProgress { response_bytes } => response_bytes, HandshakeProcessResult::Completed { .. } => { panic!("Did not expect to be completed after first packet") } }; incoming_sender .send(Bytes::from(response_bytes)) .expect("Incoming bytes channel closed"); let response = test_utils::expect_mpsc_response(&mut outgoing_receiver).await; let result = handshake .process_bytes(&response.bytes) .expect("Failed to process p2 packet"); match result { HandshakeProcessResult::InProgress { .. } => { panic!("Did not expect to still be in progress after 2nd packet") } HandshakeProcessResult::Completed { remaining_bytes, .. } => { if remaining_bytes.len() > 0 { panic!("Expected no leftover bytes after handshake completed"); } } } let (mut session, client_results) = ClientSession::new(ClientSessionConfig::new()) .expect("Failed to generate client session"); for result in client_results { match result { ClientSessionResult::OutboundResponse(packet) => { incoming_sender .send(Bytes::from(packet.bytes)) .expect("Incoming bytes channel closed"); } x => panic!("Unexpected session result of {:?}", x), } } loop { let packet = match timeout(Duration::from_millis(10), outgoing_receiver.recv()).await { Ok(Some(packet)) => packet, Ok(None) => panic!("outgoing receiver sender closed"), Err(_) => break, }; let results = session .handle_input(&packet.bytes) .expect("Error processing bytes"); for result in results { match result { ClientSessionResult::OutboundResponse(packet) => { incoming_sender .send(Bytes::from(packet.bytes)) .expect("Incoming bytes channel closed"); } _ => (), } } } self.connection = Some(Connection { session, incoming_bytes: incoming_sender, outgoing_bytes: outgoing_receiver, }) } pub async fn connect_to_app(&mut self, app: String, should_succeed: bool) { self.execute_session_method_single_result(|session| session.request_connection(app)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let response = test_utils::expect_mpsc_response(&mut connection.outgoing_bytes).await; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); let mut event_raised = false; for result in results { match result { ClientSessionResult::RaisedEvent( ClientSessionEvent::ConnectionRequestAccepted, ) => event_raised = true, _ => (), } } if !event_raised { panic!("No connection request accepted event raised"); } } } pub async fn publish_to_stream_key(&mut self, stream_key: String, should_succeed: bool) { self.execute_session_method_single_result(|session| { session.request_publishing(stream_key, PublishRequestType::Live) }); let receiver = &mut self.connection.as_mut().unwrap().outgoing_bytes; let response = test_utils::expect_mpsc_response(receiver).await; self.execute_session_method_vec_result(|session| session.handle_input(&response.bytes)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let mut all_results = Vec::new(); loop { let response = match timeout( Duration::from_millis(10), connection.outgoing_bytes.recv(), ) .await { Ok(Some(response)) => response, Ok(None) => panic!("Outgoing bytes channel closed"), Err(_) => break, }; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); all_results.extend(results); } assert_eq!(all_results.len(), 1, "Only one result expected"); match all_results.remove(0) { ClientSessionResult::RaisedEvent(ClientSessionEvent::PublishRequestAccepted) => (), result => panic!("Unexpected result seen: {:?}", result), } } } pub async fn watch_stream_key(&mut self, stream_key: String, should_succeed: bool) { self.execute_session_method_single_result(|session| session.request_playback(stream_key)); let receiver = &mut self.connection.as_mut().unwrap().outgoing_bytes; let response = test_utils::expect_mpsc_response(receiver).await; self.execute_session_method_vec_result(|session| session.handle_input(&response.bytes)); if should_succeed { let connection = self.connection.as_mut().unwrap(); let mut all_results = Vec::new(); loop { let response = match timeout( Duration::from_millis(10), connection.outgoing_bytes.recv(), ) .await { Ok(Some(response)) => response, Ok(None) => panic!("Outgoing bytes channel closed"), Err(_) => break, }; let results = connection .session .handle_input(&response.bytes) .expect("Failed to process results"); all_results.extend(results); } let mut accepted_event_received = false; for result in all_results { match result { ClientSessionResult::RaisedEvent( ClientSessionEvent::PlaybackRequestAccepted, ) => accepted_event_received = true, _ => (), } } assert!( accepted_event_received, "PlaybackRequestAccepted event not raised" ); } } pub async fn stop_watching(&mut self) { self.execute_session_method_vec_result(|session| session.stop_playback()); } pub fn disconnect(&mut self) { self.connection = None; } pub async fn stop_publishing(&mut self) { self.execute_session_method_vec_result(|session| session.stop_publishing()); } pub fn publish_metadata(&mut self, metadata: StreamMetadata) { self.execute_session_method_single_result(|session| session.publish_metadata(&metadata)); } pub fn publish_video(&mut self, data: Bytes, timestamp: RtmpTimestamp) { self.execute_session_method_single_result(|session| { session.publish_video_data(data, timestamp, false) }); } pub fn publish_audio(&mut self, data: Bytes, timestamp: RtmpTimestamp) { self.execute_session_method_single_result(|session| { session.publish_audio_data(data, timestamp, false) }); } pub fn execute_session_method_single_result( &mut self, function: impl FnOnce(&mut ClientSession) -> Result<ClientSessionResult, ClientSessionError>, ) { let connection = self .connection .as_mut() .expect("Connection not established yet"); let result = function(&mut connection.session).expect("Client session returned error"); match result { ClientSessionResult::OutboundResponse(packet) => connection .incoming_bytes .send(Bytes::from(packet.bytes)) .expect("Failed to send stop publishing command"), x => panic!("Unexpected session result: {:?}", x), } } fn execute_session_method_vec_result( &mut self, function: impl FnOnce( &mut ClientSession, ) -> Result<Vec<ClientSessionResult>, ClientSessionError>, ) { let connection = self .connection .as_mut() .expect("Connection not established yet"); let results = function(&mut connection.session).expect("Client session returned error"); for result in results { match result { ClientSessionResult::OutboundResponse(packet) => connection .incoming_bytes .send(Bytes::from(packet.bytes)) .expect("Failed to send packet"), x => panic!("Unexpected session result: {:?}", x), } } } pub async fn get_next_event(&mut self) -> Option<ClientSessionEvent> { let connection = self .connection .as_mut() .expect("Connection not established yet"); loop { let packet = match timeout(Duration::from_millis(10), connection.outgoing_bytes.recv()).await { Ok(Some(packet)) => packet, _ => break, }; let results = connection .session .handle_input(&packet.bytes) .expect("Failed to handle packet"); for result in results { match result { ClientSessionResult::RaisedEvent(event) => return Some(event), _ => (), } } } return None; } }
t (sender, receiver) = unbounded_channel(); let client = RtmpTestClient { socket_manager_receiver: receiver, socket_manager_response_sender: None, port: None, connection: None, }; (client, sender) }
function_block-function_prefixed
[ { "content": "/// Quick function to create an un-named gstreamer element, while providing a consumable error\n\n/// if that fails.\n\npub fn create_gst_element(name: &str) -> Result<Element> {\n\n ElementFactory::make(name, None).with_context(|| format!(\"Failed to create element '{}'\", name))\n\n}\n\n\n", ...
Rust
rafx-framework/src/visibility/visibility_object_arc.rs
aclysma/renderer_prototype
a274b82c873c0ec7f9d6c3376cd054bfccfe3895
use crate::render_features::RenderObjectHandle; use crate::visibility::visibility_object_allocator::VisibilityObjectId; use crate::visibility::ObjectId; use crossbeam_channel::Sender; use glam::{Quat, Vec3}; use rafx_visibility::geometry::Transform; use rafx_visibility::{ AsyncCommand, ModelHandle, PolygonSoup, VisibilityObjectHandle, VisibleBounds, ZoneHandle, }; use slotmap::Key; use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::{Arc, Weak}; pub enum CullModel { Mesh(PolygonSoup), VisibleBounds(VisibleBounds), Sphere(f32), Quad(f32, f32), None, } impl CullModel { pub fn mesh(polygon_soup: PolygonSoup) -> CullModel { CullModel::Mesh(polygon_soup) } pub fn visible_bounds(model: VisibleBounds) -> CullModel { CullModel::VisibleBounds(model) } pub fn sphere(radius: f32) -> CullModel { CullModel::Sphere(radius) } pub fn quad( width: f32, height: f32, ) -> CullModel { CullModel::Quad(width, height) } pub fn none() -> CullModel { CullModel::None } } pub struct VisibilityObjectArcInner { object: VisibilityObjectRaii, visibility_object_id: AtomicU64, drop_tx: Sender<VisibilityObjectId>, } impl Drop for VisibilityObjectArcInner { fn drop(&mut self) { let _ = self .drop_tx .send(VisibilityObjectId::from(slotmap::KeyData::from_ffi( self.visibility_object_id.load(Ordering::Relaxed), ))); } } pub struct VisibilityObjectWeakArcInner { inner: Weak<VisibilityObjectArcInner>, } impl VisibilityObjectWeakArcInner { pub fn upgrade(&self) -> Option<VisibilityObjectArc> { self.inner .upgrade() .map(|inner| VisibilityObjectArc { inner }) } } #[derive(Clone)] pub struct VisibilityObjectArc { inner: Arc<VisibilityObjectArcInner>, } impl VisibilityObjectArc { pub(crate) fn new( object: VisibilityObjectRaii, drop_tx: Sender<VisibilityObjectId>, ) -> Self { Self { inner: Arc::new(VisibilityObjectArcInner { object, visibility_object_id: AtomicU64::default(), drop_tx, }), } } pub fn downgrade(&self) -> VisibilityObjectWeakArcInner { VisibilityObjectWeakArcInner { inner: Arc::downgrade(&self.inner), } } pub(super) fn set_visibility_object_id( &self, visibility_object_id: VisibilityObjectId, ) { self.inner .visibility_object_id .store(visibility_object_id.data().as_ffi(), Ordering::Relaxed); } #[allow(dead_code)] pub(super) fn set_zone( &self, zone: Option<ZoneHandle>, ) -> &Self { self.inner.object.set_zone(zone); self } pub fn object_id(&self) -> ObjectId { self.inner.object.object_id() } pub fn visibility_object_handle(&self) -> VisibilityObjectHandle { self.inner.object.handle } pub fn render_objects(&self) -> &[RenderObjectHandle] { &self.inner.object.render_objects() } pub fn set_cull_model( &self, cull_model: Option<ModelHandle>, ) -> &Self { self.inner.object.set_cull_model(cull_model); self } pub fn set_transform( &self, translation: Vec3, rotation: Quat, scale: Vec3, ) -> &Self { self.inner .object .set_transform(translation, rotation, scale); self } } pub struct VisibilityObjectRaii { commands: Sender<AsyncCommand>, handle: VisibilityObjectHandle, object_id: ObjectId, render_objects: Vec<RenderObjectHandle>, } impl Drop for VisibilityObjectRaii { fn drop(&mut self) { let _ = self.commands.send(AsyncCommand::DestroyObject(self.handle)); } } impl VisibilityObjectRaii { pub fn new( object_id: ObjectId, render_objects: Vec<RenderObjectHandle>, handle: VisibilityObjectHandle, commands: Sender<AsyncCommand>, ) -> Self { Self { commands, handle, object_id, render_objects, } } #[allow(dead_code)] pub(super) fn set_zone( &self, zone: Option<ZoneHandle>, ) -> &Self { self.commands .send(AsyncCommand::SetObjectZone(self.handle, zone)) .expect("Unable to send SetObjectZone command."); self } pub fn object_id(&self) -> ObjectId { self.object_id } pub fn render_objects(&self) -> &[RenderObjectHandle] { &self.render_objects } pub fn set_cull_model( &self, cull_model: Option<ModelHandle>, ) -> &Self { self.commands .send(AsyncCommand::SetObjectCullModel(self.handle, cull_model)) .expect("Unable to send SetObjectCullModel command."); self } pub fn set_transform( &self, translation: Vec3, rotation: Quat, scale: Vec3, ) -> &Self { self.commands .send(AsyncCommand::SetObjectTransform( self.handle, Transform { translation, rotation, scale, }, )) .expect("Unable to send SetObjectPosition command."); self } }
use crate::render_features::RenderObjectHandle; use crate::visibility::visibility_object_allocator::VisibilityObjectId; use crate::visibility::ObjectId; use crossbeam_channel::Sender; use glam::{Quat, Vec3}; use rafx_visibility::geometry::Transform; use rafx_visibility::{ AsyncCommand, ModelHandle, PolygonSoup, VisibilityObjectHandle, VisibleBounds, ZoneHandle, }; use slotmap::Key; use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::{Arc, Weak}; pub enum CullModel { Mesh(PolygonSoup), VisibleBounds(VisibleBounds), Sphere(f32), Quad(f32, f32), None, } impl CullModel { pub fn mesh(polygon_soup: PolygonSoup) -> CullModel { CullModel::Mesh(polygon_soup) } pub fn visible_bounds(model: VisibleBounds) -> CullModel { CullModel::VisibleBounds(model) } pub fn sphere(radius: f32) -> CullModel { CullModel::Sphere(radius) } pub fn quad( width: f32, height: f32, ) -> CullModel { CullModel::Quad(width, height) } pub fn none() -> CullModel { CullModel::None } } pub struct VisibilityObjectArcInner { object: VisibilityObjectRaii, visibility_object_id: AtomicU64, drop_tx: Sender<VisibilityObjectId>, } impl Drop for VisibilityObjectArcInner { fn drop(&mut self) { let _ = self .drop_tx .send(VisibilityObjectId::from(slotmap::KeyData::from_ffi( self.visibility_object_id.load(Ordering::Relaxed), ))); } } pub struct VisibilityObjectWeakArcInner { inner: Weak<VisibilityObjectArcInner>, } impl VisibilityObjectWeakArcInner { pub fn upgrade(&self) -> Option<VisibilityObjectArc> { self.inner .upgrade() .map(|inner| VisibilityObjectArc { inner }) } } #[derive(Clone)] pub struct VisibilityObjectArc { inner: Arc<VisibilityObjectArcInner>, } impl VisibilityObjectArc { pub(crate) fn new( object: VisibilityObjectRaii, drop_tx: Sender<VisibilityObjectId>, ) -> Self { Self { inner: Arc::new(VisibilityObjectArcInner { object, visibility_object_id: AtomicU64::default(), drop_tx, }), } } pub fn downgrade(&self) -> VisibilityObjectWeakArcInner { VisibilityObjectWeakArcInner { inner: Arc::downgrade(&self.inner), } } pub(super) fn set_visibility_object_id( &self, visibility_object_id: VisibilityObjectId, ) { self.inner .visibility_object_id .store(visibility_object_id.data().as_ffi(), Ordering::Relaxed); } #[allow(dead_code)] pub(super) fn set_zone( &self, zone: Option<ZoneHandle>, ) -> &Self { self.inner.object.set_zone(zone); self } pub fn object_id(&self) -> ObjectId { self.inner.object.object_id() } pub fn visibility_object_handle(&self) -> VisibilityObjectHandle { self.inner.object.handle } pub fn render_objects(&self) -> &[RenderObjectHandle] { &self.inner.object.render_objects() } pub fn set_cull_model( &self, cull_model: Option<ModelHandle>, ) -> &Self { self.inner.object.set_cull_model(cull_model); self } pub fn set_transform( &self, translation: Vec3, rotation: Quat, scale: Vec3, ) -> &Self { self.inner .object .set_transform(translation, rotation, scale); self } } pub struct VisibilityObjectRaii { commands: Sender<AsyncCommand>, handle: VisibilityObjectHandle, object_id: ObjectId, render_objects: Vec<RenderObjectHandle>, } impl Drop for VisibilityObjectRaii { fn drop(&mut self) { let _ = self.commands.send(AsyncCommand::DestroyObject(self.handle)); } } impl VisibilityObjectRaii {
#[allow(dead_code)] pub(super) fn set_zone( &self, zone: Option<ZoneHandle>, ) -> &Self { self.commands .send(AsyncCommand::SetObjectZone(self.handle, zone)) .expect("Unable to send SetObjectZone command."); self } pub fn object_id(&self) -> ObjectId { self.object_id } pub fn render_objects(&self) -> &[RenderObjectHandle] { &self.render_objects } pub fn set_cull_model( &self, cull_model: Option<ModelHandle>, ) -> &Self { self.commands .send(AsyncCommand::SetObjectCullModel(self.handle, cull_model)) .expect("Unable to send SetObjectCullModel command."); self } pub fn set_transform( &self, translation: Vec3, rotation: Quat, scale: Vec3, ) -> &Self { self.commands .send(AsyncCommand::SetObjectTransform( self.handle, Transform { translation, rotation, scale, }, )) .expect("Unable to send SetObjectPosition command."); self } }
pub fn new( object_id: ObjectId, render_objects: Vec<RenderObjectHandle>, handle: VisibilityObjectHandle, commands: Sender<AsyncCommand>, ) -> Self { Self { commands, handle, object_id, render_objects, } }
function_block-full_function
[ { "content": "/// Call when winit sends an event\n\npub fn handle_sdl2_event(\n\n event: &Event,\n\n input_state: &mut InputState,\n\n) {\n\n let _is_close_requested = false;\n\n\n\n match event {\n\n Event::KeyDown {\n\n keycode, repeat: _, ..\n\n } => handle_keyboard_event...
Rust
core/bin/zksync_api/src/api_server/rest/v02/block.rs
w2k-star-forks/zksync
28b7402198a0eb5201f15e35183a73d4f6b983ca
use std::str::FromStr; use std::time::Instant; use actix_web::{web, Scope}; use zksync_api_types::v02::{ block::{BlockInfo, BlockStatus}, pagination::{parse_query, ApiEither, BlockAndTxHash, Paginated, PaginationQuery}, transaction::{Transaction, TxData, TxHashSerializeWrapper}, }; use zksync_crypto::{convert::FeConvert, Fr}; use zksync_storage::{chain::block::records::StorageBlockDetails, ConnectionPool, QueryResult}; use zksync_types::{tx::TxHash, BlockNumber, H256}; use super::{ error::{Error, InvalidDataError}, paginate_trait::Paginate, response::ApiResult, }; use crate::{api_try, utils::block_details_cache::BlockDetailsCache}; pub fn block_info_from_details(details: StorageBlockDetails) -> BlockInfo { let status = if details.is_verified() { BlockStatus::Finalized } else { BlockStatus::Committed }; BlockInfo { block_number: BlockNumber(details.block_number as u32), new_state_root: Fr::from_bytes(&details.new_state_root).unwrap_or_else(|err| { panic!( "Database provided an incorrect new_state_root field: {:?}, an error occurred {}", details.new_state_root, err ) }), block_size: details.block_size as u64, commit_tx_hash: details.commit_tx_hash.map(|bytes| H256::from_slice(&bytes)), verify_tx_hash: details.verify_tx_hash.map(|bytes| H256::from_slice(&bytes)), committed_at: details.committed_at, finalized_at: details.verified_at, status, } } #[derive(Debug, Clone)] struct ApiBlockData { pool: ConnectionPool, verified_blocks_cache: BlockDetailsCache, } impl ApiBlockData { fn new(pool: ConnectionPool, verified_blocks_cache: BlockDetailsCache) -> Self { Self { pool, verified_blocks_cache, } } async fn block_info(&self, block_number: BlockNumber) -> Result<Option<BlockInfo>, Error> { let details = self .verified_blocks_cache .get(&self.pool, block_number) .await .map_err(Error::storage)?; if let Some(details) = details { Ok(Some(block_info_from_details(details))) } else { Ok(None) } } async fn get_block_number_by_position( &self, block_position: &str, ) -> Result<BlockNumber, Error> { if let Ok(number) = u32::from_str(block_position) { Ok(BlockNumber(number)) } else { match block_position { "lastCommitted" => self .get_last_committed_block_number() .await .map_err(Error::storage), "lastFinalized" => self .get_last_finalized_block_number() .await .map_err(Error::storage), _ => Err(Error::from(InvalidDataError::InvalidBlockPosition)), } } } async fn block_page( &self, query: PaginationQuery<ApiEither<BlockNumber>>, ) -> Result<Paginated<BlockInfo, BlockNumber>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; storage.paginate_checked(&query).await } async fn transaction_page( &self, block_number: BlockNumber, query: PaginationQuery<ApiEither<TxHash>>, ) -> Result<Paginated<Transaction, TxHashSerializeWrapper>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; let new_query = PaginationQuery { from: BlockAndTxHash { block_number, tx_hash: query.from, }, limit: query.limit, direction: query.direction, }; storage.paginate_checked(&new_query).await } async fn tx_data( &self, block_number: BlockNumber, block_index: u64, ) -> Result<Option<TxData>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; Ok(storage .chain() .operations_ext_schema() .tx_data_by_block_and_index_api_v02(block_number, block_index) .await .map_err(Error::storage)?) } async fn get_last_committed_block_number(&self) -> QueryResult<BlockNumber> { let mut storage = self.pool.access_storage().await?; storage .chain() .block_schema() .get_last_committed_confirmed_block() .await } async fn get_last_finalized_block_number(&self) -> QueryResult<BlockNumber> { let mut storage = self.pool.access_storage().await?; storage .chain() .block_schema() .get_last_verified_confirmed_block() .await } } async fn block_pagination( data: web::Data<ApiBlockData>, web::Query(query): web::Query<PaginationQuery<String>>, ) -> ApiResult<Paginated<BlockInfo, BlockNumber>> { let start = Instant::now(); let query = api_try!(parse_query(query).map_err(Error::from)); let res = data.block_page(query).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_pagination"); res } async fn block_by_position( data: web::Data<ApiBlockData>, block_position: web::Path<String>, ) -> ApiResult<Option<BlockInfo>> { let start = Instant::now(); let block_number = api_try!(data.get_block_number_by_position(&block_position).await); let res = data.block_info(block_number).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_by_position"); res } async fn block_transactions( data: web::Data<ApiBlockData>, block_position: web::Path<String>, web::Query(query): web::Query<PaginationQuery<String>>, ) -> ApiResult<Paginated<Transaction, TxHashSerializeWrapper>> { let start = Instant::now(); let block_number = api_try!(data.get_block_number_by_position(&block_position).await); let query = api_try!(parse_query(query).map_err(Error::from)); let res = data.transaction_page(block_number, query).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_transactions"); res } async fn transaction_in_block( data: web::Data<ApiBlockData>, path: web::Path<(BlockNumber, u64)>, ) -> ApiResult<Option<TxData>> { let start = Instant::now(); let (block_number, block_index) = *path; let res = api_try!(data.tx_data(block_number, block_index).await); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "transaction_in_block"); ApiResult::Ok(res) } pub fn api_scope(pool: ConnectionPool, cache: BlockDetailsCache) -> Scope { let data = ApiBlockData::new(pool, cache); web::scope("blocks") .app_data(web::Data::new(data)) .route("", web::get().to(block_pagination)) .route("{block_position}", web::get().to(block_by_position)) .route( "{block_position}/transactions", web::get().to(block_transactions), ) .route( "{block_position}/transactions/{block_index}", web::get().to(transaction_in_block), ) } #[cfg(test)] mod tests { use super::*; use crate::api_server::rest::v02::{ test_utils::{deserialize_response_result, TestServerConfig}, SharedData, }; use zksync_api_types::v02::{ pagination::PaginationDirection, transaction::TransactionData, ApiVersion, }; #[actix_rt::test] #[cfg_attr( not(feature = "api_test"), ignore = "Use `zk test rust-api` command to perform this test" )] async fn blocks_scope() -> anyhow::Result<()> { let cfg = TestServerConfig::default(); cfg.fill_database().await?; let shared_data = SharedData { net: cfg.config.chain.eth.network, api_version: ApiVersion::V02, }; let (client, server) = cfg.start_server( |cfg: &TestServerConfig| api_scope(cfg.pool.clone(), BlockDetailsCache::new(10)), Some(shared_data), ); let query = PaginationQuery { from: ApiEither::from(BlockNumber(1)), limit: 3, direction: PaginationDirection::Newer, }; let expected_blocks: Paginated<BlockInfo, BlockNumber> = { let mut storage = cfg.pool.access_storage().await?; storage .paginate_checked(&query) .await .map_err(|err| anyhow::anyhow!(err.message))? }; let response = client.block_by_position("2").await?; let block: BlockInfo = deserialize_response_result(response)?; assert_eq!(block, expected_blocks.list[1]); let response = client.block_pagination(&query).await?; let paginated: Paginated<BlockInfo, BlockNumber> = deserialize_response_result(response)?; assert_eq!(paginated, expected_blocks); let block_number = BlockNumber(3); let expected_txs = { let mut storage = cfg.pool.access_storage().await?; storage .chain() .block_schema() .get_block_transactions(block_number) .await? }; assert!(expected_txs.len() >= 3); let tx_hash_str = expected_txs.first().unwrap().tx_hash.as_str(); let tx_hash = TxHash::from_str(tx_hash_str).unwrap(); let query = PaginationQuery { from: ApiEither::from(tx_hash), limit: 2, direction: PaginationDirection::Older, }; let response = client .block_transactions(&query, &*block_number.to_string()) .await?; let paginated: Paginated<Transaction, TxHash> = deserialize_response_result(response)?; assert_eq!(paginated.pagination.count as usize, expected_txs.len()); assert_eq!(paginated.pagination.limit, query.limit); assert_eq!(paginated.list.len(), query.limit as usize); assert_eq!(paginated.pagination.direction, PaginationDirection::Older); assert_eq!(paginated.pagination.from, tx_hash); for (tx, expected_tx) in paginated.list.into_iter().zip(expected_txs.clone()) { assert_eq!( tx.tx_hash.to_string().replace("sync-tx:", "0x"), expected_tx.tx_hash ); assert_eq!(tx.created_at, Some(expected_tx.created_at)); assert_eq!(*tx.block_number.unwrap(), expected_tx.block_number as u32); assert_eq!(tx.fail_reason, expected_tx.fail_reason); if matches!(tx.op, TransactionData::L2(_)) { assert_eq!(serde_json::to_value(tx.op).unwrap(), expected_tx.op); } } for expected_tx in expected_txs { if !expected_tx.success { continue; } let response = client .transaction_in_block( expected_tx.block_number as u32, expected_tx.block_index.unwrap() as u32, ) .await?; let tx: Option<TxData> = deserialize_response_result(response)?; let tx = tx.unwrap().tx; assert_eq!(tx.created_at, Some(expected_tx.created_at)); assert_eq!(*tx.block_number.unwrap(), expected_tx.block_number as u32); assert_eq!(tx.fail_reason, expected_tx.fail_reason); if matches!(tx.op, TransactionData::L2(_)) { assert_eq!(serde_json::to_value(tx.op).unwrap(), expected_tx.op); } } server.stop().await; Ok(()) } }
use std::str::FromStr; use std::time::Instant; use actix_web::{web, Scope}; use zksync_api_types::v02::{ block::{BlockInfo, BlockStatus}, pagination::{parse_query, ApiEither, BlockAndTxHash, Paginated, PaginationQuery}, transaction::{Transaction, TxData, TxHashSerializeWrapper}, }; use zksync_crypto::{convert::FeConvert, Fr}; use zksync_storage::{chain::block::records::StorageBlockDetails, ConnectionPool, QueryResult}; use zksync_types::{tx::TxHash, BlockNumber, H256}; use super::{ error::{Error, InvalidDataError}, paginate_trait::Paginate, response::ApiResult, }; use crate::{api_try, utils::block_details_cache::BlockDetailsCache}; pub fn block_info_from_details(details: StorageBlockDetails) -> BlockInfo { let status = if details.is_verified() { BlockStatus::Finalized } else { BlockStatus::Committed }; BlockInfo { block_number: BlockNumber(details.block_number as u32), new_state_root: Fr::from_bytes(&details.new_state_root).unwrap_or_else(|err| { panic!( "Database provided an incorrect new_state_root field: {:?}, an error occurred {}", details.new_state_root, err ) }), block_size: details.block_size as u64, commit_tx_hash: details.commit_tx_hash.map(|bytes| H256::from_slice(&bytes)), verify_tx_hash: details.verify_tx_hash.map(|bytes| H256::from_slice(&bytes)), committed_at: details.committed_at, finalized_at: details.verified_at, status, } } #[derive(Debug, Clone)] struct ApiBlockData { pool: ConnectionPool, verified_blocks_cache: BlockDetailsCache, } impl ApiBlockData { fn new(pool: ConnectionPool, verified_blocks_cache: BlockDetailsCache) -> Self { Self { pool, verified_blocks_cache, } } async fn block_info(&self, block_number: BlockNumber) -> Result<Option<BlockInfo>, Error> { let details = self .verified_blocks_cache .get(&self.pool, block_number) .await .map_err(Error::storage)?; if let Some(details) = details { Ok(Some(block_info_from_details(details))) } else { Ok(None) } } async fn get_block_number_by_position( &self, block_position: &str, ) -> Result<BlockNumber, Error> { if let Ok(number) = u32::from_str(block_position) { Ok(BlockNumber(number)) } else { match block_positio
async fn block_page( &self, query: PaginationQuery<ApiEither<BlockNumber>>, ) -> Result<Paginated<BlockInfo, BlockNumber>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; storage.paginate_checked(&query).await } async fn transaction_page( &self, block_number: BlockNumber, query: PaginationQuery<ApiEither<TxHash>>, ) -> Result<Paginated<Transaction, TxHashSerializeWrapper>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; let new_query = PaginationQuery { from: BlockAndTxHash { block_number, tx_hash: query.from, }, limit: query.limit, direction: query.direction, }; storage.paginate_checked(&new_query).await } async fn tx_data( &self, block_number: BlockNumber, block_index: u64, ) -> Result<Option<TxData>, Error> { let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; Ok(storage .chain() .operations_ext_schema() .tx_data_by_block_and_index_api_v02(block_number, block_index) .await .map_err(Error::storage)?) } async fn get_last_committed_block_number(&self) -> QueryResult<BlockNumber> { let mut storage = self.pool.access_storage().await?; storage .chain() .block_schema() .get_last_committed_confirmed_block() .await } async fn get_last_finalized_block_number(&self) -> QueryResult<BlockNumber> { let mut storage = self.pool.access_storage().await?; storage .chain() .block_schema() .get_last_verified_confirmed_block() .await } } async fn block_pagination( data: web::Data<ApiBlockData>, web::Query(query): web::Query<PaginationQuery<String>>, ) -> ApiResult<Paginated<BlockInfo, BlockNumber>> { let start = Instant::now(); let query = api_try!(parse_query(query).map_err(Error::from)); let res = data.block_page(query).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_pagination"); res } async fn block_by_position( data: web::Data<ApiBlockData>, block_position: web::Path<String>, ) -> ApiResult<Option<BlockInfo>> { let start = Instant::now(); let block_number = api_try!(data.get_block_number_by_position(&block_position).await); let res = data.block_info(block_number).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_by_position"); res } async fn block_transactions( data: web::Data<ApiBlockData>, block_position: web::Path<String>, web::Query(query): web::Query<PaginationQuery<String>>, ) -> ApiResult<Paginated<Transaction, TxHashSerializeWrapper>> { let start = Instant::now(); let block_number = api_try!(data.get_block_number_by_position(&block_position).await); let query = api_try!(parse_query(query).map_err(Error::from)); let res = data.transaction_page(block_number, query).await.into(); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "block_transactions"); res } async fn transaction_in_block( data: web::Data<ApiBlockData>, path: web::Path<(BlockNumber, u64)>, ) -> ApiResult<Option<TxData>> { let start = Instant::now(); let (block_number, block_index) = *path; let res = api_try!(data.tx_data(block_number, block_index).await); metrics::histogram!("api", start.elapsed(), "type" => "v02", "endpoint_name" => "transaction_in_block"); ApiResult::Ok(res) } pub fn api_scope(pool: ConnectionPool, cache: BlockDetailsCache) -> Scope { let data = ApiBlockData::new(pool, cache); web::scope("blocks") .app_data(web::Data::new(data)) .route("", web::get().to(block_pagination)) .route("{block_position}", web::get().to(block_by_position)) .route( "{block_position}/transactions", web::get().to(block_transactions), ) .route( "{block_position}/transactions/{block_index}", web::get().to(transaction_in_block), ) } #[cfg(test)] mod tests { use super::*; use crate::api_server::rest::v02::{ test_utils::{deserialize_response_result, TestServerConfig}, SharedData, }; use zksync_api_types::v02::{ pagination::PaginationDirection, transaction::TransactionData, ApiVersion, }; #[actix_rt::test] #[cfg_attr( not(feature = "api_test"), ignore = "Use `zk test rust-api` command to perform this test" )] async fn blocks_scope() -> anyhow::Result<()> { let cfg = TestServerConfig::default(); cfg.fill_database().await?; let shared_data = SharedData { net: cfg.config.chain.eth.network, api_version: ApiVersion::V02, }; let (client, server) = cfg.start_server( |cfg: &TestServerConfig| api_scope(cfg.pool.clone(), BlockDetailsCache::new(10)), Some(shared_data), ); let query = PaginationQuery { from: ApiEither::from(BlockNumber(1)), limit: 3, direction: PaginationDirection::Newer, }; let expected_blocks: Paginated<BlockInfo, BlockNumber> = { let mut storage = cfg.pool.access_storage().await?; storage .paginate_checked(&query) .await .map_err(|err| anyhow::anyhow!(err.message))? }; let response = client.block_by_position("2").await?; let block: BlockInfo = deserialize_response_result(response)?; assert_eq!(block, expected_blocks.list[1]); let response = client.block_pagination(&query).await?; let paginated: Paginated<BlockInfo, BlockNumber> = deserialize_response_result(response)?; assert_eq!(paginated, expected_blocks); let block_number = BlockNumber(3); let expected_txs = { let mut storage = cfg.pool.access_storage().await?; storage .chain() .block_schema() .get_block_transactions(block_number) .await? }; assert!(expected_txs.len() >= 3); let tx_hash_str = expected_txs.first().unwrap().tx_hash.as_str(); let tx_hash = TxHash::from_str(tx_hash_str).unwrap(); let query = PaginationQuery { from: ApiEither::from(tx_hash), limit: 2, direction: PaginationDirection::Older, }; let response = client .block_transactions(&query, &*block_number.to_string()) .await?; let paginated: Paginated<Transaction, TxHash> = deserialize_response_result(response)?; assert_eq!(paginated.pagination.count as usize, expected_txs.len()); assert_eq!(paginated.pagination.limit, query.limit); assert_eq!(paginated.list.len(), query.limit as usize); assert_eq!(paginated.pagination.direction, PaginationDirection::Older); assert_eq!(paginated.pagination.from, tx_hash); for (tx, expected_tx) in paginated.list.into_iter().zip(expected_txs.clone()) { assert_eq!( tx.tx_hash.to_string().replace("sync-tx:", "0x"), expected_tx.tx_hash ); assert_eq!(tx.created_at, Some(expected_tx.created_at)); assert_eq!(*tx.block_number.unwrap(), expected_tx.block_number as u32); assert_eq!(tx.fail_reason, expected_tx.fail_reason); if matches!(tx.op, TransactionData::L2(_)) { assert_eq!(serde_json::to_value(tx.op).unwrap(), expected_tx.op); } } for expected_tx in expected_txs { if !expected_tx.success { continue; } let response = client .transaction_in_block( expected_tx.block_number as u32, expected_tx.block_index.unwrap() as u32, ) .await?; let tx: Option<TxData> = deserialize_response_result(response)?; let tx = tx.unwrap().tx; assert_eq!(tx.created_at, Some(expected_tx.created_at)); assert_eq!(*tx.block_number.unwrap(), expected_tx.block_number as u32); assert_eq!(tx.fail_reason, expected_tx.fail_reason); if matches!(tx.op, TransactionData::L2(_)) { assert_eq!(serde_json::to_value(tx.op).unwrap(), expected_tx.op); } } server.stop().await; Ok(()) } }
n { "lastCommitted" => self .get_last_committed_block_number() .await .map_err(Error::storage), "lastFinalized" => self .get_last_finalized_block_number() .await .map_err(Error::storage), _ => Err(Error::from(InvalidDataError::InvalidBlockPosition)), } } }
function_block-function_prefixed
[ { "content": "pub fn fr_into_u32_low(value: Fr) -> u32 {\n\n let mut be_bytes = [0u8; 32];\n\n value\n\n .into_repr()\n\n .write_be(be_bytes.as_mut())\n\n .expect(\"Write value bytes\");\n\n u32::from_be_bytes([be_bytes[28], be_bytes[29], be_bytes[30], be_bytes[31]])\n\n}\n\n\n\n//...
Rust
lib/src/nyengine_audio/src/lib.rs
NyantasticUwU/nyengine
b6a47d2bfb101366eeda1b318e66f09d37317688
mod stream; use crate::stream::OStream; use rodio::{Decoder, OutputStream, OutputStreamHandle, Sample, Sink, Source}; use std::{ ffi::CStr, fmt::Debug, fs::File, io::BufReader, os::raw::{c_char, c_float, c_int, c_void}, ptr, }; #[no_mangle] pub unsafe extern "C" fn na_new_default_ostream() -> *mut c_void { if let Ok((stream, handle)) = OutputStream::try_default() { return Box::into_raw(Box::new(OStream::new(stream, handle))) as *mut c_void; } ptr::null_mut::<c_void>() } #[no_mangle] pub unsafe extern "C" fn na_get_ostream_handle(stream: *mut c_void) -> *mut c_void { if !stream.is_null() { &mut (*(stream as *mut OStream)).handle as *mut OutputStreamHandle as *mut c_void } else { ptr::null_mut::<c_void>() } } #[no_mangle] pub unsafe extern "C" fn na_free_stream(stream: *mut c_void) { Box::<OStream>::from_raw(stream as *mut OStream); } #[no_mangle] pub unsafe extern "C" fn na_new_sink(ostream_handle: *mut c_void) -> *mut c_void { if !ostream_handle.is_null() { if let Ok(sink) = Sink::try_new(&*(ostream_handle as *mut OutputStreamHandle)) { return Box::into_raw(Box::new(sink)) as *mut c_void; } } ptr::null_mut::<c_void>() } unsafe fn append_to_sink<D>(sink: *mut c_void, decoder: D) -> bool where D: Source + Send + 'static, D::Item: Debug + Sample + Send, { if !sink.is_null() { let sink: *mut Sink = sink as *mut Sink; (*sink).append(decoder); return true; } false } #[no_mangle] pub unsafe extern "C" fn na_add_audio_to_sink( file_name: *const c_char, sink: *mut c_void, should_loop: c_int, ) -> c_int { if let Ok(file_name_str) = CStr::from_ptr(file_name).to_str() { if let Ok(file) = File::open(file_name_str) { if should_loop == 0 { if let Ok(decoder) = Decoder::new(BufReader::new(file)) { if append_to_sink(sink, decoder) { return 0; } } } else { if let Ok(decoder) = Decoder::new_looped(BufReader::new(file)) { if append_to_sink(sink, decoder) { return 0; } } } } } 1 } #[no_mangle] pub unsafe extern "C" fn na_play_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).play(); } #[no_mangle] pub unsafe extern "C" fn na_pause_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).pause(); } #[no_mangle] pub unsafe extern "C" fn na_stop_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).stop(); } #[no_mangle] pub unsafe extern "C" fn na_sleep_sink_until_end(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).sleep_until_end(); } #[no_mangle] pub unsafe extern "C" fn na_is_sink_paused(sink: *mut c_void) -> c_int { let sink: *mut Sink = sink as *mut Sink; (*sink).is_paused() as c_int } #[no_mangle] pub unsafe extern "C" fn na_set_sink_volume(sink: *mut c_void, volume: c_float) { let sink: *mut Sink = sink as *mut Sink; (*sink).set_volume(volume); } #[no_mangle] pub unsafe extern "C" fn na_get_sink_volume(sink: *mut c_void) -> c_float { let sink: *mut Sink = sink as *mut Sink; (*sink).volume() } #[no_mangle] pub unsafe extern "C" fn na_free_sink(sink: *mut c_void) { Box::<Sink>::from_raw(sink as *mut Sink); }
mod stream; use crate::stream::OStream; use rodio::{Decoder, OutputStream, OutputStreamHandle, Sample, Sink, Source}; use std::{ ffi::CStr, fmt::Debug, fs::File, io::BufReader, os::raw::{c_char, c_float, c_int, c_void}, ptr, }; #[no_mangle] pub unsafe extern "C" fn na_new_default_ostream() -> *mut c_void { if let Ok((stream, handle)) = OutputStream::try_default() { return Box::into_raw(Box::new(OStream::new(stream, handle))) as *mut c_void; } ptr::null_mut::<c_void>() } #[no_mangle] pub unsafe extern "C" fn na_get_ostream_handle(stream: *mut c_void) -> *mut c_void { if !stream.is_null() { &mut (*(stream as *mut OStream)).handle as *mut OutputStreamHandle as *mut c_void } else { ptr::null_mut::<c_void>() } } #[no_mangle] pub unsafe extern "C" fn na_free_stream(stream: *mut c_void) { Box::<OStream>::from_raw(stream as *mut OStream); } #[no_mangle] pub unsafe extern "C" fn na_new_sink(ostream_handle: *mut c_void) -> *mut c_void { if !ostream_handle.is_null() { if let Ok(sink) = Sink::try_new(&*(ostream_handle as *mut OutputStream
false } #[no_mangle] pub unsafe extern "C" fn na_add_audio_to_sink( file_name: *const c_char, sink: *mut c_void, should_loop: c_int, ) -> c_int { if let Ok(file_name_str) = CStr::from_ptr(file_name).to_str() { if let Ok(file) = File::open(file_name_str) { if should_loop == 0 { if let Ok(decoder) = Decoder::new(BufReader::new(file)) { if append_to_sink(sink, decoder) { return 0; } } } else { if let Ok(decoder) = Decoder::new_looped(BufReader::new(file)) { if append_to_sink(sink, decoder) { return 0; } } } } } 1 } #[no_mangle] pub unsafe extern "C" fn na_play_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).play(); } #[no_mangle] pub unsafe extern "C" fn na_pause_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).pause(); } #[no_mangle] pub unsafe extern "C" fn na_stop_sink(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).stop(); } #[no_mangle] pub unsafe extern "C" fn na_sleep_sink_until_end(sink: *mut c_void) { let sink: *mut Sink = sink as *mut Sink; (*sink).sleep_until_end(); } #[no_mangle] pub unsafe extern "C" fn na_is_sink_paused(sink: *mut c_void) -> c_int { let sink: *mut Sink = sink as *mut Sink; (*sink).is_paused() as c_int } #[no_mangle] pub unsafe extern "C" fn na_set_sink_volume(sink: *mut c_void, volume: c_float) { let sink: *mut Sink = sink as *mut Sink; (*sink).set_volume(volume); } #[no_mangle] pub unsafe extern "C" fn na_get_sink_volume(sink: *mut c_void) -> c_float { let sink: *mut Sink = sink as *mut Sink; (*sink).volume() } #[no_mangle] pub unsafe extern "C" fn na_free_sink(sink: *mut c_void) { Box::<Sink>::from_raw(sink as *mut Sink); }
Handle)) { return Box::into_raw(Box::new(sink)) as *mut c_void; } } ptr::null_mut::<c_void>() } unsafe fn append_to_sink<D>(sink: *mut c_void, decoder: D) -> bool where D: Source + Send + 'static, D::Item: Debug + Sample + Send, { if !sink.is_null() { let sink: *mut Sink = sink as *mut Sink; (*sink).append(decoder); return true; }
random
[]
Rust
src/runner/command.rs
doy/nbsh
4151ab7aab939a12721a0f4207c87b5c09ace339
use crate::runner::prelude::*; pub struct Command { inner: Inner, exe: std::path::PathBuf, redirects: Vec<crate::parse::Redirect>, pre_exec: Option< Box<dyn FnMut() -> std::io::Result<()> + Send + Sync + 'static>, >, } impl Command { pub fn new(exe: crate::parse::Exe, io: super::builtins::Io) -> Self { let exe_path = exe.exe().to_path_buf(); let redirects = exe.redirects().to_vec(); Self { inner: super::builtins::Command::new(exe, io).map_or_else( |exe| Self::new_binary(&exe).inner, Inner::Builtin, ), exe: exe_path, redirects, pre_exec: None, } } pub fn new_binary(exe: &crate::parse::Exe) -> Self { let exe_path = exe.exe().to_path_buf(); let redirects = exe.redirects().to_vec(); let mut cmd = tokio::process::Command::new(exe.exe()); cmd.args(exe.args()); Self { inner: Inner::Binary(cmd), exe: exe_path, redirects, pre_exec: None, } } pub fn new_builtin( exe: crate::parse::Exe, io: super::builtins::Io, ) -> Self { let exe_path = exe.exe().to_path_buf(); let redirects = exe.redirects().to_vec(); Self { inner: super::builtins::Command::new(exe, io) .map_or_else(|_| todo!(), Inner::Builtin), exe: exe_path, redirects, pre_exec: None, } } pub fn stdin(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stdin(fh); } Inner::Builtin(cmd) => { cmd.stdin(fh); } } } pub fn stdout(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stdout(fh); } Inner::Builtin(cmd) => { cmd.stdout(fh); } } } pub fn stderr(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stderr(fh); } Inner::Builtin(cmd) => { cmd.stderr(fh); } } } pub unsafe fn pre_exec<F>(&mut self, f: F) where F: 'static + FnMut() -> std::io::Result<()> + Send + Sync, { self.pre_exec = Some(Box::new(f)); } pub fn spawn(self, env: &Env) -> Result<Child> { let Self { inner, exe, redirects, pre_exec, } = self; #[allow(clippy::as_conversions)] let pre_exec = pre_exec.map_or_else( || { let redirects = redirects.clone(); Box::new(move || { apply_redirects(&redirects)?; Ok(()) }) as Box<dyn FnMut() -> std::io::Result<()> + Send + Sync> }, |mut pre_exec| { let redirects = redirects.clone(); Box::new(move || { apply_redirects(&redirects)?; pre_exec()?; Ok(()) }) }, ); match inner { Inner::Binary(mut cmd) => { unsafe { cmd.pre_exec(pre_exec) }; Ok(Child::Binary(cmd.spawn().map_err(|e| { anyhow!( "{}: {}", crate::format::io_error(&e), exe.display() ) })?)) } Inner::Builtin(mut cmd) => { unsafe { cmd.pre_exec(pre_exec) }; cmd.apply_redirects(&redirects); Ok(Child::Builtin(cmd.spawn(env)?)) } } } } pub enum Inner { Binary(tokio::process::Command), Builtin(super::builtins::Command), } pub enum Child { Binary(tokio::process::Child), Builtin(super::builtins::Child), } impl Child { pub fn id(&self) -> Option<u32> { match self { Self::Binary(child) => child.id(), Self::Builtin(child) => child.id(), } } pub fn status( self, ) -> std::pin::Pin< Box< dyn std::future::Future<Output = Result<std::process::ExitStatus>> + Send + Sync, >, > { Box::pin(async move { match self { Self::Binary(_) => unreachable!(), Self::Builtin(child) => Ok(child.status().await?), } }) } } fn apply_redirects( redirects: &[crate::parse::Redirect], ) -> std::io::Result<()> { for redirect in redirects { match &redirect.to { crate::parse::RedirectTarget::Fd(fd) => { nix::unistd::dup2(*fd, redirect.from)?; } crate::parse::RedirectTarget::File(path) => { let fd = redirect.dir.open(path)?; if fd != redirect.from { nix::unistd::dup2(fd, redirect.from)?; nix::unistd::close(fd)?; } } } } Ok(()) }
use crate::runner::prelude::*; pub struct Command { inner: Inner, exe: std::path::PathBuf, redirects: Vec<crate::parse::Redirect>, pre_exec: Option< Box<dyn FnMut() -> std::io::Result<()> + Send + Sync + 'static>, >, } impl Command { pub fn new(exe: crate::parse::Exe, io: super::builtins::Io) -> Self { let exe_path = exe.exe().to_path_buf(); let redirects = exe.redirects().to_vec(); Self { inner: super::builtins::Command::new(exe, io).map_or_else( |exe| Self::new_binary(&exe).inner, Inner::Builtin, ), exe: exe_path, redirects, pre_exec: None, } } pub fn new_binary(exe: &crate::parse::Exe) -> Self { let exe_path = exe.exe().to_pa
F: 'static + FnMut() -> std::io::Result<()> + Send + Sync, { self.pre_exec = Some(Box::new(f)); } pub fn spawn(self, env: &Env) -> Result<Child> { let Self { inner, exe, redirects, pre_exec, } = self; #[allow(clippy::as_conversions)] let pre_exec = pre_exec.map_or_else( || { let redirects = redirects.clone(); Box::new(move || { apply_redirects(&redirects)?; Ok(()) }) as Box<dyn FnMut() -> std::io::Result<()> + Send + Sync> }, |mut pre_exec| { let redirects = redirects.clone(); Box::new(move || { apply_redirects(&redirects)?; pre_exec()?; Ok(()) }) }, ); match inner { Inner::Binary(mut cmd) => { unsafe { cmd.pre_exec(pre_exec) }; Ok(Child::Binary(cmd.spawn().map_err(|e| { anyhow!( "{}: {}", crate::format::io_error(&e), exe.display() ) })?)) } Inner::Builtin(mut cmd) => { unsafe { cmd.pre_exec(pre_exec) }; cmd.apply_redirects(&redirects); Ok(Child::Builtin(cmd.spawn(env)?)) } } } } pub enum Inner { Binary(tokio::process::Command), Builtin(super::builtins::Command), } pub enum Child { Binary(tokio::process::Child), Builtin(super::builtins::Child), } impl Child { pub fn id(&self) -> Option<u32> { match self { Self::Binary(child) => child.id(), Self::Builtin(child) => child.id(), } } pub fn status( self, ) -> std::pin::Pin< Box< dyn std::future::Future<Output = Result<std::process::ExitStatus>> + Send + Sync, >, > { Box::pin(async move { match self { Self::Binary(_) => unreachable!(), Self::Builtin(child) => Ok(child.status().await?), } }) } } fn apply_redirects( redirects: &[crate::parse::Redirect], ) -> std::io::Result<()> { for redirect in redirects { match &redirect.to { crate::parse::RedirectTarget::Fd(fd) => { nix::unistd::dup2(*fd, redirect.from)?; } crate::parse::RedirectTarget::File(path) => { let fd = redirect.dir.open(path)?; if fd != redirect.from { nix::unistd::dup2(fd, redirect.from)?; nix::unistd::close(fd)?; } } } } Ok(()) }
th_buf(); let redirects = exe.redirects().to_vec(); let mut cmd = tokio::process::Command::new(exe.exe()); cmd.args(exe.args()); Self { inner: Inner::Binary(cmd), exe: exe_path, redirects, pre_exec: None, } } pub fn new_builtin( exe: crate::parse::Exe, io: super::builtins::Io, ) -> Self { let exe_path = exe.exe().to_path_buf(); let redirects = exe.redirects().to_vec(); Self { inner: super::builtins::Command::new(exe, io) .map_or_else(|_| todo!(), Inner::Builtin), exe: exe_path, redirects, pre_exec: None, } } pub fn stdin(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stdin(fh); } Inner::Builtin(cmd) => { cmd.stdin(fh); } } } pub fn stdout(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stdout(fh); } Inner::Builtin(cmd) => { cmd.stdout(fh); } } } pub fn stderr(&mut self, fh: std::fs::File) { match &mut self.inner { Inner::Binary(cmd) => { cmd.stderr(fh); } Inner::Builtin(cmd) => { cmd.stderr(fh); } } } pub unsafe fn pre_exec<F>(&mut self, f: F) where
random
[ { "content": "pub fn setpgid_child(pg: Option<nix::unistd::Pid>) -> std::io::Result<()> {\n\n nix::unistd::setpgid(PID0, pg.unwrap_or(PID0))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/runner/sys.rs", "rank": 0, "score": 130883.43219710211 }, { "content": "pub fn io_error(e: &std::io::...