lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/stages/how_to_play.rs
rdrmic/color-columns
197a7a46ba314b93401a7882e764c85c844ec967
use ggez::{ graphics::{self, Align, DrawParam, PxScale, Text, TextFragment}, mint::Point2, Context, GameResult, }; use glam::Vec2; use crate::{ blocks::Block, constants::{ BLOCK_COLOR_BLUE, BLOCK_COLOR_GREEN, BLOCK_COLOR_MAGENTA, BLOCK_COLOR_ORANGE, BLOCK_COLOR_RED, BLOCK_COLOR_YELLOW, COLOR_GREEN, COLOR_ORANGE, COLOR_YELLOW, GO_BACK_LABEL_POSITION, HOWTOPLAY_AND_ABOUT_AREA_WIDTH, HOWTOPLAY_CONTROLS_CHAR_SCALE, HOWTOPLAY_CONTROLS_LEFTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_RIGHTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, HOWTOPLAY_HEADER_BLOCK_SIZE, HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, HOWTOPLAY_HEADER_POSITION_X, HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, HOWTOPLAY_SCORING_RULES_CHAR_SCALE, HOWTOPLAY_SCORING_RULES_TEXT_POSITION, }, input::Event, resources::Resources, }; use super::{Stage, StageTrait}; pub struct HowToPlay { blocks_header_controls: [Block; 3], blocks_header_scoring_rules: [Block; 3], go_back_instruction: Text, controls_leftside: Text, controls_rightside: Text, scoring_rules: Text, } impl HowToPlay { pub fn new(resources: &Resources) -> Self { let font = resources.get_fonts().semi_bold; let controls_leftside_str = "\n\ Right:\n\ Left:\n\ Shuffle up:\n\ Shuffle down:\n\ Drop: "; let mut controls_leftside = Text::new(TextFragment { text: controls_leftside_str.to_string(), color: Some(COLOR_ORANGE), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_CONTROLS_CHAR_SCALE)), }); controls_leftside.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); let controls_rightside_str = "\n\ RIGHT / D\n\ LEFT / A\n\ UP / W\n\ DOWN / S\n\ SPACE "; let mut controls_rightside = Text::new(TextFragment { text: controls_rightside_str.to_string(), color: Some(COLOR_YELLOW), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_CONTROLS_CHAR_SCALE)), }); controls_rightside.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); let scoring_rules_str = "\n\ Points are gained by matching\n\ same-colored blocks in all 4\n\ directions.\n\n\ The more matched blocks in a\n\ line - the more points gained.\n\n\ Also, the points are\n\ multiplicated by the number of\n\ sequential cascading matchings. "; let mut scoring_rules = Text::new(TextFragment { text: scoring_rules_str.to_string(), color: Some(COLOR_GREEN), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_SCORING_RULES_CHAR_SCALE)), }); scoring_rules.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); Self { blocks_header_controls: Self::create_header_for_controls(), blocks_header_scoring_rules: Self::create_header_for_scoring_rules(), go_back_instruction: resources .get_navigation_instructions() .get_go_back() .clone(), controls_leftside, controls_rightside, scoring_rules, } } fn create_header_for_controls() -> [Block; 3] { [ Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X, y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_BLUE, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X + HOWTOPLAY_HEADER_BLOCK_SIZE, y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_YELLOW, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_BLOCK_SIZE.mul_add(2.0, HOWTOPLAY_HEADER_POSITION_X), y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_GREEN, ), ] } fn create_header_for_scoring_rules() -> [Block; 3] { [ Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X, y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_RED, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X + HOWTOPLAY_HEADER_BLOCK_SIZE, y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_ORANGE, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_BLOCK_SIZE.mul_add(2.0, HOWTOPLAY_HEADER_POSITION_X), y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_MAGENTA, ), ] } } impl StageTrait for HowToPlay { fn update(&mut self, _ctx: &Context, input_event: Event) -> GameResult<Option<Stage>> { if let Event::Escape = input_event { return Ok(Some(Stage::MainMenu)); } Ok(Some(Stage::HowToPlay)) } fn draw(&mut self, ctx: &mut Context) -> GameResult<()> { for mut block in self.blocks_header_controls { block.draw(ctx)?; } for mut block in self.blocks_header_scoring_rules { block.draw(ctx)?; } graphics::queue_text( ctx, &self.go_back_instruction, Vec2::new(GO_BACK_LABEL_POSITION[0], GO_BACK_LABEL_POSITION[1]), None, ); graphics::queue_text( ctx, &self.controls_leftside, Vec2::new( HOWTOPLAY_CONTROLS_LEFTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, ), None, ); graphics::queue_text( ctx, &self.controls_rightside, Vec2::new( HOWTOPLAY_CONTROLS_RIGHTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, ), None, ); graphics::queue_text( ctx, &self.scoring_rules, Vec2::new( HOWTOPLAY_SCORING_RULES_TEXT_POSITION[0], HOWTOPLAY_SCORING_RULES_TEXT_POSITION[1], ), None, ); graphics::draw_queued_text( ctx, DrawParam::default(), None, graphics::FilterMode::Linear, )?; Ok(()) } }
use ggez::{ graphics::{self, Align, DrawParam, PxScale, Text, TextFragment}, mint::Point2, Context, GameResult, }; use glam::Vec2; use crate::{ blocks::Block, constants::{ BLOCK_COLOR_BLUE, BLOCK_COLOR_GREEN, BLOCK_COLOR_MAGENTA, BLOCK_COLOR_ORANGE, BLOCK_COLOR_RED, BLOCK_COLOR_YELLOW, COLOR_GREEN, COLOR_ORANGE, COLOR_YELLOW, GO_BACK_LABEL_POSITION, HOWTOPLAY_AND_ABOUT_AREA_WIDTH, HOWTOPLAY_CONTROLS_CHAR_SCALE, HOWTOPLAY_CONTROLS_LEFTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_RIGHTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, HOWTOPLAY_HEADER_BLOCK_SIZE, HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, HOWTOPLAY_HEADER_POSITION_X, HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, HOWTOPLAY_SCORING_RULES_CHAR_SCALE, HOWTOPLAY_SCORING_RULES_TEXT_POSITION, }, input::Event, resources::Resources, }; use super::{Stage, StageTrait}; pub struct HowToPlay { blocks_header_controls: [Block; 3], blocks_header_scoring_rules: [Block; 3], go_back_instruction: Text, controls_leftside: Text, controls_rightside: Text, scoring_rules: Text, } impl HowToPlay { pub fn new(resources: &Resources) -> Self { let font = resources.get_fonts().semi_bold; let controls_leftside_str = "\n\ Right:\n\ Left:\n\ Shuffle up:\n\ Shuffle down:\n\ Drop: "; let mut controls_leftside = Text::new(TextFragment { text: controls_leftside_str.to_string(), color: Some(COLOR_ORANGE), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_CONTROLS_CHAR_SCALE)), }); controls_leftside.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); let controls_rightside_str = "\n\ RIGHT / D\n\ LEFT / A\n\ UP / W\n\ DOWN / S\n\ SPACE "; let mut controls_rightside = Text::new(TextFragment { text: controls_rightside_str.to_string(), color: Some(COLOR_YELLOW), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_CONTROLS_CHAR_SCALE)), }); controls_rightside.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); let scoring_rules_str = "\n\ Points are gained by matching\n\ same-colored blocks in all 4\n\ directions.\n\n\ The more matched blocks in a\n\ line - the more points gained.\n\n\ Also, the points are\n\ multiplicated by the number of\n\ sequential cascading matchings. "; let mut scoring_rules = Text::new(TextFragment { text: scoring_rules_str.to_string(), color: Some(COLOR_GREEN), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_SCORING_RULES_CHAR_SCALE)), }); scoring_rules.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); Self { blocks_header_controls: Self::create_header_for_controls(), blocks_header_scoring_rules: Self::create_header_for_scoring_rules(), go_back_instruction: resources .get_navigation_instructions() .get_go_back() .clone(), controls_leftside, controls_rightside, scoring_rules, } } fn create_header_for_controls() -> [Block; 3] { [ Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X, y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_BLUE, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X + HOWTOPLAY_HEADER_BLOCK_SIZE, y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_YELLOW, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_BLOCK_SIZE.mul_add(2.0, HOWTOPLAY_HEADER_POSITION_X), y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_GREEN, ), ] } fn create_header_for_scoring_rules() -> [Block; 3] { [ Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X, y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_RED, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X + HOWTOPLAY_HEADER_BLOCK_SIZE, y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_ORANGE, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_BLOCK_SIZE.mul_add(2.0, HOWTOPLAY_HEADER_POSITION_X), y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_MAGENTA, ), ] } } impl StageTrait for HowToPlay {
fn draw(&mut self, ctx: &mut Context) -> GameResult<()> { for mut block in self.blocks_header_controls { block.draw(ctx)?; } for mut block in self.blocks_header_scoring_rules { block.draw(ctx)?; } graphics::queue_text( ctx, &self.go_back_instruction, Vec2::new(GO_BACK_LABEL_POSITION[0], GO_BACK_LABEL_POSITION[1]), None, ); graphics::queue_text( ctx, &self.controls_leftside, Vec2::new( HOWTOPLAY_CONTROLS_LEFTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, ), None, ); graphics::queue_text( ctx, &self.controls_rightside, Vec2::new( HOWTOPLAY_CONTROLS_RIGHTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, ), None, ); graphics::queue_text( ctx, &self.scoring_rules, Vec2::new( HOWTOPLAY_SCORING_RULES_TEXT_POSITION[0], HOWTOPLAY_SCORING_RULES_TEXT_POSITION[1], ), None, ); graphics::draw_queued_text( ctx, DrawParam::default(), None, graphics::FilterMode::Linear, )?; Ok(()) } }
fn update(&mut self, _ctx: &Context, input_event: Event) -> GameResult<Option<Stage>> { if let Event::Escape = input_event { return Ok(Some(Stage::MainMenu)); } Ok(Some(Stage::HowToPlay)) }
function_block-full_function
[ { "content": "pub fn idx_pair_to_center_point_of_block(idxs: &[usize; 2]) -> Point2<f32> {\n\n Point2 {\n\n x: BLOCK_SIZE.mul_add(idxs[0] as f32, GAME_ARENA_RECT.left()) + BLOCK_SIZE / 2.0,\n\n y: (GAME_ARENA_RECT.bottom() - BLOCK_SIZE * idxs[1] as f32) - BLOCK_SIZE + BLOCK_SIZE / 2.0,\n\n }...
Rust
src/model/yaml/null.rs
dnsl48/yamlette
593f172622d393086450023433549bf23843933f
extern crate skimmer; use model::{ model_issue_rope, EncodedString, Model, Node, Rope, Renderer, Tagged, TaggedValue }; use model::style::CommonStyles; use std::any::Any; use std::borrow::Cow; use std::default::Default; use std::iter::Iterator; pub static TAG: &'static str = "tag:yaml.org,2002:null"; #[derive (Copy, Clone, Debug)] pub struct Null; impl Null { pub fn get_tag () -> Cow<'static, str> { Cow::from (TAG) } fn read_null (&self, value: &[u8], ptr: usize) -> usize { match value.get (ptr).map (|b| *b) { Some (b'~') => 1, Some (b'n') => if value[ptr .. ].starts_with ("null".as_bytes ()) { 4 } else { 0 }, Some (b'N') => if value[ptr .. ].starts_with ("Null".as_bytes ()) || value[ptr .. ].starts_with ("NULL".as_bytes ()) { 4 } else { 0 }, _ => 0 } } } impl Model for Null { fn get_tag (&self) -> Cow<'static, str> { Self::get_tag () } fn as_any (&self) -> &Any { self } fn as_mut_any (&mut self) -> &mut Any { self } fn is_decodable (&self) -> bool { true } fn is_encodable (&self) -> bool { true } fn has_default (&self) -> bool { true } fn get_default (&self) -> TaggedValue { TaggedValue::from (NullValue::default ()) } fn encode (&self, _renderer: &Renderer, value: TaggedValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Result<Rope, TaggedValue> { let mut val: NullValue = match <TaggedValue as Into<Result<NullValue, TaggedValue>>>::into (value) { Ok (value) => value, Err (value) => return Err (value) }; let issue_tag = val.issue_tag (); let alias = val.take_alias (); let node = Node::String (EncodedString::from ("~".as_bytes ())); Ok (model_issue_rope (self, node, issue_tag, alias, tags)) } fn decode (&self, explicit: bool, value: &[u8]) -> Result<TaggedValue, ()> { if value.len () == 0 { return Ok ( TaggedValue::from (NullValue::default ()) ) } let mut ptr: usize = 0; let mut quote_state: u8 = 0; if explicit { match value.get (ptr).map (|b| *b) { Some (b'\'') => { ptr += 1; quote_state = 1; } Some (b'"') => { ptr += 1; quote_state = 2; } _ => () }; /* if self.s_quote.contained_at_start (value) { ptr += self.s_quote.len (); quote_state = 1; } else if self.d_quote.contained_at_start (value) { ptr += self.d_quote.len (); quote_state = 2; } */ } let maybe_null = self.read_null (value, ptr); if maybe_null > 0 { ptr += maybe_null; if quote_state > 0 { match value.get (ptr).map (|b| *b) { Some (b'\'') if quote_state == 1 => (), Some (b'"') if quote_state == 2 => (), _ => return Err ( () ) }; /* if quote_state == 1 { if self.s_quote.contained_at (value, ptr) { // ptr += self.s_quote.len (); ?? } else { return Err ( () ) } } else if quote_state == 2 { if self.d_quote.contained_at (value, ptr) { // ptr += self.d_quote.len (); ?? } else { return Err ( () ) } } */ } return Ok ( TaggedValue::from (NullValue::default ()) ) } if quote_state > 0 { match value.get (ptr).map (|b| *b) { Some (b'\'') if quote_state == 1 => Ok ( TaggedValue::from (NullValue::default ()) ), Some (b'"') if quote_state == 2 => Ok ( TaggedValue::from (NullValue::default ()) ), _ => Err ( () ) } /* if quote_state == 1 && ptr == self.s_quote.len () { if self.s_quote.contained_at (value, ptr) { return Ok ( TaggedValue::from (NullValue::default ()) ) } } else if quote_state == 2 && ptr == self.d_quote.len () { if self.d_quote.contained_at (value, ptr) { return Ok ( TaggedValue::from (NullValue::default ()) ) } } */ } else { Err ( () ) } } } #[derive (Clone, Debug)] pub struct NullValue { style: u8, alias: Option<Cow<'static, str>> } impl NullValue { pub fn new (styles: CommonStyles, alias: Option<Cow<'static, str>>) -> NullValue { NullValue { style: if styles.issue_tag () { 1 } else { 0 }, alias: alias } } pub fn take_alias (&mut self) -> Option<Cow<'static, str>> { self.alias.take () } pub fn issue_tag (&self) -> bool { self.style & 1 == 1 } pub fn set_issue_tag (&mut self, val: bool) { if val { self.style |= 1; } else { self.style &= !1; } } } impl Default for NullValue { fn default () -> NullValue { NullValue { style: 0, alias: None } } } impl Tagged for NullValue { fn get_tag (&self) -> Cow<'static, str> { Cow::from (TAG) } fn as_any (&self) -> &Any { self as &Any } fn as_mut_any (&mut self) -> &mut Any { self as &mut Any } } impl AsRef<str> for NullValue { fn as_ref (&self) -> &'static str { "~" } } #[cfg (all (test, not (feature = "dev")))] mod tests { use super::*; use model::{ Tagged, Renderer }; use std::iter; #[test] fn tag () { let null = Null; assert_eq! (null.get_tag (), TAG); } #[test] fn encode () { let renderer = Renderer; let null = Null; if let Ok (rope) = null.encode (&renderer, TaggedValue::from (NullValue::default ()), &mut iter::empty ()) { let encode = rope.render (&renderer); assert_eq! (encode, "~".as_bytes ()); } else { assert! (false) } } #[test] fn decode () { let null = Null; let options = ["", "~", "null", "Null", "NULL"]; for i in 0 .. options.len () { if let Ok (tagged) = null.decode (true, options[i].as_bytes ()) { assert_eq! (tagged.get_tag (), Cow::from (TAG)); if let None = tagged.as_any ().downcast_ref::<NullValue> () { assert! (false) } } else { assert! (false) } } let decode = null.decode (true, "nil".as_bytes ()); assert! (decode.is_err ()); } }
extern crate skimmer; use model::{ model_issue_rope, EncodedString, Model, Node, Rope, Renderer, Tagged, TaggedValue }; use model::style::CommonStyles; use std::any::Any; use std::borrow::Cow; use std::default::Default; use std::iter::Iterator; pub static TAG: &'static str = "tag:yaml.org,2002:null"; #[derive (Copy, Clone, Debug)] pub struct Null; impl Null { pub fn get_tag () -> Cow<'static, str> { Cow::from (TAG) } fn read_null (&self, value: &[u8], ptr: usize) -> usize { match value.get (ptr).map (|b| *b) { Some (b'~') => 1, Some (b'n') => if value[ptr .. ].starts_with ("null".as_bytes ()) { 4 } else { 0 }, Some (b'N') => if value[ptr .. ].starts_with ("Null".as_bytes ()) || value[ptr .. ].starts_with ("NULL".as_bytes ()) { 4 } else { 0 }, _ => 0 } } } impl Model for Null { fn get_tag (&self) -> Cow<'static, str> { Self::get_tag () } fn as_any (&self) -> &Any { self } fn as_mut_any (&mut self) -> &mut Any { self } fn is_decodable (&self) -> bool { true } fn is_encodable (&self) -> bool { true } fn has_default (&self) -> bool { true } fn get_default (&self) -> TaggedValue { TaggedValue::from (NullValue::default ()) } fn encode (&self, _renderer: &Renderer, value: TaggedValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Result<Rope, TaggedValue> { let mut val: NullValue = match <TaggedValue as Into<Result<NullValue, TaggedValue>>>::into (value) { Ok (value) => value, Err (value) => return Err (value) }; let issue_tag = val.issue_tag (); let alias = val.take_alias (); let node = Node::String (EncodedString::from ("~".as_bytes ())); Ok (model_issue_rope (self, node, issue_tag, alias, tags)) } fn decode (&self, explicit: bool, value: &[u8]) -> Result<TaggedValue, ()> { if value.len () == 0 { return Ok ( TaggedValue::from (NullValue::default ()) ) } let mut ptr: usize = 0; let mut quote_state: u8 = 0; if explicit { match value.get (ptr).map (|b| *b) { Some (b'\'') => { ptr += 1; quote_state = 1; } Some (b'"') => { ptr += 1; quote_state = 2; } _ => () }; /* if self.s_quote.contained_at_start (value) { ptr += self.s_quote.len (); quote_state = 1; } else if self.d_quote.contained_at_start (value) { ptr += self.d_quote.len (); quote_state = 2; } */ } let maybe_null = self.read_null (value, ptr); if maybe_null > 0 { ptr += maybe_null; if quote_state > 0 { match value.get (ptr).map (|b| *b) { Some (b'\'') if quote_state == 1 => (), Some (b'"') if quote_state == 2 => (), _ => return Err ( () ) }; /* if quote_state == 1 { if self.s_quote.contained_at (value, ptr) { // ptr += self.s_quote.len (); ?? } else { return Err ( () ) } } else if quote_state == 2 { if self.d_quote.contained_at (value, ptr) { // ptr += self.d_quote.len (); ?? } else { return Err ( () ) } } */ } return Ok ( Tagg
} #[derive (Clone, Debug)] pub struct NullValue { style: u8, alias: Option<Cow<'static, str>> } impl NullValue { pub fn new (styles: CommonStyles, alias: Option<Cow<'static, str>>) -> NullValue { NullValue { style: if styles.issue_tag () { 1 } else { 0 }, alias: alias } } pub fn take_alias (&mut self) -> Option<Cow<'static, str>> { self.alias.take () } pub fn issue_tag (&self) -> bool { self.style & 1 == 1 } pub fn set_issue_tag (&mut self, val: bool) { if val { self.style |= 1; } else { self.style &= !1; } } } impl Default for NullValue { fn default () -> NullValue { NullValue { style: 0, alias: None } } } impl Tagged for NullValue { fn get_tag (&self) -> Cow<'static, str> { Cow::from (TAG) } fn as_any (&self) -> &Any { self as &Any } fn as_mut_any (&mut self) -> &mut Any { self as &mut Any } } impl AsRef<str> for NullValue { fn as_ref (&self) -> &'static str { "~" } } #[cfg (all (test, not (feature = "dev")))] mod tests { use super::*; use model::{ Tagged, Renderer }; use std::iter; #[test] fn tag () { let null = Null; assert_eq! (null.get_tag (), TAG); } #[test] fn encode () { let renderer = Renderer; let null = Null; if let Ok (rope) = null.encode (&renderer, TaggedValue::from (NullValue::default ()), &mut iter::empty ()) { let encode = rope.render (&renderer); assert_eq! (encode, "~".as_bytes ()); } else { assert! (false) } } #[test] fn decode () { let null = Null; let options = ["", "~", "null", "Null", "NULL"]; for i in 0 .. options.len () { if let Ok (tagged) = null.decode (true, options[i].as_bytes ()) { assert_eq! (tagged.get_tag (), Cow::from (TAG)); if let None = tagged.as_any ().downcast_ref::<NullValue> () { assert! (false) } } else { assert! (false) } } let decode = null.decode (true, "nil".as_bytes ()); assert! (decode.is_err ()); } }
edValue::from (NullValue::default ()) ) } if quote_state > 0 { match value.get (ptr).map (|b| *b) { Some (b'\'') if quote_state == 1 => Ok ( TaggedValue::from (NullValue::default ()) ), Some (b'"') if quote_state == 2 => Ok ( TaggedValue::from (NullValue::default ()) ), _ => Err ( () ) } /* if quote_state == 1 && ptr == self.s_quote.len () { if self.s_quote.contained_at (value, ptr) { return Ok ( TaggedValue::from (NullValue::default ()) ) } } else if quote_state == 2 && ptr == self.d_quote.len () { if self.d_quote.contained_at (value, ptr) { return Ok ( TaggedValue::from (NullValue::default ()) ) } } */ } else { Err ( () ) } }
function_block-function_prefixed
[ { "content": "pub fn model_issue_rope (model: &Model, node: Node, issue_tag: bool, alias: Option<Cow<'static, str>>, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Rope {\n\n if let Some (alias) = alias {\n\n if issue_tag {\n\n Rope::from (vec! [model_tag (model, tags...
Rust
tss-esapi/tests/integration_tests/context_tests/tpm_commands/object_commands_tests.rs
Superhepper/rust-tss-esapi
a6ae84793e73b10dd672b613ada820566c84fe85
mod test_create { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_create() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let prim_key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let _ = context .create( prim_key_handle, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap(); } } mod test_load { use crate::common::{create_ctx_with_session, decryption_key_pub, signing_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_load() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let prim_key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let result = context .create( prim_key_handle, &signing_key_pub(), Some(&key_auth), None, None, None, ) .unwrap(); let _ = context .load(prim_key_handle, result.out_private, &result.out_public) .unwrap(); } } mod test_load_external_public { use crate::common::{create_ctx_with_session, KEY}; use std::convert::TryFrom; use tss_esapi::{ attributes::ObjectAttributesBuilder, interface_types::{ algorithm::{HashingAlgorithm, PublicAlgorithm, RsaSchemeAlgorithm}, key_bits::RsaKeyBits, resource_handles::Hierarchy, }, structures::{Public, PublicBuilder, PublicKeyRsa, PublicRsaParametersBuilder, RsaScheme}, }; pub fn get_ext_rsa_pub() -> Public { let object_attributes = ObjectAttributesBuilder::new() .with_user_with_auth(true) .with_decrypt(false) .with_sign_encrypt(true) .with_restricted(false) .build() .expect("Failed to build object attributes"); PublicBuilder::new() .with_public_algorithm(PublicAlgorithm::Rsa) .with_name_hashing_algorithm(HashingAlgorithm::Sha256) .with_object_attributes(object_attributes) .with_rsa_parameters( PublicRsaParametersBuilder::new_unrestricted_signing_key( RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256)) .expect("Failed to create rsa scheme"), RsaKeyBits::Rsa2048, Default::default(), ) .build() .expect("Failed to create rsa parameters for public structure"), ) .with_rsa_unique_identifier( &PublicKeyRsa::try_from(&KEY[..256]) .expect("Failed to create Public RSA key from buffer"), ) .build() .expect("Failed to build Public structure") } #[test] fn test_load_external_public() { let mut context = create_ctx_with_session(); let pub_key = get_ext_rsa_pub(); context .load_external_public(&pub_key, Hierarchy::Owner) .unwrap(); } } mod test_load_external { use crate::common::create_ctx_with_session; use std::convert::TryFrom; use tss_esapi::{ attributes::ObjectAttributesBuilder, interface_types::{ algorithm::{HashingAlgorithm, PublicAlgorithm, RsaSchemeAlgorithm}, key_bits::RsaKeyBits, resource_handles::Hierarchy, }, structures::{Public, PublicBuilder, PublicKeyRsa, PublicRsaParametersBuilder, RsaScheme}, }; use tss_esapi_sys::{ TPM2B_PRIVATE_KEY_RSA, TPM2B_SENSITIVE, TPMT_SENSITIVE, TPMU_SENSITIVE_COMPOSITE, }; const KEY: [u8; 256] = [ 0xc9, 0x75, 0xf8, 0xb2, 0x30, 0xf4, 0x24, 0x6e, 0x95, 0xb1, 0x3c, 0x55, 0x0f, 0xe4, 0x48, 0xe9, 0xac, 0x06, 0x1f, 0xa8, 0xbe, 0xa4, 0xd7, 0x1c, 0xa5, 0x5e, 0x2a, 0xbf, 0x60, 0xc2, 0x98, 0x63, 0x6c, 0xb4, 0xe2, 0x61, 0x54, 0x31, 0xc3, 0x3e, 0x9d, 0x1a, 0x83, 0x84, 0x18, 0x51, 0xe9, 0x8c, 0x24, 0xcf, 0xac, 0xc6, 0x0d, 0x26, 0x2c, 0x9f, 0x2b, 0xd5, 0x91, 0x98, 0x89, 0xe3, 0x68, 0x97, 0x36, 0x02, 0xec, 0x16, 0x37, 0x24, 0x08, 0xb4, 0x77, 0xd1, 0x56, 0x10, 0x3e, 0xf0, 0x64, 0xf6, 0x68, 0x50, 0x68, 0x31, 0xf8, 0x9b, 0x88, 0xf2, 0xc5, 0xfb, 0xc9, 0x21, 0xd2, 0xdf, 0x93, 0x6f, 0x98, 0x94, 0x53, 0x68, 0xe5, 0x25, 0x8d, 0x8a, 0xf1, 0xd7, 0x5b, 0xf3, 0xf9, 0xdf, 0x8c, 0x77, 0x24, 0x9e, 0x28, 0x09, 0x36, 0xf0, 0xa2, 0x93, 0x17, 0xad, 0xbb, 0x1a, 0xd7, 0x6f, 0x25, 0x6b, 0x0c, 0xd3, 0x76, 0x7f, 0xcf, 0x3a, 0xe3, 0x1a, 0x84, 0x57, 0x62, 0x71, 0x8a, 0x6a, 0x42, 0x94, 0x71, 0x21, 0x6a, 0x13, 0x73, 0x17, 0x56, 0xa2, 0x38, 0xc1, 0x5e, 0x76, 0x0b, 0x67, 0x6b, 0x6e, 0xcd, 0xd3, 0xe2, 0x8a, 0x80, 0x61, 0x6c, 0x1c, 0x60, 0x9d, 0x65, 0xbd, 0x5a, 0x4e, 0xeb, 0xa2, 0x06, 0xd6, 0xbe, 0xf5, 0x49, 0xc1, 0x7d, 0xd9, 0x46, 0x3e, 0x9f, 0x2f, 0x92, 0xa4, 0x1a, 0x14, 0x2c, 0x1e, 0xb7, 0x6d, 0x71, 0x29, 0x92, 0x43, 0x7b, 0x76, 0xa4, 0x8b, 0x33, 0xf3, 0xd0, 0xda, 0x7c, 0x7f, 0x73, 0x50, 0xe2, 0xc5, 0x30, 0xad, 0x9e, 0x0f, 0x61, 0x73, 0xa0, 0xbb, 0x87, 0x1f, 0x0b, 0x70, 0xa9, 0xa6, 0xaa, 0x31, 0x2d, 0x62, 0x2c, 0xaf, 0xea, 0x49, 0xb2, 0xce, 0x6c, 0x23, 0x90, 0xdd, 0x29, 0x37, 0x67, 0xb1, 0xc9, 0x99, 0x3a, 0x3f, 0xa6, 0x69, 0xc9, 0x0d, 0x24, 0x3f, ]; const PRIV_KEY: [u8; 256] = [ 0xcf, 0x7c, 0xe8, 0xa1, 0x9c, 0x47, 0xe1, 0x70, 0xbd, 0x38, 0x0a, 0xaf, 0x26, 0x5c, 0x48, 0x94, 0x48, 0x54, 0x98, 0x07, 0xae, 0xb9, 0x5c, 0x46, 0xaf, 0x8f, 0x59, 0xc8, 0x30, 0x1b, 0x98, 0xe3, 0x2a, 0x93, 0xb2, 0xdb, 0xab, 0x81, 0xbf, 0xd2, 0xad, 0x0d, 0xb6, 0x5b, 0x57, 0xbf, 0x98, 0xcb, 0xbc, 0x97, 0xb8, 0xc3, 0xa4, 0xb0, 0xc9, 0xf1, 0x05, 0x46, 0xed, 0x06, 0xdf, 0xdc, 0x58, 0xf4, 0xe0, 0x23, 0x15, 0x77, 0x25, 0x7b, 0x46, 0x6f, 0xea, 0x0c, 0xeb, 0xa5, 0x49, 0x53, 0x1d, 0xa0, 0x2e, 0x3a, 0x7e, 0x8e, 0x8d, 0xec, 0xdd, 0xa6, 0x07, 0x95, 0x40, 0xab, 0x3e, 0x10, 0x9b, 0x07, 0xce, 0xe9, 0xf3, 0xdb, 0x99, 0xb7, 0x52, 0xab, 0xa6, 0x22, 0x43, 0x70, 0xc2, 0x2c, 0xdc, 0x98, 0x4e, 0x05, 0x62, 0xdf, 0xe4, 0x6a, 0xba, 0xbd, 0x28, 0x4c, 0xbe, 0xbd, 0xb9, 0x80, 0x54, 0xed, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]; pub fn get_ext_rsa_priv() -> TPM2B_SENSITIVE { TPM2B_SENSITIVE { size: std::mem::size_of::<TPM2B_SENSITIVE>() as u16, sensitiveArea: TPMT_SENSITIVE { sensitiveType: PublicAlgorithm::Rsa.into(), sensitive: TPMU_SENSITIVE_COMPOSITE { rsa: TPM2B_PRIVATE_KEY_RSA { size: 128, buffer: PRIV_KEY, }, }, ..Default::default() }, } } pub fn get_ext_rsa_pub() -> Public { let object_attributes = ObjectAttributesBuilder::new() .with_user_with_auth(true) .with_decrypt(false) .with_sign_encrypt(true) .with_restricted(false) .build() .expect("Failed to build object attributes"); PublicBuilder::new() .with_public_algorithm(PublicAlgorithm::Rsa) .with_name_hashing_algorithm(HashingAlgorithm::Sha256) .with_object_attributes(object_attributes) .with_rsa_parameters( PublicRsaParametersBuilder::new_unrestricted_signing_key( RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256)) .expect("Failed to create rsa scheme"), RsaKeyBits::Rsa2048, Default::default(), ) .build() .expect("Failed to create rsa parameters for public structure"), ) .with_rsa_unique_identifier( &PublicKeyRsa::try_from(&KEY[..]) .expect("Failed to create Public RSA key from buffer"), ) .build() .expect("Failed to build Public structure") } #[test] fn test_load_external() { let mut context = create_ctx_with_session(); let pub_key = get_ext_rsa_pub(); let priv_key = get_ext_rsa_priv(); let key_handle = context .load_external(&priv_key, &pub_key, Hierarchy::Null) .unwrap(); context.flush_context(key_handle.into()).unwrap(); } } mod test_read_public { use crate::common::{create_ctx_with_session, signing_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_read_public() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let key_handle = context .create_primary( Hierarchy::Owner, &signing_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let _ = context.read_public(key_handle).unwrap(); } } mod test_make_credential { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::TryInto; use tss_esapi::interface_types::resource_handles::Hierarchy; #[test] fn test_make_credential() { let mut context = create_ctx_with_session(); let key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let (_, key_name, _) = context.read_public(key_handle).unwrap(); let cred = vec![1, 2, 3, 4, 5]; context .execute_without_session(|ctx| { ctx.make_credential(key_handle, cred.try_into().unwrap(), key_name) }) .unwrap(); } } mod test_activate_credential { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::{TryFrom, TryInto}; use tss_esapi::{ attributes::SessionAttributesBuilder, constants::SessionType, interface_types::{algorithm::HashingAlgorithm, resource_handles::Hierarchy}, structures::{Digest, SymmetricDefinition}, }; #[test] fn test_make_activate_credential() { let mut context = create_ctx_with_session(); let (session_attributes, session_attributes_mask) = SessionAttributesBuilder::new().build(); let session_1 = context .execute_without_session(|ctx| { ctx.start_auth_session( None, None, None, SessionType::Hmac, SymmetricDefinition::AES_256_CFB, HashingAlgorithm::Sha256, ) }) .expect("session_1: Call to start_auth_session failed.") .expect("session_1: The auth session returned was NONE"); context .tr_sess_set_attributes(session_1, session_attributes, session_attributes_mask) .expect("Call to tr_sess_set_attributes failed"); let session_2 = context .execute_without_session(|ctx| { ctx.start_auth_session( None, None, None, SessionType::Hmac, SymmetricDefinition::AES_256_CFB, HashingAlgorithm::Sha256, ) }) .expect("session_2: Call to start_auth_session failed.") .expect("session_2: The auth session returned was NONE"); context .tr_sess_set_attributes(session_2, session_attributes, session_attributes_mask) .unwrap(); let key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let (_, key_name, _) = context .read_public(key_handle) .expect("Call to read_public failed"); let cred = vec![1, 2, 3, 4, 5]; let (credential_blob, secret) = context .execute_without_session(|ctx| { ctx.make_credential(key_handle, cred.try_into().unwrap(), key_name) }) .expect("Call to make_credential failed"); context.set_sessions((Some(session_1), Some(session_2), None)); let decrypted = context .activate_credential(key_handle, key_handle, credential_blob, secret) .expect("Call to active_credential failed"); let expected = Digest::try_from(vec![1, 2, 3, 4, 5]).expect("Failed to create digest for expected"); assert_eq!(expected, decrypted); } } mod test_unseal { use crate::common::{create_ctx_with_session, create_public_sealed_object, decryption_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::SensitiveData}; #[test] fn unseal() { let testbytes: [u8; 5] = [0x01, 0x02, 0x03, 0x04, 0x42]; let mut context = create_ctx_with_session(); let key_handle_seal = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let key_handle_unseal = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let key_pub = create_public_sealed_object(); let result = context .create( key_handle_seal, &key_pub, None, Some(SensitiveData::try_from(testbytes.to_vec()).unwrap()).as_ref(), None, None, ) .unwrap(); let loaded_key = context .load(key_handle_unseal, result.out_private, &result.out_public) .unwrap(); let unsealed = context.unseal(loaded_key.into()).unwrap(); let unsealed = unsealed.value(); assert!(unsealed == testbytes); } }
mod test_create { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_create() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let prim_key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let _ = context .create( prim_key_handle, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap(); } } mod test_load { use crate::common::{create_ctx_with_session, decryption_key_pub, signing_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_load() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let prim_key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let result = context .create( prim_key_handle, &signing_key_pub(), Some(&key_auth), None, None, None, ) .unwrap(); let _ = context .load(prim_key_handle, result.out_private, &result.out_public) .unwrap(); } } mod test_load_external_public { use crate::common::{create_ctx_with_session, KEY}; use std::convert::TryFrom; use tss_esapi::{ attributes::ObjectAttributesBuilder, interface_types::{ algorithm::{HashingAlgorithm, PublicAlgorithm, RsaSchemeAlgorithm}, key_bits::RsaKeyBits, resource_handles::Hierarchy, }, structures::{Public, PublicBuilder, PublicKeyRsa, PublicRsaParametersBuilder, RsaScheme}, }; pub fn get_ext_rsa_pub() -> Public { let object_attributes = ObjectAttributesBuilder::new() .with_user_with_auth(true) .with_decrypt(false) .with_sign_encrypt(true) .with_restricted(false) .build() .expect("Failed to build object attributes"); PublicBuilder::new() .with_public_algorithm(PublicAlgorithm::Rsa) .with_name_hashing_algorithm(HashingAlgorithm::Sha256) .with_object_attributes(object_attributes) .with_rsa_parameters( PublicRsaParametersBuilder::new_unrestricted_signing_key( RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256)) .expect("Failed to create rsa scheme"), RsaKeyBits::Rsa2048, Default::default(), ) .build() .expect("Failed to create rsa parameters for public structure"), ) .with_rsa_unique_identifier( &PublicKeyRsa::try_from(&KEY[..256]) .expect("Failed to create Public RSA key from buffer"), ) .build() .expect("Failed to build Public structure") } #[test] fn test_load_external_public() { let mut context = create_ctx_with_session(); let pub_key = get_ext_rsa_pub(); context .load_external_public(&pub_key, Hierarchy::Owner) .unwrap(); } } mod test_load_external { use crate::common::create_ctx_with_session; use std::convert::TryFrom; use tss_esapi::{ attributes::ObjectAttributesBuilder, interface_types::{ algorithm::{HashingAlgorithm, PublicAlgorithm, RsaSchemeAlgorithm}, key_bits::RsaKeyBits, resource_handles::Hierarchy, }, structures::{Public, PublicBuilder, PublicKeyRsa, PublicRsaParametersBuilder, RsaScheme}, }; use tss_esapi_sys::{ TPM2B_PRIVATE_KEY_RSA, TPM2B_SENSITIVE, TPMT_SENSITIVE, TPMU_SENSITIVE_COMPOSITE, }; const KEY: [u8; 256] = [ 0xc9, 0x75, 0xf8, 0xb2, 0x30, 0xf4, 0x24, 0x6e, 0x95, 0xb1, 0x3c, 0x55, 0x0f, 0xe4, 0x48, 0xe9, 0xac, 0x06, 0x1f, 0xa8, 0xbe, 0xa4, 0xd7, 0x1c, 0xa5, 0x5e, 0x2a, 0xbf, 0x60, 0xc2, 0x98, 0x63, 0x6c, 0xb4, 0xe2, 0x61, 0x54, 0x31, 0xc3, 0x3e, 0x9d, 0x1a, 0x83, 0x84, 0x18, 0x51, 0xe9, 0x8c, 0x24, 0xcf, 0xac, 0xc6, 0x0d, 0x26, 0x2c, 0x9f, 0x2b, 0xd5, 0x91, 0x98, 0x89, 0xe3, 0x68, 0x97, 0x36, 0x02, 0xec, 0x16, 0x37, 0x24, 0x08, 0xb4, 0x77, 0xd1, 0x56, 0x10, 0x3e, 0xf0, 0x64, 0xf6, 0x68, 0x50, 0x68, 0x31, 0xf8, 0x9b, 0x88, 0xf2, 0xc5, 0xfb, 0xc9, 0x21, 0xd2, 0xdf, 0x93, 0x6f, 0x98, 0x94, 0x53, 0x68, 0xe5, 0x25, 0x8d, 0x8a, 0xf1, 0xd7, 0x5b, 0xf3, 0xf9, 0xdf, 0x8c, 0x77, 0x24, 0x9e, 0x28, 0x09, 0x36, 0xf0, 0xa2, 0x93, 0x17, 0xad, 0xbb, 0x1a, 0xd7, 0x6f, 0x25, 0x6b, 0x0c, 0xd3, 0x76, 0x7f, 0xcf, 0x3a, 0xe3, 0x1a, 0x84, 0x57, 0x62, 0x71, 0x8a, 0x6a, 0x42, 0x94, 0x71, 0x21, 0x6a, 0x13, 0x73, 0x17, 0x56, 0xa2, 0x38, 0xc1, 0x5e, 0x76, 0x0b, 0x67, 0x6b, 0x6e, 0xcd, 0xd3, 0xe2, 0x8a, 0x80, 0x61, 0x6c, 0x1c, 0x60, 0x9d, 0x65, 0xbd, 0x5a, 0x4e, 0xeb, 0xa2, 0x06, 0xd6, 0xbe, 0xf5, 0x49, 0xc1, 0x7d, 0xd9, 0x46, 0x3e, 0x9f, 0x2f, 0x92, 0xa4, 0x1a, 0x14, 0x2c, 0x1e, 0xb7, 0x6d, 0x71, 0x29, 0x92, 0x43, 0x7b, 0x76, 0xa4, 0x8b, 0x33, 0xf3, 0xd0, 0xda, 0x7c, 0x7f, 0x73, 0x50, 0xe2, 0xc5, 0x30, 0xad, 0x9e, 0x0f, 0x61, 0x73, 0xa0, 0xbb, 0x87, 0x1f, 0x0b, 0x70, 0xa9, 0xa6, 0xaa, 0x31, 0x2d, 0x62, 0x2c, 0xaf, 0xea, 0x49, 0xb2, 0xce, 0x6c, 0x23, 0x90, 0xdd, 0x29, 0x37, 0x67, 0xb1, 0xc9, 0x99, 0x3a, 0x3f, 0xa6, 0x69, 0xc9, 0x0d, 0x24, 0x3f, ]; const PRIV_KEY: [u8; 256] = [ 0xcf, 0x7c, 0xe8, 0xa1, 0x9c, 0x47, 0xe1, 0x70, 0xbd, 0x38, 0x0a, 0xaf, 0x26, 0x5c, 0x48, 0x94, 0x48, 0x54, 0x98, 0x07, 0xae, 0xb9, 0x5c, 0x46, 0xaf, 0x8f, 0x59, 0xc8, 0x30, 0x1b, 0x98, 0xe3, 0x2a, 0x93, 0xb2, 0xdb, 0xab, 0x81, 0xbf, 0xd2, 0xad, 0x0d, 0xb6, 0x5b, 0x57, 0xbf, 0x98, 0xcb, 0xbc, 0x97, 0xb8, 0xc3, 0xa4, 0xb0, 0xc9, 0xf1, 0x05, 0x46, 0xed, 0x06, 0xdf, 0xdc, 0x58, 0xf4, 0xe0, 0x23, 0x15, 0x77, 0x25, 0x7b, 0x46, 0x6f, 0xea, 0x0c, 0xeb, 0xa5, 0x49, 0x53, 0x1d, 0xa0, 0x2e, 0x3a, 0x7e, 0x8e, 0x8d, 0xec, 0xdd, 0xa6, 0x07, 0x95, 0x40, 0xab, 0x3e, 0x10, 0x9b, 0x07, 0xce, 0xe9, 0xf3, 0xdb, 0x99, 0xb7, 0x52, 0xab, 0xa6, 0x22, 0x43, 0x70, 0xc2, 0x2c, 0xdc, 0x98, 0x4e, 0x05, 0x62, 0xdf, 0xe4, 0x6a, 0xba, 0xbd, 0x28, 0x4c, 0xbe, 0xbd, 0xb9, 0x80, 0x54, 0xed, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]; pub fn get_ext_rsa_priv() -> TPM2B_SENSITIVE { TPM2B_SENSITIVE { size: std::mem::size_of::<TPM2B_SENSITIVE>() as u16, sensitiveArea: TPMT_SENSITIVE { sensitiveType: PublicAlgorithm::Rsa.into(), sensitive: TPMU_SENSITIVE_COMPOSITE { rsa: TPM2B_PRIVATE_KEY_RSA { size: 128, buffer: PRIV_KEY, }, }, ..Default::default() }, } } pub fn get_ext_rsa_pub() -> Public { let object_attributes = ObjectAttributesBuilder::new() .with_user_with_auth(true) .with_decrypt(false) .with_sign_encrypt(true) .with_restricted(false) .build() .expect("Failed to build object attributes"); PublicBuilder::new() .with_public_algorithm(PublicAlgorithm::Rsa) .with_name_hashing_algorithm(HashingAlgorithm::Sha256) .with_object_attributes(object_attributes) .with_rsa_parameters( PublicRsaParametersBuilder::new_unrestricted_signing_key( RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256)) .expect("Failed to create rsa scheme"), RsaKeyBits::Rsa2048, Default::default(), ) .build() .expect("Failed to create rsa parameters for public structure"), ) .with_rsa_unique_identifier( &PublicKeyRsa::try_from(&KEY[..]) .expect("Failed to create Public RSA key from buffer"), ) .build() .expect("Failed to build Public structure") } #[test] fn test_load_external() { let mut context = create_ctx_with_session(); let pub_key = get_ext_rsa_pub(); let priv_key = get_ext_rsa_priv(); let key_handle = context .load_external(&priv_key, &pub_key, Hierarchy::Null) .unwrap(); context.flush_context(key_handle.into()).unwrap(); } } mod test_read_public { use crate::common::{create_ctx_with_session, signing_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_read_public() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let ke
} mod test_make_credential { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::TryInto; use tss_esapi::interface_types::resource_handles::Hierarchy; #[test] fn test_make_credential() { let mut context = create_ctx_with_session(); let key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let (_, key_name, _) = context.read_public(key_handle).unwrap(); let cred = vec![1, 2, 3, 4, 5]; context .execute_without_session(|ctx| { ctx.make_credential(key_handle, cred.try_into().unwrap(), key_name) }) .unwrap(); } } mod test_activate_credential { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::{TryFrom, TryInto}; use tss_esapi::{ attributes::SessionAttributesBuilder, constants::SessionType, interface_types::{algorithm::HashingAlgorithm, resource_handles::Hierarchy}, structures::{Digest, SymmetricDefinition}, }; #[test] fn test_make_activate_credential() { let mut context = create_ctx_with_session(); let (session_attributes, session_attributes_mask) = SessionAttributesBuilder::new().build(); let session_1 = context .execute_without_session(|ctx| { ctx.start_auth_session( None, None, None, SessionType::Hmac, SymmetricDefinition::AES_256_CFB, HashingAlgorithm::Sha256, ) }) .expect("session_1: Call to start_auth_session failed.") .expect("session_1: The auth session returned was NONE"); context .tr_sess_set_attributes(session_1, session_attributes, session_attributes_mask) .expect("Call to tr_sess_set_attributes failed"); let session_2 = context .execute_without_session(|ctx| { ctx.start_auth_session( None, None, None, SessionType::Hmac, SymmetricDefinition::AES_256_CFB, HashingAlgorithm::Sha256, ) }) .expect("session_2: Call to start_auth_session failed.") .expect("session_2: The auth session returned was NONE"); context .tr_sess_set_attributes(session_2, session_attributes, session_attributes_mask) .unwrap(); let key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let (_, key_name, _) = context .read_public(key_handle) .expect("Call to read_public failed"); let cred = vec![1, 2, 3, 4, 5]; let (credential_blob, secret) = context .execute_without_session(|ctx| { ctx.make_credential(key_handle, cred.try_into().unwrap(), key_name) }) .expect("Call to make_credential failed"); context.set_sessions((Some(session_1), Some(session_2), None)); let decrypted = context .activate_credential(key_handle, key_handle, credential_blob, secret) .expect("Call to active_credential failed"); let expected = Digest::try_from(vec![1, 2, 3, 4, 5]).expect("Failed to create digest for expected"); assert_eq!(expected, decrypted); } } mod test_unseal { use crate::common::{create_ctx_with_session, create_public_sealed_object, decryption_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::SensitiveData}; #[test] fn unseal() { let testbytes: [u8; 5] = [0x01, 0x02, 0x03, 0x04, 0x42]; let mut context = create_ctx_with_session(); let key_handle_seal = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let key_handle_unseal = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let key_pub = create_public_sealed_object(); let result = context .create( key_handle_seal, &key_pub, None, Some(SensitiveData::try_from(testbytes.to_vec()).unwrap()).as_ref(), None, None, ) .unwrap(); let loaded_key = context .load(key_handle_unseal, result.out_private, &result.out_public) .unwrap(); let unsealed = context.unseal(loaded_key.into()).unwrap(); let unsealed = unsealed.value(); assert!(unsealed == testbytes); } }
y_handle = context .create_primary( Hierarchy::Owner, &signing_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let _ = context.read_public(key_handle).unwrap(); }
function_block-function_prefixed
[ { "content": "#[allow(dead_code)]\n\npub fn create_public_sealed_object() -> Public {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_no_da(true)\n\n .with_admin_with_policy(true)\n\n .with_user_wit...
Rust
src/tool.rs
jacrgrady/farnans-farming-game
a9c168b13e3cfe5fa07c97aca02ee686206c4ba6
use crate::crop::CropType; use crate::genes; use crate::population::Population; use crate::InventoryItemTrait; use sdl2::rect::Rect; use sdl2::render::Texture; pub enum ToolType { Hand, Hoe, WateringCan, } pub struct Tool<'a> { src: Rect, texture: Texture<'a>, current_type: ToolType, } impl<'a> Tool<'a> { pub fn new(src: Rect, texture: Texture<'a>, t: ToolType) -> Tool<'a> { Tool { src, texture, current_type: t, } } } impl InventoryItemTrait for Tool<'_> { fn get_value(&self) -> i32 { 1 } fn texture(&self) -> &Texture { &self.texture } fn src(&self) -> Rect { self.src } fn to_save_string(&self) -> Option<String> { None } fn inventory_input( &self, square: (i32, i32), pop: &mut Population, ) -> Option<(Option<CropType>, Option<genes::Genes>, Option<genes::Genes>)> { let (x, y) = square; match self.current_type { ToolType::Hand => { if let Some(_i) = pop .get_crop_with_index(x as u32, y as u32) .get_gene(crate::genes::GeneType::GrowthRate) { println!( "{}", pop.get_crop_with_index(x as u32, y as u32) .get_all_genes() .as_ref() .unwrap() ); if let Some(p) = pop .get_crop_with_index(x as u32, y as u32) .get_child() .as_ref() { println!("{}", p); } else { println!("None"); } } if pop.get_crop_with_index(x as u32, y as u32).rotten() { let mut _c = pop.get_crop_with_index_mut(x as u32, y as u32); _c.set_stage(0); _c.set_rotten(false); _c.set_crop_type_enum(CropType::None); _c.set_water(false); _c.set_genes(None); _c.set_child(None); return None; } if pop.get_crop_with_index(x as u32, y as u32).get_stage() == 3 { let _g = pop .get_crop_with_index(x as u32, y as u32) .get_all_genes() .as_ref() .unwrap() .clone(); let mut _c = pop.get_crop_with_index_mut(x as u32, y as u32); let return_crop_type = _c.get_crop_type_enum(); _c.set_crop_type_enum(CropType::None); _c.set_stage(0); _c.set_water(false); _c.set_genes(None); _c.set_child(None); let child = _c.get_child().clone(); return Some((Some(return_crop_type), Some(_g), child)); } } ToolType::Hoe => { if pop .get_crop_with_index(x as u32, y as u32) .get_crop_type() .to_owned() == "None" && !pop.get_tile_with_index(x as u32, y as u32).tilled() { let mut _tile = pop.get_tile_with_index_mut(x as u32, y as u32); _tile.set_tilled(true); } } ToolType::WateringCan => { if !pop.get_crop_with_index(x as u32, y as u32).get_watered() { pop.get_crop_with_index_mut(x as u32, y as u32) .set_water(true); } if pop.get_tile_with_index(x as u32, y as u32).tilled() { pop.get_tile_with_index_mut(x as u32, y as u32) .set_water(true); } } } return None; } }
use crate::crop::CropType; use crate::genes; use crate::population::Population; use crate::InventoryItemTrait; use sdl2::rect::Rect; use sdl2::render::Texture; pub enum ToolType { Hand, Hoe, WateringCan, } pub struct Tool<'a> { src: Rect, texture: Texture<'a>, current_type: ToolType, } impl<'a> Tool<'a> { pub fn new(src: Rect, texture: Texture<'a>, t: ToolType) -> Tool<'a> { Tool { src, texture, current_type: t, } } } impl InventoryItemTrait for Tool<'_> { fn get_value(&self) -> i32 { 1 } fn texture(&self) -> &Texture { &self.texture } fn src(&self) -> Rect { self.src } fn to_save_string(&self) -> Option<String> { None } fn inventory_input( &self, square: (i32, i32), pop: &mut Population, ) -> Option<(Option<CropType>, Option<genes::Genes>, Option<genes::Genes>)> { let (x, y) = square; match self.current_type { ToolType::Hand => { if let Some(_i) = pop .get_crop_with_index(x as u32, y as u32) .get_gene(crate::genes::GeneType::GrowthRate) { println!( "{}", pop.get_crop_with_index(x as u32, y as u32) .get_all_genes() .as_ref() .unwrap() ); if let Some(p) = pop .get_crop_with_index(x as u32, y as u32) .get_child() .as_ref() { println!("{}", p); } else { println!("None"); } } if pop.get_crop_with_index(x as u32, y as u32).rotten() { let mut _c = pop.get_crop_with_index_mut(x as u32, y as u32); _c.set_stage(0); _c.set_rotten(false); _c.set_crop_type_enum(CropType::None); _c.set_water(false); _c.set_genes(None); _c.set_child(None); return None; } if pop.get_crop_with_index(x as u32, y as u32).get_stage() == 3 { let _g = pop .get_crop_with_index(x as u32, y as u32) .get_all_genes() .as_ref() .unwra
}
p() .clone(); let mut _c = pop.get_crop_with_index_mut(x as u32, y as u32); let return_crop_type = _c.get_crop_type_enum(); _c.set_crop_type_enum(CropType::None); _c.set_stage(0); _c.set_water(false); _c.set_genes(None); _c.set_child(None); let child = _c.get_child().clone(); return Some((Some(return_crop_type), Some(_g), child)); } } ToolType::Hoe => { if pop .get_crop_with_index(x as u32, y as u32) .get_crop_type() .to_owned() == "None" && !pop.get_tile_with_index(x as u32, y as u32).tilled() { let mut _tile = pop.get_tile_with_index_mut(x as u32, y as u32); _tile.set_tilled(true); } } ToolType::WateringCan => { if !pop.get_crop_with_index(x as u32, y as u32).get_watered() { pop.get_crop_with_index_mut(x as u32, y as u32) .set_water(true); } if pop.get_tile_with_index(x as u32, y as u32).tilled() { pop.get_tile_with_index_mut(x as u32, y as u32) .set_water(true); } } } return None; }
function_block-function_prefixed
[ { "content": "pub fn background_to_draw(p: &Player) -> Rect {\n\n Rect::new(\n\n ((p.x() + ((p.width() / 2) as i32)) - ((CAM_W / 2) as i32)).clamp(0, (BG_W - CAM_W) as i32),\n\n ((p.y() + ((p.height() / 2) as i32)) - ((CAM_H / 2) as i32))\n\n .clamp(0, (BG_H - CAM_H) as i32),\n\n ...
Rust
src/sync.rs
naftulikay/phatnoise.rs
ed6d4217bcd29d58738224c1314f16020e73f228
use log::{debug, error, info}; use crate::dms; use crate::library::get_dms_media_library; use crate::library::get_local_media_library; use crate::library::LibraryFile; use crate::library::LibrarySource; use crate::utils::crypto::sha256sum; use crate::utils::fs::copy_mtime; use rayon::prelude::*; use std::collections::{BTreeSet, HashSet}; use std::env; use std::fs; use std::path::Path; use std::process; pub fn synchronize() { if !dms::is_dms_present() { error!("No DMS device detected."); process::exit(1); } if !dms::is_dms_mounted() { error!("DMS device is present but not mounted."); process::exit(1); } synchronize_media_files(); } pub fn synchronize_media_files() { info!("Synchronizing media files with DMS..."); let local_dir = Path::join( Path::new(&match env::var("HOME") { Ok(value) => value, Err(e) => { error!("Unable to detect home directory: {}", e); process::exit(1); } }), Path::new("Music"), ); let dms_dir = dms::get_dms_mount_point().expect("DMS not present or not mounted."); debug!("Music directory: {}", local_dir.display()); let (local, dms) = (get_local_media_library(&local_dir), get_dms_media_library()); let (added, deleted, changed) = ( added_files(&local, &dms), deleted_files(&local, &dms), changed_files(&local, &dms), ); info!("Copying {} new files to the DMS...", added.len()); copy_files(&added, &local, &dms_dir); info!("Copying {} changed files to the DMS...", changed.len()); copy_files(&changed, &local, &dms_dir); info!("Deleting {} orphaned files from the DMS...", deleted.len()); delete_files(&deleted); } fn copy_files(files: &Vec<&LibraryFile>, local: &BTreeSet<LibraryFile>, dms_dir: &Path) { for file in files { let (source, dest) = ( &local.get(file).unwrap().path, Path::join(&dms_dir, Path::new(&file.debase())), ); debug!( "Copying local file {} to DMS at {}...", source.display(), dest.display() ); let dest_dir = &dest .parent() .expect(format!("Unable to get parent directory for {}", dest.display()).as_str()); if !dest_dir.is_dir() { debug!("Creating parent directory {}", dest_dir.display()); fs::create_dir_all(&dest_dir).expect( format!("Unable to create parent directory for {}", dest.display()).as_str(), ); } fs::copy(&source, &dest) .expect(format!("Unable to copy file {} to DMS", source.display()).as_str()); copy_mtime(&source, &dest).expect( format!( "Unable to copy modification time from source to destination {}", dest.display() ) .as_str(), ); } } fn delete_files(files: &Vec<&LibraryFile>) { for file in files .iter() .filter(|f| f.source == LibrarySource::DMS) .map(|f| &f.path) { debug!("Deleting orphaned file from DMS {}", file.display()); fs::remove_file(file) .expect(format!("Unable to remove file from DMS: {}", file.display()).as_str()); } } pub fn added_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { local.difference(dms).collect() } pub fn deleted_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { dms.difference(local).collect() } pub fn changed_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { local .into_par_iter() .filter(|p| { if !dms.contains(p) { return false; } let (local, remote) = (p, dms.get(*p).unwrap()); let (lmeta, rmeta) = ( fs::metadata(&local.path).unwrap(), fs::metadata(&remote.path).unwrap(), ); let (llen, rlen) = (lmeta.len(), rmeta.len()); let (lmod, rmod) = (lmeta.modified().unwrap(), rmeta.modified().unwrap()); if llen != rlen { debug!("{}: changed - size not equal", local.debase()); return true; } let (first, last) = (lmod.min(rmod), lmod.max(rmod)); let diff = last.duration_since(first).unwrap(); if diff.as_secs() <= 3 { return false; } let (source, destination) = ( sha256sum(&local.path).expect("unable to compute checksum for local file"), sha256sum(&remote.path).expect("unable to compute checksum for remote file"), ); if source == destination { debug!("{}: unchanged - checksums match", local.debase()); copy_mtime(&local.path, &remote.path).ok(); false } else { debug!("{}: changed - checksums differ", local.debase()); true } }) .collect() }
use log::{debug, error, info}; use crate::dms; use crate::library::get_dms_media_library; use crate::library::get_local_media_library; use crate::library::LibraryFile; use crate::library::LibrarySource; use crate::utils::crypto::sha256sum; use crate::utils::fs::copy_mtime; use rayon::prelude::*; use std::collections::{BTreeSet, HashSet}; use std::env; use std::fs; use std::path::Path; use std::process; pub fn synchronize() { if !dms::is_dms_present() { error!("No DMS device detected."); process::exit(1); } if !dms::is_dms_mounted() { error!("DMS device is present but not mounted."); process::exit(1); } synchronize_media_files(); }
fn copy_files(files: &Vec<&LibraryFile>, local: &BTreeSet<LibraryFile>, dms_dir: &Path) { for file in files { let (source, dest) = ( &local.get(file).unwrap().path, Path::join(&dms_dir, Path::new(&file.debase())), ); debug!( "Copying local file {} to DMS at {}...", source.display(), dest.display() ); let dest_dir = &dest .parent() .expect(format!("Unable to get parent directory for {}", dest.display()).as_str()); if !dest_dir.is_dir() { debug!("Creating parent directory {}", dest_dir.display()); fs::create_dir_all(&dest_dir).expect( format!("Unable to create parent directory for {}", dest.display()).as_str(), ); } fs::copy(&source, &dest) .expect(format!("Unable to copy file {} to DMS", source.display()).as_str()); copy_mtime(&source, &dest).expect( format!( "Unable to copy modification time from source to destination {}", dest.display() ) .as_str(), ); } } fn delete_files(files: &Vec<&LibraryFile>) { for file in files .iter() .filter(|f| f.source == LibrarySource::DMS) .map(|f| &f.path) { debug!("Deleting orphaned file from DMS {}", file.display()); fs::remove_file(file) .expect(format!("Unable to remove file from DMS: {}", file.display()).as_str()); } } pub fn added_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { local.difference(dms).collect() } pub fn deleted_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { dms.difference(local).collect() } pub fn changed_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { local .into_par_iter() .filter(|p| { if !dms.contains(p) { return false; } let (local, remote) = (p, dms.get(*p).unwrap()); let (lmeta, rmeta) = ( fs::metadata(&local.path).unwrap(), fs::metadata(&remote.path).unwrap(), ); let (llen, rlen) = (lmeta.len(), rmeta.len()); let (lmod, rmod) = (lmeta.modified().unwrap(), rmeta.modified().unwrap()); if llen != rlen { debug!("{}: changed - size not equal", local.debase()); return true; } let (first, last) = (lmod.min(rmod), lmod.max(rmod)); let diff = last.duration_since(first).unwrap(); if diff.as_secs() <= 3 { return false; } let (source, destination) = ( sha256sum(&local.path).expect("unable to compute checksum for local file"), sha256sum(&remote.path).expect("unable to compute checksum for remote file"), ); if source == destination { debug!("{}: unchanged - checksums match", local.debase()); copy_mtime(&local.path, &remote.path).ok(); false } else { debug!("{}: changed - checksums differ", local.debase()); true } }) .collect() }
pub fn synchronize_media_files() { info!("Synchronizing media files with DMS..."); let local_dir = Path::join( Path::new(&match env::var("HOME") { Ok(value) => value, Err(e) => { error!("Unable to detect home directory: {}", e); process::exit(1); } }), Path::new("Music"), ); let dms_dir = dms::get_dms_mount_point().expect("DMS not present or not mounted."); debug!("Music directory: {}", local_dir.display()); let (local, dms) = (get_local_media_library(&local_dir), get_dms_media_library()); let (added, deleted, changed) = ( added_files(&local, &dms), deleted_files(&local, &dms), changed_files(&local, &dms), ); info!("Copying {} new files to the DMS...", added.len()); copy_files(&added, &local, &dms_dir); info!("Copying {} changed files to the DMS...", changed.len()); copy_files(&changed, &local, &dms_dir); info!("Deleting {} orphaned files from the DMS...", deleted.len()); delete_files(&deleted); }
function_block-full_function
[ { "content": "pub fn is_dms_present() -> bool {\n\n get_dms_device().is_some()\n\n}\n\n\n", "file_path": "src/dms.rs", "rank": 2, "score": 86054.32286933367 }, { "content": "pub fn is_dms_mounted() -> bool {\n\n get_dms_mount_point().is_some()\n\n}\n\n\n", "file_path": "src/dms.rs"...
Rust
crates/fefix/src/session/config.rs
ferrumfix/ferrumfix
36b40f3d34cc9af46acc7f60d2f3ae517fa8c0ee
use super::{Environment, MsgSeqNumCounter, SeqNumbers}; use std::marker::PhantomData; use std::num::NonZeroU64; use std::time::Duration; pub trait Configure: Clone + Default { fn verify_test_indicator(&self) -> bool { true } fn max_allowed_latency(&self) -> Duration { Duration::from_secs(3) } fn begin_string(&self) -> &[u8] { b"FIX.4.4" } fn sender_comp_id(&self) -> &[u8] { b"SENDER_COMP" } fn target_comp_id(&self) -> &[u8] { b"TARGET_COMP" } fn environment(&self) -> Environment { Environment::Production { allow_test: true } } fn heartbeat(&self) -> Duration { Duration::from_secs(30) } } #[derive(Debug, Clone)] #[allow(missing_docs)] pub struct Config { phantom: PhantomData<()>, pub verify_test_indicator: bool, pub max_allowed_latency: Duration, pub begin_string: String, pub environment: Environment, pub heartbeat: Duration, pub seq_numbers: SeqNumbers, pub msg_seq_num_inbound: MsgSeqNumCounter, pub msg_seq_num_outbound: MsgSeqNumCounter, pub sender_comp_id: String, pub target_comp_id: String, } impl Configure for Config { fn verify_test_indicator(&self) -> bool { self.verify_test_indicator } fn max_allowed_latency(&self) -> Duration { self.max_allowed_latency } fn sender_comp_id(&self) -> &[u8] { self.sender_comp_id.as_bytes() } fn target_comp_id(&self) -> &[u8] { self.target_comp_id.as_bytes() } fn begin_string(&self) -> &[u8] { self.begin_string.as_bytes() } fn environment(&self) -> Environment { self.environment } fn heartbeat(&self) -> Duration { self.heartbeat } } impl Default for Config { fn default() -> Self { Self { phantom: PhantomData::default(), verify_test_indicator: true, max_allowed_latency: Duration::from_secs(3), begin_string: "FIX.4.4".to_string(), environment: Environment::Production { allow_test: true }, heartbeat: Duration::from_secs(30), seq_numbers: SeqNumbers::new(NonZeroU64::new(1).unwrap(), NonZeroU64::new(1).unwrap()), msg_seq_num_inbound: MsgSeqNumCounter::START, msg_seq_num_outbound: MsgSeqNumCounter::START, sender_comp_id: "SENDER_COMP".to_string(), target_comp_id: "TARGET_COMP".to_string(), } } } #[cfg(test)] mod test { use super::*; use quickcheck_macros::quickcheck; #[derive(Default, Clone)] struct ConfigDefault; impl Configure for ConfigDefault {} #[test] fn config_defaults() { let config = Config::default(); assert_eq!( config.max_allowed_latency(), ConfigDefault.max_allowed_latency() ); assert_eq!( config.verify_test_indicator(), ConfigDefault.verify_test_indicator() ); } #[quickcheck] fn config_set_max_allowed_latency(latency: Duration) -> bool { let mut config = Config::default(); config.max_allowed_latency = latency; config.max_allowed_latency() == latency } #[quickcheck] fn config_set_verify_test_indicator(verify: bool) -> bool { let mut config = Config::default(); config.verify_test_indicator = verify; config.verify_test_indicator() == verify } }
use super::{Environment, MsgSeqNumCounter, SeqNumbers}; use std::marker::PhantomData; use std::num::NonZeroU64; use std::time::Duration; pub trait Configure: Clone + Default { fn verify_test_indicator(&self) -> bool { true } fn max_allowed_latency(&self) -> Duration { Duration::from_secs(3) } fn begin_string(&self) -> &[u8] { b"FIX.4.4" } fn sender_comp_id(&self) -> &[u8] { b"SENDER_COMP" } fn target_comp_id(&self) -> &[u8] { b"TARGET_COMP" } fn environment(&self) -> Environment { Environment::Production { allow_test: true } } fn heartbeat(&self) -> Duration { Duration::from_secs(30) } } #[derive(Debug, Clone)] #[allow(missing_docs)] pub struct Config { phantom: PhantomData<()>, pub verify_test_indicato
wrap()), msg_seq_num_inbound: MsgSeqNumCounter::START, msg_seq_num_outbound: MsgSeqNumCounter::START, sender_comp_id: "SENDER_COMP".to_string(), target_comp_id: "TARGET_COMP".to_string(), } } } #[cfg(test)] mod test { use super::*; use quickcheck_macros::quickcheck; #[derive(Default, Clone)] struct ConfigDefault; impl Configure for ConfigDefault {} #[test] fn config_defaults() { let config = Config::default(); assert_eq!( config.max_allowed_latency(), ConfigDefault.max_allowed_latency() ); assert_eq!( config.verify_test_indicator(), ConfigDefault.verify_test_indicator() ); } #[quickcheck] fn config_set_max_allowed_latency(latency: Duration) -> bool { let mut config = Config::default(); config.max_allowed_latency = latency; config.max_allowed_latency() == latency } #[quickcheck] fn config_set_verify_test_indicator(verify: bool) -> bool { let mut config = Config::default(); config.verify_test_indicator = verify; config.verify_test_indicator() == verify } }
r: bool, pub max_allowed_latency: Duration, pub begin_string: String, pub environment: Environment, pub heartbeat: Duration, pub seq_numbers: SeqNumbers, pub msg_seq_num_inbound: MsgSeqNumCounter, pub msg_seq_num_outbound: MsgSeqNumCounter, pub sender_comp_id: String, pub target_comp_id: String, } impl Configure for Config { fn verify_test_indicator(&self) -> bool { self.verify_test_indicator } fn max_allowed_latency(&self) -> Duration { self.max_allowed_latency } fn sender_comp_id(&self) -> &[u8] { self.sender_comp_id.as_bytes() } fn target_comp_id(&self) -> &[u8] { self.target_comp_id.as_bytes() } fn begin_string(&self) -> &[u8] { self.begin_string.as_bytes() } fn environment(&self) -> Environment { self.environment } fn heartbeat(&self) -> Duration { self.heartbeat } } impl Default for Config { fn default() -> Self { Self { phantom: PhantomData::default(), verify_test_indicator: true, max_allowed_latency: Duration::from_secs(3), begin_string: "FIX.4.4".to_string(), environment: Environment::Production { allow_test: true }, heartbeat: Duration::from_secs(30), seq_numbers: SeqNumbers::new(NonZeroU64::new(1).unwrap(), NonZeroU64::new(1).un
random
[ { "content": "/// A provider of configuration options related to FIX encoding and decoding.\n\n///\n\n/// # Implementing this trait\n\n///\n\n/// Before implementing this trait, you should look into [`Config`], which is\n\n/// adequate for most uses. The only benefit of writing your own [`Configure`]\n\n/// imp...
Rust
ivy-graphics/build.rs
ten3roberts/ivy
fb5a7645c9f699c2aebf3d1b90c1d1f9e78355fa
use anyhow::{Context, Result}; use shaderc::ShaderKind; use std::{ env, error::Error, ffi::OsString, fs, path::{Path, PathBuf}, slice, }; #[derive(Debug)] struct CompilationFailure(PathBuf); impl Error for CompilationFailure {} impl std::fmt::Display for CompilationFailure { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Failed to compile resource: {}", self.0.display()) } } fn rerun_if_changed<P: AsRef<Path>>(path: P) { let path = path.as_ref(); println!( "cargo:rerun-if-changed={}", path.canonicalize().unwrap().display() ); } fn compile_dir<A, B, F, C>(src: A, dst: B, rename_func: F, compile_func: C) -> Result<()> where A: AsRef<Path>, B: AsRef<Path>, F: Fn(&mut OsString), C: Fn(&Path, &Path) -> Result<()>, { let src = src.as_ref(); let dst = dst.as_ref(); walkdir::WalkDir::new(src) .follow_links(true) .into_iter() .flat_map(Result::ok) .map(|entry| -> Result<Option<_>> { let path = entry.path(); rerun_if_changed(path); let metadata = entry.metadata()?; if metadata.is_dir() { return Ok(None); } let mut fname = entry.file_name().to_os_string(); rename_func(&mut fname); let base = path .strip_prefix(src)? .parent() .context("No parent for path")?; let mut dst_path = PathBuf::new(); dst_path.push(dst); dst_path.push(base); fs::create_dir_all(&dst_path)?; dst_path.push(fname); let dst_metadata = dst_path.metadata().ok(); if let Some(dst_metadata) = dst_metadata { if dst_metadata.modified()? > metadata.modified()? { return Ok(None); } } eprintln!("{:?} => {:?}", path, dst_path); compile_func(path, &dst_path) .with_context(|| format!("Failed to compile {:?}", path))?; Ok(Some(())) }) .flat_map(|val| val.transpose()) .collect() } fn glslc(src: &Path, dst: &Path) -> Result<()> { let mut compiler = shaderc::Compiler::new().unwrap(); let mut options = shaderc::CompileOptions::new().unwrap(); let source = fs::read_to_string(src)?; let ext = src.extension().unwrap_or_default(); let kind = match ext.to_string_lossy().as_ref() { "vert" => ShaderKind::Vertex, "frag" => ShaderKind::Fragment, "geom" => ShaderKind::Geometry, "comp" => ShaderKind::Compute, _ => ShaderKind::InferFromSource, }; options.add_macro_definition("EP", Some("main")); let binary_result = compiler.compile_into_spirv( &source, kind, &src.to_string_lossy(), "main", Some(&options), )?; assert_eq!(Some(&0x07230203), binary_result.as_binary().first()); let bin = binary_result.as_binary(); let data = bin.as_ptr() as *const u8; let bin = unsafe { slice::from_raw_parts(data, bin.len() * 4) }; fs::write(dst, bin)?; Ok(()) } fn main() -> Result<()> { let out_dir = env::var("OUT_DIR")?; let mut dst = PathBuf::new(); dst.push(out_dir); dst.push("shaders"); compile_dir( "./shaders/", &dst, |path| path.push(".spv"), |src, dst| glslc(src, dst), )?; Ok(()) }
use anyhow::{Context, Result}; use shaderc::ShaderKind; use std::{ env, error::Error, ffi::OsString, fs, path::{Path, PathBuf}, slice, }; #[derive(Debug)] struct CompilationFailure(PathBuf); impl Error for CompilationFailure {} impl std::fmt::Display for CompilationFailure { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Failed to compile resource: {}", self.0.display()) } } fn rerun_if_changed<P: AsRef<Path>>(path: P) { let path = path.as_ref(); println!( "cargo:rerun-if-changed={}", path.canonicalize().unwrap().display() ); } fn compile_dir<A, B, F, C>(src: A, dst: B, rename_func: F, compile_func: C) -> Result<()> where A: AsRef<Path>, B: AsRef<Path>, F: Fn(&mut OsString), C: Fn(&Path, &Path) -> Result<()>, { let src = src.as_ref(); let dst = dst.as_ref(); walkdir::WalkDir::new(src) .follow_links(true) .into_iter() .flat_map(Result::ok) .map(|entry| -> Result<Option<_>> { let path = entry.path(); rerun_if_changed(path); let metadata = entry.metadata()?; if metadata.is_dir() { return Ok(None); } let mut fname = entry.file_name().to_os_string(); rename_func(&mut fname); let base = path .strip_prefix(src)? .parent() .context("No parent for path")?; let mut dst_path = PathBuf::new(); dst_path.push(dst); dst_path.push(base); fs::create_dir_all(&dst_path)?; dst_path.push(fname); let dst_metadata = dst_path.metadata().ok(); if let Some(dst_metadata) = dst_metadata { if dst_metadata.modified()? > metadata.modified()? { return Ok(None); } } eprintln!("{:?} => {:?}", path, dst_path); compile_func(path, &dst_path) .with_context(|| format!("Failed to compile {:?}", path))?; Ok(Some(())) }) .flat_map(|val| val.transpose()) .collect() } fn glslc(src: &Path, dst: &Path) -> Result<()> { let mut compiler = shaderc::Compiler::new().unwrap(); let mut options = shaderc::CompileOptions::new().unwrap(); let source = fs::read_to_string(src)?; let ext = src.extension().unwrap_or_default(); let kind = match ext.to_string_lossy().as_ref() { "vert" => ShaderKind::Vertex, "frag" => ShaderKind::Fragment, "geom" => ShaderKind::Geometry, "comp" => ShaderKind::Compute, _ => ShaderKind::InferFromSource, }; options.add_macro_definition("EP", Some("main")); let binary_result = compiler.compile_into_spirv( &source, kind, &src.to_str
; let bin = binary_result.as_binary(); let data = bin.as_ptr() as *const u8; let bin = unsafe { slice::from_raw_parts(data, bin.len() * 4) }; fs::write(dst, bin)?; Ok(()) } fn main() -> Result<()> { let out_dir = env::var("OUT_DIR")?; let mut dst = PathBuf::new(); dst.push(out_dir); dst.push("shaders"); compile_dir( "./shaders/", &dst, |path| path.push(".spv"), |src, dst| glslc(src, dst), )?; Ok(()) }
ing_lossy(), "main", Some(&options), )?; assert_eq!(Some(&0x07230203), binary_result.as_binary().first())
random
[ { "content": "fn read_to_end<P>(path: P) -> Result<Vec<u8>>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let file = fs::File::open(path.as_ref()).map_err(Error::Io)?;\n\n // Allocate one extra byte so the buffer doesn't need to grow before the\n\n // final `read` call at the end of the file. Don't worry a...
Rust
crates/sim1h/src/dht/bbdht/dynamodb/api/aspect/write.rs
jamesray1/sim1h
a3c7f43bd1fd45647d23667d451fecd07e144ce3
use crate::dht::bbdht::dynamodb::api::item::write::should_put_item_retry; use crate::dht::bbdht::dynamodb::client::Client; use crate::dht::bbdht::dynamodb::schema::blob_attribute_value; use crate::dht::bbdht::dynamodb::schema::cas::ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_LIST_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_PUBLISH_TS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_TYPE_HINT_KEY; use crate::dht::bbdht::dynamodb::schema::number_attribute_value; use crate::dht::bbdht::dynamodb::schema::string_attribute_value; use crate::dht::bbdht::dynamodb::schema::string_set_attribute_value; use crate::dht::bbdht::dynamodb::schema::TableName; use crate::dht::bbdht::error::BbDhtResult; use crate::trace::tracer; use crate::trace::LogContext; use holochain_persistence_api::cas::content::Address; use lib3h_protocol::data_types::EntryAspectData; use rusoto_dynamodb::AttributeValue; use rusoto_dynamodb::DynamoDb; use rusoto_dynamodb::PutItemInput; use rusoto_dynamodb::UpdateItemInput; use std::collections::HashMap; pub fn aspect_list_to_attribute(aspect_list: &Vec<EntryAspectData>) -> AttributeValue { string_set_attribute_value( aspect_list .iter() .map(|aspect| aspect.aspect_address.to_string()) .collect(), ) } pub fn put_aspect( log_context: &LogContext, client: &Client, table_name: &TableName, aspect: &EntryAspectData, ) -> BbDhtResult<()> { tracer(&log_context, "put_aspect"); let mut aspect_item = HashMap::new(); aspect_item.insert( String::from(ADDRESS_KEY), string_attribute_value(&aspect.aspect_address.to_string()), ); aspect_item.insert( String::from(ASPECT_ADDRESS_KEY), string_attribute_value(&aspect.aspect_address.to_string()), ); aspect_item.insert( String::from(ASPECT_TYPE_HINT_KEY), string_attribute_value(&aspect.type_hint), ); aspect_item.insert( String::from(ASPECT_KEY), blob_attribute_value(&aspect.aspect), ); aspect_item.insert( String::from(ASPECT_PUBLISH_TS_KEY), number_attribute_value(&aspect.publish_ts), ); if should_put_item_retry( log_context, client .put_item(PutItemInput { table_name: table_name.to_string(), item: aspect_item, ..Default::default() }) .sync(), )? { put_aspect(log_context, client, table_name, aspect) } else { Ok(()) } } pub fn append_aspect_list_to_entry( log_context: &LogContext, client: &Client, table_name: &TableName, entry_address: &Address, aspect_list: &Vec<EntryAspectData>, ) -> BbDhtResult<()> { tracer(&log_context, "append_aspects"); for aspect in aspect_list { put_aspect(&log_context, &client, &table_name, &aspect)?; } let mut aspect_addresses_key = HashMap::new(); aspect_addresses_key.insert( String::from(ADDRESS_KEY), string_attribute_value(&String::from(entry_address.to_owned())), ); let mut expression_attribute_values = HashMap::new(); expression_attribute_values.insert( ":aspects".to_string(), aspect_list_to_attribute(&aspect_list), ); let mut expression_attribute_names = HashMap::new(); expression_attribute_names.insert("#aspect_list".to_string(), ASPECT_LIST_KEY.to_string()); let update_expression = "ADD #aspect_list :aspects"; let aspect_list_update = UpdateItemInput { table_name: table_name.to_string(), key: aspect_addresses_key, update_expression: Some(update_expression.to_string()), expression_attribute_names: Some(expression_attribute_names), expression_attribute_values: Some(expression_attribute_values), ..Default::default() }; client.update_item(aspect_list_update).sync()?; Ok(()) } #[cfg(test)] pub mod tests { use crate::aspect::fixture::aspect_list_fresh; use crate::aspect::fixture::entry_aspect_data_fresh; use crate::dht::bbdht::dynamodb::api::aspect::write::append_aspect_list_to_entry; use crate::dht::bbdht::dynamodb::api::aspect::write::aspect_list_to_attribute; use crate::dht::bbdht::dynamodb::api::aspect::write::put_aspect; use crate::dht::bbdht::dynamodb::api::item::read::get_item_by_address; use crate::dht::bbdht::dynamodb::api::table::create::ensure_cas_table; use crate::dht::bbdht::dynamodb::api::table::exist::table_exists; use crate::dht::bbdht::dynamodb::api::table::fixture::table_name_fresh; use crate::dht::bbdht::dynamodb::client::local::local_client; use crate::dht::bbdht::dynamodb::schema::cas::ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_LIST_KEY; use crate::dht::bbdht::dynamodb::schema::string_attribute_value; use crate::entry::fixture::entry_hash_fresh; use crate::trace::tracer; use std::collections::HashMap; #[test] fn put_aspect_test() { let log_context = "put_aspect_test"; tracer(&log_context, "fixtures"); let local_client = local_client(); let table_name = table_name_fresh(); let entry_aspect = entry_aspect_data_fresh(); assert!(ensure_cas_table(&log_context, &local_client, &table_name).is_ok()); assert!(table_exists(&log_context, &local_client, &table_name).is_ok()); assert!(put_aspect(&log_context, &local_client, &table_name, &entry_aspect).is_ok()); } #[test] fn append_aspects_to_entry_test() { let log_context = "append_aspects_to_entry_test"; tracer(&log_context, "fixtures"); let local_client = local_client(); let table_name = table_name_fresh(); let entry_address = entry_hash_fresh(); let aspect_list = aspect_list_fresh(); let mut expected = HashMap::new(); expected.insert( ASPECT_LIST_KEY.to_string(), aspect_list_to_attribute(&aspect_list), ); expected.insert( ADDRESS_KEY.to_string(), string_attribute_value(&String::from(entry_address.clone())), ); assert!(ensure_cas_table(&log_context, &local_client, &table_name).is_ok()); assert!(table_exists(&log_context, &local_client, &table_name).is_ok()); for _ in 0..3 { assert!(append_aspect_list_to_entry( &log_context, &local_client, &table_name, &entry_address, &aspect_list ) .is_ok()); match get_item_by_address(&log_context, &local_client, &table_name, &entry_address) { Ok(get_item_output) => match get_item_output { Some(item) => { assert_eq!(expected["address"], item["address"],); assert_eq!( expected["aspect_list"].ss.iter().count(), item["aspect_list"].ss.iter().count(), ); } None => { tracer(&log_context, "get matches None"); panic!("None"); } }, Err(err) => { tracer(&log_context, "get matches err"); panic!("{:?}", err); } } } } }
use crate::dht::bbdht::dynamodb::api::item::write::should_put_item_retry; use crate::dht::bbdht::dynamodb::client::Client; use crate::dht::bbdht::dynamodb::schema::blob_attribute_value; use crate::dht::bbdht::dynamodb::schema::cas::ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_LIST_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_PUBLISH_TS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_TYPE_HINT_KEY; use crate::dht::bbdht::dynamodb::schema::number_attribute_value; use crate::dht::bbdht::dynamodb::schema::string_attribute_value; use crate::dht::bbdht::dynamodb::schema::string_set_attribute_value; use crate::dht::bbdht::dynamodb::schema::TableName; use crate::dht::bbdht::error::BbDhtResult; use crate::trace::tracer; use crate::trace::LogContext; use holochain_persistence_api::cas::content::Address; use lib3h_protocol::data_types::EntryAspectData; use rusoto_dynamodb::AttributeValue; use rusoto_dynamodb::DynamoDb; use rusoto_dynamodb::PutItemInput; use rusoto_dynamodb::UpdateItemInput; use std::collections::HashMap; pub fn aspect_list_to_attribute(aspect_list: &Vec<EntryAspectData>) -> AttributeValue { string_set_attribute_value( aspect_list .iter() .map(|aspect| aspect.aspect_address.to_string()) .collect(), ) } pub fn put_aspect( log_context: &LogContext, client: &Client, table_name: &TableName, aspect: &EntryAspectData, ) -> BbDhtResult<()> { tracer(&log_context, "put_aspect"); let mut aspect_item = HashMap::new(); aspect_item.insert( String::from(ADDRESS_KEY), string_attribute_value(&aspect.aspect_address.to_string()), ); aspect_item.insert( String::from(ASPECT_ADDRESS_KEY), string_attribute_value(&aspect.aspect_address.to_string()), ); aspect_item.insert( String::from(ASPECT_TYPE_HINT_KEY), string_attribute_value(&aspect.type_hint), ); aspect_item.insert( String::from(ASPECT_KEY), blob_attribute_value(&aspect.aspect), ); aspect_item.insert( String::from(ASPECT_PUBLISH_TS_KEY), number_attribute_value(&aspect.publish_ts), ); if should_put_item_retry( log_context, client .put_item(PutItemInput { table_name: table_name.to_string(), item: aspect_item, ..Default::default() }) .sync(), )? { put_aspect(log_context, client, table_name, aspect) } else { Ok(()) } } pub fn append_aspect_list_to_entry( log_context: &LogContext, client: &Client, table_name: &TableName, entry_address: &Address, aspect_list: &Vec<EntryAspectData>, ) -> BbDhtResult<()> { tracer(&log_context, "append_aspects"); for aspect in aspect_list { put_aspect(&log_context, &client, &table_name, &aspect)?; } let mut aspect_addresses_key = HashMap::new(); aspect_addresses_key.insert( String::from(ADDRESS_KEY), string_attribute_value(&String::from(entry_address.to_owned())), ); let mut expression_attribute_values = HashMap::new(); expression_attribute_values.insert( ":aspects".to_string(), aspect_list_to_attribute(&aspect_list), ); let mut expression_attribute_names = HashMap::new(); expression_attribute_names.insert("#aspect_list".to_string(), ASPECT_LIST_KEY.to_string()); let update_expression = "ADD #aspect_list :aspects"; let aspect_list_update = UpdateItemInput { table_name: table_name.to_string(), key: aspect_addresses_key, update_expression: Some(update_expression.to_string()), expression_attribute_names: Some(expression_attribute_names), expression_attribute_values: Some(expression_attribute_values), ..Default::default() }; client.update_item(aspect_list_update).sync()?; Ok(()) } #[cfg(test)] pub mod tests { use crate::aspect::fixture::aspect_list_fresh; use crate::aspect::fixture::entry_aspect_data_fresh; use crate::dht::bbdht::dynamodb::api::aspect::write::append_aspect_list_to_entry; use crate::dht::bbdht::dynamodb::api::aspect::write::aspect_list_to_attribute; use crate::dht::bbdht::dynamodb::api::aspect::write::put_aspect; use crate::dht::bbdht::dynamodb::api::item::read::get_item_by_address; use crate::dht::bbdht::dynamodb::api::table::create::ensure_cas_table; use crate::dht::bbdht::dynamodb::api::table::exist::table_exists; use crate::dht::bbdht::dynamodb::api::table::fixture::table_name_fresh; use crate::dht::bbdht::dynamodb::client::local::local_client; use crate::dht::bbdht::dynamodb::schema::cas::ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_LIST_KEY; use crate::dht::bbdht::dynamodb::schema::string_attribute_value; use crate::entry::fixture::entry_hash_fresh; use crate::trace::tracer; use std::collections::HashMap; #[test] fn put_aspect_test() { let log_context = "put_aspect_test"; tracer(&log_context, "fixtures"); let local_client = local_client(); let table_name = table_name_fresh(); let entry_aspect = entry_aspect_data_fresh(); assert!(ensure_cas_table(&log_context, &local_client, &table_name).is_ok()); assert!(table_exists(&log_context, &local_client, &table_name).is_ok()); assert!(put_aspect(&log_context, &local_client, &table_name, &entry_aspect).is_ok()); } #[test] fn append_aspects_to_entry_test() { let log_context = "append_aspects_to_entry_test"; tracer(&log_context, "fixtures"); let local_client = local_client(); let table_name = table_name_fresh(); let entry_address = entry_hash_fresh(); let aspect_list = aspect_list_fresh(); let mut expected = HashMap::new(); expected.insert( ASPECT_LIST_KEY.to_string(), aspect_list_to_attribute(&aspect_list), ); expected.insert( ADDRESS_KEY.to_string(), string_attribute_value(&String::from(entry_address.clone())),
&log_context, &local_client, &table_name, &entry_address, &aspect_list ) .is_ok()); match get_item_by_address(&log_context, &local_client, &table_name, &entry_address) { Ok(get_item_output) => match get_item_output { Some(item) => { assert_eq!(expected["address"], item["address"],); assert_eq!( expected["aspect_list"].ss.iter().count(), item["aspect_list"].ss.iter().count(), ); } None => { tracer(&log_context, "get matches None"); panic!("None"); } }, Err(err) => { tracer(&log_context, "get matches err"); panic!("{:?}", err); } } } } }
); assert!(ensure_cas_table(&log_context, &local_client, &table_name).is_ok()); assert!(table_exists(&log_context, &local_client, &table_name).is_ok()); for _ in 0..3 { assert!(append_aspect_list_to_entry(
function_block-random_span
[ { "content": "pub fn try_aspect_list_from_item(item: Item) -> BbDhtResult<Vec<Address>> {\n\n let addresses = match get_or_err(&item, ASPECT_LIST_KEY)?.ss.clone() {\n\n Some(addresses) => addresses.iter().map(|s| Address::from(s.clone())).collect(),\n\n None => {\n\n return Err(BbDht...
Rust
src/process.rs
berkus/fectl
bcd4137848f1c4fdbf24753e23f6030be79cce9c
#![allow(dead_code)] use std; use std::io; use std::error::Error; use std::os::unix::io::RawFd; use std::time::{Duration, Instant}; use serde_json as json; use byteorder::{ByteOrder, BigEndian}; use bytes::{BytesMut, BufMut}; use tokio_io::AsyncRead; use tokio_io::io::WriteHalf; use tokio_io::codec::{FramedRead, Encoder, Decoder}; use nix::sys::signal::{kill, Signal}; use nix::unistd::{close, pipe, fork, ForkResult, Pid}; use actix::prelude::*; use config::ServiceConfig; use io::PipeFile; use worker::{WorkerMessage, WorkerCommand}; use event::Reason; use exec::exec_worker; use service::{self, FeService}; const HEARTBEAT: u64 = 2; const WORKER_TIMEOUT: i32 = 98; pub const WORKER_INIT_FAILED: i32 = 99; pub const WORKER_BOOT_FAILED: i32 = 100; pub struct Process { idx: usize, pid: Pid, state: ProcessState, hb: Instant, addr: Addr<Unsync, FeService>, timeout: Duration, startup_timeout: u64, shutdown_timeout: u64, framed: actix::io::FramedWrite<WriteHalf<PipeFile>, TransportCodec>, } impl Actor for Process { type Context = Context<Self>; fn stopping(&mut self, ctx: &mut Context<Self>) -> Running { self.kill(ctx, false); Running::Stop } } impl StreamHandler<ProcessMessage, io::Error> for Process { fn finished(&mut self, ctx: &mut Context<Self>) { self.kill(ctx, false); ctx.stop(); } fn handle(&mut self, msg: ProcessMessage, ctx: &mut Self::Context) { ctx.notify(msg); } } #[derive(Debug)] enum ProcessState { Starting, Failed, Running, Stopping, } #[derive(PartialEq, Debug, Message)] pub enum ProcessMessage { Message(WorkerMessage), StartupTimeout, StopTimeout, Heartbeat, Kill, } #[derive(Debug, Clone)] pub enum ProcessError { Heartbeat, FailedToStart(Option<String>), StartupTimeout, StopTimeout, ConfigError(String), InitFailed, BootFailed, Signal(usize), ExitCode(i8), } impl ProcessError { pub fn from(code: i8) -> ProcessError { match code as i32 { WORKER_TIMEOUT => ProcessError::StartupTimeout, WORKER_INIT_FAILED => ProcessError::InitFailed, WORKER_BOOT_FAILED => ProcessError::BootFailed, code => ProcessError::ExitCode(code as i8), } } } impl<'a> std::convert::From<&'a ProcessError> for Reason { fn from(ob: &'a ProcessError) -> Self { match *ob { ProcessError::Heartbeat => Reason::HeartbeatFailed, ProcessError::FailedToStart(ref err) => Reason::FailedToStart( if let &Some(ref e) = err { Some(format!("{}", e))} else {None}), ProcessError::StartupTimeout => Reason::StartupTimeout, ProcessError::StopTimeout => Reason::StopTimeout, ProcessError::ConfigError(ref err) => Reason::WorkerError(err.clone()), ProcessError::InitFailed => Reason::InitFailed, ProcessError::BootFailed => Reason::BootFailed, ProcessError::Signal(sig) => Reason::Signal(sig), ProcessError::ExitCode(code) => Reason::ExitCode(code), } } } impl Process { pub fn start(idx: usize, cfg: &ServiceConfig, addr: Addr<Unsync, FeService>) -> (Pid, Option<Addr<Unsync, Process>>) { let (pid, pipe) = match Process::fork(idx, cfg) { Ok(res) => res, Err(err) => { let pid = Pid::from_raw(-1); addr.do_send( service::ProcessFailed( idx, pid, ProcessError::FailedToStart(Some(format!("{}", err))))); return (pid, None) } }; let timeout = Duration::new(u64::from(cfg.timeout), 0); let startup_timeout = u64::from(cfg.startup_timeout); let shutdown_timeout = u64::from(cfg.shutdown_timeout); let addr = Process::create(move |ctx| { let (r, w) = pipe.split(); ctx.add_stream(FramedRead::new(r, TransportCodec)); ctx.notify_later(ProcessMessage::StartupTimeout, Duration::new(startup_timeout as u64, 0)); Process { idx, pid, addr, timeout, startup_timeout, shutdown_timeout, state: ProcessState::Starting, hb: Instant::now(), framed: actix::io::FramedWrite::new(w, TransportCodec, ctx) }}); (pid, Some(addr)) } fn fork(idx: usize, cfg: &ServiceConfig) -> Result<(Pid, PipeFile), io::Error> { let (p_read, p_write, ch_read, ch_write) = Process::create_pipes()?; let pid = match fork() { Ok(ForkResult::Parent{ child }) => child, Ok(ForkResult::Child) => { let _ = close(p_write); let _ = close(ch_read); exec_worker(idx, cfg, p_read, ch_write); unreachable!(); }, Err(err) => { error!("Fork failed: {}", err.description()); return Err(io::Error::new(io::ErrorKind::Other, err.description())) } }; let _ = close(p_read); let _ = close(ch_write); let pipe = PipeFile::new(ch_read, p_write, Arbiter::handle()); Ok((pid, pipe)) } fn create_pipes() -> Result<(RawFd, RawFd, RawFd, RawFd), io::Error> { let (p_read, p_write) = match pipe() { Ok((r, w)) => (r, w), Err(err) => { error!("Can not create pipe: {}", err); return Err(io::Error::new( io::ErrorKind::Other, format!("Can not create pipe: {}", err))) } }; let (ch_read, ch_write) = match pipe() { Ok((r, w)) => (r, w), Err(err) => { error!("Can not create pipe: {}", err); return Err(io::Error::new( io::ErrorKind::Other, format!("Can not create pipe: {}", err))) } }; Ok((p_read, p_write, ch_read, ch_write)) } fn kill(&self, ctx: &mut Context<Self>, graceful: bool) { if graceful { ctx.notify_later(ProcessMessage::Kill, Duration::new(1, 0)); } else { let _ = kill(self.pid, Signal::SIGKILL); ctx.terminate(); } } } impl Drop for Process { fn drop(&mut self) { let _ = kill(self.pid, Signal::SIGKILL); } } impl actix::io::WriteHandler<io::Error> for Process {} impl Handler<ProcessMessage> for Process { type Result = (); fn handle(&mut self, msg: ProcessMessage, ctx: &mut Context<Self>) { match msg { ProcessMessage::Message(msg) => match msg { WorkerMessage::forked => { debug!("Worker forked (pid:{})", self.pid); self.framed.write(WorkerCommand::prepare); } WorkerMessage::loaded => { match self.state { ProcessState::Starting => { debug!("Worker loaded (pid:{})", self.pid); self.addr.do_send( service::ProcessLoaded(self.idx, self.pid)); self.state = ProcessState::Running; self.hb = Instant::now(); ctx.notify_later( ProcessMessage::Heartbeat, Duration::new(HEARTBEAT, 0)); }, _ => { warn!("Received `loaded` message from worker (pid:{})", self.pid); } } } WorkerMessage::hb => { self.hb = Instant::now(); } WorkerMessage::reload => { info!("Worker requests reload (pid:{})", self.pid); self.addr.do_send( service::ProcessMessage( self.idx, self.pid, WorkerMessage::reload)); } WorkerMessage::restart => { info!("Worker requests restart (pid:{})", self.pid); self.addr.do_send( service::ProcessMessage( self.idx, self.pid, WorkerMessage::restart)); } WorkerMessage::cfgerror(msg) => { error!("Worker config error: {} (pid:{})", msg, self.pid); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::ConfigError(msg))); } } ProcessMessage::StartupTimeout => { if let ProcessState::Starting = self.state { error!("Worker startup timeout after {} secs", self.startup_timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::StartupTimeout)); self.state = ProcessState::Failed; let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } ProcessMessage::StopTimeout => { if let ProcessState::Stopping = self.state { info!("Worker shutdown timeout aftre {} secs", self.shutdown_timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::StopTimeout)); self.state = ProcessState::Failed; let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } ProcessMessage::Heartbeat => { if let ProcessState::Running = self.state { if Instant::now().duration_since(self.hb) > self.timeout { error!("Worker heartbeat failed (pid:{}) after {:?} secs", self.pid, self.timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::Heartbeat)); } else { self.framed.write(WorkerCommand::hb); ctx.notify_later( ProcessMessage::Heartbeat, Duration::new(HEARTBEAT, 0)); } } } ProcessMessage::Kill => { let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } } } #[derive(Message)] pub struct SendCommand(pub WorkerCommand); impl Handler<SendCommand> for Process { type Result = (); fn handle(&mut self, msg: SendCommand, _: &mut Context<Process>) { self.framed.write(msg.0); } } #[derive(Message)] pub struct StartProcess; impl Handler<StartProcess> for Process { type Result = (); fn handle(&mut self, _: StartProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::start); } } #[derive(Message)] pub struct PauseProcess; impl Handler<PauseProcess> for Process { type Result = (); fn handle(&mut self, _: PauseProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::pause); } } #[derive(Message)] pub struct ResumeProcess; impl Handler<ResumeProcess> for Process { type Result = (); fn handle(&mut self, _: ResumeProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::resume); } } #[derive(Message)] pub struct StopProcess; impl Handler<StopProcess> for Process { type Result = (); fn handle(&mut self, _: StopProcess, ctx: &mut Context<Process>) { info!("Stopping worker: (pid:{})", self.pid); match self.state { ProcessState::Running => { self.state = ProcessState::Stopping; self.framed.write(WorkerCommand::stop); ctx.notify_later( ProcessMessage::StopTimeout, Duration::new(self.shutdown_timeout, 0)); let _ = kill(self.pid, Signal::SIGTERM); }, _ => { let _ = kill(self.pid, Signal::SIGQUIT); ctx.terminate(); } } } } #[derive(Message)] pub struct QuitProcess(pub bool); impl Handler<QuitProcess> for Process { type Result = (); fn handle(&mut self, msg: QuitProcess, ctx: &mut Context<Process>) { if msg.0 { let _ = kill(self.pid, Signal::SIGQUIT); self.kill(ctx, true); } else { self.kill(ctx, false); let _ = kill(self.pid, Signal::SIGKILL); ctx.terminate(); } } } pub struct TransportCodec; impl Decoder for TransportCodec { type Item = ProcessMessage; type Error = io::Error; fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> { let size = { if src.len() < 2 { return Ok(None) } BigEndian::read_u16(src.as_ref()) as usize }; if src.len() >= size + 2 { src.split_to(2); let buf = src.split_to(size); Ok(Some(ProcessMessage::Message(json::from_slice::<WorkerMessage>(&buf)?))) } else { Ok(None) } } } impl Encoder for TransportCodec { type Item = WorkerCommand; type Error = io::Error; fn encode(&mut self, msg: WorkerCommand, dst: &mut BytesMut) -> Result<(), Self::Error> { let msg = json::to_string(&msg).unwrap(); let msg_ref: &[u8] = msg.as_ref(); dst.reserve(msg_ref.len() + 2); dst.put_u16::<BigEndian>(msg_ref.len() as u16); dst.put(msg_ref); Ok(()) } }
#![allow(dead_code)] use std; use std::io; use std::error::Error; use std::os::unix::io::RawFd; use std::time::{Duration, Instant}; use serde_json as json; use byteorder::{ByteOrder, BigEndian}; use bytes::{BytesMut, BufMut}; use tokio_io::AsyncRead; use tokio_io::io::WriteHalf; use tokio_io::codec::{FramedRead, Encoder, Decoder}; use nix::sys::signal::{kill, Signal}; use nix::unistd::{close, pipe, fork, ForkResult, Pid}; use actix::prelude::*; use config::ServiceConfig; use io::PipeFile; use worker::{WorkerMessage, WorkerCommand}; use event::Reason; use exec::exec_worker; use service::{self, FeService}; const HEARTBEAT: u64 = 2; const WORKER_TIMEOUT: i32 = 98; pub const WORKER_INIT_FAILED: i32 = 99; pub const WORKER_BOOT_FAILED: i32 = 100; pub struct Process { idx: usize, pid: Pid, state: ProcessState, hb: Instant, addr: Addr<Unsync, FeService>, timeout: Duration, startup_timeout: u64, shutdown_timeout: u64, framed: actix::io::FramedWrite<WriteHalf<PipeFile>, TransportCodec>, } impl Actor for Process { type Context = Context<Self>; fn stopping(&mut self, ctx: &mut Context<Self>) -> Running { self.kill(ctx, false); Running::Stop } } impl StreamHandler<ProcessMessage, io::Error> for Process { fn finished(&mut self, ctx: &mut Context<Self>) { self.kill(ctx, false); ctx.stop(); } fn handle(&mut self, msg: ProcessMessage, ctx: &mut Self::Context) { ctx.notify(msg); } } #[derive(Debug)] enum ProcessState { Starting, Failed, Running, Stopping, } #[derive(PartialEq, Debug, Message)] pub enum ProcessMessage { Message(WorkerMessage), StartupTimeout, StopTimeout, Heartbeat, Kill, } #[derive(Debug, Clone)] pub enum ProcessError { Heartbeat, FailedToStart(Option<String>), StartupTimeout, StopTimeout, ConfigError(String), InitFailed, BootFailed, Signal(usize), ExitCode(i8), } impl ProcessError { pub fn from(code: i8) -> ProcessError { match code as i32 { WORKER_TIMEOUT => ProcessError::StartupTimeout, WORKER_INIT_FAILED => ProcessError::InitFailed, WORKER_BOOT_FAILED => ProcessError::BootFailed, code => ProcessError::ExitCode(code as i8), } } } impl<'a> std::convert::From<&'a ProcessError> for Reason { fn from(ob: &'a ProcessError) -> Self { match *ob { ProcessError::Heartbeat => Reason::HeartbeatFailed, ProcessError::FailedToStart(ref err) => Reason::FailedToStart( if let &Some(ref e) = err { Some(format!("{}", e))} else {None}), ProcessError::StartupTimeout => Reason::StartupTimeout, ProcessError::StopTimeout => Reason::StopTimeout, ProcessError::ConfigError(ref err) => Reason::WorkerError(err.clone()), ProcessError::InitFailed => Reason::InitFailed, ProcessError::BootFailed => Reason::BootFailed, ProcessError::Signal(sig) => Reason::Signal(sig), ProcessError::ExitCode(code) => Reason::ExitCode(code), } } } impl Process { pub fn start(idx: usize, cfg: &ServiceConfig, addr: Addr<Unsync, FeService>) -> (Pid, Option<Addr<Unsync, Process>>) { let (pid, pipe) = match Process::fork(idx, cfg) { Ok(res) => res, Err(err) => { let pid = Pid::from_raw(-1); addr.do_send( service::ProcessFailed( idx, pid, ProcessError::FailedToStart(Some(format!("{}", err))))); return (pid, None) } }; let timeout = Duration::new(u64::from(cfg.timeout), 0); let startup_timeout = u64::from(cfg.startup_timeout); let shutdown_timeout = u64::from(cfg.shutdown_timeout); let addr = Process::create(move |ctx| { let (r, w) = pipe.split(); ctx.add_stream(FramedRead::new(r, TransportCodec)); ctx.notify_later(ProcessMessage::StartupTimeout, Duration::new(startup_timeout as u64, 0)); Process { idx, pid, addr, timeout, startup_timeout, shutdown_timeout, state: ProcessState::Starting, hb: Instant::now(), framed: actix::io::FramedWrite::new(w, TransportCodec, ctx) }}); (pid, Some(addr)) } fn fork(idx: usize, cfg: &ServiceConfig) -> Result<(Pid, PipeFile), io::Error> { let (p_read, p_write, ch_read, ch_write) = Process::create_pipes()?; let pid = match fork() { Ok(ForkResult::Parent{ child }) => child, Ok(ForkResult::Child) => { let _ = close(p_write); let _ = close(ch_read); exec_worker(idx, cfg, p_read, ch_write); unreachable!(); }, Err(err) => { error!("Fork failed: {}", err.description()); return Err(io::Error::new(io::ErrorKind::Other, err.description())) } }; let _ = close(p_read); let _ = close(ch_write); let pipe = PipeFile::new(ch_read, p_write, Arbiter::handle()); Ok((pid, pipe)) } fn create_pipes() -> Result<(RawFd, RawFd, RawFd, RawFd), io::Error> { let (p_read, p_write) = match pipe() { Ok((r, w)) => (r, w), Err(err) => { error!("Can not create pipe: {}", err); return Err(io::Error::new( io::ErrorKind::Other, format!("Can not create pipe: {}", err))) } }; let (ch_read, ch_write) = match pipe() { Ok((r, w)) => (r, w), Err(err) => { error!("Can not create pipe: {}", err); return Err(io::Error::new( io::ErrorKind::Other, format!("Can not create pipe: {}", err))) } }; Ok((p_read, p_write, ch_read, ch_write)) } fn kill(&self, ctx: &mut Context<Self>, graceful: bool) { if graceful { ctx.notify_later(ProcessMessage::Kill, Duration::new(1, 0)); } else { let _ = kill(self.pid, Signal::SIGKILL); ctx.terminate(); } } } impl Drop for Process { fn drop(&mut self) { let _ = kill(self.pid, Signal::SIGKILL); } } impl actix::io::WriteHandler<io::Error> for Process {} impl Handler<ProcessMessage> for Process { type Result = (); fn handle(&mut self, msg: ProcessMessage, ctx: &mut Context<Self>) { match msg { ProcessMessage::Message(msg) => match msg { WorkerMessage::forked => { debug!("Worker forked (pid:{})", self.pid); self.framed.write(WorkerCommand::prepare); } WorkerMessage::loaded => { match self.state { ProcessState::Starting => { debug!("Worker loaded (pid:{})", self.pid); self.addr.do_send( service::ProcessLoaded(self.idx, self.pid)); self.state = ProcessState::Running; self.hb = Instant::now(); ctx.notify_later( ProcessMessage::Heartbeat, Duration::new(HEARTBEAT, 0)); }, _ => { warn!("Received `loaded` message from worker (pid:{})", self.pid); } } } WorkerMessage::hb => { self.hb = Instant::now(); } WorkerMessage::reload => { info!("Worker requests reload (pid:{})", self.pid); self.addr.do_send( service::ProcessMessage( self.idx, self.pid, WorkerMessage::reload)); } WorkerMessage::restart => { info!("Worker requests restart (pid:{})", self.pid); self.addr.do_send( service::ProcessMessage( self.idx, self.pid, WorkerMessage::restart)); } WorkerMessage::cfgerror(msg) => { error!("Worker config error: {} (pid:{})", msg, self.pid); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::ConfigError(msg))); } } ProcessMessage::StartupTimeout => { if let ProcessState::Starting = self.state { error!("Worker startup timeout after {} secs", self.startup_timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::StartupTimeout)); self.state = ProcessState::Failed; let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } ProcessMessage::StopTimeout => { if let ProcessState::Stopping = self.state { info!("Worker shutdown timeout aftre {} secs", self.shutdown_timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::StopTimeout)); self.state = ProcessState::Failed; let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } ProcessMessage::Heartbeat => { if let ProcessState::Running = self.state { if Instant::now().duration_since(self.hb) > self.timeout { error!("Worker heartbeat failed (pid:{}) after {:?} secs", self.pid, self.timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::Heartbeat)); } else { self.framed.write(WorkerCommand::hb); ctx.notify_later( ProcessMessage::Heartbeat, Duration::new(HEARTBEAT, 0)); } } } ProcessMessage::Kill => { let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } } } #[derive(Message)] pub struct SendCommand(pub WorkerCommand); impl Handler<SendCommand> for Process { type Result = (); fn handle(&mut self, msg: SendCommand, _: &mut Context<Process>) { self.framed.write(msg.0); } } #[derive(Message)] pub struct StartProcess; impl Handler<StartProcess> for Process { type Result = (); fn handle(&mut self, _: StartProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::start); } } #[derive(Message)] pub struct PauseProcess; impl Handler<PauseProcess> for Process { type Result = (); fn handle(&mut self, _: PauseProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::pause); } } #[derive(Message)] pub struct ResumeProcess; impl Handler<ResumeProcess> for Process { type Result = (); fn handle(&mut self, _: ResumeProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::resume); } } #[derive(Message)] pub struct StopProcess; impl Handler<StopProcess> for Process { type Result = (); fn handle(&mut self, _: StopProcess, ctx: &mut Context<Process>) { info!("Stopping worker: (pid:{})", self.pid); match self.state { ProcessState::Running => { self.state = ProcessState::Stopping; self.framed.write(WorkerCommand::stop); ctx.notify_later( ProcessMessage::StopTimeout, Duration::new(self.shutdown_timeout, 0)); let _ = kill(self.pid, Signal::SIGTERM); }, _ => { let _ = kill(self.pid, Signal::SIGQUIT); ctx.terminate(); } } } } #[derive(Message)] pub struct QuitProcess(pub bool); impl Handler<QuitProcess> for Process { type Result = (); fn handle(&mut self, msg: QuitProcess, ctx: &mut Context<Process>) { if msg.0 { let _ = kill(self.pid, Signal::SIGQUIT);
} pub struct TransportCodec; impl Decoder for TransportCodec { type Item = ProcessMessage; type Error = io::Error; fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> { let size = { if src.len() < 2 { return Ok(None) } BigEndian::read_u16(src.as_ref()) as usize }; if src.len() >= size + 2 { src.split_to(2); let buf = src.split_to(size); Ok(Some(ProcessMessage::Message(json::from_slice::<WorkerMessage>(&buf)?))) } else { Ok(None) } } } impl Encoder for TransportCodec { type Item = WorkerCommand; type Error = io::Error; fn encode(&mut self, msg: WorkerCommand, dst: &mut BytesMut) -> Result<(), Self::Error> { let msg = json::to_string(&msg).unwrap(); let msg_ref: &[u8] = msg.as_ref(); dst.reserve(msg_ref.len() + 2); dst.put_u16::<BigEndian>(msg_ref.len() as u16); dst.put(msg_ref); Ok(()) } }
self.kill(ctx, true); } else { self.kill(ctx, false); let _ = kill(self.pid, Signal::SIGKILL); ctx.terminate(); } }
function_block-function_prefix_line
[ { "content": "/// Start master process\n\npub fn start(cfg: Config) -> bool {\n\n // init logging\n\n logging::init_logging(&cfg.logging);\n\n\n\n info!(\"Starting fectl process\");\n\n\n\n // change working dir\n\n if let Err(err) = nix::unistd::chdir::<OsStr>(cfg.master.directory.as_ref()) {\n\...
Rust
day18b/src/main.rs
LinAGKar/advent-of-code-2019-rust
f1de4d408f1c331f49a0928cf5abd733a3e15bb6
use std::cmp::Reverse; use std::collections::{BinaryHeap, HashSet}; use std::io::Read; #[derive(PartialEq)] enum Tile { Wall, Floor, Door(usize), Key(usize), } fn get_shortest_paths(map: &Vec<Vec<Tile>>, start: (i8, i8)) -> Vec<Vec<(u16, u32)>> { let mut queue = BinaryHeap::new(); let mut visited: Vec<Vec<_>> = map.iter().map(|row| { row.iter().map(|_| Vec::new()).collect() }).collect(); let mut shortest_paths = vec![Vec::new(); 26]; queue.push((Reverse(0), 0, 0, start)); while let Some((_, mut needed_keys, cost, pos)) = queue.pop() { let (y, x) = pos; let visited_this = &mut visited[y as usize][x as usize]; if visited_this.iter().any(|&old_needed_keys| needed_keys & old_needed_keys == old_needed_keys) { continue; } visited_this.push(needed_keys); let tile = &map[y as usize][x as usize]; match *tile { Tile::Door(door) => { needed_keys |= 1 << door; } Tile::Wall => { continue; } Tile::Floor => {} Tile::Key(key) => { shortest_paths[key as usize].push((cost, needed_keys)); needed_keys |= 1 << key; } } for (dy, dx) in &[ (-1, 0), (1, 0), (0, -1), (0, 1), ] { let (y, x) = (y + dy, x + dx); queue.push((Reverse(cost + 1), needed_keys, cost + 1, (y, x))); } } shortest_paths } fn main() { let mut input = String::new(); std::io::stdin().read_to_string(&mut input).unwrap(); let mut entrance = (0, 0); let mut keys = vec![(0, 0); 26]; let mut doors = vec![(0, 0); 26]; let mut map: Vec<Vec<_>> = input.lines().enumerate().map(|(y, line)| { line.chars().enumerate().map(|(x, c)| { match c { '@' => { entrance = (y as i8, x as i8); Tile::Floor } '.' => Tile::Floor, '#' => Tile::Wall, _ => { let ord = c as usize; if c.is_ascii_uppercase() { let index = ord - 'A' as usize; doors[index] = (y as i8, x as i8); Tile::Door(index) } else { let index = ord - 'a' as usize; keys[index] = (y as i8, x as i8); Tile::Key(index) } } } }).collect() }).collect(); map[entrance.0 as usize][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize - 1][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize + 1][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize][entrance.1 as usize - 1] = Tile::Wall; map[entrance.0 as usize][entrance.1 as usize + 1] = Tile::Wall; keys.push((entrance.0 - 1, entrance.1 - 1)); keys.push((entrance.0 - 1, entrance.1 + 1)); keys.push((entrance.0 + 1, entrance.1 - 1)); keys.push((entrance.0 + 1, entrance.1 + 1)); let costs_from_key: Vec<_> = keys.iter().map(|&pos| { get_shortest_paths(&map, pos) }).collect(); let mut queue = BinaryHeap::new(); queue.push((Reverse(0), 0, [26, 27, 28, 29], 0)); let mut visited = HashSet::new(); while let Some((_, cost, poses, keys)) = queue.pop() { if visited.contains(&(poses, keys)) { continue; } visited.insert((poses, keys)); if keys == 0x3FFFFFF { println!("{}", cost); break; } queue.extend(poses.iter().enumerate().flat_map(|(n, &pos)| { costs_from_key[pos].iter().enumerate().filter_map(move |( key, possibilities, )| { possibilities.iter().find_map(|&(new_cost, needed_keys)| { if key != pos && keys & 1 << key == 0 && needed_keys & keys == needed_keys { let new_cost = new_cost + cost; let mut new_poses = poses; new_poses[n] = key; Some((Reverse(new_cost), new_cost, new_poses, keys | 1 << key)) } else { None } }) }) })); } }
use std::cmp::Reverse; use std::collections::{BinaryHeap, HashSet}; use std::io::Read; #[derive(PartialEq)] enum Tile { Wall, Floor, Door(usize), Key(usize), } fn get_shortest_paths(map: &Vec<Vec<Tile>>, start: (i8, i8)) -> Vec<Vec<(u16, u32)>> { let mut queue = BinaryHeap::new(); let mut visited: Vec<Vec<_>> = map.iter().map(|row| { row.iter().map(|_| Vec::new()).collect() }).collect(); let mut shortest_paths = vec![Vec::new(); 26]; queue.push((Reverse(0), 0, 0, s
d_keys |= 1 << door; } Tile::Wall => { continue; } Tile::Floor => {} Tile::Key(key) => { shortest_paths[key as usize].push((cost, needed_keys)); needed_keys |= 1 << key; } } for (dy, dx) in &[ (-1, 0), (1, 0), (0, -1), (0, 1), ] { let (y, x) = (y + dy, x + dx); queue.push((Reverse(cost + 1), needed_keys, cost + 1, (y, x))); } } shortest_paths } fn main() { let mut input = String::new(); std::io::stdin().read_to_string(&mut input).unwrap(); let mut entrance = (0, 0); let mut keys = vec![(0, 0); 26]; let mut doors = vec![(0, 0); 26]; let mut map: Vec<Vec<_>> = input.lines().enumerate().map(|(y, line)| { line.chars().enumerate().map(|(x, c)| { match c { '@' => { entrance = (y as i8, x as i8); Tile::Floor } '.' => Tile::Floor, '#' => Tile::Wall, _ => { let ord = c as usize; if c.is_ascii_uppercase() { let index = ord - 'A' as usize; doors[index] = (y as i8, x as i8); Tile::Door(index) } else { let index = ord - 'a' as usize; keys[index] = (y as i8, x as i8); Tile::Key(index) } } } }).collect() }).collect(); map[entrance.0 as usize][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize - 1][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize + 1][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize][entrance.1 as usize - 1] = Tile::Wall; map[entrance.0 as usize][entrance.1 as usize + 1] = Tile::Wall; keys.push((entrance.0 - 1, entrance.1 - 1)); keys.push((entrance.0 - 1, entrance.1 + 1)); keys.push((entrance.0 + 1, entrance.1 - 1)); keys.push((entrance.0 + 1, entrance.1 + 1)); let costs_from_key: Vec<_> = keys.iter().map(|&pos| { get_shortest_paths(&map, pos) }).collect(); let mut queue = BinaryHeap::new(); queue.push((Reverse(0), 0, [26, 27, 28, 29], 0)); let mut visited = HashSet::new(); while let Some((_, cost, poses, keys)) = queue.pop() { if visited.contains(&(poses, keys)) { continue; } visited.insert((poses, keys)); if keys == 0x3FFFFFF { println!("{}", cost); break; } queue.extend(poses.iter().enumerate().flat_map(|(n, &pos)| { costs_from_key[pos].iter().enumerate().filter_map(move |( key, possibilities, )| { possibilities.iter().find_map(|&(new_cost, needed_keys)| { if key != pos && keys & 1 << key == 0 && needed_keys & keys == needed_keys { let new_cost = new_cost + cost; let mut new_poses = poses; new_poses[n] = key; Some((Reverse(new_cost), new_cost, new_poses, keys | 1 << key)) } else { None } }) }) })); } }
tart)); while let Some((_, mut needed_keys, cost, pos)) = queue.pop() { let (y, x) = pos; let visited_this = &mut visited[y as usize][x as usize]; if visited_this.iter().any(|&old_needed_keys| needed_keys & old_needed_keys == old_needed_keys) { continue; } visited_this.push(needed_keys); let tile = &map[y as usize][x as usize]; match *tile { Tile::Door(door) => { neede
function_block-random_span
[ { "content": "fn get_shortest_paths(map: &Vec<Vec<Tile>>, start: (i8, i8)) -> Vec<Vec<(u16, u32)>> {\n\n let mut queue = BinaryHeap::new();\n\n let mut visited: Vec<Vec<_>> = map.iter().map(|row| {\n\n row.iter().map(|_| Vec::new()).collect()\n\n }).collect();\n\n let mut shortest_paths = vec...
Rust
src/server/main.rs
brennie/chat
dfeec8c61f6f7f8a603241e055a4e214195cc2b0
extern crate chat_common; extern crate failure; extern crate futures; extern crate serde; #[macro_use] extern crate slog; extern crate slog_async; extern crate slog_term; extern crate structopt; #[macro_use] extern crate structopt_derive; extern crate tokio; extern crate tokio_io; use std::net::IpAddr; use futures::future; use slog::Drain; use structopt::StructOpt; use tokio::prelude::*; use chat_common::{join_stream, messages, split_stream, Recv, Send}; #[derive(Debug, StructOpt)] #[structopt(name = "server")] struct Options { #[structopt( short = "h", long = "host", default_value = "127.0.0.1", env = "CHAT_HOST", parse(try_from_str) )] host: IpAddr, #[structopt(short = "p", long = "port", default_value = "9999", env = "CHAT_PORT")] port: u16, #[structopt(short = "v", parse(from_occurrences))] verbosity: u8, } fn build_drain<D>( decorator: D, min_level: slog::Level, max_level: slog::Level, ) -> impl Drain<Ok = Option<()>, Err = slog::Never> where D: slog_term::Decorator, { slog_term::FullFormat::new(decorator) .use_original_order() .use_utc_timestamp() .build() .fuse() .filter(move |record: &slog::Record| { min_level <= record.level() && record.level() <= max_level }) } fn main() { let exit_code = { let options = Options::from_args(); let log_level = match options.verbosity { 0 => slog::Level::Info, 1 => slog::Level::Debug, _ => slog::Level::Trace, }; let stderr = build_drain( slog_term::TermDecorator::new().stderr().build(), slog::Level::Critical, slog::Level::Error, ); let stdout = build_drain( slog_term::TermDecorator::new().stdout().build(), slog::Level::Warning, log_level, ); let drain = slog::Duplicate::new(stdout, stderr).fuse(); let drain = slog_async::Async::new(drain).build().fuse(); let log = slog::Logger::root(drain, o!()); info!(log, "Started server"; "options" => ?options, "version" => env!("CARGO_PKG_VERSION")); match run_server(log.clone(), options) { Ok(_) => 0, Err(e) => { crit!(log, "An nexpected error occurred"; "error" => %e); 1 } } }; std::process::exit(exit_code); } fn run_server(log: slog::Logger, options: Options) -> Result<(), failure::Error> { use std::net::SocketAddr; let addr = SocketAddr::new(options.host, options.port); let server = tokio::net::TcpListener::bind(&addr)? .incoming() .for_each({ let log = log.clone(); move |conn| { let peer_addr = conn.peer_addr() .expect("Could not retrieve remote address") .clone(); let peer_addr = format!("{}", peer_addr); let log = log.new(o!("peer" => peer_addr)); info!(log, "New connection."); tokio::spawn(handle_conn(log, conn)); Ok(()) } }) .map_err({ let log = log.clone(); move |e| { error!(log, "Connection error."; "error" => %e); () } }); tokio::run(server); Ok(()) } fn handle_conn( log: slog::Logger, stream: tokio::net::TcpStream, ) -> impl Future<Item = (), Error = ()> { use chat_common::messages::{client::*, handshake, server::*}; future::ok(split_stream::<handshake::AuthRequest, handshake::AuthResponse>(stream)) .and_then({ let log = log.clone(); move |(recv, send)| { do_handshake(log.clone(), recv, send) .map_err({ let log = log.clone(); move |err| { error!(log, "An error occurred during handshaking: {}", err); } }) .and_then(|(log, recv, send)| { let stream = join_stream(recv, send).unwrap(); let (recv, send) = split_stream::<ClientMessageKind, ServerMessage>(stream); future::ok((log, recv, send)) }) } }) .and_then(move |(log, recv, send)| { send.send(ServerMessage::FromServer(ServerMessageKind::Greeting( GreetingMessage { motd: "Hello, world!".into(), }, ))).map_err(|err| failure::Error::from(err)) .and_then({ let log = log.clone(); move |_| read_loop(log, recv) }) .map_err({ let log = log.clone(); move |err| { error!(log, "An unexpected error occurred: {}", err); } }) }) .and_then(|_| future::ok(())) } fn do_handshake( log: slog::Logger, recv: Recv<messages::handshake::AuthRequest>, send: Send<messages::handshake::AuthResponse>, ) -> impl Future< Item = ( slog::Logger, Recv<messages::handshake::AuthRequest>, Send<messages::handshake::AuthResponse>, ), Error = failure::Error, > { use messages::handshake::{AuthRequest, AuthResponse}; recv.into_future() .map_err(|(err, _)| err.into()) .and_then(move |(maybe_msg, recv)| match maybe_msg { Some(AuthRequest::AuthRequest { username }) => future::ok(((send, recv), username)), None => future::err(failure::err_msg("Connection closed unexpectedly.")), }) .and_then({ let log = log.clone(); move |((send, recv), username)| { let log = log.new(o!("username" => username.clone())); send.send(AuthResponse::AuthResponse { result: Ok(username.clone()), }).map_err(|err| err.into()) .and_then(|send| { info!(log, "Client authenticated."); future::ok((log, recv, send)) }) } }) } fn read_loop( log: slog::Logger, recv: Recv<messages::client::ClientMessageKind>, ) -> impl Future<Item = (), Error = failure::Error> { use messages::client::{ClientMessageKind::*, *}; future::loop_fn(recv.into_future(), { let log = log.clone(); move |stream_fut| { stream_fut .map_err(|(err, _)| err.into()) .and_then(|(maybe_msg, stream)| match maybe_msg { Some(msg) => future::ok((msg, stream)), None => future::err(failure::err_msg("Client unexpectedly closed connection.")), }) .and_then({ let log = log.clone(); move |(msg, _stream)| match msg { Goodbye(GoodbyeMessage { reason }) => { info!(log, "Client disconnected."; "reason" => ?reason); Ok(future::Loop::Break(())) } } }) } }) }
extern crate chat_common; extern crate failure; extern crate futures; extern crate serde; #[macro_use] extern crate slog; extern crate slog_async; extern crate slog_term; extern crate structopt; #[macro_use] extern crate structopt_derive; extern crate tokio; extern crate tokio_io; use std::net::IpAddr; use futures::future; use slog::Drain; use structopt::StructOpt; use tokio::prelude::*; use chat_common::{join_stream, messages, split_stream, Recv, Send}; #[derive(Debug, StructOpt)] #[structopt(name = "server")] struct Options { #[structopt( short = "h", long = "host", default_value = "127.0.0.1", env = "CHAT_HOST", parse(try_from_str) )] host: IpAddr, #[structopt(short = "p", long = "port", default_value = "9999", env = "CHAT_PORT")] port: u16, #[structopt(short = "v", parse(from_occurrences))] verbosity: u8, } fn build_drain<D>( decorator: D, min_level: slog::Level, max_level: slog::Level, ) -> impl Drain<Ok = Option<()>, Err = slog::Never> where D: slog_term::Decorator, { slog_term::FullFormat::new(decorator) .use_original_order() .use_utc_timestamp() .build() .fuse() .filter(move |record: &slog::Record| { min_level <= record.level() && record.level() <= max_level }) } fn main() { let exit_code = { let options = Options::from_args(); let log_level = match options.verbosity { 0 => slog::Level::Info, 1 => slog::Level::Debug, _ => slog::Level::Trace, }; let stderr = build_drain( slog_term::TermDecorator::new().stderr().build(), slog::Level::Critical, slog::Level::Error, ); let stdout = build_drain( slog_term::TermDecorator::new().stdout().build(), slog::Level::Warning, log_level, ); let drain = slog::Duplicate::new(stdout, stderr).fuse(); let drain = slog_async::Async::new(drain).build().fuse(); let log = slog::Logger::root(drain, o!()); info!(log, "Started server"; "options" => ?options, "version" => env!("CARGO_PKG_VERSION")); match run_server(log.clone(), options) { Ok(_) => 0, Err(e) => { crit!(log, "An nexpected error occurred"; "error" => %e); 1 } } }; std::process::exit(exit_code); } fn run_server(log: slog::Logger, options: Options) -> Result<(), failure::Error> { use std::net::SocketAddr; let addr = SocketAddr::new(options.host, options.port); let server = tokio::net::TcpListener::bind(&addr)? .incoming() .for_each({ let log = log.clone(); move |conn| { let peer_addr = conn.peer_addr() .expect("Could not retrieve remote address") .clone(); let peer_addr = format!("{}", peer_addr); let log = log.new(o!("peer" => peer_addr)); info!(log, "New connection."); tokio::spawn(handle_conn(log, conn)); Ok(()) } }) .map_err({ let log = log.clone(); move |e| { error!(log, "Connection error."; "error" => %e); () } }); tokio::run(server); Ok(()) } fn handle_conn( log: slog::Logger, stream: tokio::net::TcpStream, ) -> impl Future<Item = (), Error = ()> { use chat_common::messages::{client::*, handshake, server::*}; future::ok(split_stream::<handshake::AuthRequest, handshake::AuthResponse>(stream)) .and_then({ let log = log.clone(); move |(recv, send)| { do_handshake(log.clone(), recv, send) .map_err({ let log = log.clone(); move |err| { error!(log, "An error occurred during handshaking: {}", err); } }) .and_then(|(log, recv, send)| { let stream = join_stream(recv, send).unwrap(); let (recv, send) = split_stream::<ClientMessageKind, ServerMessage>(stream); future::ok((log, recv, send)) }) } }) .and_then(move |(log, recv, send)| { send.send(ServerMessage::FromServer(ServerMessageKind::Greeting( GreetingMessage { motd: "Hello, world!".into(), }, ))).map_err(|err| failure::Error::from(err)) .and_then({ let log = log.clone(); move |_| read_loop(log, recv) }) .map_err({ let log = log.clone(); move |err| { error!(log, "An unexpected error occurred: {}", err); } }) }) .and_then(|_| future::ok(())) } fn do_handshake( log: slog::Logger, recv: Recv<messages::handshake::AuthRequest>, send: Send<messages::handshake::AuthResponse>, ) -> impl Future< Item = ( slog::Logger, Recv<messages::handshake::AuthRequest>, Send<messages::handshake::AuthResponse>, ), Error = failure::Error, > { use messages::handshake::{AuthRequest, AuthResponse}; recv.into_future() .map_err(|(err, _)| err.into()) .and_then(move |(maybe_msg, recv)| match maybe_msg { Some(AuthRequest::AuthRequest { username }) => future::ok(((send, recv), username)), None => future::err(failure::err_msg("Connection closed unexpectedly.")), }) .and_then({ let log = log.clone(); move |((send, recv), username)| { let log = log.new(o!("username" => username.clone())); send.send(AuthResponse::AuthResponse { result: Ok(username.clone()), }).map_err(|err| err.into()) .and_then(|send| { info!(log, "Client authenticated."); future::ok((log, recv, send)) }) } }) } fn read_loop( log: slog::Logger, recv: Recv<messages::client::ClientMessageKind>, ) -> impl Future<Item = (), Error = failure::Error> { use messages::client::{ClientMessageKind::*, *}; future::loop_fn(recv.into_future(), { let log = log.clone(); move |stream_fut| { stream_fut .map_err(|(err, _)| err.into()) .and_then(|(maybe_msg, stream)|
) .and_then({ let log = log.clone(); move |(msg, _stream)| match msg { Goodbye(GoodbyeMessage { reason }) => { info!(log, "Client disconnected."; "reason" => ?reason); Ok(future::Loop::Break(())) } } }) } }) }
match maybe_msg { Some(msg) => future::ok((msg, stream)), None => future::err(failure::err_msg("Client unexpectedly closed connection.")), }
if_condition
[ { "content": "fn read_loop(recv: Recv<messages::server::ServerMessage>) -> impl Future<Item = (), Error = failure::Error>\n\n{\n\n use messages::server::{*, ServerMessage::*, ServerMessageKind::*};\n\n\n\n recv.map_err(|err| err.into())\n\n .for_each(|msg| {\n\n match msg {\n\n ...
Rust
src/config.rs
OtaK/vivid
ecaf0c95e2334971e0acc05ea572bec488d85f6f
use crate::error::VividError; use winapi::{ shared::ntdef::NULL, um::{shellapi::ShellExecuteA, winuser::SW_SHOWNORMAL}, }; pub const DEFAULT_CONFIG_FILENAME: &str = "vivid.toml"; #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct VideoMode { pub width: u32, pub height: u32, pub freq: u32, } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct Program { pub exe_name: String, pub vibrance: u8, pub fullscreen_only: Option<bool>, pub resolution: Option<VideoMode>, } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct Config { desktop_vibrance: u8, resolution: Option<VideoMode>, program_settings: Vec<Program>, } impl Default for Config { fn default() -> Self { Self { desktop_vibrance: 50, program_settings: vec![], resolution: None, } } } impl Config { fn sample() -> crate::VividResult<Self> { let vibrance = unsafe { crate::GPU.as_ref()?.write().get_vibrance()? }; let mut default = Self::default(); default.desktop_vibrance = vibrance; default.program_settings.push(Program { exe_name: "sample_program.exe".into(), vibrance, fullscreen_only: Some(false), resolution: None, }); Ok(default) } fn config_path() -> crate::VividResult<std::path::PathBuf> { let mut path = std::env::current_exe()?; path.set_file_name(DEFAULT_CONFIG_FILENAME); Ok(path) } fn load_file(maybe_path: Option<String>) -> crate::VividResult<std::fs::File> { use std::io::Write as _; let path = maybe_path.map_or_else(|| Self::config_path(), |path| Ok(path.into()))?; let res = std::fs::OpenOptions::new() .write(true) .read(true) .create_new(true) .open(path.clone()); if let Ok(mut file) = res { write!(file, "{}", toml::to_string_pretty(&Self::sample()?)?)?; Ok(file) } else { let file = std::fs::OpenOptions::new() .write(true) .read(true) .truncate(false) .open(path)?; Ok(file) } } pub fn load(maybe_path: Option<String>) -> crate::VividResult<Self> { use std::io::Read as _; let mut file = Self::load_file(maybe_path)?; let mut file_contents = vec![]; file.read_to_end(&mut file_contents)?; toml::from_slice(&file_contents).map_err(Into::into) } pub fn edit() -> crate::VividResult<()> { let _ = Self::load_file(None)?; let file_path = std::ffi::CString::new(Self::config_path()?.to_str().unwrap().as_bytes()).unwrap(); let hwnd = unsafe { ShellExecuteA( NULL as _, NULL as _, file_path.as_ptr(), NULL as _, NULL as _, SW_SHOWNORMAL, ) }; if hwnd as u32 > 32 { Ok(()) } else { return Err(VividError::windows_error()); } } pub fn vibrance_for_program(&self, program_exe: &str) -> Option<(u8, bool)> { self.program_settings .iter() .find(|&program| program.exe_name == program_exe) .map(|program| { ( program.vibrance, program.fullscreen_only.unwrap_or_default(), ) }) } pub fn default_vibrance(&self) -> u8 { self.desktop_vibrance } }
use crate::error::VividError; use winapi::{ shared::ntdef::NULL, um::{shellapi::ShellExecuteA, winuser::SW_SHOWNORMAL}, }; pub const DEFAULT_CONFIG_FILENAME: &str = "vivid.toml"; #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct VideoMode { pub width: u32, pub height: u32, pub freq: u32, } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct Program { pub exe_name: String, pub vibrance: u8, pub fullscreen_only: Option<bool>, pub resolution: Option<VideoMode>, } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct Config { desktop_vibrance: u8, resolution: Option<VideoMode>, program_settings: Vec<Program>, } impl Default for Config { fn default() -> Self { Self { desktop_vibrance: 50, program_settings: vec![], resolution: None, } } } impl Config { fn sample() -> crate::VividResult<Self> { let vibrance = unsafe { crate::GPU.as_ref()?.write().get_vibrance()? }; let mut default = Self::default(); default.desktop_vibrance = vibrance; default.program_settings.push(Program { exe_name: "sample_program.exe".into(), vibrance, fullscreen_only: Some(false), resolution: None, }); Ok(default) } fn config_path() -> crate::VividResult<std::path::PathBuf> { let mut path = std::env::current_exe()?; path.set_file_name(DEFAULT_CONFIG_FILENAME); Ok(path) } fn load_file(maybe_path: Option<String>) -> crate::VividResult<std::fs::File> { use std::io::Write as _; let path = maybe_path.map_or_else(|| Self::config_path(), |path| Ok(path.into()))?; let res = std::fs::OpenOptions::new() .write(true) .read(true) .create_new(true) .open(path.clone()); if let Ok(mut file) = res { write!(file, "{}", toml::to_string_pretty(&Self::sample()?)?)?; Ok(file) } else { let file = std::fs::OpenOptions::new() .write(true) .read(true) .truncate(false) .open(path)?; Ok(file) } } pub fn load(maybe_path: Option<String>) -> crate::VividResult<Self> { use std::io::Read as _; let mut file = Self::load_file(maybe_path)?; let mut file_contents = vec![]; file.read_to_end(&mut file_contents)?; toml::from_slice(&file_contents).map_err(Into::into) } pub fn edit() -> crate::VividResult<()> { let _ = Self::load_file(None)?; let file_path = std::ffi::CString::new(Self::config_path()?.to_str().unwrap().as_bytes()).unwrap(); let hwnd = unsafe { ShellExecuteA( NULL as _, NULL as _,
; } } pub fn vibrance_for_program(&self, program_exe: &str) -> Option<(u8, bool)> { self.program_settings .iter() .find(|&program| program.exe_name == program_exe) .map(|program| { ( program.vibrance, program.fullscreen_only.unwrap_or_default(), ) }) } pub fn default_vibrance(&self) -> u8 { self.desktop_vibrance } }
file_path.as_ptr(), NULL as _, NULL as _, SW_SHOWNORMAL, ) }; if hwnd as u32 > 32 { Ok(()) } else { return Err(VividError::windows_error())
function_block-random_span
[ { "content": "#[inline(always)]\n\nfn dll_exists(path: *const winapi::ctypes::c_char) -> bool {\n\n let hwnd = unsafe {\n\n winapi::um::libloaderapi::LoadLibraryExA(\n\n path,\n\n winapi::shared::ntdef::NULL,\n\n winapi::um::libloaderapi::LOAD_LIBRARY_AS_DATAFILE\n\n ...
Rust
adapter/deps/lldb/src/sb/sbdata.rs
naari3/vscode-lldb
bed54848119cb0e8846ed198c5bfd652f650922b
use super::*; use std::marker::PhantomData; cpp_class!(unsafe struct _SBData as "SBData"); unsafe impl Send for _SBData {} #[repr(transparent)] pub struct SBData<'a> { _inner: _SBData, _marker: PhantomData<&'a ()>, } pub type SBDataOwned = SBData<'static>; impl<'b> SBData<'b> { pub fn new() -> SBDataOwned { cpp!(unsafe [] -> SBData as "SBData" { return SBData(); }) } pub fn borrow_bytes<'a>(bytes: &'a [u8], endian: ByteOrder, addr_size: usize) -> SBData<'a> { let buf = bytes.as_ptr(); let size = bytes.len(); let inner = cpp!(unsafe [buf as "void*", size as "size_t", endian as "ByteOrder", addr_size as "size_t"] -> _SBData as "SBData" { SBData data; SBError error; data.SetData(error, buf, size, endian, addr_size); return data; }); SBData { _inner: inner, _marker: PhantomData, } } pub fn from_cstr(cstr: &CStr, endian: ByteOrder, addr_size: usize) -> SBDataOwned { let ptr = cstr.as_ptr(); cpp!(unsafe [ptr as "const char*", endian as "ByteOrder", addr_size as "size_t"] -> SBData as "SBData" { return SBData::CreateDataFromCString(endian, addr_size, ptr); }) } pub fn clear(&mut self) { cpp!(unsafe [self as "SBData*"] { return self->Clear(); }) } pub fn byte_order(&self) -> ByteOrder { cpp!(unsafe [self as "SBData*"] -> ByteOrder as "ByteOrder" { return self->GetByteOrder(); }) } pub fn address_byte_size(&self) -> usize { cpp!(unsafe [self as "SBData*"] -> usize as "size_t" { return (size_t)self->GetAddressByteSize(); }) } pub fn byte_size(&self) -> usize { cpp!(unsafe [self as "SBData*"] -> usize as "size_t" { return self->GetByteSize(); }) } pub fn read_f32(&self, offset: u64) -> Result<f32, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> f32 as "float" { return self->GetFloat(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_f64(&self, offset: u64) -> Result<f64, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> f64 as "double" { return self->GetDouble(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_address(&self, offset: u64) -> Result<Address, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> Address as "addr_t" { return self->GetAddress(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u8(&self, offset: u64) -> Result<u8, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u8 as "uint8_t" { return self->GetUnsignedInt8(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u16(&self, offset: u64) -> Result<u16, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u16 as "uint16_t" { return self->GetUnsignedInt16(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u32(&self, offset: u64) -> Result<u32, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u32 as "uint32_t" { return self->GetUnsignedInt32(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u64(&self, offset: u64) -> Result<u64, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u64 as "uint64_t" { return self->GetUnsignedInt64(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_string(&self, offset: u64) -> Result<*const c_char, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> *const c_char as "const char*" { return self->GetString(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_raw_data(&self, offset: u64, buffer: &mut [u8]) -> Result<(), SBError> { let ptr = buffer.as_ptr(); let size = buffer.len(); let mut error = SBError::new(); cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t", ptr as "void*", size as "size_t"] -> usize as "size_t" { return self->ReadRawData(error, offset, ptr, size); }); if error.is_success() { Ok(()) } else { Err(error) } } } impl<'a> IsValid for SBData<'a> { fn is_valid(&self) -> bool { cpp!(unsafe [self as "SBData*"] -> bool as "bool" { return self->IsValid(); }) } } impl<'a> fmt::Debug for SBData<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { debug_descr(f, |descr| { cpp!(unsafe [self as "SBData*", descr as "SBStream*"] -> bool as "bool" { return self->GetDescription(*descr); }) }) } } #[derive(Clone, Copy, Eq, PartialEq, Debug)] #[repr(u32)] pub enum ByteOrder { Invalid = 0, Big = 1, PDP = 2, Little = 4, }
use super::*; use std::marker::PhantomData; cpp_class!(unsafe struct _SBData as "SBData"); unsafe impl Send for _SBData {} #[repr(transparent)] pub struct SBData<'a> { _inner: _SBData, _marker: PhantomData<&'a ()>, } pub type SBDataOwned = SBData<'static>; impl<'b> SBData<'b> { pub fn new() -> SBDataOwned { cpp!(unsafe [] -> SBData as "SBData" { return SBData(); }) } pub fn borrow_bytes<'a>(bytes: &'a [u8], endian: ByteOrder, addr_size: usize) -> SBData<'a> { let buf = bytes.as_ptr(); let size = bytes.len(); let inner = cpp!(unsafe [buf as "void*", size as "size_t", endian as "ByteOrder", addr_size as "size_t"] -> _SBData as "SBData" { SBData data; SBError error; data.SetData(error, buf, size, endian, addr_size); return data; }); SBData { _inner: inner, _marker: PhantomData, } } pub fn from_cstr(cstr: &CStr, endian: ByteOrder, addr_size: usize) -> SBDataOwned { let ptr = cstr.as_ptr(); cpp!(unsafe [ptr as "const char*", endian as "ByteOrder", addr_size as "size_t"] -> SBData as "SBData" { return SBData::CreateDataFromCString(endian, addr_size, ptr); }) } pub fn clear(&mut self) { cpp!(unsafe [self as "SBData*"] { return self->Clear(); }) } pub fn byte_order(&self) -> ByteOrder { cpp!(unsafe [self as "SBData*"] -> ByteOrder as "ByteOrder" { return self->GetByteOrder(); }) } pub fn address_byte_size(&self) -> usize { cpp!(unsafe [self as "SBData*"] -> usize as "size_t" { return (size_t)self->GetAddressByteSize(); }) } pub fn byte_size(&self) -> usize { cpp!(unsafe [self as "SBData*"] -> usize as "size_t" { return self->GetByteSize(); }) } pub fn read_f32(&self, offset: u64) -> Result<f32, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> f32 as "float" { return self->GetFloat(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_f64(&self, offset: u64) -> Result<f64, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> f64 as "double" { return self->GetDouble(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_address(&self, offset: u64) -> Result<Address, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> Address as "addr_t" { return self->GetAddress(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u8(&self, offset: u64) -> Result<u8, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u8 as "uint8_t" { return self->GetUnsignedInt8(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u16(&self, offset: u64) -> Result<u16, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u16 as "uint16_t" { return self->GetUnsignedInt16(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u32(&self, offset: u64) -> Result<u32, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u32 as "uint32_t" { return self->GetUnsignedInt32(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u64(&self, offset: u64) -> Result<u64, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u64 as "uint64_t" { return self->GetUnsignedInt64(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_string(&self, offset: u64) -> Result<*const c_char, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> *const c_char as "const char*" { return self->GetString(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_raw_data(&self, offset: u64, buffer: &mut [u8]) -> Result<(), SBError> { let ptr = buffer.as_ptr(); let size = buffer.len(); let mut error = SBError::new(); cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t", ptr as "void*", size as "size_t"] -> usize as "size_t" { return self->ReadRawData(error, offset, ptr, size); }); if error.is_success() { Ok(()) } else { Err(error) } } } impl<'a> IsValid for SBData<'a> { fn is_valid(&self) -> bool { cpp!(unsafe [self as "SBData*"] -> bool as "bool" { return self->IsValid(); }) } } impl<'a> fmt::Debug for SBData<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
} } #[derive(Clone, Copy, Eq, PartialEq, Debug)] #[repr(u32)] pub enum ByteOrder { Invalid = 0, Big = 1, PDP = 2, Little = 4, }
debug_descr(f, |descr| { cpp!(unsafe [self as "SBData*", descr as "SBStream*"] -> bool as "bool" { return self->GetDescription(*descr); }) })
call_expression
[ { "content": "// The returned FILE takes ownership of file's descriptor.\n\npub fn cfile_from_file(file: File, write: bool) -> Result<*mut FILE, SBError> {\n\n #[cfg(unix)]\n\n let fd = file.into_raw_fd() as isize;\n\n #[cfg(windows)]\n\n let fd = file.into_raw_handle() as isize;\n\n\n\n let mut ...
Rust
src/once.rs
tiqwab/xv6-rust
525899393df10855a274ce0a9d4e5841aa032aeb
use core::cell::UnsafeCell; use core::fmt; use core::fmt::Formatter; use core::hint::unreachable_unchecked as unreachable; use core::sync::atomic::{spin_loop_hint as cpu_relax, AtomicUsize, Ordering}; pub(crate) struct Once<T> { state: AtomicUsize, data: UnsafeCell<Option<T>>, } impl<T: fmt::Debug> fmt::Debug for Once<T> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self.try_get() { Some(s) => write!(f, "Once {{ data: ") .and_then(|()| s.fmt(f)) .and_then(|()| write!(f, "}}")), None => write!(f, "Once {{ <uninitialized> }}"), } } } unsafe impl<T: Send + Sync> Sync for Once<T> {} unsafe impl<T: Send> Send for Once<T> {} const INCOMPLETE: usize = 0x0; const RUNNING: usize = 0x01; const COMPLETE: usize = 0x2; const PANICKED: usize = 0x3; impl<T> Once<T> { pub(crate) const INIT: Self = Once { state: AtomicUsize::new(INCOMPLETE), data: UnsafeCell::new(None), }; pub(crate) const fn new() -> Once<T> { Self::INIT } fn force_get(&self) -> &T { match unsafe { &*self.data.get() }.as_ref() { None => unsafe { unreachable() }, Some(p) => p, } } pub fn call_once<F: FnOnce() -> T>(&self, builder: F) -> &T { let mut status = self.state.load(Ordering::SeqCst); if status == INCOMPLETE { status = self .state .compare_and_swap(INCOMPLETE, RUNNING, Ordering::SeqCst); let mut finish = Finish { state: &self.state, panicked: true, }; unsafe { *self.data.get() = Some(builder()) }; finish.panicked = false; status = COMPLETE; self.state.store(status, Ordering::SeqCst); return self.force_get(); } loop { match status { INCOMPLETE => unreachable!(), RUNNING => { cpu_relax(); status = self.state.load(Ordering::SeqCst) } PANICKED => panic!("Once has panicked"), COMPLETE => return self.force_get(), _ => unreachable!(), } } } pub(crate) fn try_get(&self) -> Option<&T> { match self.state.load(Ordering::SeqCst) { COMPLETE => Some(self.force_get()), _ => None, } } pub(crate) fn wait(&self) -> Option<&T> { loop { match self.state.load(Ordering::SeqCst) { INCOMPLETE => return None, RUNNING => cpu_relax(), COMPLETE => return Some(self.force_get()), PANICKED => panic!("Once has panicked"), _ => unreachable!(), } } } } struct Finish<'a> { state: &'a AtomicUsize, panicked: bool, } impl<'a> Drop for Finish<'a> { fn drop(&mut self) { if self.panicked { self.state.store(PANICKED, Ordering::SeqCst); } } }
use core::cell::UnsafeCell; use core::fmt; use core::fmt::Formatter; use core::hint::unreachable_unchecked as unreachable; use core::sync::atomic::{spin_loop_hint as cpu_relax, AtomicUsize, Ordering}; pub(crate) struct Once<T> { state: AtomicUsize, data: UnsafeCell<Option<T>>, } impl<T: fmt::Debug> fmt::Debug for Once<T> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self.try_get() { Some(s) => write!(f, "Once {{ data: ") .and_then(|()| s.fmt(f)) .and_then(|()| write!(f, "}}")), None => write!(f, "Once {{ <uninitialized> }}"), } } } unsafe impl<T: Send + Sync> Sync for Once<T> {} unsafe impl<T: Send> Send for Once<T> {} const INCOMPLETE: usize = 0x0; const RUNNING: usize = 0x01; const COMPLETE: usize = 0x2; const PANICKED: usize = 0x3; impl<T> Once<T> { pub(crate) const INIT: Self = Once { state: AtomicUsize::new(INCOMPLETE), data: UnsafeCell::new(None), }; pub(crate) const fn new() -> Once<T> { Self::INIT }
pub fn call_once<F: FnOnce() -> T>(&self, builder: F) -> &T { let mut status = self.state.load(Ordering::SeqCst); if status == INCOMPLETE { status = self .state .compare_and_swap(INCOMPLETE, RUNNING, Ordering::SeqCst); let mut finish = Finish { state: &self.state, panicked: true, }; unsafe { *self.data.get() = Some(builder()) }; finish.panicked = false; status = COMPLETE; self.state.store(status, Ordering::SeqCst); return self.force_get(); } loop { match status { INCOMPLETE => unreachable!(), RUNNING => { cpu_relax(); status = self.state.load(Ordering::SeqCst) } PANICKED => panic!("Once has panicked"), COMPLETE => return self.force_get(), _ => unreachable!(), } } } pub(crate) fn try_get(&self) -> Option<&T> { match self.state.load(Ordering::SeqCst) { COMPLETE => Some(self.force_get()), _ => None, } } pub(crate) fn wait(&self) -> Option<&T> { loop { match self.state.load(Ordering::SeqCst) { INCOMPLETE => return None, RUNNING => cpu_relax(), COMPLETE => return Some(self.force_get()), PANICKED => panic!("Once has panicked"), _ => unreachable!(), } } } } struct Finish<'a> { state: &'a AtomicUsize, panicked: bool, } impl<'a> Drop for Finish<'a> { fn drop(&mut self) { if self.panicked { self.state.store(PANICKED, Ordering::SeqCst); } } }
fn force_get(&self) -> &T { match unsafe { &*self.data.get() }.as_ref() { None => unsafe { unreachable() }, Some(p) => p, } }
function_block-full_function
[ { "content": "/// Modify mappings in kern_pgdir to support SMP\n\n/// - Map the per-CPU stacks in the region [KSTACKTOP-PTSIZE, KSTACKTOP)\n\nfn mem_init_mp(kern_pgdir: &mut PageDirectory, allocator: &mut PageAllocator) {\n\n // Map per-CPU stacks starting at KSTACKTOP, for up to 'NCPU' CPUs.\n\n //\n\n...
Rust
src/server/agents.rs
bofh69/crater
29d2aebfa4723bba8e64889e728953d223418447
use chrono::Duration; use chrono::{DateTime, Utc}; use db::{Database, QueryUtils}; use errors::*; use experiments::{Assignee, Experiment}; use server::tokens::Tokens; use std::collections::HashSet; const INACTIVE_AFTER: i64 = 300; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum AgentStatus { Working, Idle, Unreachable, } pub struct Agent { name: String, experiment: Option<Experiment>, last_heartbeat: Option<DateTime<Utc>>, git_revision: Option<String>, } impl Agent { fn with_experiment(mut self, db: &Database) -> Result<Self> { self.experiment = Experiment::run_by(db, &Assignee::Agent(self.name.clone()))?; Ok(self) } pub fn git_revision(&self) -> Option<&String> { self.git_revision.as_ref() } pub fn name(&self) -> &str { &self.name } pub fn assigned_experiment(&self) -> Option<&Experiment> { self.experiment.as_ref() } pub fn last_heartbeat(&self) -> Option<&DateTime<Utc>> { self.last_heartbeat.as_ref() } pub fn status(&self) -> AgentStatus { if let Some(ref heartbeat) = self.last_heartbeat { if Utc::now() - Duration::seconds(INACTIVE_AFTER) < *heartbeat { if self.experiment.is_some() { return AgentStatus::Working; } else { return AgentStatus::Idle; } } } AgentStatus::Unreachable } } #[derive(Clone)] pub struct Agents { db: Database, } impl Agents { pub fn new(db: Database, tokens: &Tokens) -> Result<Self> { let agents = Agents { db }; agents.synchronize(tokens)?; Ok(agents) } fn synchronize(&self, tokens: &Tokens) -> Result<()> { self.db.transaction(|trans| { let mut real = tokens.agents.values().collect::<HashSet<&String>>(); for agent in self.all()? { if !real.remove(&agent.name) { trans.execute("DELETE FROM agents WHERE name = ?1;", &[&agent.name])?; } } for missing in &real { trans.execute( "INSERT INTO agents (name) VALUES (?1);", &[&missing.as_str()], )?; } Ok(()) }) } pub fn all(&self) -> Result<Vec<Agent>> { self.db .query("SELECT * FROM agents ORDER BY name;", &[], |row| { Agent { name: row.get("name"), last_heartbeat: row.get("last_heartbeat"), git_revision: row.get("git_revision"), experiment: None, } })?.into_iter() .map(|agent| agent.with_experiment(&self.db)) .collect() } #[cfg(test)] fn get(&self, name: &str) -> Result<Option<Agent>> { let row = self .db .get_row("SELECT * FROM agents WHERE name = ?1;", &[&name], |row| { Agent { name: row.get("name"), last_heartbeat: row.get("last_heartbeat"), git_revision: row.get("git_revision"), experiment: None, } })?; Ok(if let Some(agent) = row { Some(agent.with_experiment(&self.db)?) } else { None }) } pub fn record_heartbeat(&self, agent: &str) -> Result<()> { let changes = self.db.execute( "UPDATE agents SET last_heartbeat = ?1 WHERE name = ?2;", &[&Utc::now(), &agent], )?; assert_eq!(changes, 1); Ok(()) } pub fn set_git_revision(&self, agent: &str, revision: &str) -> Result<()> { let changes = self.db.execute( "UPDATE agents SET git_revision = ?1 WHERE name = ?2;", &[&revision, &agent], )?; assert_eq!(changes, 1); Ok(()) } } #[cfg(test)] mod tests { use super::{AgentStatus, Agents}; use actions::CreateExperiment; use config::Config; use db::Database; use experiments::{Assignee, Experiment}; use server::tokens::Tokens; #[test] fn test_agents_synchronize() { let db = Database::temp().unwrap(); let agents = Agents::new(db, &Tokens::default()).unwrap(); let mut tokens = Tokens::default(); tokens.agents.insert("token1".into(), "agent1".into()); tokens.agents.insert("token2".into(), "agent2".into()); agents.synchronize(&tokens).unwrap(); assert_eq!( agents .all() .unwrap() .into_iter() .map(|a| a.name) .collect::<Vec<_>>(), vec!["agent1".to_string(), "agent2".to_string()] ); tokens.agents.remove("token1"); tokens.agents.insert("token3".into(), "agent3".into()); agents.synchronize(&tokens).unwrap(); assert_eq!( agents .all() .unwrap() .into_iter() .map(|a| a.name) .collect::<Vec<_>>(), vec!["agent2".to_string(), "agent3".to_string()] ); } #[test] fn test_heartbeat_recording() { let db = Database::temp().unwrap(); let mut tokens = Tokens::default(); tokens.agents.insert("token".into(), "agent".into()); let agents = Agents::new(db, &tokens).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert!(agent.last_heartbeat.is_none()); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); let first_heartbeat = agent.last_heartbeat.unwrap(); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert!(first_heartbeat < agent.last_heartbeat.unwrap()); } #[test] fn test_agent_status() { let db = Database::temp().unwrap(); let config = Config::default(); let mut tokens = Tokens::default(); tokens.agents.insert("token".into(), "agent".into()); let agents = Agents::new(db.clone(), &tokens).unwrap(); ::crates::lists::setup_test_lists(&db, &config).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Unreachable); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Idle); CreateExperiment::dummy("dummy") .apply(&db, &config) .unwrap(); Experiment::next(&db, &Assignee::Agent("agent".to_string())).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Working); } }
use chrono::Duration; use chrono::{DateTime, Utc}; use db::{Database, QueryUtils}; use errors::*; use experiments::{Assignee, Experiment}; use server::tokens::Tokens; use std::collections::HashSet; const INACTIVE_AFTER: i64 = 300; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum AgentStatus { Working, Idle, Unreachable, } pub struct Agent { name: String, experiment: Option<Experiment>, last_heartbeat: Option<DateTime<Utc>>, git_revision: Option<String>, } impl Agent { fn with_experiment(mut self, db: &Database) -> Result<Self> { self.experiment = Experiment::run_by(db, &Assignee::Agent(self.name.clone()))?; Ok(self) } pub fn git_revision(&self) -> Option<&String> { self.git_revision.as_ref() } pub fn name(&self) -> &str { &self.name } pub fn assigned_experiment(&self) -> Option<&Experiment> { self.experiment.as_ref() } pub fn last_heartbeat(&self) -> Option<&DateTime<Utc>> { self.last_heartbeat.as_ref() } pub fn status(&self) -> AgentStatus { if let Some(ref heartbeat) = self.last_heartbeat { if Utc::now() - Duration::seconds(INACTIVE_AFTER) < *heartbeat { if self.experiment.is_some() { return AgentStatus::Working; } else { return AgentStatus::Idle; } } } AgentStatus::Unreachable } } #[derive(Clone)] pub struct Agents { db: Database, } impl Agents { pub fn new(db: Database, tokens: &Tokens) -> Result<Self> { let agents = Agents { db }; agents.synchronize(tokens)?; Ok(agents) } fn synchronize(&self, tokens: &Tokens) -> Result<()> { self.db.transaction(|trans| { let mut real = tokens.agents.values().collect::<HashSet<&String>>(); for agent in self.all()? { if !real.remove(&agent.name) { trans.execute("DELETE FROM agents WHERE name = ?1;", &[&agent.name])?; } } for missing in &real { trans.execute( "INSERT INTO agents (name) VALUES (?1);", &[&missing.as_str()], )?; } Ok(()) }) } pub fn all(&self) -> Result<Vec<Agent>> { self.db .query("SELECT * FROM agents ORDER BY name;", &[], |row| { Agent { name: row.get("name"), last_heartbeat: row.get("last_heartbeat"), git_revision: row.get("git_revision"), experiment: None, } })?.into_iter() .map(|agent| agent.with_experiment(&self.db)) .collect() } #[cfg(test)] fn get(&self, name: &str) -> Result<Option<Agent>> { let row = self .db .get_row("SELECT * FROM agents WHERE name = ?1;", &[&name], |row| { Agent { name: row.get("name"), last_heartbeat: row.get("last_heartbeat"), git_revision: row.get("git_revision"), experiment: None, } })?;
} pub fn record_heartbeat(&self, agent: &str) -> Result<()> { let changes = self.db.execute( "UPDATE agents SET last_heartbeat = ?1 WHERE name = ?2;", &[&Utc::now(), &agent], )?; assert_eq!(changes, 1); Ok(()) } pub fn set_git_revision(&self, agent: &str, revision: &str) -> Result<()> { let changes = self.db.execute( "UPDATE agents SET git_revision = ?1 WHERE name = ?2;", &[&revision, &agent], )?; assert_eq!(changes, 1); Ok(()) } } #[cfg(test)] mod tests { use super::{AgentStatus, Agents}; use actions::CreateExperiment; use config::Config; use db::Database; use experiments::{Assignee, Experiment}; use server::tokens::Tokens; #[test] fn test_agents_synchronize() { let db = Database::temp().unwrap(); let agents = Agents::new(db, &Tokens::default()).unwrap(); let mut tokens = Tokens::default(); tokens.agents.insert("token1".into(), "agent1".into()); tokens.agents.insert("token2".into(), "agent2".into()); agents.synchronize(&tokens).unwrap(); assert_eq!( agents .all() .unwrap() .into_iter() .map(|a| a.name) .collect::<Vec<_>>(), vec!["agent1".to_string(), "agent2".to_string()] ); tokens.agents.remove("token1"); tokens.agents.insert("token3".into(), "agent3".into()); agents.synchronize(&tokens).unwrap(); assert_eq!( agents .all() .unwrap() .into_iter() .map(|a| a.name) .collect::<Vec<_>>(), vec!["agent2".to_string(), "agent3".to_string()] ); } #[test] fn test_heartbeat_recording() { let db = Database::temp().unwrap(); let mut tokens = Tokens::default(); tokens.agents.insert("token".into(), "agent".into()); let agents = Agents::new(db, &tokens).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert!(agent.last_heartbeat.is_none()); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); let first_heartbeat = agent.last_heartbeat.unwrap(); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert!(first_heartbeat < agent.last_heartbeat.unwrap()); } #[test] fn test_agent_status() { let db = Database::temp().unwrap(); let config = Config::default(); let mut tokens = Tokens::default(); tokens.agents.insert("token".into(), "agent".into()); let agents = Agents::new(db.clone(), &tokens).unwrap(); ::crates::lists::setup_test_lists(&db, &config).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Unreachable); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Idle); CreateExperiment::dummy("dummy") .apply(&db, &config) .unwrap(); Experiment::next(&db, &Assignee::Agent("agent".to_string())).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Working); } }
Ok(if let Some(agent) = row { Some(agent.with_experiment(&self.db)?) } else { None })
call_expression
[ { "content": "pub fn execute(db: &mut Connection) -> Result<()> {\n\n // If the database version is 0, create the migrations table and bump it\n\n let version: i32 = db.query_row(\"PRAGMA user_version;\", &[], |r| r.get(0))?;\n\n if version == 0 {\n\n db.execute(\"CREATE TABLE migrations (name T...
Rust
src/main.rs
museun/tinge
aee80a24bcb7ba868934f21a7c5b0eb33c4d0243
use filetime::FileTime; use std::fs::{self, File}; use std::path::Path; const USAGE: &str = " tinge. change file access and modification times Usage: tinge [-acm] [-r <file>] <file>> Options: -a Change access time -c Do not create file if it exists -m Change modification time -r <file> Use access and modification times from this file "; fn error(msg: &str) -> ! { eprintln!("Error! {}", msg); eprintln!("{}", USAGE); std::process::exit(1); } #[derive(Debug)] struct Args { access: bool, no_create: bool, modify: bool, source: Option<String>, file: String, } impl Args { pub fn parse() -> Args { let mut access = None; let mut no_create = None; let mut modify = None; let mut replacement = None; let mut source = None; let mut file = None; macro_rules! check { ($flag:expr, $data:expr, $err:expr) => {{ if $flag.is_some() { error($err) } $flag.replace($data); }}; } for arg in std::env::args().skip(1) { if arg.starts_with('-') { for ch in arg[1..].chars() { match ch { 'a' => check!(access, true, "-a flag already specified"), 'c' => check!(no_create, true, "-c flag already specified"), 'm' => check!(modify, true, "-m flag already specified"), 'r' => check!(replacement, true, "-r flag already specified"), _ => {} }; } continue; } let s = arg .chars() .skip_while(|c| c.is_whitespace()) .take_while(|c| !c.is_whitespace()); if replacement.is_some() && source.is_none() { source.replace(s.collect::<String>()); continue; } if file.is_none() { file.replace(s.collect::<String>()); } } if file.is_none() || file.as_ref().map(|d| d.len()) == Some(0) { error("a filename must be provided") } Self { access: access.unwrap_or_default(), no_create: no_create.unwrap_or_default(), modify: modify.unwrap_or_default(), source, file: file.unwrap(), } } } struct TempFile<'a>(&'a str); impl<'a> TempFile<'a> { pub fn create(p: &'a str) -> Self { let _ = File::create(p).unwrap(); TempFile(p) } } impl<'a> Drop for TempFile<'a> { fn drop(&mut self) { let _ = fs::remove_file(self.0); } } fn main() { let Args { access, no_create, modify, source, file, } = Args::parse(); const TEMP: &str = "___touch"; let _temp = TempFile::create(TEMP); let path = Path::new(&file); if path.exists() && no_create { return; } if !path.exists() { let _ = File::create(&file); } let df = fs::metadata(&file).unwrap(); let dt = fs::metadata(TEMP).unwrap(); let mut tatime = FileTime::from_last_access_time(&dt); let mut tmtime = FileTime::from_last_modification_time(&dt); if let Some(source) = source { let p = Path::new(&source); if p.exists() { let fi = fs::metadata(&source).unwrap(); tatime = FileTime::from_last_access_time(&fi); tmtime = FileTime::from_last_modification_time(&fi); } else { error("cannot access reference file"); } } let (fatime, fmtime) = ( FileTime::from_last_access_time(&df), FileTime::from_last_modification_time(&df), ); let (access, modify) = match (access, modify) { (true, false) => (tatime, fmtime), (false, true) => (fatime, tmtime), (true, true) => (tatime, tmtime), _ => return, }; let _ = filetime::set_file_times(file, access, modify); }
use filetime::FileTime; use std::fs::{self, File}; use std::path::Path; const USAGE: &str = " tinge. change file access and modification times Usage: tinge [-acm] [-r <file>] <file>> Options: -a Change access time -c Do not create file if it exists -m Change modification time -r <file> Use access and modification times from this file "; fn error(msg: &str) -> ! { eprintln!("Error! {}", msg); eprintln!("{}", USAGE); std::process::exit(1); } #[derive(Debug)] struct Args { access: bool, no_create: bool, modify: bool, source: Option<String>, file: String, } impl Args { pub fn parse() -> Args { let mut access = None; let mut no_create = None; let mut modify = None; let mut replacement = None; let mut source = None; let mut file = None; macro_rules! check { ($flag:expr, $data:expr, $err:expr) => {{ if $flag.is_some() { error($err) } $flag.replace($data); }}; } for arg in std::env::args().skip(1) { if arg.starts_with('-') { for ch in arg[1..].chars() { match ch { 'a' => check!(access, true, "-a flag already specified"), 'c' => check!(no_create, true, "-c flag already specified"), 'm' => check!(modify, true, "-m flag already specified"), 'r' => check!(replacement, true, "-r flag already specified"), _ => {} }; } continue; } let s = arg .chars() .skip_while(|c| c.is_whitespace()) .take_while(|c| !c.is_whitespace()); if replacement.is_some() && source.is_none() { source.replace(s.collect::<String>()); continue; } if file.is_none() { file.replace(s.collect::<String>()); } } if file.is_none() || file.as_ref().map(|d| d.len()) == Some(0) { error("a filename must be provided") } Self { access: access.unwrap_or_default(), no_create: no_create.unwrap_or_default(), modify: modify.unwrap_or_default(), source, file: file.unwrap(), } } } struct TempFile<'a>(&'a str); impl<'a> TempFile<'a> { pub fn create(p: &'a str) -> Self { let _ = File::create(p).unwrap(); TempFile(p) } } impl<'a> Drop for TempFile<'a> { fn drop(&mut self) { let _ = fs::remove_file(self.0); } } fn main() { let Args { access, no_create, modify, source, file, } = Args::parse(); const TEMP: &str = "___touch"; let _temp = TempFile::create(TEMP); let path = Path::new(&file); if path.exists() && no_create { return; } if !path.exists() { let _ = File::create(&file); } let df = fs::metadata(&file).unwrap(); let dt = fs::metadata(TEMP).unwrap(); let mut tatime = FileTime::from_last_access_time(&dt); let mut tmtime = FileTime::from_last_modification_time(&dt); if let Some(source) = source { let p = Path::new(&source); if p.exists() { let fi = fs::metadata(&source).unwrap(); tatime = FileTime::from_last_access_time(&fi); tmtime = FileTime::from_last_modification_time(&fi); } else { error("cannot access reference file"); } } let (fatime, fmtime) = ( FileTime::from_last_access_time(&df), FileTime::from_last_modification_time(&df), ); let (access, modify) = match (access, modif
y) { (true, false) => (tatime, fmtime), (false, true) => (fatime, tmtime), (true, true) => (tatime, tmtime), _ => return, }; let _ = filetime::set_file_times(file, access, modify); }
function_block-function_prefixed
[]
Rust
day16/src/main.rs
obi1kenobi/advent-of-code-2020
51f1c462642dc59eae8fadf26cc635bce063678d
use std::{ collections::{HashMap, HashSet}, fs, }; fn main() { let contents = fs::read_to_string( "/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day16/input.txt", ) .unwrap(); let groups: Vec<_> = contents.trim().split("\n\n").collect(); let fields_info = groups[0]; let your_ticket_info = groups[1]; let nearby_tickets_info = groups[2]; let fields: Vec<_> = fields_info.trim().split("\n").collect(); let field_valid_ranges: Vec<_> = fields .iter() .map(|&x| x.split(": ").last().unwrap()) .flat_map(|x| x.split(" or ")) .collect(); let field_valid_rules: Vec<(i64, i64)> = field_valid_ranges .iter() .map(|&x| x.split("-").map(|y| y.parse().unwrap())) .map(|mut y| (y.next().unwrap(), y.last().unwrap())) .collect(); let your_ticket_data: Vec<_> = your_ticket_info.trim().split("\n").collect(); assert!(your_ticket_data[0] == "your ticket:"); let your_ticket_numbers: Vec<i64> = your_ticket_data[1] .trim() .split(",") .map(|x| x.parse().unwrap()) .collect(); let nearby_tickets_data: Vec<_> = nearby_tickets_info.trim().split("\n").collect(); assert!(nearby_tickets_data[0] == "nearby tickets:"); let nearby_tickets: Vec<Vec<i64>> = nearby_tickets_data[1..nearby_tickets_data.len()] .iter() .map(|&ticket| { ticket .trim() .split(",") .map(|y| y.parse().unwrap()) .collect() }) .collect(); let (part1_soln, mut valid_tickets) = solve_part1(&field_valid_rules, &nearby_tickets); println!("{}", part1_soln); valid_tickets.push(your_ticket_numbers.clone()); println!( "{}", solve_part2(&fields, &valid_tickets, &your_ticket_numbers) ); } fn solve_part1( field_valid_rules: &Vec<(i64, i64)>, nearby_tickets: &Vec<Vec<i64>>, ) -> (i64, Vec<Vec<i64>>) { let mut result: i64 = 0; let mut valid_tickets: Vec<Vec<i64>> = Vec::new(); for nearby_ticket in nearby_tickets { let mut is_valid_ticket = true; for value in nearby_ticket { let is_valid_value: bool = field_valid_rules .iter() .filter(|(lower, upper)| lower <= value && upper >= value) .next() .is_some(); if !is_valid_value { is_valid_ticket = false; result += value; } } if is_valid_ticket { valid_tickets.push(nearby_ticket.clone()); } } (result, valid_tickets) } fn solve_part2(fields: &Vec<&str>, valid_tickets: &Vec<Vec<i64>>, your_ticket: &Vec<i64>) -> i64 { let field_names: Vec<_> = fields .iter() .map(|&x| x.split(": ").next().unwrap()) .collect(); let field_rule_elements: Vec<Vec<&str>> = fields .iter() .map(|&x| x.split(": ").last().unwrap().split(" or ").collect()) .collect(); let field_rule_ranges: Vec<Vec<(i64, i64)>> = field_rule_elements .iter() .map(|ranges| { ranges .iter() .map(|&range| range.split("-").map(|value| value.parse::<i64>().unwrap())) .map(|mut iter| (iter.next().unwrap(), iter.next().unwrap())) .collect() }) .collect(); let mut possible_ticket_index_to_field_index: Vec<HashSet<usize>> = Vec::new(); for _ in 0..your_ticket.len() { possible_ticket_index_to_field_index.push((0..fields.len()).collect()); } for ticket in valid_tickets { for (ticket_index, ticket_value) in ticket.iter().enumerate() { let valid_mappings: HashSet<_> = field_rule_ranges .iter() .enumerate() .filter(|(_, rule)| { rule.iter() .filter(|(lower, upper)| lower <= ticket_value && upper >= ticket_value) .next() .is_some() }) .map(|(rule_index, _)| rule_index) .collect(); let prior_options = &possible_ticket_index_to_field_index[ticket_index]; let subsequent_options = prior_options .intersection(&valid_mappings) .copied() .collect(); possible_ticket_index_to_field_index[ticket_index] = subsequent_options; } } let mut field_index_to_ticket_index: HashMap<usize, usize> = HashMap::new(); while field_index_to_ticket_index.len() < fields.len() { for (ticket_index, possible_field_indexes) in possible_ticket_index_to_field_index.iter().enumerate() { let remaining_field_indexes: HashSet<_> = possible_field_indexes .difference(&field_index_to_ticket_index.keys().copied().collect()) .copied() .collect(); if remaining_field_indexes.len() == 1 { field_index_to_ticket_index.insert( remaining_field_indexes.iter().next().unwrap().clone(), ticket_index, ); } } } let mut result: i64 = 1; for (field_index, field_name) in field_names.iter().enumerate() { if field_name.starts_with("departure") { let ticket_index = field_index_to_ticket_index[&field_index]; result *= your_ticket[ticket_index]; } } result }
use std::{ collections::{HashMap, HashSet}, fs, }; fn main() { let contents = fs::read_to_string( "/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day16/input.txt", ) .unwrap(); let groups: Vec<_> = contents.trim().split("\n\n").collect(); let fields_info = groups[0];
fn solve_part1( field_valid_rules: &Vec<(i64, i64)>, nearby_tickets: &Vec<Vec<i64>>, ) -> (i64, Vec<Vec<i64>>) { let mut result: i64 = 0; let mut valid_tickets: Vec<Vec<i64>> = Vec::new(); for nearby_ticket in nearby_tickets { let mut is_valid_ticket = true; for value in nearby_ticket { let is_valid_value: bool = field_valid_rules .iter() .filter(|(lower, upper)| lower <= value && upper >= value) .next() .is_some(); if !is_valid_value { is_valid_ticket = false; result += value; } } if is_valid_ticket { valid_tickets.push(nearby_ticket.clone()); } } (result, valid_tickets) } fn solve_part2(fields: &Vec<&str>, valid_tickets: &Vec<Vec<i64>>, your_ticket: &Vec<i64>) -> i64 { let field_names: Vec<_> = fields .iter() .map(|&x| x.split(": ").next().unwrap()) .collect(); let field_rule_elements: Vec<Vec<&str>> = fields .iter() .map(|&x| x.split(": ").last().unwrap().split(" or ").collect()) .collect(); let field_rule_ranges: Vec<Vec<(i64, i64)>> = field_rule_elements .iter() .map(|ranges| { ranges .iter() .map(|&range| range.split("-").map(|value| value.parse::<i64>().unwrap())) .map(|mut iter| (iter.next().unwrap(), iter.next().unwrap())) .collect() }) .collect(); let mut possible_ticket_index_to_field_index: Vec<HashSet<usize>> = Vec::new(); for _ in 0..your_ticket.len() { possible_ticket_index_to_field_index.push((0..fields.len()).collect()); } for ticket in valid_tickets { for (ticket_index, ticket_value) in ticket.iter().enumerate() { let valid_mappings: HashSet<_> = field_rule_ranges .iter() .enumerate() .filter(|(_, rule)| { rule.iter() .filter(|(lower, upper)| lower <= ticket_value && upper >= ticket_value) .next() .is_some() }) .map(|(rule_index, _)| rule_index) .collect(); let prior_options = &possible_ticket_index_to_field_index[ticket_index]; let subsequent_options = prior_options .intersection(&valid_mappings) .copied() .collect(); possible_ticket_index_to_field_index[ticket_index] = subsequent_options; } } let mut field_index_to_ticket_index: HashMap<usize, usize> = HashMap::new(); while field_index_to_ticket_index.len() < fields.len() { for (ticket_index, possible_field_indexes) in possible_ticket_index_to_field_index.iter().enumerate() { let remaining_field_indexes: HashSet<_> = possible_field_indexes .difference(&field_index_to_ticket_index.keys().copied().collect()) .copied() .collect(); if remaining_field_indexes.len() == 1 { field_index_to_ticket_index.insert( remaining_field_indexes.iter().next().unwrap().clone(), ticket_index, ); } } } let mut result: i64 = 1; for (field_index, field_name) in field_names.iter().enumerate() { if field_name.starts_with("departure") { let ticket_index = field_index_to_ticket_index[&field_index]; result *= your_ticket[ticket_index]; } } result }
let your_ticket_info = groups[1]; let nearby_tickets_info = groups[2]; let fields: Vec<_> = fields_info.trim().split("\n").collect(); let field_valid_ranges: Vec<_> = fields .iter() .map(|&x| x.split(": ").last().unwrap()) .flat_map(|x| x.split(" or ")) .collect(); let field_valid_rules: Vec<(i64, i64)> = field_valid_ranges .iter() .map(|&x| x.split("-").map(|y| y.parse().unwrap())) .map(|mut y| (y.next().unwrap(), y.last().unwrap())) .collect(); let your_ticket_data: Vec<_> = your_ticket_info.trim().split("\n").collect(); assert!(your_ticket_data[0] == "your ticket:"); let your_ticket_numbers: Vec<i64> = your_ticket_data[1] .trim() .split(",") .map(|x| x.parse().unwrap()) .collect(); let nearby_tickets_data: Vec<_> = nearby_tickets_info.trim().split("\n").collect(); assert!(nearby_tickets_data[0] == "nearby tickets:"); let nearby_tickets: Vec<Vec<i64>> = nearby_tickets_data[1..nearby_tickets_data.len()] .iter() .map(|&ticket| { ticket .trim() .split(",") .map(|y| y.parse().unwrap()) .collect() }) .collect(); let (part1_soln, mut valid_tickets) = solve_part1(&field_valid_rules, &nearby_tickets); println!("{}", part1_soln); valid_tickets.push(your_ticket_numbers.clone()); println!( "{}", solve_part2(&fields, &valid_tickets, &your_ticket_numbers) ); }
function_block-function_prefix_line
[ { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day24/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let all_directions: Vec<_> = contents.trim().split('\\n').map(parse_directions).collect();\n\n\n\n println!...
Rust
benches/benchmarks/bench_curve.rs
algorand/pixel
7c90f8162d6b8c879f795cf1d2d29350abeb0a96
use super::ff::Field; use super::pairing::{bls12_381::*, CurveAffine, CurveProjective, Engine, SubgroupCheck}; use super::pixel::{PixelG1, PixelG2}; use super::rand_core::*; use super::rand_xorshift::XorShiftRng; use criterion::Criterion; #[allow(dead_code)] fn bench_group_multiplication(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list: Vec<PixelG1> = vec![]; let mut g2list: Vec<PixelG2> = vec![]; let mut r1list: Vec<Fr> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list.push(PixelG1::random(&mut rng)); g2list.push(PixelG2::random(&mut rng)); r1list.push(Fr::random(&mut rng)); } let r2list = r1list.clone(); let mut counter = 0; c.bench_function("Pixel G1 muliplication cost", move |b| { b.iter(|| { g1list[counter].mul_assign(r1list[counter]); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Pixel G2 muliplication cost", move |b| { b.iter(|| { g2list[counter].mul_assign(r2list[counter]); counter = (counter + 1) % SAMPLES; }) }); } #[allow(dead_code)] fn bench_membership_testing(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list: Vec<PixelG1> = vec![]; let mut g2list: Vec<PixelG2> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list.push(PixelG1::random(&mut rng)); g2list.push(PixelG2::random(&mut rng)); } let mut counter = 0; c.bench_function("Pixel G1 membership testing cost", move |b| { b.iter(|| { g1list[counter].into_affine().in_subgroup(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Pixel G2 membership testing cost", move |b| { b.iter(|| { g2list[counter].into_affine().in_subgroup(); counter = (counter + 1) % SAMPLES; }) }); } #[allow(dead_code)] fn bench_pairing(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list1: Vec<G1> = vec![]; let mut g1list2: Vec<G1> = vec![]; let mut g1list3: Vec<G1> = vec![]; let mut g2list1: Vec<G2> = vec![]; let mut g2list2: Vec<G2> = vec![]; let mut g2list3: Vec<G2> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list1.push(G1::random(&mut rng)); g1list2.push(G1::random(&mut rng)); g1list3.push(G1::random(&mut rng)); g2list1.push(G2::random(&mut rng)); g2list2.push(G2::random(&mut rng)); g2list3.push(G2::random(&mut rng)); } let mut counter = 0; let g11 = g1list1.clone(); let g21 = g2list1.clone(); c.bench_function("Single pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [( &(g11[counter].into_affine().prepare()), &(g21[counter].into_affine().prepare()), )] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; let g11 = g1list1.clone(); let g12 = g1list2.clone(); let g21 = g2list1.clone(); let g22 = g2list2.clone(); c.bench_function("Simutaneously 2 pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [ ( &(g11[counter].into_affine().prepare()), &(g21[counter].into_affine().prepare()), ), ( &(g12[counter].into_affine().prepare()), &(g22[counter].into_affine().prepare()), ), ] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Simutaneously 3 pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [ ( &(g1list1[counter].into_affine().prepare()), &(g2list1[counter].into_affine().prepare()), ), ( &(g1list2[counter].into_affine().prepare()), &(g2list2[counter].into_affine().prepare()), ), ( &(g1list3[counter].into_affine().prepare()), &(g2list3[counter].into_affine().prepare()), ), ] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); } criterion_group!( group_ops, bench_group_multiplication, bench_membership_testing, bench_pairing );
use super::ff::Field; use super::pairing::{bls12_381::*, CurveAffine, CurveProjective, Engine, SubgroupCheck}; use super::pixel::{PixelG1, PixelG2}; use super::rand_core::*; use super::rand_xorshift::XorShiftRng; use criterion::Criterion; #[allow(dead_code)] fn bench_group_multiplication(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list: Vec<PixelG1> = vec![]; let mut g2list: Vec<PixelG2> = vec![]; let mut r1list: Vec<Fr> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list.push(PixelG1::random(&mut rng)); g2list.push(PixelG2::random(&mut rng)); r1list.push(Fr::random(&mut rng)); } let r2list = r1list.clone(); let mut counter = 0; c.bench_function("Pixel G1 muliplication cost", move |b| { b.ite
#[allow(dead_code)] fn bench_membership_testing(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list: Vec<PixelG1> = vec![]; let mut g2list: Vec<PixelG2> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list.push(PixelG1::random(&mut rng)); g2list.push(PixelG2::random(&mut rng)); } let mut counter = 0; c.bench_function("Pixel G1 membership testing cost", move |b| { b.iter(|| { g1list[counter].into_affine().in_subgroup(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Pixel G2 membership testing cost", move |b| { b.iter(|| { g2list[counter].into_affine().in_subgroup(); counter = (counter + 1) % SAMPLES; }) }); } #[allow(dead_code)] fn bench_pairing(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list1: Vec<G1> = vec![]; let mut g1list2: Vec<G1> = vec![]; let mut g1list3: Vec<G1> = vec![]; let mut g2list1: Vec<G2> = vec![]; let mut g2list2: Vec<G2> = vec![]; let mut g2list3: Vec<G2> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list1.push(G1::random(&mut rng)); g1list2.push(G1::random(&mut rng)); g1list3.push(G1::random(&mut rng)); g2list1.push(G2::random(&mut rng)); g2list2.push(G2::random(&mut rng)); g2list3.push(G2::random(&mut rng)); } let mut counter = 0; let g11 = g1list1.clone(); let g21 = g2list1.clone(); c.bench_function("Single pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [( &(g11[counter].into_affine().prepare()), &(g21[counter].into_affine().prepare()), )] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; let g11 = g1list1.clone(); let g12 = g1list2.clone(); let g21 = g2list1.clone(); let g22 = g2list2.clone(); c.bench_function("Simutaneously 2 pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [ ( &(g11[counter].into_affine().prepare()), &(g21[counter].into_affine().prepare()), ), ( &(g12[counter].into_affine().prepare()), &(g22[counter].into_affine().prepare()), ), ] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Simutaneously 3 pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [ ( &(g1list1[counter].into_affine().prepare()), &(g2list1[counter].into_affine().prepare()), ), ( &(g1list2[counter].into_affine().prepare()), &(g2list2[counter].into_affine().prepare()), ), ( &(g1list3[counter].into_affine().prepare()), &(g2list3[counter].into_affine().prepare()), ), ] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); } criterion_group!( group_ops, bench_group_multiplication, bench_membership_testing, bench_pairing );
r(|| { g1list[counter].mul_assign(r1list[counter]); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Pixel G2 muliplication cost", move |b| { b.iter(|| { g2list[counter].mul_assign(r2list[counter]); counter = (counter + 1) % SAMPLES; }) }); }
function_block-function_prefixed
[ { "content": "// Convert a vector back to time.\n\n// Returns an error if time depth is invalid.\n\nfn vec_to_time(mut t_vec: Vec<u64>, d: usize) -> Result<u64, String> {\n\n // python code:\n\n // if tvec == []:\n\n // return 1\n\n // else:\n\n // ti = tvec.pop(0)\n\n //...
Rust
src/signal.rs
Emilgardis/fundsp
213f5ae89ac454c791c731075c323b939dcc476c
use super::math::*; use num_complex::Complex64; use tinyvec::TinyVec; #[derive(Clone, Copy)] pub enum Signal { Unknown, Value(f64), Latency(f64), Response(Complex64, f64), } impl Default for Signal { fn default() -> Signal { Signal::Unknown } } impl Signal { pub fn filter(&self, latency: f64, filter: impl Fn(Complex64) -> Complex64) -> Signal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(response, l) => Signal::Response(filter(*response), l + latency), _ => Signal::Unknown, } } pub fn distort(&self, latency: f64) -> Signal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(_, l) => Signal::Latency(l + latency), _ => Signal::Unknown, } } pub fn delay(&self, latency: f64) -> Signal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(response, l) => Signal::Response(*response, l + latency), x => *x, } } pub fn scale(&self, factor: f64) -> Signal { match self { Signal::Value(x) => Signal::Value(x * factor), Signal::Response(response, latency) => Signal::Response(response * factor, *latency), x => *x, } } pub fn combine_nonlinear(&self, other: Signal, latency: f64) -> Signal { match (self.distort(0.0), other.distort(0.0)) { (Signal::Latency(lx), Signal::Latency(ly)) => Signal::Latency(min(lx, ly) + latency), (Signal::Latency(lx), _) => Signal::Latency(lx + latency), (_, Signal::Latency(ly)) => Signal::Latency(ly + latency), _ => Signal::Unknown, } } pub fn combine_linear( &self, other: Signal, latency: f64, value: impl Fn(f64, f64) -> f64, response: impl Fn(Complex64, Complex64) -> Complex64, ) -> Signal { match (*self, other) { (Signal::Value(vx), Signal::Value(vy)) => Signal::Value(value(vx, vy)), (Signal::Latency(lx), Signal::Latency(ly)) => Signal::Latency(min(lx, ly) + latency), (Signal::Response(rx, lx), Signal::Response(ry, ly)) => { Signal::Response(response(rx, ry), min(lx, ly) + latency) } (Signal::Response(rx, lx), Signal::Value(_)) => { Signal::Response(response(rx, Complex64::new(0.0, 0.0)), lx + latency) } (Signal::Value(_), Signal::Response(ry, ly)) => { Signal::Response(response(Complex64::new(0.0, 0.0), ry), ly + latency) } (Signal::Response(_, lx), Signal::Latency(ly)) => { Signal::Latency(min(lx, ly) + latency) } (Signal::Latency(lx), Signal::Response(_, ly)) => { Signal::Latency(min(lx, ly) + latency) } (Signal::Latency(lx), _) => Signal::Latency(lx + latency), (Signal::Response(_, lx), _) => Signal::Latency(lx + latency), (_, Signal::Latency(ly)) => Signal::Latency(ly + latency), (_, Signal::Response(_, ly)) => Signal::Latency(ly + latency), _ => Signal::Unknown, } } } pub type SignalFrame = TinyVec<[Signal; 32]>; pub fn new_signal_frame(size: usize) -> SignalFrame { let mut frame = TinyVec::with_capacity(size); frame.resize(size, Signal::Unknown); frame } pub fn copy_signal_frame(source: &SignalFrame, i: usize, n: usize) -> SignalFrame { let mut frame = new_signal_frame(n); frame[0..n].copy_from_slice(&source[i..i + n]); frame } pub enum Routing { Arbitrary, Split, Join, } impl Routing { pub fn propagate(&self, input: &SignalFrame, outputs: usize) -> SignalFrame { let mut output = new_signal_frame(outputs); if input.is_empty() { return output; } match self { Routing::Arbitrary => { let mut combo = input[0].distort(0.0); for i in 1..input.len() { combo = combo.combine_nonlinear(input[i], 0.0); } output.fill(combo); } Routing::Split => { for i in 0..outputs { output[i] = input[i % input.len()]; } } Routing::Join => { let bundle = input.len() / output.len(); for i in 0..outputs { let mut combo = input[i]; for j in 1..bundle { combo = combo.combine_linear( input[i + j * outputs], 0.0, |x, y| x + y, |x, y| x + y, ); } output[i] = combo.scale(output.len() as f64 / input.len() as f64); } } } output } }
use super::math::*; use num_complex::Complex64; use tinyvec::TinyVec; #[derive(Clone, Copy)] pub enum Signal { Unknown, Value(f64), Latency(f64), Response(Complex64, f64), } impl Default for Signal { fn default() -> Signal { Signal::Unknown } } impl Signal { pub fn filter(&self, latency: f64, filter: impl Fn(Complex64) -> Complex64) -> Signal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(response, l) => Signal::Response(filter(*response), l + latency), _ => Signal::Unknown, } } pub fn distort(&self, latency: f64) -> S
pub fn delay(&self, latency: f64) -> Signal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(response, l) => Signal::Response(*response, l + latency), x => *x, } } pub fn scale(&self, factor: f64) -> Signal { match self { Signal::Value(x) => Signal::Value(x * factor), Signal::Response(response, latency) => Signal::Response(response * factor, *latency), x => *x, } } pub fn combine_nonlinear(&self, other: Signal, latency: f64) -> Signal { match (self.distort(0.0), other.distort(0.0)) { (Signal::Latency(lx), Signal::Latency(ly)) => Signal::Latency(min(lx, ly) + latency), (Signal::Latency(lx), _) => Signal::Latency(lx + latency), (_, Signal::Latency(ly)) => Signal::Latency(ly + latency), _ => Signal::Unknown, } } pub fn combine_linear( &self, other: Signal, latency: f64, value: impl Fn(f64, f64) -> f64, response: impl Fn(Complex64, Complex64) -> Complex64, ) -> Signal { match (*self, other) { (Signal::Value(vx), Signal::Value(vy)) => Signal::Value(value(vx, vy)), (Signal::Latency(lx), Signal::Latency(ly)) => Signal::Latency(min(lx, ly) + latency), (Signal::Response(rx, lx), Signal::Response(ry, ly)) => { Signal::Response(response(rx, ry), min(lx, ly) + latency) } (Signal::Response(rx, lx), Signal::Value(_)) => { Signal::Response(response(rx, Complex64::new(0.0, 0.0)), lx + latency) } (Signal::Value(_), Signal::Response(ry, ly)) => { Signal::Response(response(Complex64::new(0.0, 0.0), ry), ly + latency) } (Signal::Response(_, lx), Signal::Latency(ly)) => { Signal::Latency(min(lx, ly) + latency) } (Signal::Latency(lx), Signal::Response(_, ly)) => { Signal::Latency(min(lx, ly) + latency) } (Signal::Latency(lx), _) => Signal::Latency(lx + latency), (Signal::Response(_, lx), _) => Signal::Latency(lx + latency), (_, Signal::Latency(ly)) => Signal::Latency(ly + latency), (_, Signal::Response(_, ly)) => Signal::Latency(ly + latency), _ => Signal::Unknown, } } } pub type SignalFrame = TinyVec<[Signal; 32]>; pub fn new_signal_frame(size: usize) -> SignalFrame { let mut frame = TinyVec::with_capacity(size); frame.resize(size, Signal::Unknown); frame } pub fn copy_signal_frame(source: &SignalFrame, i: usize, n: usize) -> SignalFrame { let mut frame = new_signal_frame(n); frame[0..n].copy_from_slice(&source[i..i + n]); frame } pub enum Routing { Arbitrary, Split, Join, } impl Routing { pub fn propagate(&self, input: &SignalFrame, outputs: usize) -> SignalFrame { let mut output = new_signal_frame(outputs); if input.is_empty() { return output; } match self { Routing::Arbitrary => { let mut combo = input[0].distort(0.0); for i in 1..input.len() { combo = combo.combine_nonlinear(input[i], 0.0); } output.fill(combo); } Routing::Split => { for i in 0..outputs { output[i] = input[i % input.len()]; } } Routing::Join => { let bundle = input.len() / output.len(); for i in 0..outputs { let mut combo = input[i]; for j in 1..bundle { combo = combo.combine_linear( input[i + j * outputs], 0.0, |x, y| x + y, |x, y| x + y, ); } output[i] = combo.scale(output.len() as f64 / input.len() as f64); } } } output } }
ignal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(_, l) => Signal::Latency(l + latency), _ => Signal::Unknown, } }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn shape_fn<S: Fn(f64) -> f64>(f: S) -> An<ShaperFn<f64, S>> {\n\n super::prelude::shape_fn(f)\n\n}\n\n\n\n/// Shape signal.\n", "file_path": "src/hacker.rs", "rank": 0, "score": 177753.0561621905 }, { "content": "#[inline]\n\npub fn declick_s(t: f64) -> An<...
Rust
src/connection/peer_provider/k8s.rs
chenfisher/c19-1
f7f22aadb325d218ff24b754c5858aa567ec097b
use crate::connection::peer_provider::{PeerProvider, Peer}; use futures::{StreamExt, TryStreamExt}; use k8s_openapi::api::core::v1::Pod; use kube::{ api::{Api, ListParams, Meta, WatchEvent}, Client, }; use log::error; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::sync::{Arc, RwLock}; use std::error::Error as StdError; type Result<T> = std::result::Result<T, Box<dyn StdError + Send + Sync>>; #[derive(Serialize, Deserialize, Debug)] #[serde(default)] pub struct K8s { selector: HashMap<String, String>, namespace: String, #[serde(skip_serializing, skip_deserializing)] peers: Arc<RwLock<HashMap<String, Peer>>>, } impl std::default::Default for K8s { fn default() -> Self { K8s { selector: Default::default(), namespace: "default".to_string(), peers: Default::default(), } } } impl K8s { fn selector(&self) -> String { self.selector .iter() .fold(String::new(), |s, (k, v)| format!("{},{}={}", s, k, v)) .strip_prefix(",") .unwrap_or("") .to_string() } fn ip(pod: &Pod) -> Option<Peer> { pod.status.as_ref().and_then(|status| { status .pod_ip .as_ref() .and_then(|ip| ip.parse().ok()) }) } } #[typetag::serde] impl PeerProvider for K8s { fn init(&self) -> Result<()> { let selector = self.selector(); let peers = self.peers.clone(); let namespace = self.namespace.clone(); tokio::spawn(async move { let client = Client::try_default().await?; let pods: Api<Pod> = if namespace == ":all" { Api::all(client) } else { Api::namespaced(client, namespace.as_ref()) }; let lp = ListParams::default().labels(&selector); let mut events = pods.watch(&lp, "0").await?.boxed(); while let Some(event) = events.try_next().await? { let event = &event; match event { WatchEvent::Added(pod) | WatchEvent::Modified(pod) => { if let Some(ip) = K8s::ip(pod) { peers.write().unwrap().insert( Meta::meta(pod).uid.as_ref().unwrap().clone(), ip, ); } } WatchEvent::Deleted(pod) => { peers .write() .unwrap() .remove(Meta::meta(pod).uid.as_ref().unwrap()); } _ => error!("Some error occured while receiving pod event"), } } Ok::<_, kube::Error>(()) }); Ok(()) } fn get(&self) -> Vec<Peer> { self.peers .read() .unwrap() .values() .map(|value| value.clone()) .collect() } }
use crate::connection::peer_provider::{PeerProvider, Peer}; use futures::{StreamExt, TryStreamExt}; use k8s_openapi::api::core::v1::Pod; use kube::{ api::{Api, ListParams, Meta, WatchEvent}, Client, }; use log::error; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::sync::{Arc, RwLock}; use std::error::Error as StdError; type Result<T> = std::result::Result<T, Box<dyn StdError + Send + Sync>>; #[derive(Serialize, Deserialize, Debug)] #[serde(default)] pub struct K8s { selector: HashMap<String, String>, namespace: String, #[serde(skip_serializing, skip_deserializing)] peers: Arc<RwLock<HashMap<String, Peer>>>, } impl std::default::Default for K8s { fn default() -> Self { K8s { selector: Default::default(), namespace: "default".to_string(), peers: Default::default(), } } } impl K8s { fn selector(&self) -> String { self.selector .iter() .fold(String::new(), |s, (k, v)| format!("{},{}={}", s, k, v)) .strip_prefix(",") .unwrap_or("") .to_string() } fn ip(pod: &Pod) -> Option<Peer> { pod.status.as_ref().and_then(|status| { status .pod_ip .as_ref() .and_then(|ip| ip.parse().ok()) }) } } #[typetag::serde] impl PeerProvider for K8s { fn init(&self) -> Result<()> { let selector = self.selector(); let peers = self.pee
.values() .map(|value| value.clone()) .collect() } }
rs.clone(); let namespace = self.namespace.clone(); tokio::spawn(async move { let client = Client::try_default().await?; let pods: Api<Pod> = if namespace == ":all" { Api::all(client) } else { Api::namespaced(client, namespace.as_ref()) }; let lp = ListParams::default().labels(&selector); let mut events = pods.watch(&lp, "0").await?.boxed(); while let Some(event) = events.try_next().await? { let event = &event; match event { WatchEvent::Added(pod) | WatchEvent::Modified(pod) => { if let Some(ip) = K8s::ip(pod) { peers.write().unwrap().insert( Meta::meta(pod).uid.as_ref().unwrap().clone(), ip, ); } } WatchEvent::Deleted(pod) => { peers .write() .unwrap() .remove(Meta::meta(pod).uid.as_ref().unwrap()); } _ => error!("Some error occured while receiving pod event"), } } Ok::<_, kube::Error>(()) }); Ok(()) } fn get(&self) -> Vec<Peer> { self.peers .read() .unwrap()
random
[ { "content": "/// Initializes the state and runs the connection and agent layers.\n\n///\n\n/// The state is given a chance to be initialized by running state::init\n\n/// on the instance. The connection and agent layers are then started while\n\n/// given the initialized state.\n\n///\n\n/// The instances for ...
Rust
Usermode/fileviewer/src/main.rs
ids1024/rust_os
236dfdca5660372ea6d4c3f1eea463dbd2c2945d
extern crate wtk; #[macro_use(kernel_log)] extern crate syscalls; mod hexview; mod textview; struct Viewer<'a> { dims: ::std::cell::RefCell<(u32, u32)>, file: ::std::cell::RefCell<&'a mut ::syscalls::vfs::File>, mode: ViewerMode, vscroll: ::wtk::ScrollbarV, hscroll: ::wtk::ScrollbarH, hex: ::hexview::Widget, text: ::textview::Widget, toggle_button: ::wtk::ButtonBcb<'static, ::wtk::Colour>, } enum ViewerMode { Hex, Text, } fn main() { ::wtk::initialise(); let mut file: ::syscalls::vfs::File = match ::syscalls::threads::S_THIS_PROCESS.receive_object("file") { Ok(v) => v, Err(e) => { kernel_log!("TOOD: Handle open error in fileviewer - {:?}", e); return ; }, }; for a in ::std::env::args_os() { kernel_log!("arg = {:?}", a); } let mut args = ::std::env::args_os().skip(0); let path = args.next(); let path: Option<&::std::ffi::OsStr> = path.as_ref().map(|x| x.as_ref()); let path = path.unwrap_or( ::std::ffi::OsStr::new(b"-") ); let use_hex = true; let root = Viewer::new(&mut file, use_hex); let mut window = ::wtk::Window::new_def("File viewer", &root).unwrap(); window.set_title( format!("File Viewer - {:?}", path) ); window.focus(&root); window.set_dims(root.min_width(), 150); window.set_pos(150, 100); window.show(); window.idle_loop(); } impl<'a> Viewer<'a> { fn new(file: &'a mut ::syscalls::vfs::File, init_use_hex: bool) -> Viewer<'a> { let rv = Viewer { dims: ::std::cell::RefCell::new( (0,0) ), file: ::std::cell::RefCell::new(file), mode: if init_use_hex { ViewerMode::Hex } else { ViewerMode::Text }, hex: ::hexview::Widget::new(), text: ::textview::Widget::new(), vscroll: ::wtk::ScrollbarV::new(), hscroll: ::wtk::ScrollbarH::new(), toggle_button: ::wtk::Button::new_boxfn( ::wtk::Colour::theme_body_bg(), |_,_| {} ), }; if init_use_hex { let mut file = rv.file.borrow_mut(); file.set_cursor(0); let _ = rv.hex.populate(&mut *file); } else { /* let mut file = rv.file.borrow_mut(); let mut n_lines = 0; let mut max_len = 0; for line in file.split(b'\n') { max_len = ::std::cmp::max(max_len, line.len()); n_lines += 1; } */ } rv } pub fn min_width(&self) -> u32 { SCROLL_SIZE + self.hex.min_width() + 2*2 } } const SCROLL_SIZE: u32 = 16; impl<'a> ::wtk::Element for Viewer<'a> { fn resize(&self, width: u32, height: u32) { *self.dims.borrow_mut() = (width, height); let body_width = width - SCROLL_SIZE; let body_height = height - SCROLL_SIZE; self.vscroll.resize(SCROLL_SIZE, body_height); match self.mode { ViewerMode::Hex => { use std::io::Seek; self.hex.resize(body_width, body_height); let ofs = self.hex.get_start(); let mut file = self.file.borrow_mut(); let _ = file.seek(::std::io::SeekFrom::Start(ofs)).and_then(|_| self.hex.populate(&mut *file)); }, ViewerMode::Text => { self.text.resize(body_width, body_height); let mut file = self.file.borrow_mut(); let _ = self.text.populate(&mut *file); }, } self.hscroll.resize(body_width, SCROLL_SIZE); let file = self.file.borrow(); let filesize = file.get_size(); if filesize > usize::max_value() as u64 { self.vscroll.set_bar( None ); } else if filesize <= self.hex.get_capacity() as u64 { self.vscroll.set_bar( Some( (0,0) ) ); } else { self.vscroll.set_bar( Some( (filesize as usize, self.hex.get_capacity() as usize) ) ); } self.vscroll.set_pos( 0 ); self.hscroll.set_bar( None ); } fn render(&self, surface: ::wtk::surface::SurfaceView, force: bool) { use wtk::geom::Rect; let (width, height) = (surface.width(), surface.height()); assert_eq!( (width,height), *self.dims.borrow() ); let body_width = width - SCROLL_SIZE; let body_height = height - SCROLL_SIZE; self.vscroll.render(surface.slice(Rect::new(body_width, 0, SCROLL_SIZE, body_height)), force); let body_view = surface.slice(Rect::new(0, 0, body_width, body_height)); match self.mode { ViewerMode::Hex => self.hex.render(body_view, force), ViewerMode::Text => self.text.render(body_view, force), } self.hscroll.render(surface.slice(Rect::new(0, height - SCROLL_SIZE, body_width, SCROLL_SIZE)), force); } fn with_element_at_pos(&self, pos: ::wtk::geom::PxPos, dims: ::wtk::geom::PxDims, f: ::wtk::WithEleAtPosCb) -> bool { let x = pos.x.0; let y = pos.y.0; let (width, height) = (dims.w.0, dims.h.0); let body_dims = ::wtk::geom::PxDims::new( width - SCROLL_SIZE, height - SCROLL_SIZE ); let vscroll_pos = ::wtk::geom::PxPos::new(body_dims.w.0, 0); let hscroll_pos = ::wtk::geom::PxPos::new(0, body_dims.h.0); if y < hscroll_pos.y.0 { if x > vscroll_pos.x.0 { self.vscroll.with_element_at_pos(pos - vscroll_pos, ::wtk::geom::PxDims::new(SCROLL_SIZE, body_dims.h.0), f) } else { match self.mode { ViewerMode::Hex => self.hex.with_element_at_pos(pos, body_dims, f), ViewerMode::Text => self.text.with_element_at_pos(pos, body_dims, f), } } } else { if x > body_dims.w.0 { self.toggle_button.with_element_at_pos(pos - body_dims.bottomright(), ::wtk::geom::PxDims::new(SCROLL_SIZE, SCROLL_SIZE), f) } else { self.hscroll.with_element_at_pos(pos - hscroll_pos, ::wtk::geom::PxDims::new(body_dims.w.0, SCROLL_SIZE), f) } } } }
extern crate wtk; #[macro_use(kernel_log)] extern crate syscalls; mod hexview; mod textview; struct Viewer<'a> { dims: ::std::cell::RefCell<(u32, u32)>, file: ::std::cell::RefCell<&'a mut ::syscalls::vfs::File>, mode: ViewerMode, vscroll: ::wtk::ScrollbarV, hscroll: ::wtk::ScrollbarH, hex: ::hexview::Widget, text: ::textview::Widget, toggle_button: ::wtk::ButtonBcb<'static, ::wtk::Colour>, } enum ViewerMode { Hex, Text, } fn main() { ::wtk::initialise(); let mut file: ::syscalls::vfs::File = match ::syscalls::threads::S_THIS_PROCESS.receive_object("file") { Ok(v) => v, Err(e) => { kernel_log!("TOOD: Handle open error in fileviewer - {:?}", e); return ; }, }; for a in ::std::env::args_os() { kernel_log!("arg = {:?}", a); } let mut args = ::std::env::args_os().skip(0); let path = args.next(); let path: Option<&::std::ffi::OsStr> = path.as_ref().map(|x| x.as_ref()); let path = path.unwrap_or( ::std::ffi::OsStr::new(b"-") ); let use_hex = true; let root = Viewer::new(&mut file, use_hex); let mut window = ::wtk::Window::new_def("File viewer", &root).unwrap(); window.set_title( format!("File Viewer - {:?}", path) ); window.focus(&root); window.set_dims(root.min_width(), 150); window.set_pos(150, 100); window.show(); window.idle_loop(); } impl<'a> Viewer<'a> { fn new(file: &'a mut ::syscalls::vfs::File, init_use_hex: bool) -> Viewer<'a> { let rv = Viewer { dims: ::std::cell::RefCell::new( (0,0) ), file: ::std::cell::RefCell::new(file), mode: if init_use_hex { ViewerMode::Hex } else { ViewerMode::Text }, hex: ::hexview::Widget::new(), text: ::textview::Widget::new(), vscroll: ::wtk::ScrollbarV::new(), hscroll: ::wtk::ScrollbarH::new(), toggle_button: ::wtk::Button::new_boxfn( ::wtk::Colour::theme_body_bg(), |_,_| {} ), }; if init_use_hex { let mut file = rv.file.borrow_mut(); file.set_cursor(0); let _ = rv.hex.populate(&mut *file); } else { /* let mut file = rv.file.borrow_mut(); let mut n_lines = 0; let mut max_len = 0; for line in file.split(b'\n') { max_len = ::std::cmp::max(max_len, line.len()); n_lines += 1; } */ } rv } pub fn min_width(&self) -> u32 { SCROLL_SIZE + self.hex.min_width() + 2*2 } } const SCROLL_SIZE: u32 = 16; impl<'a> ::wtk::Element for Viewer<'a> { fn resize(&self, width: u32, height: u32) { *self.dims.borrow_mut() = (width, height); let body_width = width - SCROLL_SIZE; let body_height = height - SCROLL_SIZE; self.vscroll.resize(SCROLL_SIZE, body_height); match self.mode { ViewerMode::Hex => { use std::io::Seek; self.hex.resize(body_width, body_height); let ofs = self.hex.get_start(); let mut file = self.file.borrow_mut(); let _ = file.seek(::std::io::SeekFrom::Start(ofs)).and_then(|_| self.hex.populate(&mut *file)); }, ViewerMode::Text => { self.text.resize(body_width, body_height); let mut file = self.file.borrow_mut(); let _ = self.text.populate(&mut *file); }, } self.hscroll.resize(body_width, SCROLL_SIZE); let file = self.file.borrow(); let filesize = file.get_size(); if filesize > usize::max_value() as u64 { self.vscroll.set_bar( None ); } else if filesize <= self.hex.get_capacity() as u64 { self.vscroll.set_bar( Some( (0,0) ) ); } else { self.vscroll.set_bar( Some( (filesize as usize, self.hex.get_capacity() as usize) ) ); } self.vscroll.set_pos( 0 ); self.hscroll.set_bar( None ); } fn render(&self, surface: ::wtk::surface::SurfaceView, force: bool) { use wtk::geom::Rect; let (width, height) = (surface.width(), surface.height()); assert_eq!( (width,height), *self.dims.borrow() ); let body_width = width - SCROLL_SIZE; let body_height = height - SCROLL_SIZE; self.vscroll.render(surface.slice(Rect::new(body_width, 0, SCROLL_SIZE, body_height)), force); let body_view = surface.slice(Rect::new(0, 0, body_width, body_height)); match self.mode { ViewerMode::Hex => self.hex.render(body_view, force), ViewerMode::Text => self.text.render(body_view, force), } self.hscroll.render(surface.slice(Rect::new(0, height - SCROLL_SIZE, body_width, SCROLL_SIZE)), force); } fn with_element_at_pos(&self, pos: ::wtk::geom::PxPos, dims: ::wtk::geom::PxDims, f: ::wtk::WithEleAtPosCb) -> bool { let x = pos.x.0; let y = pos.y.0; let (width, height) = (dims.w.0, dims.h.0); let body_dims = ::wtk::geom::PxDims::new( width - SCROLL_SIZE, height - SCROLL_SIZE ); let vscroll_pos = ::wtk::geom::PxPos::new(body_dims.w.0, 0); let hscroll_pos = ::wtk::geom::PxPos::new(0, body_dims.h.0); if y < hscroll_pos.y.0 {
}
if x > vscroll_pos.x.0 { self.vscroll.with_element_at_pos(pos - vscroll_pos, ::wtk::geom::PxDims::new(SCROLL_SIZE, body_dims.h.0), f) } else { match self.mode { ViewerMode::Hex => self.hex.with_element_at_pos(pos, body_dims, f), ViewerMode::Text => self.text.with_element_at_pos(pos, body_dims, f), } } } else { if x > body_dims.w.0 { self.toggle_button.with_element_at_pos(pos - body_dims.bottomright(), ::wtk::geom::PxDims::new(SCROLL_SIZE, SCROLL_SIZE), f) } else { self.hscroll.with_element_at_pos(pos - hscroll_pos, ::wtk::geom::PxDims::new(body_dims.w.0, SCROLL_SIZE), f) } } }
function_block-function_prefix_line
[ { "content": "#[inline(never)]\n\npub fn call_object_ref(handle: u32, call: u16, args: &mut Args) -> Result<u64,super::Error>\n\n{\n\n\t// Obtain reference/borrow to object (individually locked), and call the syscall on it\n\n\tget_process_local::<ProcessObjects>().with_object(handle, |obj| {\n\n\t\t//log_trace...
Rust
vchain/src/acc/mod.rs
hkbudb/vchain-demo
8e12ac2d1a3b38cb7009f400b8601b6bfc28f23d
pub mod digest_set; pub mod serde_impl; pub mod utils; pub use ark_bls12_381::{ Bls12_381 as Curve, Fq12, Fr, G1Affine, G1Projective, G2Affine, G2Projective, }; pub type DigestSet = digest_set::DigestSet<Fr>; use crate::digest::{Digest, Digestible}; use crate::set::{MultiSet, SetElement}; use anyhow::{self, bail, ensure, Context}; use ark_ec::{msm::VariableBaseMSM, AffineCurve, PairingEngine, ProjectiveCurve}; use ark_ff::{Field, One, PrimeField, ToBytes, Zero}; use ark_poly::{univariate::DensePolynomial, Polynomial}; use core::any::Any; use core::str::FromStr; use rayon::prelude::*; use serde::{Deserialize, Serialize}; use utils::{xgcd, FixedBaseCurvePow, FixedBaseScalarPow}; #[cfg(test)] const GS_VEC_LEN: usize = 0; #[cfg(not(test))] const GS_VEC_LEN: usize = 5000; lazy_static! { static ref PUB_Q: Fr = Fr::from_str("480721077433357505777975950918924200361380912084288598463024400624539293706").unwrap(); static ref PRI_S: Fr = Fr::from_str("259535143263514268207918833918737523409").unwrap(); static ref G1_POWER: FixedBaseCurvePow<G1Projective> = FixedBaseCurvePow::build(&G1Projective::prime_subgroup_generator()); static ref G2_POWER: FixedBaseCurvePow<G2Projective> = FixedBaseCurvePow::build(&G2Projective::prime_subgroup_generator()); static ref PRI_S_POWER: FixedBaseScalarPow<Fr> = FixedBaseScalarPow::build(&PRI_S); static ref G1_S_VEC: Vec<G1Affine> = { info!("Initialize G1_S_VEC..."); let timer = howlong::ProcessCPUTimer::new(); let mut res: Vec<G1Affine> = Vec::with_capacity(GS_VEC_LEN); (0..GS_VEC_LEN) .into_par_iter() .map(|i| get_g1s(Fr::from(i as u64))) .collect_into_vec(&mut res); info!("Done in {}.", timer.elapsed()); res }; static ref G2_S_VEC: Vec<G2Affine> = { info!("Initialize G2_S_VEC..."); let timer = howlong::ProcessCPUTimer::new(); let mut res: Vec<G2Affine> = Vec::with_capacity(GS_VEC_LEN); (0..GS_VEC_LEN) .into_par_iter() .map(|i| get_g2s(Fr::from(i as u64))) .collect_into_vec(&mut res); info!("Done in {}.", timer.elapsed()); res }; static ref E_G_G: Fq12 = Curve::pairing( G1Affine::prime_subgroup_generator(), G2Affine::prime_subgroup_generator() ); } fn get_g1s(coeff: Fr) -> G1Affine { let si = PRI_S_POWER.apply(&coeff); G1_POWER.apply(&si).into_affine() } fn get_g2s(coeff: Fr) -> G2Affine { let si = PRI_S_POWER.apply(&coeff); G2_POWER.apply(&si).into_affine() } #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] pub enum Type { ACC1, ACC2, } pub trait Accumulator { const TYPE: Type; type Proof; fn cal_acc_g1_sk<T: SetElement>(set: &MultiSet<T>) -> G1Affine { Self::cal_acc_g1_sk_d(&DigestSet::new(set)) } fn cal_acc_g1<T: SetElement>(set: &MultiSet<T>) -> G1Affine { Self::cal_acc_g1_d(&DigestSet::new(set)) } fn cal_acc_g2_sk<T: SetElement>(set: &MultiSet<T>) -> G2Affine { Self::cal_acc_g2_sk_d(&DigestSet::new(set)) } fn cal_acc_g2<T: SetElement>(set: &MultiSet<T>) -> G2Affine { Self::cal_acc_g2_d(&DigestSet::new(set)) } fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine; fn cal_acc_g1_d(set: &DigestSet) -> G1Affine; fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine; fn cal_acc_g2_d(set: &DigestSet) -> G2Affine; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof>; } pub trait AccumulatorProof: Eq + PartialEq { const TYPE: Type; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> where Self: core::marker::Sized; fn combine_proof(&mut self, other: &Self) -> anyhow::Result<()>; fn as_any(&self) -> &dyn Any; } pub struct Acc1; impl Acc1 { fn poly_to_g1(poly: DensePolynomial<Fr>) -> G1Affine { let mut idxes: Vec<usize> = Vec::with_capacity(poly.degree() + 1); for (i, coeff) in poly.coeffs.iter().enumerate() { if coeff.is_zero() { continue; } idxes.push(i); } let mut bases: Vec<G1Affine> = Vec::with_capacity(idxes.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(idxes.len()); (0..idxes.len()) .into_par_iter() .map(|i| { G1_S_VEC.get(i).copied().unwrap_or_else(|| { trace!("access g1 pub key at {}", i); get_g1s(Fr::from(i as u64)) }) }) .collect_into_vec(&mut bases); (0..idxes.len()) .into_par_iter() .map(|i| poly.coeffs[i].into_repr()) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn poly_to_g2(poly: DensePolynomial<Fr>) -> G2Affine { let mut idxes: Vec<usize> = Vec::with_capacity(poly.degree() + 1); for (i, coeff) in poly.coeffs.iter().enumerate() { if coeff.is_zero() { continue; } idxes.push(i); } let mut bases: Vec<G2Affine> = Vec::with_capacity(idxes.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(idxes.len()); (0..idxes.len()) .into_par_iter() .map(|i| { G2_S_VEC.get(i).copied().unwrap_or_else(|| { trace!("access g2 pub key at {}", i); get_g2s(Fr::from(i as u64)) }) }) .collect_into_vec(&mut bases); (0..idxes.len()) .into_par_iter() .map(|i| poly.coeffs[i].into_repr()) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Acc1Proof { #[serde(with = "serde_impl")] f1: G2Affine, #[serde(with = "serde_impl")] f2: G2Affine, } impl AccumulatorProof for Acc1Proof { const TYPE: Type = Type::ACC1; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> { Acc1::gen_proof(set1, set2) } fn combine_proof(&mut self, _other: &Self) -> anyhow::Result<()> { bail!("invalid operation"); } fn as_any(&self) -> &dyn Any { self } } impl Acc1Proof { pub fn verify(&self, acc1: &G1Affine, acc2: &G1Affine) -> bool { Curve::product_of_pairings(&[ ((*acc1).into(), self.f1.into()), ((*acc2).into(), self.f2.into()), ]) == *E_G_G } } impl Accumulator for Acc1 { const TYPE: Type = Type::ACC1; type Proof = Acc1Proof; fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine { let x = set .par_iter() .map(|(v, exp)| { let s = *PRI_S + v; let exp = [*exp as u64]; s.pow(&exp) }) .reduce(Fr::one, |a, b| a * &b); G1_POWER.apply(&x).into_affine() } fn cal_acc_g1_d(set: &DigestSet) -> G1Affine { let poly = set.expand_to_poly(); Self::poly_to_g1(poly) } fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine { let x = set .par_iter() .map(|(v, exp)| { let s = *PRI_S + v; let exp = [*exp as u64]; s.pow(&exp) }) .reduce(Fr::one, |a, b| a * &b); G2_POWER.apply(&x).into_affine() } fn cal_acc_g2_d(set: &DigestSet) -> G2Affine { let poly = set.expand_to_poly(); Self::poly_to_g2(poly) } fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof> { let poly1 = set1.expand_to_poly(); let poly2 = set2.expand_to_poly(); let (g, x, y) = xgcd(poly1, poly2).context("failed to compute xgcd")?; ensure!(g.degree() == 0, "cannot generate proof"); Ok(Acc1Proof { f1: Self::poly_to_g2(&x / &g), f2: Self::poly_to_g2(&y / &g), }) } } pub struct Acc2; #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Acc2Proof { #[serde(with = "serde_impl")] f: G1Affine, } impl AccumulatorProof for Acc2Proof { const TYPE: Type = Type::ACC2; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> { Acc2::gen_proof(set1, set2) } fn combine_proof(&mut self, other: &Self) -> anyhow::Result<()> { let mut f = self.f.into_projective(); f.add_assign_mixed(&other.f); self.f = f.into_affine(); Ok(()) } fn as_any(&self) -> &dyn Any { self } } impl Acc2Proof { pub fn verify(&self, acc1: &G1Affine, acc2: &G2Affine) -> bool { let a = Curve::pairing(*acc1, *acc2); let b = Curve::pairing(self.f, G2Affine::prime_subgroup_generator()); a == b } } impl Accumulator for Acc2 { const TYPE: Type = Type::ACC2; type Proof = Acc2Proof; fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine { let x = set .par_iter() .map(|(a, b)| { let s = PRI_S_POWER.apply(a); s * &Fr::from(*b) }) .reduce(Fr::zero, |a, b| a + &b); G1_POWER.apply(&x).into_affine() } fn cal_acc_g1_d(set: &DigestSet) -> G1Affine { let mut bases: Vec<G1Affine> = Vec::with_capacity(set.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(set.len()); (0..set.len()) .into_par_iter() .map(|i| get_g1s(set[i].0)) .collect_into_vec(&mut bases); (0..set.len()) .into_par_iter() .map(|i| <Fr as PrimeField>::BigInt::from(set[i].1 as u64)) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine { let x = set .par_iter() .map(|(a, b)| { let s = PRI_S_POWER.apply(&(*PUB_Q - a)); s * &Fr::from(*b) }) .reduce(Fr::zero, |a, b| a + &b); G2_POWER.apply(&x).into_affine() } fn cal_acc_g2_d(set: &DigestSet) -> G2Affine { let mut bases: Vec<G2Affine> = Vec::with_capacity(set.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(set.len()); (0..set.len()) .into_par_iter() .map(|i| get_g2s(*PUB_Q - &set[i].0)) .collect_into_vec(&mut bases); (0..set.len()) .into_par_iter() .map(|i| <Fr as PrimeField>::BigInt::from(set[i].1 as u64)) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof> { let produce_size = set1.len() * set2.len(); let mut product: Vec<(Fr, u64)> = Vec::with_capacity(produce_size); (0..produce_size) .into_par_iter() .map(|i| { let set1idx = i / set2.len(); let set2idx = i % set2.len(); let (s1, q1) = set1[set1idx]; let (s2, q2) = set2[set2idx]; (*PUB_Q + &s1 - &s2, (q1 * q2) as u64) }) .collect_into_vec(&mut product); if product.par_iter().any(|(x, _)| *x == *PUB_Q) { bail!("cannot generate proof"); } let mut bases: Vec<G1Affine> = Vec::with_capacity(produce_size); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(produce_size); (0..produce_size) .into_par_iter() .map(|i| get_g1s(product[i].0)) .collect_into_vec(&mut bases); (0..produce_size) .into_par_iter() .map(|i| <Fr as PrimeField>::BigInt::from(product[i].1)) .collect_into_vec(&mut scalars); let f = VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine(); Ok(Acc2Proof { f }) } } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub enum Proof { ACC1(Box<Acc1Proof>), ACC2(Box<Acc2Proof>), } impl Digestible for G1Affine { fn to_digest(&self) -> Digest { let mut buf = Vec::<u8>::new(); self.write(&mut buf) .unwrap_or_else(|_| panic!("failed to serialize {:?}", self)); buf.to_digest() } } #[cfg(test)] mod tests { use super::*; fn init_logger() { let _ = env_logger::builder().is_test(true).try_init(); } #[test] fn test_cal_acc() { init_logger(); let set = MultiSet::from_vec(vec![1, 1, 2, 3, 4, 4, 5, 6, 6, 7, 8, 9]); assert_eq!(Acc1::cal_acc_g1(&set), Acc1::cal_acc_g1_sk(&set)); assert_eq!(Acc1::cal_acc_g2(&set), Acc1::cal_acc_g2_sk(&set)); assert_eq!(Acc2::cal_acc_g1(&set), Acc2::cal_acc_g1_sk(&set)); assert_eq!(Acc2::cal_acc_g2(&set), Acc2::cal_acc_g2_sk(&set)); } #[test] fn test_acc1_proof() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![1, 1])); let proof = Acc1::gen_proof(&set1, &set2).unwrap(); let acc1 = Acc1::cal_acc_g1_sk_d(&set1); let acc2 = Acc1::cal_acc_g1_sk_d(&set2); assert!(proof.verify(&acc1, &acc2)); assert!(Acc1::gen_proof(&set1, &set3).is_err()); } #[test] fn test_acc2_proof() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![1, 1])); let proof = Acc2::gen_proof(&set1, &set2).unwrap(); let acc1 = Acc2::cal_acc_g1_sk_d(&set1); let acc2 = Acc2::cal_acc_g2_sk_d(&set2); assert!(proof.verify(&acc1, &acc2)); assert!(Acc2::gen_proof(&set1, &set3).is_err()); } #[test] fn test_acc2_proof_sum() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![7, 8, 9])); let mut proof1 = Acc2::gen_proof(&set1, &set2).unwrap(); let proof2 = Acc2::gen_proof(&set1, &set3).unwrap(); proof1.combine_proof(&proof2).unwrap(); let acc1 = Acc2::cal_acc_g1_sk_d(&set1); let acc2 = Acc2::cal_acc_g2_sk_d(&set2); let acc3 = Acc2::cal_acc_g2_sk_d(&set3); let acc4 = { let mut acc = acc2.into_projective(); acc.add_assign_mixed(&acc3); acc.into_affine() }; assert!(proof1.verify(&acc1, &acc4)); } }
pub mod digest_set; pub mod serde_impl; pub mod utils; pub use ark_bls12_381::{ Bls12_381 as Curve, Fq12, Fr, G1Affine, G1Projective, G2Affine, G2Projective, }; pub type DigestSet = digest_set::DigestSet<Fr>; use crate::digest::{Digest, Digestible}; use crate::set::{MultiSet, SetElement}; use anyhow::{self, bail, ensure, Context}; use ark_ec::{msm::VariableBaseMSM, AffineCurve, PairingEngine, ProjectiveCurve}; use ark_ff::{Field, One, PrimeField, ToBytes, Zero}; use ark_poly::{univariate::DensePolynomial, Polynomial}; use core::any::Any; use core::str::FromStr; use rayon::prelude::*; use serde::{Deserialize, Serialize}; use utils::{xgcd, FixedBaseCurvePow, FixedBaseScalarPow}; #[cfg(test)] const GS_VEC_LEN: usize = 0; #[cfg(not(test))] const GS_VEC_LEN: usize = 5000; lazy_static! { static ref PUB_Q: Fr = Fr::from_str("480721077433357505777975950918924200361380912084288598463024400624539293706").unwrap(); static ref PRI_S: Fr = Fr::from_str("259535143263514268207918833918737523409").unwrap(); static ref G1_POWER: FixedBaseCurvePow<G1Projective> = FixedBaseCurvePow::build(&G1Projective::prime_subgroup_generator()); static ref G2_POWER: FixedBaseCurvePow<G2Projective> = FixedBaseCurvePow::build(&G2Projective::prime_subgroup_generator()); static ref PRI_S_POWER: FixedBaseScalarPow<Fr> = FixedBaseScalarPow::build(&PRI_S); static ref G1_S_VEC: Vec<G1Affine> = { info!("Initialize G1_S_VEC..."); let timer = howlong::ProcessCPUTimer::new(); let mut res: Vec<G1Affine> = Vec::with_capacity(GS_VEC_LEN); (0..GS_VEC_LEN) .into_par_iter() .map(|i| get_g1s(Fr::from(i as u64))) .collect_into_vec(&mut res); info!("Done in {}.", timer.elapsed()); res }; static ref G2_S_VEC: Vec<G2Affine> = { info!("Initialize G2_S_VEC..."); let timer = howlong::ProcessCPUTimer::new(); let mut res: Vec<G2Affine> = Vec::with_capacity(GS_VEC_LEN); (0..GS_VEC_LEN) .into_par_iter() .map(|i| get_g2s(Fr::from(i as u64))) .collect_into_vec(&mut res); info!("Done in {}.", timer.elapsed()); res }; static ref E_G_G: Fq12 = Curve::pairing( G1Affine::prime_subgroup_generator(), G2Affine::prime_subgroup_generator() ); } fn get_g1s(coeff: Fr) -> G1Affine { let si = PRI_S_POWER.apply(&coeff); G1_POWER.apply(&si).into_affine() } fn get_g2s(coeff: Fr) -> G2Affine { let si = PRI_S_POWER.apply(&coeff); G2_POWER.apply(&si).into_affine() } #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] pub enum Type { ACC1, ACC2, } pub trait Accumulator { const TYPE: Type; type Proof; fn cal_acc_g1_sk<T: SetElement>(set: &MultiSet<T>) -> G1Affine { Self::cal_acc_g1_sk_d(&DigestSet::new(set)) } fn cal_acc_g1<T: SetElement>(set: &MultiSet<T>) -> G1Affine { Self::cal_acc_g1_d(&DigestSet::new(set)) } fn cal_acc_g2_sk<T: SetElement>(set: &MultiSet<T>) -> G2Affine { Self::cal_acc_g2_sk_d(&DigestSet::new(set)) } fn cal_acc_g2<T: SetElement>(set: &MultiSet<T>) -> G2Affine { Self::cal_acc_g2_d(&DigestSet::new(set)) } fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine; fn cal_acc_g1_d(set: &DigestSet) -> G1Affine; fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine; fn cal_acc_g2_d(set: &DigestSet) -> G2Affine; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof>; } pub trait AccumulatorProof: Eq + PartialEq { const TYPE: Type; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> where Self: core::marker::Sized; fn combine_proof(&mut self, other: &Self) -> anyhow::Result<()>; fn as_any(&self) -> &dyn Any; } pub struct Acc1; impl Acc1 { fn poly_to_g1(poly: DensePolynomial<Fr>) -> G1Affine { let mut idxes: Vec<usize> = Vec::with_capacity(poly.degree() + 1); for (i, coeff) in poly.coeffs.iter().enumerate() { if coeff.is_zero() { continue; } idxes.push(i); } let mut bases: Vec<G1Affine> = Vec::with_capacity(idxes.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(idxes.len()); (0..idxes.len()) .into_par_iter() .map(|i| { G1_S_VEC.get(i).copied().unwrap_or_else(|| { trace!("access g1 pub key at {}", i); get_g1s(Fr::from(i as u64)) }) }) .collect_into_vec(&mut bases); (0..idxes.len()) .into_par_iter() .map(|i| poly.coeffs[i].into_repr()) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn poly_to_g2(poly: DensePolynomial<Fr>) -> G2Affine { let mut idxes: Vec<usize> = Vec::with_capacity(poly.degree() + 1); for (i, coeff) in poly.coeffs.iter().enumerate() { if coeff.is_zero() { continue; } idxes.push(i); } let mut bases: Vec<G2Affine> = Vec::with_capacity(idxes.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(idxes.len()); (0..idxes.len()) .into_par_iter() .map(|i| { G2_S_VEC.get(i).copied().unwrap_or_else(|| { trace!("access g2 pub key at {}", i); get_g2s(Fr::from(i as u64)) }) }) .collect_into_vec(&mut bases); (0..idxes.len()) .into_par_iter() .map(|i| poly.coeffs[i].into_repr()) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Acc1Proof { #[serde(with = "serde_impl")] f1: G2Affine, #[serde(with = "serde_impl")] f2: G2Affine, } impl AccumulatorProof for Acc1Proof { const TYPE: Type = Type::ACC1; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> { Acc1::gen_proof(set1, set2) } fn combine_proof(&mut self, _other: &Self) -> anyhow::Result<()> { bail!("invalid operation"); } fn as_any(&self) -> &dyn Any { self } } impl Acc1Proof { pub fn verify(&self, acc1: &G1Affine, acc2: &G1Aff
.map(|i| <Fr as PrimeField>::BigInt::from(product[i].1)) .collect_into_vec(&mut scalars); let f = VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine(); Ok(Acc2Proof { f }) } } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub enum Proof { ACC1(Box<Acc1Proof>), ACC2(Box<Acc2Proof>), } impl Digestible for G1Affine { fn to_digest(&self) -> Digest { let mut buf = Vec::<u8>::new(); self.write(&mut buf) .unwrap_or_else(|_| panic!("failed to serialize {:?}", self)); buf.to_digest() } } #[cfg(test)] mod tests { use super::*; fn init_logger() { let _ = env_logger::builder().is_test(true).try_init(); } #[test] fn test_cal_acc() { init_logger(); let set = MultiSet::from_vec(vec![1, 1, 2, 3, 4, 4, 5, 6, 6, 7, 8, 9]); assert_eq!(Acc1::cal_acc_g1(&set), Acc1::cal_acc_g1_sk(&set)); assert_eq!(Acc1::cal_acc_g2(&set), Acc1::cal_acc_g2_sk(&set)); assert_eq!(Acc2::cal_acc_g1(&set), Acc2::cal_acc_g1_sk(&set)); assert_eq!(Acc2::cal_acc_g2(&set), Acc2::cal_acc_g2_sk(&set)); } #[test] fn test_acc1_proof() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![1, 1])); let proof = Acc1::gen_proof(&set1, &set2).unwrap(); let acc1 = Acc1::cal_acc_g1_sk_d(&set1); let acc2 = Acc1::cal_acc_g1_sk_d(&set2); assert!(proof.verify(&acc1, &acc2)); assert!(Acc1::gen_proof(&set1, &set3).is_err()); } #[test] fn test_acc2_proof() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![1, 1])); let proof = Acc2::gen_proof(&set1, &set2).unwrap(); let acc1 = Acc2::cal_acc_g1_sk_d(&set1); let acc2 = Acc2::cal_acc_g2_sk_d(&set2); assert!(proof.verify(&acc1, &acc2)); assert!(Acc2::gen_proof(&set1, &set3).is_err()); } #[test] fn test_acc2_proof_sum() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![7, 8, 9])); let mut proof1 = Acc2::gen_proof(&set1, &set2).unwrap(); let proof2 = Acc2::gen_proof(&set1, &set3).unwrap(); proof1.combine_proof(&proof2).unwrap(); let acc1 = Acc2::cal_acc_g1_sk_d(&set1); let acc2 = Acc2::cal_acc_g2_sk_d(&set2); let acc3 = Acc2::cal_acc_g2_sk_d(&set3); let acc4 = { let mut acc = acc2.into_projective(); acc.add_assign_mixed(&acc3); acc.into_affine() }; assert!(proof1.verify(&acc1, &acc4)); } }
ine) -> bool { Curve::product_of_pairings(&[ ((*acc1).into(), self.f1.into()), ((*acc2).into(), self.f2.into()), ]) == *E_G_G } } impl Accumulator for Acc1 { const TYPE: Type = Type::ACC1; type Proof = Acc1Proof; fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine { let x = set .par_iter() .map(|(v, exp)| { let s = *PRI_S + v; let exp = [*exp as u64]; s.pow(&exp) }) .reduce(Fr::one, |a, b| a * &b); G1_POWER.apply(&x).into_affine() } fn cal_acc_g1_d(set: &DigestSet) -> G1Affine { let poly = set.expand_to_poly(); Self::poly_to_g1(poly) } fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine { let x = set .par_iter() .map(|(v, exp)| { let s = *PRI_S + v; let exp = [*exp as u64]; s.pow(&exp) }) .reduce(Fr::one, |a, b| a * &b); G2_POWER.apply(&x).into_affine() } fn cal_acc_g2_d(set: &DigestSet) -> G2Affine { let poly = set.expand_to_poly(); Self::poly_to_g2(poly) } fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof> { let poly1 = set1.expand_to_poly(); let poly2 = set2.expand_to_poly(); let (g, x, y) = xgcd(poly1, poly2).context("failed to compute xgcd")?; ensure!(g.degree() == 0, "cannot generate proof"); Ok(Acc1Proof { f1: Self::poly_to_g2(&x / &g), f2: Self::poly_to_g2(&y / &g), }) } } pub struct Acc2; #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Acc2Proof { #[serde(with = "serde_impl")] f: G1Affine, } impl AccumulatorProof for Acc2Proof { const TYPE: Type = Type::ACC2; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> { Acc2::gen_proof(set1, set2) } fn combine_proof(&mut self, other: &Self) -> anyhow::Result<()> { let mut f = self.f.into_projective(); f.add_assign_mixed(&other.f); self.f = f.into_affine(); Ok(()) } fn as_any(&self) -> &dyn Any { self } } impl Acc2Proof { pub fn verify(&self, acc1: &G1Affine, acc2: &G2Affine) -> bool { let a = Curve::pairing(*acc1, *acc2); let b = Curve::pairing(self.f, G2Affine::prime_subgroup_generator()); a == b } } impl Accumulator for Acc2 { const TYPE: Type = Type::ACC2; type Proof = Acc2Proof; fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine { let x = set .par_iter() .map(|(a, b)| { let s = PRI_S_POWER.apply(a); s * &Fr::from(*b) }) .reduce(Fr::zero, |a, b| a + &b); G1_POWER.apply(&x).into_affine() } fn cal_acc_g1_d(set: &DigestSet) -> G1Affine { let mut bases: Vec<G1Affine> = Vec::with_capacity(set.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(set.len()); (0..set.len()) .into_par_iter() .map(|i| get_g1s(set[i].0)) .collect_into_vec(&mut bases); (0..set.len()) .into_par_iter() .map(|i| <Fr as PrimeField>::BigInt::from(set[i].1 as u64)) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine { let x = set .par_iter() .map(|(a, b)| { let s = PRI_S_POWER.apply(&(*PUB_Q - a)); s * &Fr::from(*b) }) .reduce(Fr::zero, |a, b| a + &b); G2_POWER.apply(&x).into_affine() } fn cal_acc_g2_d(set: &DigestSet) -> G2Affine { let mut bases: Vec<G2Affine> = Vec::with_capacity(set.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(set.len()); (0..set.len()) .into_par_iter() .map(|i| get_g2s(*PUB_Q - &set[i].0)) .collect_into_vec(&mut bases); (0..set.len()) .into_par_iter() .map(|i| <Fr as PrimeField>::BigInt::from(set[i].1 as u64)) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof> { let produce_size = set1.len() * set2.len(); let mut product: Vec<(Fr, u64)> = Vec::with_capacity(produce_size); (0..produce_size) .into_par_iter() .map(|i| { let set1idx = i / set2.len(); let set2idx = i % set2.len(); let (s1, q1) = set1[set1idx]; let (s2, q2) = set2[set2idx]; (*PUB_Q + &s1 - &s2, (q1 * q2) as u64) }) .collect_into_vec(&mut product); if product.par_iter().any(|(x, _)| *x == *PUB_Q) { bail!("cannot generate proof"); } let mut bases: Vec<G1Affine> = Vec::with_capacity(produce_size); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(produce_size); (0..produce_size) .into_par_iter() .map(|i| get_g1s(product[i].0)) .collect_into_vec(&mut bases); (0..produce_size) .into_par_iter()
random
[ { "content": "pub trait SetElement: Digestible + Clone + Send + Sync + Eq + PartialEq + core::hash::Hash {}\n\n\n\nimpl<T> SetElement for T where\n\n T: Digestible + Clone + Send + Sync + Eq + PartialEq + core::hash::Hash\n\n{\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Default)]\n\npub struct MultiSet...
Rust
src/sink.rs
ollien/hline
f08d7277003e7428a577e357fe18cc7012391fc2
use crate::print; use crate::print::{Printer, StdoutPrinter}; use grep::searcher::{Searcher, Sink, SinkContext, SinkError, SinkMatch}; use std::fmt::Display; use std::io; use std::panic; use termion::color::{Fg, LightRed}; use thiserror::Error; const PASSTHRU_PANIC_MSG: &str = "passthru is not enabled on the given searcher"; pub(crate) struct ContextPrintingSink<P: Printer> { printer: P, } #[derive(Error, Debug)] pub enum Error { #[error("Print failure: {0}")] PrintFailed( io::Error, ), #[error("{0}")] SearchError( String, ), } impl From<print::Error> for Error { fn from(err: print::Error) -> Self { let io_err = match err { print::Error::BrokenPipe(wrapped) | print::Error::Other(wrapped) => wrapped, }; Error::PrintFailed(io_err) } } impl SinkError for Error { fn error_message<T: Display>(message: T) -> Self { Error::SearchError(message.to_string()) } } impl<P: Printer> ContextPrintingSink<P> { fn get_sink_result_for_print_result(res: print::Result) -> Result<bool, Error> { match res { Err(print::Error::Other(_)) => Err(Error::from(res.unwrap_err())), Err(print::Error::BrokenPipe(_)) => Ok(false), Ok(_) => Ok(true), } } } impl<P: Printer> ContextPrintingSink<P> { #[must_use] pub fn new(printer: P) -> Self { ContextPrintingSink { printer } } fn validate_searcher(searcher: &Searcher) { if !searcher.passthru() { panic!("{}", PASSTHRU_PANIC_MSG) } } } impl Default for ContextPrintingSink<StdoutPrinter> { fn default() -> Self { ContextPrintingSink { printer: StdoutPrinter {}, } } } impl<P: Printer> Sink for ContextPrintingSink<P> { type Error = Error; fn matched( &mut self, searcher: &Searcher, sink_match: &SinkMatch, ) -> Result<bool, Self::Error> { Self::validate_searcher(searcher); let print_res = self .printer .colored_print(Fg(LightRed), String::from_utf8_lossy(sink_match.bytes())); Self::get_sink_result_for_print_result(print_res) } fn context( &mut self, searcher: &Searcher, context: &SinkContext<'_>, ) -> Result<bool, Self::Error> { Self::validate_searcher(searcher); let data = String::from_utf8_lossy(context.bytes()); let print_res = self.printer.print(data); Self::get_sink_result_for_print_result(print_res) } } #[cfg(test)] mod tests { use super::*; use crate::testutil::mock_print::MockPrinter; use grep::regex::RegexMatcher; use grep::searcher::SearcherBuilder; use test_case::test_case; const SEARCH_TEXT: &str = "The quick \n\ brown fox \n\ jumped over \n\ the lazy \n\ dog."; enum RequiredSearcherSettings { Passthru, } #[test_case(&[RequiredSearcherSettings::Passthru], true; "passthru")] #[test_case(&[], false; "none")] fn test_requires_properly_configured_searcher( settings: &[RequiredSearcherSettings], valid: bool, ) { let perform_search = || { let matcher = RegexMatcher::new("fox").expect("regexp doesn't compile"); let mock_printer = MockPrinter::default(); let sink = ContextPrintingSink { printer: &mock_printer, }; let mut builder = SearcherBuilder::new(); for setting in settings { match setting { RequiredSearcherSettings::Passthru => builder.passthru(true), }; } let mut searcher = builder.build(); searcher.search_slice(matcher, SEARCH_TEXT.as_bytes(), sink) }; if valid { let search_res = perform_search(); assert!(search_res.is_ok()); } else { let search_res = panic::catch_unwind(perform_search); assert!(search_res.is_err()); match search_res.unwrap_err().downcast_ref::<String>() { Some(err) => assert_eq!(err, PASSTHRU_PANIC_MSG), None => panic!("Panicked error was not of expected type"), }; } } }
use crate::print; use crate::print::{Printer, StdoutPrinter}; use grep::searcher::{Searcher, Sink, SinkContext, SinkError, SinkMatch}; use std::fmt::Display; use std::io; use std::panic; use termion::color::{Fg, LightRed}; use thiserror::Error; const PASSTHRU_PANIC_MSG: &str = "passthru is not enabled on the given searcher"; pub(crate) struct ContextPrintingSink<P: Printer> { printer: P, } #[derive(Error, Debug)] pub enum Error { #[error("Print failure: {0}")] PrintFailed( io::Error, ), #[error("{0}")] SearchError( String, ), } impl From<print::Error> for Error { fn from(err: print::Error) -> Self { let io_err = match err { print::Error::BrokenPipe(wrapped) | print::Error::Other(wrapped) => wrapped, }; Error::PrintFailed(io_err) } } impl SinkError for Error { fn error_message<T: Display>(message: T) -> Self { Error::SearchError(message.to_string()) } } impl<P: Printer> ContextPrintingSink<P> { fn get_sink_result_for_print_result(res: print::Result) -> Result<bool, Error> { match res { Err(print::Error::Other(_)) => Err(Error::from(res.unwrap_err())), Err(print::Error::BrokenPipe(_)) => Ok(false), Ok(_) => Ok(true), } } } impl<P: Printer> ContextPrintingSink<P> { #[must_use] pub fn new(printer: P) -> Self { ContextPrintingSink { printer } } fn validate_searcher(searcher: &Searcher) { if !searcher.passthru() { panic!("{}", PASSTHRU_PANIC_MSG) } } } impl Default for ContextPrintingSink<StdoutPrinter> { fn default() -> Self { ContextPrintingSink { printer: StdoutPrinter {}, } } } impl<P: Printer> Sink for ContextPrintingSink<P> { type Error = Error; fn matched( &mut self, searcher: &Searcher, sink_match: &SinkMatch, ) -> Result<bool, Self::Error> { Self::validate_searcher(searcher); let print_res = self .printer .colored_print(Fg(LightRed), String::from_utf8_lossy(sink_match.bytes())); Self::get_sink_result_for_print_result(print_res) } fn context( &mut self, searcher: &Searcher, context: &SinkContext<'_>, ) -> Result<bool, Self::Error> { Self::validate_searcher(searcher); let data = String::from_utf8_lossy(context.bytes()); let print_res = self.printer.print(data); Self::get_sink_result_for_print_result(print_res) } } #[cfg(test)] mod tests { use super::*; use crate::testutil::mock_print::MockPrinter; use grep::regex::RegexMatcher; use grep::searcher::SearcherBuilder; use test_case::test_case; const SEARCH_TEXT: &str = "The quick \n\ brown fox \n\ jumped over \n\ the lazy \n\ dog."; enum RequiredSearcherSettings { Passthru, } #[test_case(&[RequiredSearcherSettings::Passthru], true; "passthru")] #[test_case(&[], false; "none")] fn test_requires_properly_configured_searcher( settings: &[RequiredSearcherSettings], valid: bool, ) { let perform_search = || { let matcher = RegexMatcher::new("fox").expect("regexp doesn't compile"); let mock_printer = MockPrinter::default(); let sink = ContextPrintingSink { printer: &mock_printer, }; let mut builder = SearcherBuilder::new(); for setting in settings { match setting { RequiredSearcherSettings::Passthru => builder.passthru(true), }; } let mut searcher = builder.build(); searcher.search_slice(matcher, SEARCH_TEXT.as_bytes(), sink) }; if valid {
}
let search_res = perform_search(); assert!(search_res.is_ok()); } else { let search_res = panic::catch_unwind(perform_search); assert!(search_res.is_err()); match search_res.unwrap_err().downcast_ref::<String>() { Some(err) => assert_eq!(err, PASSTHRU_PANIC_MSG), None => panic!("Panicked error was not of expected type"), }; } }
function_block-function_prefix_line
[ { "content": "/// `scan_pattern_to_printer` will print a `Read`'s contents to the given `Printer`, while also scanning its contents\n\n/// for a regular expression. Lines that match this pattern will be highlighted in the output.\n\n///\n\n/// Note that this pattern is not anchored at the start of the line by d...
Rust
consensus/safety-rules/src/persistent_safety_storage.rs
pepenemo/libra
86a42bc1f1113868ee8c4815f0d3a137a923ed52
use crate::{ counters, logging::{self, LogEntry, LogEvent, LogField}, }; use anyhow::Result; use consensus_types::{common::Author, safety_data::SafetyData}; use libra_crypto::ed25519::{Ed25519PrivateKey, Ed25519PublicKey}; use libra_global_constants::{CONSENSUS_KEY, EXECUTION_KEY, OWNER_ACCOUNT, SAFETY_DATA, WAYPOINT}; use libra_logger::prelude::*; use libra_secure_storage::{ CachedStorage, CryptoStorage, InMemoryStorage, KVStorage, Storage, Value, }; use libra_types::waypoint::Waypoint; use std::str::FromStr; pub struct PersistentSafetyStorage { internal_store: Storage, } impl PersistentSafetyStorage { pub fn in_memory( consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, ) -> Self { let storage = Storage::from(InMemoryStorage::new()); Self::initialize( storage, Author::random(), consensus_private_key, execution_private_key, Waypoint::default(), ) } pub fn initialize( mut internal_store: Storage, author: Author, consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, waypoint: Waypoint, ) -> Self { Self::initialize_( &mut internal_store, author, consensus_private_key, execution_private_key, waypoint, ) .expect("Unable to initialize backend storage"); Self { internal_store } } fn initialize_( internal_store: &mut Storage, author: Author, consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, waypoint: Waypoint, ) -> Result<()> { let result = internal_store.import_private_key(CONSENSUS_KEY, consensus_private_key); if let Err(libra_secure_storage::Error::KeyAlreadyExists(_)) = result { warn!("Attempted to re-initialize existing storage"); return Ok(()); } internal_store.import_private_key(EXECUTION_KEY, execution_private_key)?; internal_store.set( SAFETY_DATA, Value::SafetyData(SafetyData::new(1, 0, 0, None)), )?; internal_store.set(OWNER_ACCOUNT, Value::String(author.to_string()))?; internal_store.set(WAYPOINT, Value::String(waypoint.to_string()))?; Ok(()) } pub fn into_cached(self) -> PersistentSafetyStorage { if let Storage::CachedStorage(cached_storage) = self.internal_store { PersistentSafetyStorage { internal_store: Storage::CachedStorage(cached_storage), } } else { PersistentSafetyStorage { internal_store: Storage::CachedStorage(CachedStorage::new(self.internal_store)), } } } pub fn new(internal_store: Storage) -> Self { Self { internal_store } } pub fn author(&self) -> Result<Author> { let res = self.internal_store.get(OWNER_ACCOUNT)?; let res = res.value.string()?; std::str::FromStr::from_str(&res) } pub fn consensus_key_for_version( &self, version: Ed25519PublicKey, ) -> Result<Ed25519PrivateKey> { self.internal_store .export_private_key_for_version(CONSENSUS_KEY, version) .map_err(|e| e.into()) } pub fn execution_public_key(&self) -> Result<Ed25519PublicKey> { Ok(self .internal_store .get_public_key(EXECUTION_KEY) .map(|r| r.public_key)?) } pub fn safety_data(&self) -> Result<SafetyData> { Ok(self .internal_store .get(SAFETY_DATA) .and_then(|r| r.value.safety_data())?) } pub fn set_safety_data(&mut self, data: SafetyData) -> Result<()> { counters::set_state("epoch", data.epoch as i64); counters::set_state("last_voted_round", data.last_voted_round as i64); counters::set_state("preferred_round", data.preferred_round as i64); self.internal_store .set(SAFETY_DATA, Value::SafetyData(data))?; Ok(()) } pub fn waypoint(&self) -> Result<Waypoint> { let waypoint = self .internal_store .get(WAYPOINT) .and_then(|r| r.value.string())?; Waypoint::from_str(&waypoint) .map_err(|e| anyhow::anyhow!("Unable to parse waypoint: {}", e)) } pub fn set_waypoint(&mut self, waypoint: &Waypoint) -> Result<()> { self.internal_store .set(WAYPOINT, Value::String(waypoint.to_string()))?; send_struct_log!(logging::safety_log(LogEntry::Waypoint, LogEvent::Update) .data(LogField::Message.as_str(), waypoint)); Ok(()) } #[cfg(any(test, feature = "testing"))] pub fn internal_store(&mut self) -> &mut Storage { &mut self.internal_store } } #[cfg(test)] mod tests { use super::*; use libra_crypto::Uniform; use libra_types::validator_signer::ValidatorSigner; #[test] fn test() { let private_key = ValidatorSigner::from_int(0).private_key().clone(); let mut storage = PersistentSafetyStorage::in_memory( private_key, Ed25519PrivateKey::generate_for_testing(), ); let safety_data = storage.safety_data().unwrap(); assert_eq!(safety_data.epoch, 1); assert_eq!(safety_data.last_voted_round, 0); assert_eq!(safety_data.preferred_round, 0); storage .set_safety_data(SafetyData::new(9, 8, 1, None)) .unwrap(); let safety_data = storage.safety_data().unwrap(); assert_eq!(safety_data.epoch, 9); assert_eq!(safety_data.last_voted_round, 8); assert_eq!(safety_data.preferred_round, 1); } }
use crate::{ counters, logging::{self, LogEntry, LogEvent, LogField}, }; use anyhow::Result; use consensus_types::{common::Author, safety_data::SafetyData}; use libra_crypto::ed25519::{Ed25519PrivateKey, Ed25519PublicKey}; use libra_global_constants::{CONSENSUS_KEY, EXECUTION_KEY, OWNER_ACCOUNT, SAFETY_DATA, WAYPOINT}; use libra_logger::prelude::*; use libra_secure_storage::{ CachedStorage, CryptoStorage, InMemoryStorage, KVStorage, Storage, Value, }; use libra_types::waypoint::Waypoint; use std::str::FromStr; pub struct PersistentSafetyStorage { internal_store: Storage, } impl PersistentSafetyStorage { pub fn in_memory( consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, ) -> Self { let storage = Storage::from(InMemoryStorage::new()); Self::initialize( storage, Author::random(), consensus_private_key, execution_private_key, Waypoint::default(), ) } pub fn initialize( mut internal_store: Storage, author: Author, consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, waypoint: Waypoint, ) -> Self { Self::initialize_( &mut internal_store, author, consensus_private_key, execution_private_key, waypoint, ) .expect("Unable to initialize backend storage"); Self { internal_store } } fn initialize_( internal_store: &mut Storage, author: Author, consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, waypoint: Waypoint, ) -> Result<()> { let result = internal_store.import_private_key(CONSENSUS_KEY, consensus_private_key); if let Err(libra_secure_storage::Error::KeyAlreadyExists(_)) = result { warn!("Attempted to re-initialize existing storage"); return Ok(()); } internal_store.import_private_key(EXECUTION_KEY, execution_private_key)?; internal_store.set( SAFETY_DATA, Value::SafetyData(SafetyData::new(1, 0, 0, None)), )?; internal_store.set(OWNER_ACCOUNT, Value::String(author.to_string()))?; internal_store.set(WAYPOINT, Value::String(waypoint.to_string()))?; Ok(()) } pub fn into_cached(self) -> PersistentSafetyStorage {
} pub fn new(internal_store: Storage) -> Self { Self { internal_store } } pub fn author(&self) -> Result<Author> { let res = self.internal_store.get(OWNER_ACCOUNT)?; let res = res.value.string()?; std::str::FromStr::from_str(&res) } pub fn consensus_key_for_version( &self, version: Ed25519PublicKey, ) -> Result<Ed25519PrivateKey> { self.internal_store .export_private_key_for_version(CONSENSUS_KEY, version) .map_err(|e| e.into()) } pub fn execution_public_key(&self) -> Result<Ed25519PublicKey> { Ok(self .internal_store .get_public_key(EXECUTION_KEY) .map(|r| r.public_key)?) } pub fn safety_data(&self) -> Result<SafetyData> { Ok(self .internal_store .get(SAFETY_DATA) .and_then(|r| r.value.safety_data())?) } pub fn set_safety_data(&mut self, data: SafetyData) -> Result<()> { counters::set_state("epoch", data.epoch as i64); counters::set_state("last_voted_round", data.last_voted_round as i64); counters::set_state("preferred_round", data.preferred_round as i64); self.internal_store .set(SAFETY_DATA, Value::SafetyData(data))?; Ok(()) } pub fn waypoint(&self) -> Result<Waypoint> { let waypoint = self .internal_store .get(WAYPOINT) .and_then(|r| r.value.string())?; Waypoint::from_str(&waypoint) .map_err(|e| anyhow::anyhow!("Unable to parse waypoint: {}", e)) } pub fn set_waypoint(&mut self, waypoint: &Waypoint) -> Result<()> { self.internal_store .set(WAYPOINT, Value::String(waypoint.to_string()))?; send_struct_log!(logging::safety_log(LogEntry::Waypoint, LogEvent::Update) .data(LogField::Message.as_str(), waypoint)); Ok(()) } #[cfg(any(test, feature = "testing"))] pub fn internal_store(&mut self) -> &mut Storage { &mut self.internal_store } } #[cfg(test)] mod tests { use super::*; use libra_crypto::Uniform; use libra_types::validator_signer::ValidatorSigner; #[test] fn test() { let private_key = ValidatorSigner::from_int(0).private_key().clone(); let mut storage = PersistentSafetyStorage::in_memory( private_key, Ed25519PrivateKey::generate_for_testing(), ); let safety_data = storage.safety_data().unwrap(); assert_eq!(safety_data.epoch, 1); assert_eq!(safety_data.last_voted_round, 0); assert_eq!(safety_data.preferred_round, 0); storage .set_safety_data(SafetyData::new(9, 8, 1, None)) .unwrap(); let safety_data = storage.safety_data().unwrap(); assert_eq!(safety_data.epoch, 9); assert_eq!(safety_data.last_voted_round, 8); assert_eq!(safety_data.preferred_round, 1); } }
if let Storage::CachedStorage(cached_storage) = self.internal_store { PersistentSafetyStorage { internal_store: Storage::CachedStorage(cached_storage), } } else { PersistentSafetyStorage { internal_store: Storage::CachedStorage(CachedStorage::new(self.internal_store)), } }
if_condition
[ { "content": "/// Same as `to_bytes` but write directly into an `std::io::Write` object.\n\npub fn serialize_into<W, T>(write: &mut W, value: &T) -> Result<()>\n\nwhere\n\n W: std::io::Write,\n\n T: ?Sized + Serialize,\n\n{\n\n let serializer = Serializer::new(write, crate::MAX_CONTAINER_DEPTH);\n\n ...
Rust
src/pdu/hex_access/write_multi_reg.rs
hubertmis/modbus
430c6204070d7fd27fc639c08475b3b3079edeaa
use crate::Error; use crate::pdu::{Function, FunctionCode, Request as ReqT, Response as RspT, Setter}; use std::convert::TryInto; const MIN_QUANTITY: usize = 1; const MAX_QUANTITY: usize = 123; #[derive(Debug, PartialEq)] pub struct Request { address: u16, values: Vec<u16>, } impl Request { pub fn new(address: u16, values: &[u16]) -> Self { assert!(values.len() >= MIN_QUANTITY); assert!(values.len() <= MAX_QUANTITY); Request{address, values: Vec::from(values)} } pub fn get_address(&self) -> u16 { self.address } pub fn get_values(&self) -> &[u16] { &self.values } } impl Function for Request { fn encode(&self) -> Result<Vec<u8>, Error> { match self.values.len() { MIN_QUANTITY..=MAX_QUANTITY => { let mut result = Vec::new(); result.push(FunctionCode::WriteMultiReg as u8); result.append(&mut self.address.to_be_bytes().to_vec()); result.append(&mut (self.values.len() as u16).to_be_bytes().to_vec()); result.push((self.values.len() as u8) * 2); for val in &self.values { result.append(&mut val.to_be_bytes().to_vec()); } Ok(result) } _ => Err(Error::InvalidValue) } } fn decode(data: &[u8]) -> Result<Self, Error> { if data.len() < 6 { return Err(Error::InvalidDataLength); } if data[0] != FunctionCode::WriteMultiReg as u8 { return Err(Error::InvalidData); } let address = u16::from_be_bytes(data[1..=2].try_into().unwrap()); let quantity = u16::from_be_bytes(data[3..=4].try_into().unwrap()); let data_cnt = data[5]; if data_cnt as u16 != quantity * 2 { return Err(Error::InvalidDataLength); } if (quantity as usize) < MIN_QUANTITY || (quantity as usize) > MAX_QUANTITY { return Err(Error::InvalidData); } let mut values = Vec::with_capacity(quantity as usize); for i in 0..quantity { let val_idx = (6 + i * 2) as usize; values.push(u16::from_be_bytes(data[val_idx..=val_idx+1].try_into().unwrap())) } Ok(Self{address, values}) } } impl ReqT for Request { type Rsp = Response; } impl Setter for Request { fn create_expected_response(&self) -> Self::Rsp { Response::new(self.address, self.values.len() as u16) } } #[derive(Debug, PartialEq)] pub struct Response { address: u16, quantity: u16, } impl Response { pub fn new(address: u16, quantity: u16) -> Self { assert!(quantity as usize >= MIN_QUANTITY); assert!(quantity as usize <= MAX_QUANTITY); Self{address, quantity} } pub fn get_address(&self) -> u16 { self.address } pub fn get_quantity(&self) -> u16 { self.quantity } } impl Function for Response { fn encode(&self) -> Result<Vec<u8>, Error> { match self.quantity as usize { MIN_QUANTITY..=MAX_QUANTITY => { let mut result = Vec::new(); result.push(FunctionCode::WriteMultiReg as u8); result.append(&mut self.address.to_be_bytes().to_vec()); result.append(&mut self.quantity.to_be_bytes().to_vec()); Ok(result) } _ => Err(Error::InvalidValue) } } fn decode(data: &[u8]) -> Result<Self, Error> { if data.len() != 5 { return Err(Error::InvalidDataLength); } if data[0] != FunctionCode::WriteMultiReg as u8 { return Err(Error::InvalidData); } let address = u16::from_be_bytes(data[1..=2].try_into().unwrap()); let quantity = u16::from_be_bytes(data[3..=4].try_into().unwrap()); Ok(Self{address, quantity}) } } impl RspT for Response { fn get_exc_function_code() -> u8 { FunctionCode::ExcWriteMultiReg.try_into().unwrap() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_encode_request() { let req = Request::new(0xdead, &vec![0xfade, 0xface, 0x0000, 0x0001]); let pdu = req.encode().unwrap(); let expected_pdu = vec![0x10, 0xde, 0xad, 0x00, 0x04, 0x08, 0xfa, 0xde, 0xfa, 0xce, 0x00, 0x00, 0x00, 0x01]; assert_eq!(pdu, expected_pdu); } #[test] fn test_encode_response() { let rsp = Response::new(0xffff, 0x0072); let pdu = rsp.encode().unwrap(); let expected_pdu = vec![0x10, 0xff, 0xff, 0x00, 0x72]; assert_eq!(pdu, expected_pdu); } #[test] fn test_decode_request() { let pdu = vec![0x10, 0x00, 0x00, 0x00, 0x02, 0x04, 0x01, 0x02, 0xfe, 0xfd]; let req = Request::decode(&pdu).unwrap(); let expected_req = Request::new(0x0000, &vec![0x0102, 0xfefd]); assert_eq!(req, expected_req); } #[test] fn test_decode_invalid_request() { let pdu = vec![0x11, 0x01, 0x23, 0x00, 0x01, 0x02, 0x11, 0x12]; let err = Request::decode(&pdu).err().unwrap(); match err { Error::InvalidData => {} _ => panic!(format!("Expected InvalidData, but got {:?}", err)), } } #[test] fn test_decode_response() { let pdu = vec![0x10, 0x01, 0x23, 0x00, 0x65]; let rsp = Response::decode(&pdu).unwrap(); let expected_rsp = Response::new(0x0123, 0x0065); assert_eq!(rsp, expected_rsp); } }
use crate::Error; use crate::pdu::{Function, FunctionCode, Request as ReqT, Response as RspT, Setter}; use std::convert::TryInto; const MIN_QUANTITY: usize = 1; const MAX_QUANTITY: usize = 123; #[derive(Debug, PartialEq)] pub struct Request { address: u16, values: Vec<u16>, } impl Request { pub fn new(address: u16, values: &[u16]) -> Self { assert!(values.len() >= MIN_QUANTITY); assert!(values.len() <= MAX_QUANTITY); Request{address, values: Vec::from(values)} } pub fn get_address(&self) -> u16 { self.address } pub fn get_values(&self) -> &[u16] { &self.values } } impl Function for Request { fn encode(&self) -> Result<Vec<u8>, Error> { match self.values.len() { MIN_QUANTITY..=MAX_QUANTITY => { let mut result = Vec::new(); result.push(FunctionCode::WriteMultiReg as u8); result.append(&mut self.address.to_be_bytes().to_vec()); result.append(&mut (self.values.len() as u16).to_be_bytes().to_vec()); result.push((self.values.len() as u8) * 2); for val in &self.values { result.append(&mut val.to_be_bytes().to_vec()); } Ok(result) } _ => Err(Error::InvalidValue) } } fn decode(data: &[u8]) -> Result<Self, Error> { if data.len() < 6 { return Err(Error::InvalidDataLength); } if data[0] != FunctionCode::WriteMultiReg as u8 { return Err(Error::InvalidData); } let address = u16::from_be_bytes(data[1..=2].try_into().unwrap()); let quantity = u16::from_be_bytes(data[3..=4].try_into().unwrap()); let data_cnt = data[5]; if data_cnt as u16 != quantity * 2 { return Err(Error::InvalidDataLength); } if (quantity as usize) < MIN_QUANTITY || (quantity as usize) > MAX_QUANTITY { return Err(Error::InvalidData); } let mut values = Vec::with_capacity(quantity as usize); for i in 0..quantity { let val_idx = (6 + i * 2) as usize; values.push(u16::from_be_bytes(data[val_idx..=val_idx+1].try_into().unwrap())) } Ok(Self{address, values}) } } impl ReqT for Request { type Rsp = Response; } impl Setter for Request { fn create_expected_response(&self) -> Self::Rsp { Response::new(self.address, self.values.len() as u16) } } #[derive(Debug, PartialEq)] pub struct Response { address: u16, quantity: u16, } impl Response { pub fn new(address: u16, quantity: u16) -> Self { assert!(quantity as usize >= MIN_QUANTITY); assert!(quantity as usize <= MAX_QUANTITY); Self{address, quantity} } pub fn get_address(&self) -> u16 { self.address } pub fn get_quantity(&self) -> u16 { self.quantity } } impl Function for Response { fn encode(&self) -> Result<Vec<u8>, Error> { match self.quantity as usize { MIN_QUANTITY..=MAX_QUANTITY => { let mut result = Vec::new(); result.push(FunctionCode::WriteMultiReg as u8); result.append(&mut self.address.to_be_bytes().to_vec()); result.app
fn decode(data: &[u8]) -> Result<Self, Error> { if data.len() != 5 { return Err(Error::InvalidDataLength); } if data[0] != FunctionCode::WriteMultiReg as u8 { return Err(Error::InvalidData); } let address = u16::from_be_bytes(data[1..=2].try_into().unwrap()); let quantity = u16::from_be_bytes(data[3..=4].try_into().unwrap()); Ok(Self{address, quantity}) } } impl RspT for Response { fn get_exc_function_code() -> u8 { FunctionCode::ExcWriteMultiReg.try_into().unwrap() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_encode_request() { let req = Request::new(0xdead, &vec![0xfade, 0xface, 0x0000, 0x0001]); let pdu = req.encode().unwrap(); let expected_pdu = vec![0x10, 0xde, 0xad, 0x00, 0x04, 0x08, 0xfa, 0xde, 0xfa, 0xce, 0x00, 0x00, 0x00, 0x01]; assert_eq!(pdu, expected_pdu); } #[test] fn test_encode_response() { let rsp = Response::new(0xffff, 0x0072); let pdu = rsp.encode().unwrap(); let expected_pdu = vec![0x10, 0xff, 0xff, 0x00, 0x72]; assert_eq!(pdu, expected_pdu); } #[test] fn test_decode_request() { let pdu = vec![0x10, 0x00, 0x00, 0x00, 0x02, 0x04, 0x01, 0x02, 0xfe, 0xfd]; let req = Request::decode(&pdu).unwrap(); let expected_req = Request::new(0x0000, &vec![0x0102, 0xfefd]); assert_eq!(req, expected_req); } #[test] fn test_decode_invalid_request() { let pdu = vec![0x11, 0x01, 0x23, 0x00, 0x01, 0x02, 0x11, 0x12]; let err = Request::decode(&pdu).err().unwrap(); match err { Error::InvalidData => {} _ => panic!(format!("Expected InvalidData, but got {:?}", err)), } } #[test] fn test_decode_response() { let pdu = vec![0x10, 0x01, 0x23, 0x00, 0x65]; let rsp = Response::decode(&pdu).unwrap(); let expected_rsp = Response::new(0x0123, 0x0065); assert_eq!(rsp, expected_rsp); } }
end(&mut self.quantity.to_be_bytes().to_vec()); Ok(result) } _ => Err(Error::InvalidValue) } }
function_block-function_prefixed
[ { "content": "/// Setter is a trait for Modbus requests that expect known response.\n\npub trait Setter where Self: Request, Self::Rsp: PartialEq {\n\n fn create_expected_response(&self) -> Self::Rsp;\n\n}\n\n\n\n#[derive(Clone, Copy, FromPrimitive, IntoPrimitive, PartialEq)]\n\n#[repr(u8)]\n\npub enum Funct...
Rust
projects/rivium/src/lib.rs
chykon/rivium-mono
f7d45afbf94441a5f925ac8e3a2087f18f02e20c
/* const MEMORY_SIZE: usize = 1024 * 1024 * 512; const REGISTERS_COUNT: usize = 32 + 1; const CODE_RANGE_BEGIN: usize = 0; const CODE_RANGE_END: usize = CODE_RANGE_BEGIN + (1024 * 1024 * 128) - 1; static mut MEMORY: [u8; MEMORY_SIZE] = [0; MEMORY_SIZE]; static mut REGISTERS: [u32; REGISTERS_COUNT] = [0; REGISTERS_COUNT]; static mut ERROR_MESSAGE: &str = ""; const X0: usize = 0; const X1: usize = 1; const X2: usize = 2; const X3: usize = 3; const X4: usize = 4; const X5: usize = 5; const X6: usize = 6; const X7: usize = 7; const X8: usize = 8; const X9: usize = 9; const X10: usize = 10; const X11: usize = 11; const X12: usize = 12; const X13: usize = 13; const X14: usize = 14; const X15: usize = 15; const X16: usize = 16; const X17: usize = 17; const X18: usize = 18; const X19: usize = 19; const X20: usize = 20; const X21: usize = 21; const X22: usize = 22; const X23: usize = 23; const X24: usize = 24; const X25: usize = 25; const X26: usize = 26; const X27: usize = 27; const X28: usize = 28; const X29: usize = 29; const X30: usize = 30; const X31: usize = 31; const PC: usize = 32; const INST_20: u32 = 0b00000000000100000000000000000000; const INST_24_21: u32 = 0b00000001111000000000000000000000; const INST_30_25: u32 = 0b01111110000000000000000000000000; const INST_31: u32 = 0b10000000000000000000000000000000; const INST_7: u32 = 0b00000000000000000000000010000000; const INST_11_8: u32 = 0b00000000000000000000111100000000; const INST_19_12: u32 = 0b00000000000011111111000000000000; const INST_30_20: u32 = 0b01111111111100000000000000000000; const LOAD: u32 = 0b0000011; const LOAD_FP: u32 = 0b0000111; const CUSTOM_0: u32 = 0b0001011; const MISC_MEM: u32 = 0b0001111; const OP_IMM: u32 = 0b0010011; const AUIPC: u32 = 0b0010111; const OP_IMM_32: u32 = 0b0011011; const STORE: u32 = 0b0100011; const STORE_FP: u32 = 0b0100111; const CUSTOM_1: u32 = 0b0101011; const AMO: u32 = 0b0101111; const OP: u32 = 0b0110011; const LUI: u32 = 0b0110111; const OP_32: u32 = 0b0111011; const MADD: u32 = 0b1000011; const MSUB: u32 = 0b1000111; const NMSUB: u32 = 0b1001011; const NMADD: u32 = 0b1001111; const OP_FP: u32 = 0b1010011; const RESERVED_0: u32 = 0b1010111; const CUSTOM_2: u32 = 0b1011011; const BRANCH: u32 = 0b1100011; const JALR: u32 = 0b1100111; const RESERVED_1: u32 = 0b1101011; const JAL: u32 = 0b1101111; const SYSTEM: u32 = 0b1110011; const RESERVED_2: u32 = 0b1110111; const CUSTOM_3: u32 = 0b1111011; const BEQ: u32 = 0b000; const BNE: u32 = 0b001; const BLT: u32 = 0b100; const BGE: u32 = 0b101; const BLTU: u32 = 0b110; const BGEU: u32 = 0b111; const LB: u32 = 0b000; const LH: u32 = 0b001; const LW: u32 = 0b010; const LBU: u32 = 0b100; const LHU: u32 = 0b101; const SB: u32 = 0b000; const SH: u32 = 0b001; const SW: u32 = 0b010; const ADDI: u32 = 0b000; const SLTI: u32 = 0b010; const SLTIU: u32 = 0b011; const XORI: u32 = 0b100; const ORI: u32 = 0b110; const ANDI: u32 = 0b111; const SLLI: u32 = 0b0000000001; const SRLI: u32 = 0b0000000101; const SRAI: u32 = 0b0100000101; const ADD: u32 = 0b0100000000; const SUB: u32 = 0b0000000000; const SLL: u32 = 0b0000000001; const SLT: u32 = 0b0000000010; const SLTU: u32 = 0b0000000011; const XOR: u32 = 0b0000000100; const SRL: u32 = 0b0000000101; const SRA: u32 = 0b0100000101; const OR: u32 = 0b0000000110; const AND: u32 = 0b0000000111; const FENCE: u32 = 0b000; const ECALL: u32 = 0b0000000000000000000000000; const EBREAK: u32 = 0b0000000000010000000000000; */ #[wasm_bindgen::prelude::wasm_bindgen] pub fn set_panic_hook() { std::panic::set_hook(Box::new(console_error_panic_hook::hook)) } #[wasm_bindgen::prelude::wasm_bindgen] pub fn jsonify_intermediate(string: &str) -> String { let mut map = serde_json::Map::new(); let data: serde_json::Value = serde_json::from_str(string).unwrap(); let array = data.as_array().unwrap(); for (i, element) in array.iter().enumerate() { let mut strings: Vec<String> = Vec::new(); let element_array = element.as_array().unwrap(); for elem in element_array { let elem_str = elem.as_str().unwrap().to_lowercase(); strings.push(elem_str); } map.insert(i.to_string(), serde_json::json!(strings)); } serde_json::to_string(&map).unwrap() } #[wasm_bindgen::prelude::wasm_bindgen] pub fn intermediate_to_text(string: &str) -> String { let data: serde_json::Value = serde_json::from_str(string).unwrap(); let array = data.as_object().unwrap(); let mut output_string = String::new(); for element in array { let element_array = element.1.as_array().unwrap(); let elem = &element_array[0]; let elem_str = elem.as_str().unwrap(); if element_array.len() == 1 { output_string.push_str(elem_str); output_string.push('\n'); continue; } else if element_array.len() == 3 { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let mut str_2 = String::new(); str_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push_str(elem_str_3); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push('\n'); continue; } match elem_str { "add" | "sub" | "sll" | "slt" | "sltu" | "xor" | "srl" | "sra" | "or" | "and" => { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let elem_str_4 = element_array[3].as_str().unwrap(); let mut str_2 = String::new(); str_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push('x'); str_3.push_str(elem_str_3); str_3.push_str(", "); let mut str_4 = String::new(); str_4.push('x'); str_4.push_str(elem_str_4); str_4.push_str(", "); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push_str(&str_4); } _ => { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let elem_str_4 = element_array[3].as_str().unwrap(); let mut str_2 = String::new(); str_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push('x'); str_3.push_str(elem_str_3); str_3.push_str(", "); let mut str_4 = String::new(); str_4.push_str(elem_str_4); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push_str(&str_4); } } output_string.push('\n'); } output_string } #[wasm_bindgen::prelude::wasm_bindgen] pub fn analyze(string: &str) -> bool { let data: serde_json::Value = serde_json::from_str(string).unwrap(); let obj = data.as_object().unwrap(); let arr = obj.get("0").unwrap().as_array().unwrap(); let mut result = true; match arr[0].as_str().unwrap() { "lui" | "auipc" => { let imm = arr[2].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b111111111111) != 0b000000000000 { result = false; } } "jal" => { let imm = arr[2].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b00000000000000000000000000000000 { result = false; } } "jalr" | "lb" | "lh" | "lw" | "lbu" | "lhu" | "sb" | "sh" | "sw" | "addi" | "slti" | "sltiu" | "xori" | "ori" | "andi" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111111111111000000000000) != 0b00000000000000000000000000000000 { result = false; } } "beq" | "bne" | "blt" | "bge" | "bltu" | "bgeu" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111111111110000000000001) != 0b00000000000000000000000000000000 { result = false; } } "slli" | "srli" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b00000000000000000000000000000000 { result = false; } } "srai" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b01000000000000000000000000000000 { result = false; } } _ => {} } result } /* #[wasm_bindgen::prelude::wasm_bindgen] pub fn load_machine_code(byte: u8, address: usize) -> bool { if address > CODE_RANGE_END { false } else { unsafe { MEMORY[address] = byte } true } } #[wasm_bindgen::prelude::wasm_bindgen] pub fn set_to_register(register: usize, value: u32) { if register == X0 { unsafe { REGISTERS[register] = 0 } } else { unsafe { REGISTERS[register] = value } } } #[wasm_bindgen::prelude::wasm_bindgen] pub fn get_from_register(register: usize) -> u32 { if register == X0 { unsafe { REGISTERS[register] = 0; REGISTERS[register] } } else { unsafe { REGISTERS[register] } } } fn create_instruction(byte_1: u8, byte_2: u8, byte_3: u8, byte_4: u8) -> u32 { let mut instruction = 0; instruction |= byte_1 as u32; instruction |= (byte_2 as u32) << 8; instruction |= (byte_3 as u32) << 16; instruction |= (byte_4 as u32) << 24; instruction } fn iimm_to_value(instruction: u32) -> u32 { let val_1 = (instruction & INST_20) >> 20; let val_2 = (instruction & INST_24_21) >> 21; let val_3 = (instruction & INST_30_25) >> 25; let val_4 = (instruction & INST_31) >> 31; (val_4 << 31) | (val_3 << 5) | (val_2 << 1) | val_1 } fn value_to_iimm(value: u32) -> u32 { let imm_1 = (value & INST_20) >> 20; let imm_2 = (value & INST_24_21) >> 21; let imm_3 = (value & INST_30_25) >> 25; let imm_4 = (value & INST_31) >> 31; (imm_4 << 31) | (imm_3 << 5) | (imm_2 << 1) | imm_1 } // simm // bimm // uimm // jimm fn get_opcode(instruction: u32) -> u32 { instruction & 0b1111111 } fn get_rd(instruction: u32) -> u32 { (instruction >> 7) & 0b11111 } fn get_funct3(instruction: u32) -> u32 { (instruction >> 12) & 0b111 } fn get_rs1(instruction: u32) -> u32 { (instruction >> 15) & 0b11111 } // Disassembler fn decode_instruction(instruction: u32) -> serde_json::Value { if (instruction & 0b11) != 0b11 { return serde_json::json!("DASM:DECODE_0B11_END") } else if (instruction & 0b11100) == 0b11100 { return serde_json::json!("DASM:DECODE_0B11100_END") } let mut operation = ""; let mut operand_1 = ""; let mut operand_2 = ""; let mut operand_3 = ""; let mut opcode = get_opcode(instruction); if opcode == OP_IMM { let rd = get_rd(instruction); let funct3 = get_funct3(instruction); let rs1 = get_rs1(instruction); let iimm = iimm_to_value(instruction); match funct3 { ADDI => operation = "ADDI", _ => return serde_json::json!("DASM:DECODE_0B11100_END") // a }; } serde_json::json!({ operation: [ operand_1, operand_2, operand_3 ] }) } // Emulator #[wasm_bindgen::prelude::wasm_bindgen] pub fn execute_instruction() -> bool { unsafe { ERROR_MESSAGE = ""; } // Fetch let current_pc = get_from_register(PC) as usize; if (current_pc + 3) > CODE_RANGE_END { unsafe { ERROR_MESSAGE = "VM:CODE_RANGE_END"; } return false } let instruction_byte_1; let instruction_byte_2; let instruction_byte_3; let instruction_byte_4; unsafe { instruction_byte_1 = MEMORY[current_pc]; instruction_byte_2 = MEMORY[current_pc + 1]; instruction_byte_3 = MEMORY[current_pc + 2]; instruction_byte_4 = MEMORY[current_pc + 3]; } let instruction = create_instruction( instruction_byte_1, instruction_byte_2, instruction_byte_3, instruction_byte_4 ); let next_pc = (current_pc as u32) + 4; // Decode /* if (instruction & 0b11) != 0b11 { unsafe { ERROR_MESSAGE = "VM:DECODE_0B11_END"; } return false } else if (instruction & 0b11100) == 0b11100 { unsafe { ERROR_MESSAGE = "VM:DECODE_0B11100_END"; } return false } */ // move to dis ams //???? // fail -> error // execute // debug signal -> do // check pc-register increment if branch // ... set_to_register(PC, next_pc); true } */
/* const MEMORY_SIZE: usize = 1024 * 1024 * 512; const REGISTERS_COUNT: usize = 32 + 1; const CODE_RANGE_BEGIN: usize = 0; const CODE_RANGE_END: usize = CODE_RANGE_BEGIN + (1024 * 1024 * 128) - 1; static mut MEMORY: [u8; MEMORY_SIZE] = [0; MEMORY_SIZE]; static mut REGISTERS: [u32; REGISTERS_COUNT] = [0; REGISTERS_COUNT]; static mut ERROR_MESSAGE: &str = ""; const X0: usize = 0; const X1: usize = 1; const X2: usize = 2; const X3: usize = 3; const X4: usize = 4; const X5: usize = 5; const X6: usize = 6; const X7: usize = 7; const X8: usize = 8; const X9: usize = 9; const X10: usize = 10; const X11: usize = 11; const X12: usize = 12; const X13: usize = 13; const X14: usize = 14; const X15: usize = 15; const X16: usize = 16; const X17: usize = 17; const X18: usize = 18; const X19: usize = 19; const X20: usize = 20; const X21: usize = 21; const X22: usize = 22; const X23: usize = 23; const X24: usize = 24; const X25: usize = 25; const X26: usize = 26; const X27: usize = 27; const X28: usize = 28; const X29: usize = 29; const X30: usize = 30; const X31: usize = 31; const PC: usize = 32; const INST_20: u32 = 0b00000000000100000000000000000000; const INST_24_21: u32 = 0b00000001111000000000000000000000; const INST_30_25: u32 = 0b01111110000000000000000000000000; const INST_31: u32 = 0b10000000000000000000000000000000; const INST_7: u32 = 0b00000000000000000000000010000000; const INST_11_8: u32 = 0b00000000000000000000111100000000; const INST_19_12: u32 = 0b00000000000011111111000000000000; const INST_30_20: u32 = 0b01111111111100000000000000000000; const LOAD: u32 = 0b0000011; const LOAD_FP: u32 = 0b0000111; const CUSTOM_0: u32 = 0b0001011; const MISC_MEM: u32 = 0b0001111; const OP_IMM: u32 = 0b0010011; const AUIPC: u32 = 0b0010111; const OP_IMM_32: u32 = 0b0011011; const STORE: u32 = 0b0100011; const STORE_FP: u32 = 0b0100111; const CUSTOM_1: u32 = 0b0101011; const AMO: u32 = 0b0101111; const OP: u32 = 0b0110011; const LUI: u32 = 0b0110111; const OP_32: u32 = 0b0111011; const MADD: u32 = 0b1000011; const MSUB: u32 = 0b1000111; const NMSUB: u32 = 0b1001011; const NMADD: u32 = 0b1001111; const OP_FP: u32 = 0b1010011; const RESERVED_0: u32 = 0b1010111; const CUSTOM_2: u32 = 0b1011011; const BRANCH: u32 = 0b1100011; const JALR: u32 = 0b1100111; const RESERVED_1: u32 = 0b1101011; const JAL: u32 = 0b1101111; const SYSTEM: u32 = 0b1110011; const RESERVED_2: u32 = 0b1110111; const CUSTOM_3: u32 = 0b1111011; const BEQ: u32 = 0b000; const BNE: u32 = 0b001; const BLT: u32 = 0b100; const BGE: u32 = 0b101; const BLTU: u32 = 0b110; const BGEU: u32 = 0b111; const LB: u32 = 0b000; const LH: u32 = 0b001; const LW: u32 = 0b010; const LBU: u32 = 0b100; const LHU: u32 = 0b101; const SB: u32 = 0b000; const SH: u32 = 0b001; const SW: u32 = 0b010; const ADDI: u32 = 0b000; const SLTI: u32 = 0b010; const SLTIU: u32 = 0b011; const XORI: u32 = 0b100; const ORI: u32 = 0b110; const ANDI: u32 = 0b111; const SLLI: u32 = 0b0000000001; const SRLI: u32 = 0b0000000101; const SRAI: u32 = 0b0100000101; const ADD: u32 = 0b0100000000; const SUB: u32 = 0b0000000000; const SLL: u32 = 0b0000000001; const SLT: u32 = 0b0000000010; const SLTU: u32 = 0b0000000011; const XOR: u32 = 0b0000000100; const SRL: u32 = 0b0000000101; const SRA: u32 = 0b0100000101; const OR: u32 = 0b0000000110; const AND: u32 = 0b0000000111; const FENCE: u32 = 0b000; const ECALL: u32 = 0b0000000000000000000000000; const EBREAK: u32 = 0b0000000000010000000000000; */ #[wasm_bindgen::prelude::wasm_bindgen] pub fn set_panic_hook() { std::panic::set_hook(Box::new(console_error_panic_hook::hook)) } #[wasm_bindgen::prelude::wasm_bindgen] pub fn jsonify_intermediate(string: &str) -> String { let mut map = serde_json::Map::new(); let data: serde_json::Value = serde_json::from_str(string).unwrap(); let array = data.as_array().unwrap(); for (i, element) in array.iter().enumerate() { let mut strings: Vec<String> = Vec::new(); let element_array = element.as_array().unwrap(); for elem in element_array { let elem_str = elem.as_str().unwrap().to_lowercase(); strings.push(elem_str); } map.insert(i.to_string(), serde_json::json!(strings)); } serde_json::to_string(&map).unwrap() } #[wasm_bindgen::prelude::wasm_bindgen] pub fn intermediate_to_text(string: &str) -> String { let data: serde_json::Value = serde_json::from_str(string).unwrap(); let array = data.as_object().unwrap(); let mut output_string = String::new(); for element in array { let element_array = element.1.as_array().unwrap(); let elem = &element_array[0]; let elem_str = elem.as_str().unwrap(); if element_array.len() == 1 { output_string.push_str(elem_str); output_string.push('\n'); continue; } else if element_array.len() == 3 { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let mut str_2 = String::new(); str_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push_str(elem_str_3); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push('\n'); continue; } match elem_str { "add" | "sub" | "sll" | "slt" | "sltu" | "xor" | "srl" | "sra" | "or" | "and" => { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let elem_str_4 = element_array[3].as_str().unwrap(); let mut str_2 = String::new(); str
nstruction & 0b11100) == 0b11100 { unsafe { ERROR_MESSAGE = "VM:DECODE_0B11100_END"; } return false } */ // move to dis ams //???? // fail -> error // execute // debug signal -> do // check pc-register increment if branch // ... set_to_register(PC, next_pc); true } */
_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push('x'); str_3.push_str(elem_str_3); str_3.push_str(", "); let mut str_4 = String::new(); str_4.push('x'); str_4.push_str(elem_str_4); str_4.push_str(", "); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push_str(&str_4); } _ => { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let elem_str_4 = element_array[3].as_str().unwrap(); let mut str_2 = String::new(); str_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push('x'); str_3.push_str(elem_str_3); str_3.push_str(", "); let mut str_4 = String::new(); str_4.push_str(elem_str_4); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push_str(&str_4); } } output_string.push('\n'); } output_string } #[wasm_bindgen::prelude::wasm_bindgen] pub fn analyze(string: &str) -> bool { let data: serde_json::Value = serde_json::from_str(string).unwrap(); let obj = data.as_object().unwrap(); let arr = obj.get("0").unwrap().as_array().unwrap(); let mut result = true; match arr[0].as_str().unwrap() { "lui" | "auipc" => { let imm = arr[2].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b111111111111) != 0b000000000000 { result = false; } } "jal" => { let imm = arr[2].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b00000000000000000000000000000000 { result = false; } } "jalr" | "lb" | "lh" | "lw" | "lbu" | "lhu" | "sb" | "sh" | "sw" | "addi" | "slti" | "sltiu" | "xori" | "ori" | "andi" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111111111111000000000000) != 0b00000000000000000000000000000000 { result = false; } } "beq" | "bne" | "blt" | "bge" | "bltu" | "bgeu" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111111111110000000000001) != 0b00000000000000000000000000000000 { result = false; } } "slli" | "srli" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b00000000000000000000000000000000 { result = false; } } "srai" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b01000000000000000000000000000000 { result = false; } } _ => {} } result } /* #[wasm_bindgen::prelude::wasm_bindgen] pub fn load_machine_code(byte: u8, address: usize) -> bool { if address > CODE_RANGE_END { false } else { unsafe { MEMORY[address] = byte } true } } #[wasm_bindgen::prelude::wasm_bindgen] pub fn set_to_register(register: usize, value: u32) { if register == X0 { unsafe { REGISTERS[register] = 0 } } else { unsafe { REGISTERS[register] = value } } } #[wasm_bindgen::prelude::wasm_bindgen] pub fn get_from_register(register: usize) -> u32 { if register == X0 { unsafe { REGISTERS[register] = 0; REGISTERS[register] } } else { unsafe { REGISTERS[register] } } } fn create_instruction(byte_1: u8, byte_2: u8, byte_3: u8, byte_4: u8) -> u32 { let mut instruction = 0; instruction |= byte_1 as u32; instruction |= (byte_2 as u32) << 8; instruction |= (byte_3 as u32) << 16; instruction |= (byte_4 as u32) << 24; instruction } fn iimm_to_value(instruction: u32) -> u32 { let val_1 = (instruction & INST_20) >> 20; let val_2 = (instruction & INST_24_21) >> 21; let val_3 = (instruction & INST_30_25) >> 25; let val_4 = (instruction & INST_31) >> 31; (val_4 << 31) | (val_3 << 5) | (val_2 << 1) | val_1 } fn value_to_iimm(value: u32) -> u32 { let imm_1 = (value & INST_20) >> 20; let imm_2 = (value & INST_24_21) >> 21; let imm_3 = (value & INST_30_25) >> 25; let imm_4 = (value & INST_31) >> 31; (imm_4 << 31) | (imm_3 << 5) | (imm_2 << 1) | imm_1 } // simm // bimm // uimm // jimm fn get_opcode(instruction: u32) -> u32 { instruction & 0b1111111 } fn get_rd(instruction: u32) -> u32 { (instruction >> 7) & 0b11111 } fn get_funct3(instruction: u32) -> u32 { (instruction >> 12) & 0b111 } fn get_rs1(instruction: u32) -> u32 { (instruction >> 15) & 0b11111 } // Disassembler fn decode_instruction(instruction: u32) -> serde_json::Value { if (instruction & 0b11) != 0b11 { return serde_json::json!("DASM:DECODE_0B11_END") } else if (instruction & 0b11100) == 0b11100 { return serde_json::json!("DASM:DECODE_0B11100_END") } let mut operation = ""; let mut operand_1 = ""; let mut operand_2 = ""; let mut operand_3 = ""; let mut opcode = get_opcode(instruction); if opcode == OP_IMM { let rd = get_rd(instruction); let funct3 = get_funct3(instruction); let rs1 = get_rs1(instruction); let iimm = iimm_to_value(instruction); match funct3 { ADDI => operation = "ADDI", _ => return serde_json::json!("DASM:DECODE_0B11100_END") // a }; } serde_json::json!({ operation: [ operand_1, operand_2, operand_3 ] }) } // Emulator #[wasm_bindgen::prelude::wasm_bindgen] pub fn execute_instruction() -> bool { unsafe { ERROR_MESSAGE = ""; } // Fetch let current_pc = get_from_register(PC) as usize; if (current_pc + 3) > CODE_RANGE_END { unsafe { ERROR_MESSAGE = "VM:CODE_RANGE_END"; } return false } let instruction_byte_1; let instruction_byte_2; let instruction_byte_3; let instruction_byte_4; unsafe { instruction_byte_1 = MEMORY[current_pc]; instruction_byte_2 = MEMORY[current_pc + 1]; instruction_byte_3 = MEMORY[current_pc + 2]; instruction_byte_4 = MEMORY[current_pc + 3]; } let instruction = create_instruction( instruction_byte_1, instruction_byte_2, instruction_byte_3, instruction_byte_4 ); let next_pc = (current_pc as u32) + 4; // Decode /* if (instruction & 0b11) != 0b11 { unsafe { ERROR_MESSAGE = "VM:DECODE_0B11_END"; } return false } else if (i
random
[ { "content": "class Memory {\n\n xlen;\n\n space; // maximum length in bytes, 2^XLEN\n\n vcount;\n\n mrcount;\n\n regres; // reserve for registers, MRCOUNT*XLEN*VCOUNT\n\n mempow;\n\n length; // actual length, (2^MEMPOW)-REGRES\n\n buffer; // memory data\n\n\n\n constructor(xlen, vcount, mrcount, mempo...
Rust
src/env/blocking.rs
quietboil/sibyl
bbb7cb28686d9d743252f1f28fb365eaaaad21a0
use super::Environment; use crate::{Session, ConnectionPool, Result, SessionPool}; impl Environment { /** Creates and begins a session for the given server. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The user ID with which to start the sessions. * `password` - The password for the corresponding `username`. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; let session = oracle.connect(&dbname, &dbuser, &dbpass)?; assert!(!session.is_async()?); assert!(session.is_connected()?); assert!(session.ping().is_ok()); let stmt = session.prepare(" SELECT DISTINCT client_driver FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; assert_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn connect(&self, dbname: &str, username: &str, password: &str) -> Result<Session> { Session::new(self, dbname, username, password) } /** Creates new session pool. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The username with which to start the sessions. * `password` - The password for the corresponding `username`. * `min` - The minimum number of sessions in the session pool. This number of sessions will be started during pool creation. After `min` sessions are started, sessions are opened only when necessary. * `inc` - The next increment for sessions to be started if the current number of sessions is less than `max`. The valid values are 0 and higher. * `max` - The maximum number of sessions that can be opened in the session pool. After this value is reached, no more sessions are opened. The valid values are 1 and higher. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; // Create a session pool where each session will connect to the database // `dbname` and authenticate itself as `dbuser` with password `dbpass`. // Pool will have no open sessions initially. It will create 1 new session // at a time, up to the maximum of 10 sessions, when they are requested // and there are no idle sessions in the pool. let pool = oracle.create_session_pool(&dbname, &dbuser, &dbpass, 0, 1, 10)?; let session = pool.get_session()?; let stmt = session.prepare(" SELECT DISTINCT client_driver FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; assert_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn create_session_pool(&self, dbname: &str, username: &str, password: &str, min: usize, inc: usize, max: usize) -> Result<SessionPool> { SessionPool::new(self, dbname, username, password, min, inc, max) } /** Creates new connection pool. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The username with which to start the sessions. * `password` - The password for the corresponding `username`. * `min` - The minimum number of connections to be opened when the pool is created. After the connection pool is created, connections are opened only when necessary. Generally, this parameter should be set to the number of concurrent statements that the application is planning or expecting to run. * `inc` - incremental number of connections to be opened when all the connections are busy and a call needs a connection. This increment is used only when the total number of open connections is less than the maximum number of connections that can be opened in that pool. * `max` - The maximum number of connections that can be opened to the database. When the maximum number of connections are open and all the connections are busy, if a call needs a connection, it waits until it gets one. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; let pool = oracle.create_connection_pool(&dbname, &dbuser, &dbpass, 1, 1, 10)?; let session = pool.get_session(&dbuser, &dbpass)?; let stmt = session.prepare(" SELECT DISTINCT client_driver FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; assert_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn create_connection_pool(&self, dbname: &str, username: &str, password: &str, min: usize, inc: usize, max: usize) -> Result<ConnectionPool> { ConnectionPool::new(self, dbname, username, password, min, inc, max) } }
use super::Environment; use crate::{Session, ConnectionPool, Result, SessionPool}; impl Environment { /** Creates and begins a session for the given server. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The user ID with which to start the sessions. * `password` - The password for the corresponding `username`. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; let session = oracle.connect(&dbname, &dbuser, &dbpass)?; assert!(!session.is_async()?); assert!(session.is_connected()?); assert!(session.ping().is_ok()); let stmt = session.prepare(" SELECT DISTINCT client_driver
t_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn connect(&self, dbname: &str, username: &str, password: &str) -> Result<Session> { Session::new(self, dbname, username, password) } /** Creates new session pool. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The username with which to start the sessions. * `password` - The password for the corresponding `username`. * `min` - The minimum number of sessions in the session pool. This number of sessions will be started during pool creation. After `min` sessions are started, sessions are opened only when necessary. * `inc` - The next increment for sessions to be started if the current number of sessions is less than `max`. The valid values are 0 and higher. * `max` - The maximum number of sessions that can be opened in the session pool. After this value is reached, no more sessions are opened. The valid values are 1 and higher. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; // Create a session pool where each session will connect to the database // `dbname` and authenticate itself as `dbuser` with password `dbpass`. // Pool will have no open sessions initially. It will create 1 new session // at a time, up to the maximum of 10 sessions, when they are requested // and there are no idle sessions in the pool. let pool = oracle.create_session_pool(&dbname, &dbuser, &dbpass, 0, 1, 10)?; let session = pool.get_session()?; let stmt = session.prepare(" SELECT DISTINCT client_driver FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; assert_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn create_session_pool(&self, dbname: &str, username: &str, password: &str, min: usize, inc: usize, max: usize) -> Result<SessionPool> { SessionPool::new(self, dbname, username, password, min, inc, max) } /** Creates new connection pool. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The username with which to start the sessions. * `password` - The password for the corresponding `username`. * `min` - The minimum number of connections to be opened when the pool is created. After the connection pool is created, connections are opened only when necessary. Generally, this parameter should be set to the number of concurrent statements that the application is planning or expecting to run. * `inc` - incremental number of connections to be opened when all the connections are busy and a call needs a connection. This increment is used only when the total number of open connections is less than the maximum number of connections that can be opened in that pool. * `max` - The maximum number of connections that can be opened to the database. When the maximum number of connections are open and all the connections are busy, if a call needs a connection, it waits until it gets one. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; let pool = oracle.create_connection_pool(&dbname, &dbuser, &dbpass, 1, 1, 10)?; let session = pool.get_session(&dbuser, &dbpass)?; let stmt = session.prepare(" SELECT DISTINCT client_driver FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; assert_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn create_connection_pool(&self, dbname: &str, username: &str, password: &str, min: usize, inc: usize, max: usize) -> Result<ConnectionPool> { ConnectionPool::new(self, dbname, username, password, min, inc, max) } }
FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; asser
random
[ { "content": "#[cfg(feature=\"blocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n use std::{env, thread, sync::Arc};\n\n use once_cell::sync::OnceCell;\n\n use sibyl::*;\n\n\n\n static ORACLE : OnceCell<Environment> = OnceCell::new();\n\n let oracle = ORACLE.get_or_try_init(|| {\n\n env()...
Rust
botan/src/utils.rs
JustPretender/botan-rs
bdf1de579913cb0b8a07024e8a4015a3719195ee
use botan_sys::*; use core::fmt; #[cfg(feature = "no-std")] pub(crate) use alloc::{borrow::ToOwned, string::String, string::ToString, vec::Vec}; #[cfg(feature = "no-std")] pub(crate) use cstr_core::{CStr, CString}; #[cfg(not(feature = "no-std"))] pub(crate) use std::ffi::{CStr, CString}; pub(crate) use core::mem; pub(crate) use core::ptr; pub(crate) use cty::{c_char, c_int, c_void}; pub type Result<T> = ::core::result::Result<T, Error>; pub(crate) fn make_cstr(input: &str) -> Result<CString> { let cstr = CString::new(input).map_err(Error::conversion_error)?; Ok(cstr) } pub(crate) fn call_botan_ffi_returning_vec_u8( initial_size: usize, cb: &dyn Fn(*mut u8, *mut usize) -> c_int, ) -> Result<Vec<u8>> { let mut output = vec![0; initial_size]; let mut out_len = output.len(); let rc = cb(output.as_mut_ptr(), &mut out_len); if rc == 0 { assert!(out_len <= output.len()); output.resize(out_len, 0); return Ok(output); } else if rc != BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE { return Err(Error::from_rc(rc)); } output.resize(out_len, 0); let rc = cb(output.as_mut_ptr(), &mut out_len); if rc != 0 { return Err(Error::from_rc(rc)); } output.resize(out_len, 0); Ok(output) } fn cstr_slice_to_str(raw_cstr: &[u8]) -> Result<String> { let cstr = CStr::from_bytes_with_nul(raw_cstr).map_err(Error::conversion_error)?; Ok(cstr.to_str().map_err(Error::conversion_error)?.to_owned()) } #[cfg(feature = "botan3")] unsafe fn cstr_to_str(raw_cstr: *const i8) -> Result<String> { let cstr = CStr::from_ptr(raw_cstr); Ok(cstr.to_str().map_err(Error::conversion_error)?.to_owned()) } pub(crate) fn call_botan_ffi_returning_string( initial_size: usize, cb: &dyn Fn(*mut u8, *mut usize) -> c_int, ) -> Result<String> { let v = call_botan_ffi_returning_vec_u8(initial_size, cb)?; cstr_slice_to_str(&v) } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Error { err_type: ErrorType, message: Option<String>, } impl Error { pub fn error_type(&self) -> ErrorType { self.err_type } pub fn error_message(&self) -> Option<&str> { self.message.as_deref() } pub(crate) fn from_rc(rc: c_int) -> Self { let err_type = ErrorType::from(rc); #[cfg(feature = "botan3")] let message = { let cptr = unsafe { botan_sys::botan_error_last_exception_message() }; match unsafe { cstr_to_str(cptr) } { Err(_) => None, Ok(s) if s.len() > 0 => Some(s), Ok(_) => None, } }; #[cfg(not(feature = "botan3"))] let message = None; Self { err_type, message } } pub(crate) fn with_message(err_type: ErrorType, message: String) -> Self { Self { err_type, message: Some(message), } } #[cfg(not(feature = "no-std"))] pub(crate) fn conversion_error<T: std::error::Error>(e: T) -> Self { Self { err_type: ErrorType::ConversionError, message: Some(format!("{}", e)), } } #[cfg(feature = "no-std")] pub(crate) fn conversion_error<T: core::fmt::Display>(e: T) -> Self { Self { err_type: ErrorType::ConversionError, message: Some(format!("{}", e)), } } } impl core::fmt::Display for Error { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { match &self.message { Some(m) => write!(f, "{} ({})", self.err_type, m), None => write!(f, "{}", self.err_type), } } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum ErrorType { BadAuthCode, BadFlag, BadParameter, ExceptionThrown, InsufficientBufferSpace, InternalError, InvalidInput, InvalidObject, InvalidObjectState, InvalidVerifier, InvalidKeyLength, KeyNotSet, NotImplemented, NullPointer, OutOfMemory, SystemError, UnknownError, ConversionError, TlsError, HttpError, } impl fmt::Display for ErrorType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let msg = match self { Self::BadAuthCode => "A provided authentication code was incorrect", Self::BadFlag => "A bad flag was passed to the library", Self::BadParameter => "An invalid parameter was provided to the library", Self::ExceptionThrown => "An exception was thrown while processing this request", Self::InsufficientBufferSpace => { "There was insufficient buffer space to write the output" } Self::InternalError => "An internal error occurred (this is a bug in the library)", Self::InvalidInput => "Something about the input was invalid", Self::InvalidObject => "An invalid object was provided to the library", Self::InvalidObjectState => { "An object was invoked in a way that is invalid for its current state" } Self::InvalidVerifier => "A verifier was incorrect", Self::InvalidKeyLength => "An key of invalid length was provided", Self::KeyNotSet => "An object was invoked without the key being set", Self::NotImplemented => { "Some functionality is not implemented in the current library version" } Self::NullPointer => "A null pointer was incorrectly provided", Self::OutOfMemory => "Memory exhaustion", Self::SystemError => "An error occurred while invoking a system API", Self::UnknownError => "Some unknown error occurred", Self::ConversionError => "An error occured while converting data to C", Self::TlsError => "An error occurred in TLS", Self::HttpError => "An error occurred during an HTTP transaction", }; write!(f, "{}", msg) } } #[cfg(not(feature = "no-std"))] impl std::error::Error for Error {} impl From<i32> for ErrorType { fn from(err: i32) -> Self { match err { BOTAN_FFI_ERROR_BAD_FLAG => Self::BadFlag, BOTAN_FFI_ERROR_BAD_MAC => Self::BadAuthCode, BOTAN_FFI_ERROR_BAD_PARAMETER => Self::BadParameter, BOTAN_FFI_ERROR_EXCEPTION_THROWN => Self::ExceptionThrown, BOTAN_FFI_ERROR_HTTP_ERROR => Self::HttpError, BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE => Self::InsufficientBufferSpace, BOTAN_FFI_ERROR_INTERNAL_ERROR => Self::InternalError, BOTAN_FFI_ERROR_INVALID_INPUT => Self::InvalidInput, BOTAN_FFI_ERROR_INVALID_KEY_LENGTH => Self::InvalidKeyLength, BOTAN_FFI_ERROR_INVALID_OBJECT => Self::InvalidObject, BOTAN_FFI_ERROR_INVALID_OBJECT_STATE => Self::InvalidObjectState, BOTAN_FFI_ERROR_KEY_NOT_SET => Self::KeyNotSet, BOTAN_FFI_ERROR_NOT_IMPLEMENTED => Self::NotImplemented, BOTAN_FFI_ERROR_NULL_POINTER => Self::NullPointer, BOTAN_FFI_ERROR_OUT_OF_MEMORY => Self::OutOfMemory, BOTAN_FFI_ERROR_SYSTEM_ERROR => Self::SystemError, BOTAN_FFI_ERROR_TLS_ERROR => Self::TlsError, BOTAN_FFI_ERROR_UNKNOWN_ERROR => Self::UnknownError, BOTAN_FFI_INVALID_VERIFIER => Self::InvalidVerifier, _ => Self::UnknownError, } } } pub struct KeySpec { min_keylen: usize, max_keylen: usize, mod_keylen: usize, } impl KeySpec { pub(crate) fn new(min_keylen: usize, max_keylen: usize, mod_keylen: usize) -> Result<KeySpec> { if min_keylen > max_keylen || mod_keylen == 0 { return Err(Error::with_message( ErrorType::ConversionError, "Bad key spec".to_owned(), )); } Ok(KeySpec { min_keylen, max_keylen, mod_keylen, }) } #[must_use] pub fn is_valid_keylength(&self, keylen: usize) -> bool { keylen >= self.min_keylen && keylen <= self.max_keylen && keylen % self.mod_keylen == 0 } #[must_use] pub fn minimum_keylength(&self) -> usize { self.min_keylen } #[must_use] pub fn maximum_keylength(&self) -> usize { self.max_keylen } #[must_use] pub fn keylength_multiple(&self) -> usize { self.mod_keylen } }
use botan_sys::*; use core::fmt; #[cfg(feature = "no-std")] pub(crate) use alloc::{borrow::ToOwned, string::String, string::ToString, vec::Vec}; #[cfg(feature = "no-std")] pub(crate) use cstr_core::{CStr, CString}; #[cfg(not(feature = "no-std"))] pub(crate) use std::ffi::{CStr, CString}; pub(crate) use core::mem; pub(crate) use core::ptr; pub(crate) use cty::{c_char, c_int, c_void}; pub type Result<T> = ::core::result::Result<T, Error>; pub(crate) fn make_cstr(input: &str) -> Result<CString> { let cstr = CString::new(input).map_err(Error::conversion_error)?; Ok(cstr)
=> Self::NullPointer, BOTAN_FFI_ERROR_OUT_OF_MEMORY => Self::OutOfMemory, BOTAN_FFI_ERROR_SYSTEM_ERROR => Self::SystemError, BOTAN_FFI_ERROR_TLS_ERROR => Self::TlsError, BOTAN_FFI_ERROR_UNKNOWN_ERROR => Self::UnknownError, BOTAN_FFI_INVALID_VERIFIER => Self::InvalidVerifier, _ => Self::UnknownError, } } } pub struct KeySpec { min_keylen: usize, max_keylen: usize, mod_keylen: usize, } impl KeySpec { pub(crate) fn new(min_keylen: usize, max_keylen: usize, mod_keylen: usize) -> Result<KeySpec> { if min_keylen > max_keylen || mod_keylen == 0 { return Err(Error::with_message( ErrorType::ConversionError, "Bad key spec".to_owned(), )); } Ok(KeySpec { min_keylen, max_keylen, mod_keylen, }) } #[must_use] pub fn is_valid_keylength(&self, keylen: usize) -> bool { keylen >= self.min_keylen && keylen <= self.max_keylen && keylen % self.mod_keylen == 0 } #[must_use] pub fn minimum_keylength(&self) -> usize { self.min_keylen } #[must_use] pub fn maximum_keylength(&self) -> usize { self.max_keylen } #[must_use] pub fn keylength_multiple(&self) -> usize { self.mod_keylen } }
} pub(crate) fn call_botan_ffi_returning_vec_u8( initial_size: usize, cb: &dyn Fn(*mut u8, *mut usize) -> c_int, ) -> Result<Vec<u8>> { let mut output = vec![0; initial_size]; let mut out_len = output.len(); let rc = cb(output.as_mut_ptr(), &mut out_len); if rc == 0 { assert!(out_len <= output.len()); output.resize(out_len, 0); return Ok(output); } else if rc != BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE { return Err(Error::from_rc(rc)); } output.resize(out_len, 0); let rc = cb(output.as_mut_ptr(), &mut out_len); if rc != 0 { return Err(Error::from_rc(rc)); } output.resize(out_len, 0); Ok(output) } fn cstr_slice_to_str(raw_cstr: &[u8]) -> Result<String> { let cstr = CStr::from_bytes_with_nul(raw_cstr).map_err(Error::conversion_error)?; Ok(cstr.to_str().map_err(Error::conversion_error)?.to_owned()) } #[cfg(feature = "botan3")] unsafe fn cstr_to_str(raw_cstr: *const i8) -> Result<String> { let cstr = CStr::from_ptr(raw_cstr); Ok(cstr.to_str().map_err(Error::conversion_error)?.to_owned()) } pub(crate) fn call_botan_ffi_returning_string( initial_size: usize, cb: &dyn Fn(*mut u8, *mut usize) -> c_int, ) -> Result<String> { let v = call_botan_ffi_returning_vec_u8(initial_size, cb)?; cstr_slice_to_str(&v) } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Error { err_type: ErrorType, message: Option<String>, } impl Error { pub fn error_type(&self) -> ErrorType { self.err_type } pub fn error_message(&self) -> Option<&str> { self.message.as_deref() } pub(crate) fn from_rc(rc: c_int) -> Self { let err_type = ErrorType::from(rc); #[cfg(feature = "botan3")] let message = { let cptr = unsafe { botan_sys::botan_error_last_exception_message() }; match unsafe { cstr_to_str(cptr) } { Err(_) => None, Ok(s) if s.len() > 0 => Some(s), Ok(_) => None, } }; #[cfg(not(feature = "botan3"))] let message = None; Self { err_type, message } } pub(crate) fn with_message(err_type: ErrorType, message: String) -> Self { Self { err_type, message: Some(message), } } #[cfg(not(feature = "no-std"))] pub(crate) fn conversion_error<T: std::error::Error>(e: T) -> Self { Self { err_type: ErrorType::ConversionError, message: Some(format!("{}", e)), } } #[cfg(feature = "no-std")] pub(crate) fn conversion_error<T: core::fmt::Display>(e: T) -> Self { Self { err_type: ErrorType::ConversionError, message: Some(format!("{}", e)), } } } impl core::fmt::Display for Error { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { match &self.message { Some(m) => write!(f, "{} ({})", self.err_type, m), None => write!(f, "{}", self.err_type), } } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum ErrorType { BadAuthCode, BadFlag, BadParameter, ExceptionThrown, InsufficientBufferSpace, InternalError, InvalidInput, InvalidObject, InvalidObjectState, InvalidVerifier, InvalidKeyLength, KeyNotSet, NotImplemented, NullPointer, OutOfMemory, SystemError, UnknownError, ConversionError, TlsError, HttpError, } impl fmt::Display for ErrorType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let msg = match self { Self::BadAuthCode => "A provided authentication code was incorrect", Self::BadFlag => "A bad flag was passed to the library", Self::BadParameter => "An invalid parameter was provided to the library", Self::ExceptionThrown => "An exception was thrown while processing this request", Self::InsufficientBufferSpace => { "There was insufficient buffer space to write the output" } Self::InternalError => "An internal error occurred (this is a bug in the library)", Self::InvalidInput => "Something about the input was invalid", Self::InvalidObject => "An invalid object was provided to the library", Self::InvalidObjectState => { "An object was invoked in a way that is invalid for its current state" } Self::InvalidVerifier => "A verifier was incorrect", Self::InvalidKeyLength => "An key of invalid length was provided", Self::KeyNotSet => "An object was invoked without the key being set", Self::NotImplemented => { "Some functionality is not implemented in the current library version" } Self::NullPointer => "A null pointer was incorrectly provided", Self::OutOfMemory => "Memory exhaustion", Self::SystemError => "An error occurred while invoking a system API", Self::UnknownError => "Some unknown error occurred", Self::ConversionError => "An error occured while converting data to C", Self::TlsError => "An error occurred in TLS", Self::HttpError => "An error occurred during an HTTP transaction", }; write!(f, "{}", msg) } } #[cfg(not(feature = "no-std"))] impl std::error::Error for Error {} impl From<i32> for ErrorType { fn from(err: i32) -> Self { match err { BOTAN_FFI_ERROR_BAD_FLAG => Self::BadFlag, BOTAN_FFI_ERROR_BAD_MAC => Self::BadAuthCode, BOTAN_FFI_ERROR_BAD_PARAMETER => Self::BadParameter, BOTAN_FFI_ERROR_EXCEPTION_THROWN => Self::ExceptionThrown, BOTAN_FFI_ERROR_HTTP_ERROR => Self::HttpError, BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE => Self::InsufficientBufferSpace, BOTAN_FFI_ERROR_INTERNAL_ERROR => Self::InternalError, BOTAN_FFI_ERROR_INVALID_INPUT => Self::InvalidInput, BOTAN_FFI_ERROR_INVALID_KEY_LENGTH => Self::InvalidKeyLength, BOTAN_FFI_ERROR_INVALID_OBJECT => Self::InvalidObject, BOTAN_FFI_ERROR_INVALID_OBJECT_STATE => Self::InvalidObjectState, BOTAN_FFI_ERROR_KEY_NOT_SET => Self::KeyNotSet, BOTAN_FFI_ERROR_NOT_IMPLEMENTED => Self::NotImplemented, BOTAN_FFI_ERROR_NULL_POINTER
random
[ { "content": "/// Verify a bcrypt password hash\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// let mut rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let bcrypt = botan::bcrypt_hash(\"password\", &mut rng, 10).unwrap();\n\n/// assert_eq!(botan::bcrypt_verify(\"not even close\", &bcrypt), Ok(false...
Rust
src/ledc/lstimer3_conf.rs
ForsakenHarmony/esp32c3-pac
7d9eb9a5b5a51077d1d1eb6c6efd186064b7149b
#[doc = "Reader of register LSTIMER3_CONF"] pub type R = crate::R<u32, super::LSTIMER3_CONF>; #[doc = "Writer for register LSTIMER3_CONF"] pub type W = crate::W<u32, super::LSTIMER3_CONF>; #[doc = "Register LSTIMER3_CONF `reset()`'s with value 0"] impl crate::ResetValue for super::LSTIMER3_CONF { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `LSTIMER3_PARA_UP`"] pub struct LSTIMER3_PARA_UP_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_PARA_UP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25); self.w } } #[doc = "Reader of field `TICK_SEL_LSTIMER3`"] pub type TICK_SEL_LSTIMER3_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TICK_SEL_LSTIMER3`"] pub struct TICK_SEL_LSTIMER3_W<'a> { w: &'a mut W, } impl<'a> TICK_SEL_LSTIMER3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `LSTIMER3_RST`"] pub type LSTIMER3_RST_R = crate::R<bool, bool>; #[doc = "Write proxy for field `LSTIMER3_RST`"] pub struct LSTIMER3_RST_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_RST_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23); self.w } } #[doc = "Reader of field `LSTIMER3_PAUSE`"] pub type LSTIMER3_PAUSE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `LSTIMER3_PAUSE`"] pub struct LSTIMER3_PAUSE_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_PAUSE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22); self.w } } #[doc = "Reader of field `CLK_DIV_LSTIMER3`"] pub type CLK_DIV_LSTIMER3_R = crate::R<u32, u32>; #[doc = "Write proxy for field `CLK_DIV_LSTIMER3`"] pub struct CLK_DIV_LSTIMER3_W<'a> { w: &'a mut W, } impl<'a> CLK_DIV_LSTIMER3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0003_ffff << 4)) | (((value as u32) & 0x0003_ffff) << 4); self.w } } #[doc = "Reader of field `LSTIMER3_DUTY_RES`"] pub type LSTIMER3_DUTY_RES_R = crate::R<u8, u8>; #[doc = "Write proxy for field `LSTIMER3_DUTY_RES`"] pub struct LSTIMER3_DUTY_RES_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_DUTY_RES_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } impl R { #[doc = "Bit 24"] #[inline(always)] pub fn tick_sel_lstimer3(&self) -> TICK_SEL_LSTIMER3_R { TICK_SEL_LSTIMER3_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 23"] #[inline(always)] pub fn lstimer3_rst(&self) -> LSTIMER3_RST_R { LSTIMER3_RST_R::new(((self.bits >> 23) & 0x01) != 0) } #[doc = "Bit 22"] #[inline(always)] pub fn lstimer3_pause(&self) -> LSTIMER3_PAUSE_R { LSTIMER3_PAUSE_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bits 4:21"] #[inline(always)] pub fn clk_div_lstimer3(&self) -> CLK_DIV_LSTIMER3_R { CLK_DIV_LSTIMER3_R::new(((self.bits >> 4) & 0x0003_ffff) as u32) } #[doc = "Bits 0:3"] #[inline(always)] pub fn lstimer3_duty_res(&self) -> LSTIMER3_DUTY_RES_R { LSTIMER3_DUTY_RES_R::new((self.bits & 0x0f) as u8) } } impl W { #[doc = "Bit 25"] #[inline(always)] pub fn lstimer3_para_up(&mut self) -> LSTIMER3_PARA_UP_W { LSTIMER3_PARA_UP_W { w: self } } #[doc = "Bit 24"] #[inline(always)] pub fn tick_sel_lstimer3(&mut self) -> TICK_SEL_LSTIMER3_W { TICK_SEL_LSTIMER3_W { w: self } } #[doc = "Bit 23"] #[inline(always)] pub fn lstimer3_rst(&mut self) -> LSTIMER3_RST_W { LSTIMER3_RST_W { w: self } } #[doc = "Bit 22"] #[inline(always)] pub fn lstimer3_pause(&mut self) -> LSTIMER3_PAUSE_W { LSTIMER3_PAUSE_W { w: self } } #[doc = "Bits 4:21"] #[inline(always)] pub fn clk_div_lstimer3(&mut self) -> CLK_DIV_LSTIMER3_W { CLK_DIV_LSTIMER3_W { w: self } } #[doc = "Bits 0:3"] #[inline(always)] pub fn lstimer3_duty_res(&mut self) -> LSTIMER3_DUTY_RES_W { LSTIMER3_DUTY_RES_W { w: self } } }
#[doc = "Reader of register LSTIMER3_CONF"] pub type R = crate::R<u32, super::LSTIMER3_CONF>; #[doc = "Writer for register LSTIMER3_CONF"] pub type W = crate::W<u32, super::LSTIMER3_CONF>; #[doc = "Register LSTIMER3_CONF `reset()`'s with value 0"] impl crate::ResetValue for super::LSTIMER3_CONF { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `LSTIMER3_PARA_UP`"] pub struct LSTIMER3_PARA_UP_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_PARA_UP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn
[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22); self.w } } #[doc = "Reader of field `CLK_DIV_LSTIMER3`"] pub type CLK_DIV_LSTIMER3_R = crate::R<u32, u32>; #[doc = "Write proxy for field `CLK_DIV_LSTIMER3`"] pub struct CLK_DIV_LSTIMER3_W<'a> { w: &'a mut W, } impl<'a> CLK_DIV_LSTIMER3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0003_ffff << 4)) | (((value as u32) & 0x0003_ffff) << 4); self.w } } #[doc = "Reader of field `LSTIMER3_DUTY_RES`"] pub type LSTIMER3_DUTY_RES_R = crate::R<u8, u8>; #[doc = "Write proxy for field `LSTIMER3_DUTY_RES`"] pub struct LSTIMER3_DUTY_RES_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_DUTY_RES_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } impl R { #[doc = "Bit 24"] #[inline(always)] pub fn tick_sel_lstimer3(&self) -> TICK_SEL_LSTIMER3_R { TICK_SEL_LSTIMER3_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 23"] #[inline(always)] pub fn lstimer3_rst(&self) -> LSTIMER3_RST_R { LSTIMER3_RST_R::new(((self.bits >> 23) & 0x01) != 0) } #[doc = "Bit 22"] #[inline(always)] pub fn lstimer3_pause(&self) -> LSTIMER3_PAUSE_R { LSTIMER3_PAUSE_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bits 4:21"] #[inline(always)] pub fn clk_div_lstimer3(&self) -> CLK_DIV_LSTIMER3_R { CLK_DIV_LSTIMER3_R::new(((self.bits >> 4) & 0x0003_ffff) as u32) } #[doc = "Bits 0:3"] #[inline(always)] pub fn lstimer3_duty_res(&self) -> LSTIMER3_DUTY_RES_R { LSTIMER3_DUTY_RES_R::new((self.bits & 0x0f) as u8) } } impl W { #[doc = "Bit 25"] #[inline(always)] pub fn lstimer3_para_up(&mut self) -> LSTIMER3_PARA_UP_W { LSTIMER3_PARA_UP_W { w: self } } #[doc = "Bit 24"] #[inline(always)] pub fn tick_sel_lstimer3(&mut self) -> TICK_SEL_LSTIMER3_W { TICK_SEL_LSTIMER3_W { w: self } } #[doc = "Bit 23"] #[inline(always)] pub fn lstimer3_rst(&mut self) -> LSTIMER3_RST_W { LSTIMER3_RST_W { w: self } } #[doc = "Bit 22"] #[inline(always)] pub fn lstimer3_pause(&mut self) -> LSTIMER3_PAUSE_W { LSTIMER3_PAUSE_W { w: self } } #[doc = "Bits 4:21"] #[inline(always)] pub fn clk_div_lstimer3(&mut self) -> CLK_DIV_LSTIMER3_W { CLK_DIV_LSTIMER3_W { w: self } } #[doc = "Bits 0:3"] #[inline(always)] pub fn lstimer3_duty_res(&mut self) -> LSTIMER3_DUTY_RES_W { LSTIMER3_DUTY_RES_W { w: self } } }
bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25); self.w } } #[doc = "Reader of field `TICK_SEL_LSTIMER3`"] pub type TICK_SEL_LSTIMER3_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TICK_SEL_LSTIMER3`"] pub struct TICK_SEL_LSTIMER3_W<'a> { w: &'a mut W, } impl<'a> TICK_SEL_LSTIMER3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `LSTIMER3_RST`"] pub type LSTIMER3_RST_R = crate::R<bool, bool>; #[doc = "Write proxy for field `LSTIMER3_RST`"] pub struct LSTIMER3_RST_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_RST_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23); self.w } } #[doc = "Reader of field `LSTIMER3_PAUSE`"] pub type LSTIMER3_PAUSE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `LSTIMER3_PAUSE`"] pub struct LSTIMER3_PAUSE_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_PAUSE_W<'a> { #[doc = r"Sets the field bit"] #
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Res...
Rust
src/node_state/mod.rs
frugalos/raftlog
25d663b3e8eda35224cd666e1f1ef05b44ace884
use futures::{Async, Poll, Stream}; use std::time::Instant; pub use self::common::Common; use self::candidate::Candidate; use self::common::HandleMessageResult; use self::follower::Follower; use self::leader::Leader; use self::loader::Loader; use crate::cluster::ClusterConfig; use crate::message::Message; use crate::metrics::NodeStateMetrics; use crate::node::NodeId; use crate::{Error, Event, Io, Result}; mod candidate; mod common; mod follower; mod leader; mod loader; type NextState<IO> = Option<RoleState<IO>>; pub struct NodeState<IO: Io> { pub common: Common<IO>, pub role: RoleState<IO>, started_at: Instant, pub metrics: NodeStateMetrics, } impl<IO: Io> NodeState<IO> { pub fn load(node_id: NodeId, config: ClusterConfig, io: IO, metrics: NodeStateMetrics) -> Self { let mut common = Common::new(node_id, io, config, metrics.clone()); let role = RoleState::Loader(Loader::new(&mut common)); let started_at = Instant::now(); NodeState { common, role, started_at, metrics, } } pub fn is_loading(&self) -> bool { self.role.is_loader() } pub fn start_election(&mut self) { if let RoleState::Follower(follower) = &mut self.role { let next = follower.handle_timeout(&mut self.common); let next = track_try_unwrap!(next); if let Some(next) = next { self.handle_role_change(next); } } } fn handle_timeout(&mut self) -> Result<Option<RoleState<IO>>> { match self.role { RoleState::Loader(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Follower(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Candidate(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Leader(ref mut t) => track!(t.handle_timeout(&mut self.common)), } } fn handle_message(&mut self, message: Message) -> Result<Option<RoleState<IO>>> { if let RoleState::Loader(_) = self.role { return Ok(None); } match self.common.handle_message(message) { HandleMessageResult::Handled(next) => Ok(next), HandleMessageResult::Unhandled(message) => match self.role { RoleState::Loader(_) => unreachable!(), RoleState::Follower(ref mut t) => { track!(t.handle_message(&mut self.common, message)) } RoleState::Candidate(ref mut t) => { track!(t.handle_message(&mut self.common, &message)) } RoleState::Leader(ref mut t) => track!(t.handle_message(&mut self.common, message)), }, } } fn handle_role_change(&mut self, next: RoleState<IO>) { match (&self.role, &next) { (RoleState::Candidate(_), RoleState::Leader(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .candidate_to_leader_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Candidate(_), RoleState::Follower(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .candidate_to_follower_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Loader(_), RoleState::Candidate(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .loader_to_candidate_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Leader(_), RoleState::Leader(_)) | (RoleState::Follower(_), RoleState::Follower(_)) | (RoleState::Candidate(_), RoleState::Candidate(_)) | (RoleState::Loader(_), RoleState::Loader(_)) => {} _ => self.started_at = Instant::now(), } self.role = next; } } impl<IO: Io> Stream for NodeState<IO> { type Item = Event; type Error = Error; fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { let mut did_something = true; while did_something { did_something = false; if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } if let Async::Ready(()) = track!(self.common.poll_timeout())? { did_something = true; self.metrics.poll_timeout_total.increment(); if let Some(next) = track!(self.handle_timeout())? { self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } } if let Some(next) = track!(self.common.run_once())? { did_something = true; self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } let result = match self.role { RoleState::Loader(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Follower(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Candidate(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Leader(ref mut t) => track!(t.run_once(&mut self.common))?, }; if let Some(next) = result { did_something = true; self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } if let Some(message) = track!(self.common.try_recv_message())? { did_something = true; if let Some(next) = track!(self.handle_message(message))? { self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } } } Ok(Async::NotReady) } } pub enum RoleState<IO: Io> { Loader(Loader<IO>), Follower(Follower<IO>), Candidate(Candidate<IO>), Leader(Leader<IO>), } impl<IO: Io> RoleState<IO> { pub fn is_loader(&self) -> bool { matches!(self, RoleState::Loader(_)) } #[cfg(test)] pub fn is_candidate(&self) -> bool { matches!(self, RoleState::Candidate(_)) } } #[cfg(test)] mod tests { use super::*; use prometrics::metrics::MetricBuilder; use crate::test_util::tests::TestIoBuilder; #[test] fn node_state_is_loading_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let node = NodeState::load("test".into(), cluster, io, metrics); assert!(node.is_loading()); } #[test] fn role_state_is_loader_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let mut common = Common::new("test".into(), io, cluster, metrics); let state = RoleState::Loader(Loader::new(&mut common)); assert!(state.is_loader()); assert!(!state.is_candidate()); } #[test] fn role_state_is_candidate_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let mut common = Common::new("test".into(), io, cluster, metrics); let state = RoleState::Candidate(Candidate::new(&mut common)); assert!(!state.is_loader()); assert!(state.is_candidate()); } }
use futures::{Async, Poll, Stream}; use std::time::Instant; pub use self::common::Common; use self::candidate::Candidate; use self::common::HandleMessageResult; use self::follower::Follower; use self::leader::Leader; use self::loader::Loader; use crate::cluster::ClusterConfig; use crate::message::Message; use crate::metrics::NodeStateMetrics; use crate::node::NodeId; use crate::{Error, Event, Io, Result}; mod candidate; mod common; mod follower; mod leader; mod loader; type NextState<IO> = Option<RoleState<IO>>; pub struct NodeState<IO: Io> { pub common: Common<IO>, pub role: RoleState<IO>, started_at: Instant, pub metrics: NodeStateMetrics, } impl<IO: Io> NodeState<IO> { pub fn load(node_id: NodeId, config: ClusterConfig, io: IO, metrics: NodeStateMetrics) -> Self { let mut common = Common::new(node_id, io, config, metrics.clone()); let role = RoleState::Loader(Loader::new(&mut common)); let started_at = Instant::now(); NodeState { common, role, started_at, metrics, } } pub fn is_loading(&self) -> bool { self.role.is_loader() } pub fn start_election(&mut self) { if let RoleState::Follower(follower) = &mut self.role { let next = follower.handle_timeout(&mut self.common); let next = track_try_unwrap!(next); if let Some(next) = next { self.handle_role_change(next); } } } fn handle_timeout(&mut self) -> Result<Option<RoleState<IO>>> { match self.role { RoleState::Loader(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Follower(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Candidate(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Leader(ref mut t) => track!(t.handle_timeout(&mut self.common)), } } fn handle_message(&mut self, message: Message) -> Result<Option<RoleState<IO>>> { if let RoleState::Loader(_) = self.role { return Ok(None); } match self.common.handle_message(message) { HandleMessageResult::Handled(next) => Ok(next), HandleMessageResult::Unhandled(message) => match self.role { RoleState::Loader(_) => unreachable!(), RoleState::Follower(ref mut t) => { track!(t.handle_message(&mut self.common, message)) } RoleState::Candidate(ref mut t) => { track!(t.handle_message(&mut self.common, &message)) } RoleState::Leader(ref mut t) => track!(t.handle_message(&mut self.common, message)), }, } } fn handle_role_change(&mut self, next: RoleState<IO>) { match (&self.role, &next) { (RoleState::Candidate(_), RoleState::Leader(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .candidate_to_leader_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Candidate(_), RoleState::Follower(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .candidate_to_follower_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Loader(_), RoleState::Candidate(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .loader_to_candidate_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Leader(_), RoleState::Leader(_)) | (RoleState::Follower(_), RoleState::Follower(_)) | (RoleState::Candidate(_), RoleState::Candidate(_)) | (RoleState::Loader(_), RoleState::Loader(_)) => {} _ => self.started_at = Instant::now(), } self.role = next; } } impl<IO: Io> Stream for NodeState<IO> { type Item = Event; type Error = Error; fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { let mut did_something = true; while did_something { did_something = false; if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } if let Async::Ready(()) = track!(self.common.poll_timeout())? { did_something = true; self.metrics.poll_timeout_total.increment(); if let Some(next) = track!(self.handle_timeout())? { self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } } if let Some(next) = track!(self.common.run_once())? { did_something = true; self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } let result = match self.role { RoleState::Loader(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Follower(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Candidate(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Leader(ref mut t) => track!(t.run_once(&mut self.common))?, }; if let Some(next) = result { did_something = true; self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } if l
Leader(Leader<IO>), } impl<IO: Io> RoleState<IO> { pub fn is_loader(&self) -> bool { matches!(self, RoleState::Loader(_)) } #[cfg(test)] pub fn is_candidate(&self) -> bool { matches!(self, RoleState::Candidate(_)) } } #[cfg(test)] mod tests { use super::*; use prometrics::metrics::MetricBuilder; use crate::test_util::tests::TestIoBuilder; #[test] fn node_state_is_loading_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let node = NodeState::load("test".into(), cluster, io, metrics); assert!(node.is_loading()); } #[test] fn role_state_is_loader_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let mut common = Common::new("test".into(), io, cluster, metrics); let state = RoleState::Loader(Loader::new(&mut common)); assert!(state.is_loader()); assert!(!state.is_candidate()); } #[test] fn role_state_is_candidate_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let mut common = Common::new("test".into(), io, cluster, metrics); let state = RoleState::Candidate(Candidate::new(&mut common)); assert!(!state.is_loader()); assert!(state.is_candidate()); } }
et Some(message) = track!(self.common.try_recv_message())? { did_something = true; if let Some(next) = track!(self.handle_message(message))? { self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } } } Ok(Async::NotReady) } } pub enum RoleState<IO: Io> { Loader(Loader<IO>), Follower(Follower<IO>), Candidate(Candidate<IO>),
random
[ { "content": "fn make_role_change_histogram(builder: &mut HistogramBuilder) -> Result<Histogram> {\n\n builder\n\n .bucket(0.001)\n\n .bucket(0.005)\n\n .bucket(0.01)\n\n .bucket(0.05)\n\n .bucket(0.1)\n\n .bucket(0.2)\n\n .bucket(0.4)\n\n .bucket(0.6)\...
Rust
src/algebra/geometry.rs
hsnavarro/retrogame-rust
bc62342d95001b70a1d11c822f2722cfc0b73fec
use crate::algebra::{Vec2f, cross_product, distance, dot_product}; use crate::shapes; pub fn is_point_inside_rect(rect: &shapes::Rect, point: Vec2f) -> bool { let rect_points = rect.get_points_clockwise(); for i in 0..rect_points.len() { let j = (i + 1) % rect_points.len(); if cross_product(rect_points[j] - rect_points[i], point - rect_points[i]) < 0.0 { return false; } } true } pub fn closest_to_point_in_rect_border(rect: &shapes::Rect, point: Vec2f) -> Vec2f { let mut min_distance = f64::MAX; let mut closest_point = Vec2f::new(); let rect_points = rect.get_points_clockwise(); let mut update_closest_point = |rect_point: Vec2f| { let distance = distance(rect_point, point); if distance < min_distance { min_distance = distance; closest_point = rect_point; } }; for rect_point in rect_points.iter() { update_closest_point(*rect_point); } for i in 0..rect_points.len() { let j = (i + 1) % rect_points.len(); let rect_side = rect_points[j] - rect_points[i]; let projected_vector = (point - rect_points[i]).projection(rect_side); if projected_vector.magnitude() > rect_side.magnitude() || dot_product(projected_vector, rect_side) < 0.0 { continue; } let projected_point = rect_points[i] + projected_vector; update_closest_point(projected_point); } closest_point } #[cfg(test)] mod tests { use super::*; use crate::algebra::Vec2f; use crate::shapes::Rect; use sdl2::pixels::Color; mod is_point_inside_rect_tests { use super::*; #[test] fn point_outside_rect() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 40.0, y: 40.0 }; assert!(is_point_inside_rect(&rect, point) == false); } #[test] fn point_inside_rect() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 10.0, y: 20.0 }; assert!(is_point_inside_rect(&rect, point) == true); } #[test] fn point_in_rect_border() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 2.0, y: 15.0 }; assert!(is_point_inside_rect(&rect, point) == true); } } mod closest_to_point_in_rect_border_tests { use super::*; #[test] fn closest_is_corner() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 40.0, y: 40.0 }; let ans = Vec2f { x: 22.0, y: 20.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } #[test] fn closest_is_side() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 6.0, y: 4.0 }; let ans = Vec2f { x: 6.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } #[test] fn point_inside_rect() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 10.0, y: 13.0 }; let ans = Vec2f { x: 10.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } #[test] fn point_in_rect_border() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 12.0, y: 10.0 }; let ans = Vec2f { x: 12.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } } }
use crate::algebra::{Vec2f, cross_product, distance, dot_product}; use crate::shapes; pub fn is_point_inside_rect(rect: &shapes::Rect, point: Vec2f) -> bool { let rect_points = rect.get_points_clockwise(); for i in 0..rect_points.len() { let j = (i + 1) % rect_points.len(); if cross_product(rect_points[j] - rect_points[i], point - rect_points[i]) < 0.0 { return false; } } true } pub fn closest_to_point_in_rect_border(rect: &shapes::Rect, point: Vec2f) -> Vec2f { let mut min_distance = f64::MAX; let mut closest_point = Vec2f::new(); let rect_points = rect.get_points_clockwise(); let mut update_closest_point = |rect_point: Vec2f| { let distance = distance(rect_point, point); if distance < min_distance { min_distance = distance; closest_point = rect_point; } }; for rect_point in rect_points.iter() { update_closest_point(*rect_point); } for i in 0..rect_points.len() { let j = (i + 1) % rect_points.len(); let rect_side = rect_points[j] - rect_points[i]; let projected_vector = (point - rect_points[i]).projection(rect_side); if projected_vector.magnitude() > rect_side.magnitude() || dot_product(projected_vector, rect_side) < 0.0 { continue; } let projected_point = rect_points[i] + projected_vector; update_closest_point(projected_point); } closest_point } #[cfg(test)] mod tests { use super::*; use crate::algebra::Vec2f; use crate::shapes::Rect; use sdl2::pixels::Color; mod is_point_inside_rect_tests { use super::*; #[test] fn point_outs
t), ans); } #[test] fn point_in_rect_border() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 12.0, y: 10.0 }; let ans = Vec2f { x: 12.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } } }
ide_rect() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 40.0, y: 40.0 }; assert!(is_point_inside_rect(&rect, point) == false); } #[test] fn point_inside_rect() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 10.0, y: 20.0 }; assert!(is_point_inside_rect(&rect, point) == true); } #[test] fn point_in_rect_border() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 2.0, y: 15.0 }; assert!(is_point_inside_rect(&rect, point) == true); } } mod closest_to_point_in_rect_border_tests { use super::*; #[test] fn closest_is_corner() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 40.0, y: 40.0 }; let ans = Vec2f { x: 22.0, y: 20.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } #[test] fn closest_is_side() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 6.0, y: 4.0 }; let ans = Vec2f { x: 6.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } #[test] fn point_inside_rect() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 10.0, y: 13.0 }; let ans = Vec2f { x: 10.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, poin
random
[ { "content": "pub fn distance(x: Vec2f, y: Vec2f) -> f64 {\n\n (x - y).square_magnitude()\n\n}\n\n\n", "file_path": "src/algebra/vec2f.rs", "rank": 1, "score": 116311.21337077796 }, { "content": "pub fn dot_product(lhs: Vec2f, rhs: Vec2f) -> f64 {\n\n lhs.x * rhs.x + lhs.y * rhs.y\n\n}...
Rust
src/system2/memory.rs
huhlig/vcpu16-rs
4fda8e36fbaae7dddb79f5cb96646205d36ef677
use std::char; use std::fmt; use std::io::{Read, Write}; use std::mem; use std::slice; use super::Word; use super::SystemError; #[derive(Clone)] pub struct Memory { buffer: [Word; 65536], } impl Memory { pub fn new() -> Memory { Memory { buffer: [0; 65536], } } pub fn load(&mut self, reader: &mut Read) { unsafe { let memory_size = mem::size_of_val(&self.buffer); let memory_slice = slice::from_raw_parts_mut( &mut self.buffer as *mut _ as *mut u8, memory_size, ); reader.read_exact(memory_slice).unwrap(); } } pub fn save(&mut self, writer: &mut Write) { unsafe { let memory_size = mem::size_of_val(&self.buffer); let memory_slice = slice::from_raw_parts_mut( &mut self.buffer as *mut _ as *mut u8, memory_size, ); writer.write(memory_slice).unwrap(); } } pub fn clear(&mut self) { self.buffer = [0; 65536]; } pub fn write(&mut self, address: Word, buffer: &[Word]) -> Result<(), SystemError> { if address as usize + buffer.len() > 65535 { return Err(SystemError::AddressOverflow); } let start = address as usize; let end = start + buffer.len(); Ok(self.buffer[start..end].copy_from_slice(buffer)) } pub fn read(&mut self, address: Word, length: Word) -> Result<&[Word], SystemError> { if address as usize + length as usize > 65535 { return Err(SystemError::AddressOverflow); } let start = address as usize; let end = start + length as usize; Ok(&self.buffer[start..end as usize]) } pub fn set(&mut self, address: Word, value: Word) { self.buffer[address as usize] = value } pub fn get(&self, address: Word) -> Word { self.buffer[address as usize] } } impl fmt::Display for Memory { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Memory 0 1 2 3 4 5 6 7 8 9 A B C D E F")?; for base in (0..4096usize).map(|o| o * 16) { write!(f, "0x{:04X}", base)?; for offset in 0..16usize { write!(f, " {:04X}", self.buffer[base + offset])?; } write!(f, " ")?; for offset in 0..16usize { if let Some(ch) = char::from_u32(self.buffer[base + offset] as u32) { if ch.is_ascii_alphanumeric() { write!(f, "{}", ch)?; } else { write!(f, "{}", '.')?; } } else { write!(f, "{}", '.')?; } } writeln!(f, " ")?; } Ok(()) } } impl fmt::Debug for Memory { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Memory 0 1 2 3 4 5 6 7 8 9 A B C D E F")?; for base in (0..4096usize).map(|o| o * 16) { write!(f, "0x{:04X}", base)?; for offset in 0..16usize { write!(f, " {:04X}", self.buffer[base + offset])?; } write!(f, " ")?; for offset in 0..16usize { if let Some(ch) = char::from_u32(self.buffer[base + offset] as u32) { if ch.is_ascii_alphanumeric() { write!(f, "{}", ch)?; } else { write!(f, "{}", '.')?; } } else { write!(f, "{}", '.')?; } } writeln!(f, " ")?; } Ok(()) } } #[cfg(test)] mod tests { use super::Word; use super::Memory; use rand::{Rng, SeedableRng, XorShiftRng}; use std::io::Cursor; #[test] pub fn test_load_save() { let mut mem = Memory::new(); let mut input: [u8; 131072] = [0; 131072]; let mut output: [u8; 131072] = [0; 131072]; XorShiftRng::from_seed([1; 4]).fill_bytes(&mut input[..]); mem.load(&mut Cursor::new(&mut input[..])); mem.save(&mut Cursor::new(&mut output[..])); assert_eq!(&input[..], &output[..]); } #[test] pub fn test_write_clear_read() { let mut mem = Memory::new(); let read_address: Word = 0x0100; let write_address: Word = 0x0104; let write_buffer: [Word; 8] = [1; 8]; let empty_buffer: [Word; 16] = [0; 16]; let dirty_buffer: [Word; 16] = [0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]; assert_eq!(&empty_buffer[..], mem.read(read_address, 16).unwrap()); mem.write(write_address, &write_buffer).unwrap(); assert_eq!(&dirty_buffer[..], mem.read(read_address, 16).unwrap()); mem.clear(); assert_eq!(&empty_buffer[..], mem.read(read_address, 16).unwrap()); } #[test] pub fn test_set_get() { let mut mem = Memory::new(); let address: u16 = 0xFFFF; let oldvalue: u16 = 0x0000; let newvalue: u16 = 0x2222; assert_eq!(oldvalue, mem.get(address)); mem.set(address, newvalue); assert_eq!(newvalue, mem.get(address)); } #[test] pub fn test_display() { let mut mem = Memory::new(); for addr in 0..65536u32 { let addr = addr as u16; mem.set(addr, addr); } println!("{}", mem); } }
use std::char; use std::fmt; use std::io::{Read, Write}; use std::mem; use std::slice; use super::Word; use super::SystemError; #[derive(Clone)] pub struct Memory { buffer: [Word; 65536], } impl Memory { pub fn new() -> Memory { Memory { buffer: [0; 65536], } } pub fn load(&mut self, reader: &mut Read) { unsafe { let memory_size = mem::size_of_val(&self.buffer); let memory_slice = slice::from_raw_parts_mut( &mut self.buffer as *mut _ as *mut u8, memory_size, ); reader.read_exact(memory_slice).unwrap(); } } pub fn save(&mut self, writer: &mut Write) { unsafe { let memory_size = mem::size_of_val(&self.buffer); let memory_slice = slice::from_raw_parts_mut( &mut self.buffer as *mut _ as *mut u8, memory_
assert_eq!(&dirty_buffer[..], mem.read(read_address, 16).unwrap()); mem.clear(); assert_eq!(&empty_buffer[..], mem.read(read_address, 16).unwrap()); } #[test] pub fn test_set_get() { let mut mem = Memory::new(); let address: u16 = 0xFFFF; let oldvalue: u16 = 0x0000; let newvalue: u16 = 0x2222; assert_eq!(oldvalue, mem.get(address)); mem.set(address, newvalue); assert_eq!(newvalue, mem.get(address)); } #[test] pub fn test_display() { let mut mem = Memory::new(); for addr in 0..65536u32 { let addr = addr as u16; mem.set(addr, addr); } println!("{}", mem); } }
size, ); writer.write(memory_slice).unwrap(); } } pub fn clear(&mut self) { self.buffer = [0; 65536]; } pub fn write(&mut self, address: Word, buffer: &[Word]) -> Result<(), SystemError> { if address as usize + buffer.len() > 65535 { return Err(SystemError::AddressOverflow); } let start = address as usize; let end = start + buffer.len(); Ok(self.buffer[start..end].copy_from_slice(buffer)) } pub fn read(&mut self, address: Word, length: Word) -> Result<&[Word], SystemError> { if address as usize + length as usize > 65535 { return Err(SystemError::AddressOverflow); } let start = address as usize; let end = start + length as usize; Ok(&self.buffer[start..end as usize]) } pub fn set(&mut self, address: Word, value: Word) { self.buffer[address as usize] = value } pub fn get(&self, address: Word) -> Word { self.buffer[address as usize] } } impl fmt::Display for Memory { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Memory 0 1 2 3 4 5 6 7 8 9 A B C D E F")?; for base in (0..4096usize).map(|o| o * 16) { write!(f, "0x{:04X}", base)?; for offset in 0..16usize { write!(f, " {:04X}", self.buffer[base + offset])?; } write!(f, " ")?; for offset in 0..16usize { if let Some(ch) = char::from_u32(self.buffer[base + offset] as u32) { if ch.is_ascii_alphanumeric() { write!(f, "{}", ch)?; } else { write!(f, "{}", '.')?; } } else { write!(f, "{}", '.')?; } } writeln!(f, " ")?; } Ok(()) } } impl fmt::Debug for Memory { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Memory 0 1 2 3 4 5 6 7 8 9 A B C D E F")?; for base in (0..4096usize).map(|o| o * 16) { write!(f, "0x{:04X}", base)?; for offset in 0..16usize { write!(f, " {:04X}", self.buffer[base + offset])?; } write!(f, " ")?; for offset in 0..16usize { if let Some(ch) = char::from_u32(self.buffer[base + offset] as u32) { if ch.is_ascii_alphanumeric() { write!(f, "{}", ch)?; } else { write!(f, "{}", '.')?; } } else { write!(f, "{}", '.')?; } } writeln!(f, " ")?; } Ok(()) } } #[cfg(test)] mod tests { use super::Word; use super::Memory; use rand::{Rng, SeedableRng, XorShiftRng}; use std::io::Cursor; #[test] pub fn test_load_save() { let mut mem = Memory::new(); let mut input: [u8; 131072] = [0; 131072]; let mut output: [u8; 131072] = [0; 131072]; XorShiftRng::from_seed([1; 4]).fill_bytes(&mut input[..]); mem.load(&mut Cursor::new(&mut input[..])); mem.save(&mut Cursor::new(&mut output[..])); assert_eq!(&input[..], &output[..]); } #[test] pub fn test_write_clear_read() { let mut mem = Memory::new(); let read_address: Word = 0x0100; let write_address: Word = 0x0104; let write_buffer: [Word; 8] = [1; 8]; let empty_buffer: [Word; 16] = [0; 16]; let dirty_buffer: [Word; 16] = [0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]; assert_eq!(&empty_buffer[..], mem.read(read_address, 16).unwrap()); mem.write(write_address, &write_buffer).unwrap();
random
[ { "content": "/// Memory Array\n\nstruct Memory {\n\n /// Memory Buffer\n\n buffer: [u16; 65536],\n\n}\n\n\n", "file_path": "src/system/cpu.rs", "rank": 0, "score": 69478.01755072441 }, { "content": "/// Internal Clock\n\nstruct Clock {\n\n /// Is CPU Halted\n\n halted: bool,\n\n...
Rust
program/programs/main-program-final/src/lib.rs
SOLBROS/SOLHUNT
d36a2779b88d500af285fd3ff3bfdb9b31d16ac8
use anchor_lang::prelude::*; use anchor_spl::token::{self, SetAuthority, TokenAccount, Transfer}; use spl_token::instruction::AuthorityType; #[program] pub mod main_program_final { use super::*; pub fn initialize(ctx: Context<Initialize>) -> ProgramResult { let my_account = &mut ctx.accounts.my_account; my_account.data = 0; Ok(()) } pub fn initialize_admin_account( ctx: Context<InitializeAdminAccount>, total_amount: u64 ) -> ProgramResult { ctx.accounts.escrow_account.admin_key = *ctx.accounts.admin.key; ctx.accounts .escrow_account .admin_deposit_token_account = *ctx .accounts .admin_deposit_token_account .to_account_info() .key; ctx.accounts.escrow_account.total_amount = total_amount; let (pda, _bump_seed) = Pubkey::find_program_address(&[b"dungeon"], ctx.program_id); token::set_authority(ctx.accounts.into(), AuthorityType::AccountOwner, Some(pda))?; Ok(()) } pub fn update(ctx: Context<Update>, data: u64) -> ProgramResult { let my_account = &mut ctx.accounts.my_account; let amount = 20; if my_account.data & (1 << data) == 0 { msg!("Collecting token"); my_account.data = my_account.data | 1 << data; let (_pda, bump_seed) = Pubkey::find_program_address(&[b"dungeon"], ctx.program_id); let seeds = &[&b"dungeon"[..], &[bump_seed]]; token::transfer( ctx.accounts .into_transfer_to_game_user_context() .with_signer(&[&seeds[..]]), amount, )?; ctx.accounts.escrow_account.total_amount = ctx.accounts.escrow_account.total_amount - amount; } else { msg!("Token already collected"); } Ok(()) } } #[derive(Accounts)] #[instruction(total_amount: u64)] pub struct InitializeAdminAccount<'info> { #[account(signer)] pub admin: AccountInfo<'info>, #[account( mut, constraint = admin_deposit_token_account.amount >= total_amount )] pub admin_deposit_token_account: CpiAccount<'info, TokenAccount>, #[account(init)] pub escrow_account: ProgramAccount<'info, EscrowAccount>, pub token_program: AccountInfo<'info>, } #[account] pub struct EscrowAccount { pub admin_key: Pubkey, pub admin_deposit_token_account: Pubkey, pub total_amount: u64, } impl<'info> From<&mut InitializeAdminAccount<'info>> for CpiContext<'_, '_, '_, 'info, SetAuthority<'info>> { fn from(accounts: &mut InitializeAdminAccount<'info>) -> Self { let cpi_accounts = SetAuthority { account_or_mint: accounts .admin_deposit_token_account .to_account_info() .clone(), current_authority: accounts.admin.clone(), }; let cpi_program = accounts.token_program.clone(); CpiContext::new(cpi_program, cpi_accounts) } } impl<'info> Update<'info> { fn into_transfer_to_game_user_context(&self) -> CpiContext<'_, '_, '_, 'info, Transfer<'info>> { let cpi_accounts = Transfer { from: self.pda_deposit_token_account.to_account_info().clone(), to: self.game_user_receive_token_account.to_account_info().clone(), authority: self.pda_account.clone(), }; CpiContext::new(self.token_program.clone(), cpi_accounts) } } #[derive(Accounts)] pub struct Initialize<'info> { #[account(init)] pub my_account: ProgramAccount<'info, MyAccount>, } #[account] pub struct MyAccount { pub data: u64, } #[derive(Accounts)] pub struct Update<'info> { #[account(mut)] pub my_account: ProgramAccount<'info, MyAccount>, #[account(signer)] pub game_user: AccountInfo<'info>, #[account(mut)] pub game_user_receive_token_account: CpiAccount<'info, TokenAccount>, #[account(mut)] pub admin_main_account: AccountInfo<'info>, #[account(mut)] pub pda_deposit_token_account: CpiAccount<'info, TokenAccount>, #[account( mut, constraint = escrow_account.admin_deposit_token_account == *pda_deposit_token_account.to_account_info().key, constraint = escrow_account.admin_key == *admin_main_account.key )] pub escrow_account: ProgramAccount<'info, EscrowAccount>, pub pda_account: AccountInfo<'info>, pub token_program: AccountInfo<'info>, }
use anchor_lang::prelude::*; use anchor_spl::token::{self, SetAuthority, TokenAccount, Transfer}; use spl_token::instruction::AuthorityType; #[program] pub mod main_program_final { use super::*; pub fn initialize(ctx: Context<Initialize>) -> ProgramResult { let my_account = &mut ctx.accounts.my_account; my_account.data = 0; Ok(()) } pub fn initialize_admin_account( ctx: Context<InitializeAdminAccount>, total_amount: u64 ) -> ProgramResult { ctx.accounts.escrow_account.admin_key = *ctx.accounts.admin.key; ctx.accounts .escrow_account .admin_deposit_token_account = *ctx .accounts .admin_deposit_token_account .to_account_info() .key; ctx.accounts.escrow_account.total_amount = total_amount; let (pda, _bump_seed) = Pubkey::find_program_add
enAccount>, #[account(mut)] pub admin_main_account: AccountInfo<'info>, #[account(mut)] pub pda_deposit_token_account: CpiAccount<'info, TokenAccount>, #[account( mut, constraint = escrow_account.admin_deposit_token_account == *pda_deposit_token_account.to_account_info().key, constraint = escrow_account.admin_key == *admin_main_account.key )] pub escrow_account: ProgramAccount<'info, EscrowAccount>, pub pda_account: AccountInfo<'info>, pub token_program: AccountInfo<'info>, }
ress(&[b"dungeon"], ctx.program_id); token::set_authority(ctx.accounts.into(), AuthorityType::AccountOwner, Some(pda))?; Ok(()) } pub fn update(ctx: Context<Update>, data: u64) -> ProgramResult { let my_account = &mut ctx.accounts.my_account; let amount = 20; if my_account.data & (1 << data) == 0 { msg!("Collecting token"); my_account.data = my_account.data | 1 << data; let (_pda, bump_seed) = Pubkey::find_program_address(&[b"dungeon"], ctx.program_id); let seeds = &[&b"dungeon"[..], &[bump_seed]]; token::transfer( ctx.accounts .into_transfer_to_game_user_context() .with_signer(&[&seeds[..]]), amount, )?; ctx.accounts.escrow_account.total_amount = ctx.accounts.escrow_account.total_amount - amount; } else { msg!("Token already collected"); } Ok(()) } } #[derive(Accounts)] #[instruction(total_amount: u64)] pub struct InitializeAdminAccount<'info> { #[account(signer)] pub admin: AccountInfo<'info>, #[account( mut, constraint = admin_deposit_token_account.amount >= total_amount )] pub admin_deposit_token_account: CpiAccount<'info, TokenAccount>, #[account(init)] pub escrow_account: ProgramAccount<'info, EscrowAccount>, pub token_program: AccountInfo<'info>, } #[account] pub struct EscrowAccount { pub admin_key: Pubkey, pub admin_deposit_token_account: Pubkey, pub total_amount: u64, } impl<'info> From<&mut InitializeAdminAccount<'info>> for CpiContext<'_, '_, '_, 'info, SetAuthority<'info>> { fn from(accounts: &mut InitializeAdminAccount<'info>) -> Self { let cpi_accounts = SetAuthority { account_or_mint: accounts .admin_deposit_token_account .to_account_info() .clone(), current_authority: accounts.admin.clone(), }; let cpi_program = accounts.token_program.clone(); CpiContext::new(cpi_program, cpi_accounts) } } impl<'info> Update<'info> { fn into_transfer_to_game_user_context(&self) -> CpiContext<'_, '_, '_, 'info, Transfer<'info>> { let cpi_accounts = Transfer { from: self.pda_deposit_token_account.to_account_info().clone(), to: self.game_user_receive_token_account.to_account_info().clone(), authority: self.pda_account.clone(), }; CpiContext::new(self.token_program.clone(), cpi_accounts) } } #[derive(Accounts)] pub struct Initialize<'info> { #[account(init)] pub my_account: ProgramAccount<'info, MyAccount>, } #[account] pub struct MyAccount { pub data: u64, } #[derive(Accounts)] pub struct Update<'info> { #[account(mut)] pub my_account: ProgramAccount<'info, MyAccount>, #[account(signer)] pub game_user: AccountInfo<'info>, #[account(mut)] pub game_user_receive_token_account: CpiAccount<'info, Tok
random
[]
Rust
src/config.rs
cmsd2/codelauf
69a590b4f0bb86ea4d50b9e229baddc8cb8d4c6a
use std::env; use clap::{App, SubCommand, ArgMatches}; use toml::{Table, Parser}; use std::io::{Read,Result,Error,ErrorKind}; use std::fs::File; use super::result::*; #[derive(Debug,Clone)] pub struct Config { pub data_dir: String, pub zookeeper: Option<String>, pub elasticsearch: Option<String>, pub index_config: IndexConfig, pub sync_config: SyncConfig, pub repo_location: Option<RepoLocation>, } impl Config { pub fn new() -> Config { Config { data_dir: ".".to_string(), zookeeper: None, elasticsearch: None, index_config: IndexConfig::new(), sync_config: SyncConfig::new(), repo_location: None, } } pub fn new_from_table(table: &Table) -> Config { let mut cfg = Self::new(); cfg.data_dir = table .get("data_dir") .map(|m| m.as_str().unwrap().to_string()) .unwrap_or(cfg.data_dir); cfg.zookeeper = table .get("zookeeper") .map(|m| m.as_str().unwrap().to_string()); cfg.elasticsearch = table .get("elasticsearch") .map(|m| m.as_str().unwrap().to_string()); cfg.index_config = table .get("index") .map(|m| IndexConfig::new_from_table(m.as_table().unwrap()) ) .unwrap_or(cfg.index_config); cfg.sync_config = table .get("sync") .map(|m| SyncConfig::new_from_table(m.as_table().unwrap()) ) .unwrap_or(cfg.sync_config); cfg } } #[derive(Debug,Clone)] pub struct IndexConfig; impl IndexConfig { pub fn new() -> IndexConfig { IndexConfig } pub fn new_from_table(_table: &Table) -> IndexConfig { let cfg = Self::new(); cfg } } #[derive(Debug,Clone)] pub struct SyncConfig; impl SyncConfig { pub fn new() -> SyncConfig { SyncConfig } pub fn new_from_table(_table: &Table) -> SyncConfig { let cfg = Self::new(); cfg } } #[derive(Debug,Clone)] pub struct RepoLocation { pub remote: Option<String>, pub branches: Vec<String>, pub dir: Option<String>, } impl RepoLocation { pub fn new() -> RepoLocation { RepoLocation { remote: None, branches: vec![], dir: None, } } pub fn get_remote<'a>(&'a self) -> RepoResult<&'a str> { self.remote.as_ref().map(|s| s as &str).ok_or(RepoError::NoRemote) } pub fn new_from_args<'a,'b>(args: &ArgMatches<'a,'b>) -> Option<RepoLocation> { if args.is_present("REMOTE") || args.is_present("REPO_DIR") { let mut repo_loc = RepoLocation::new(); repo_loc.remote = get_config_str(args, "REMOTE") .or(repo_loc.remote); match args.values_of("BRANCH") { Some(branches) => { for branch in branches { repo_loc.branches.push(branch.to_string()); } }, None => { repo_loc.branches.push("master".to_string()); } } repo_loc.dir = get_config_str(args, "REPO_DIR") .or(repo_loc.dir); Some(repo_loc) } else { None } } } pub fn parse_args<'a,'b>() -> ArgMatches<'a,'b> { App::new("codelauf") .version("1.0") .author("Chris Dawes <cmsd2@cantab.net>") .about("Codelauf indexes git repositories for search") .args_from_usage( "-c --config=[CONFIG] 'Sets a custom config file' -z --zookeeper=[ZOOKEEPER] 'Zookeeper host:port[/dir] (env var ZOOKEEPER)' -e --elasticsearch=[ELASTICSEARCH] 'Elasticsearch host:port (env var ELASTICSEARCH)' -d --data-dir=[DATA_DIR] 'Data directory'") .subcommand(SubCommand::with_name("init") .about("creates the local database and exits") .args_from_usage("") ) .subcommand(SubCommand::with_name("index") .about("indexes a single repository and exits") .args_from_usage( "-r --remote=[REMOTE] 'Repository remote url (required if not already cloned)' -b --branch=[BRANCH] 'Branch (default master)' -R --repo-dir=[REPO_DIR] 'Repo dir to use for repo (clones if it does not exist)'") ) .subcommand(SubCommand::with_name("fetch") .about("clones or fetches a repository and exits") .args_from_usage( "-r --remote=[REMOTE] 'Repository remote url (required if not already cloned)' -b --branch=[BRANCH] 'Branch (default master)' -R --repo-dir=[REPO_DIR] 'Repo dir to use for repo (clones if it does not exist)'") ) .subcommand(SubCommand::with_name("sync") .about("starts the worker process to mirror and index repos") .args_from_usage("") ) .get_matches() } pub fn parse_config(path: &str) -> Result<Config> { let mut f = try!(File::open(path)); let mut s = String::new(); try!(f.read_to_string(&mut s)); let mut p = Parser::new(&s); p.parse().map(|m| Config::new_from_table(&m)).ok_or(Error::new(ErrorKind::Other, "config parsing error")) } pub fn read_config(config: Option<String>) -> Result<Config> { match config { Some(path) => parse_config(&path), None => Ok(Config::new()) } } pub fn get_env(name: &str) -> Option<String> { match env::var(name) { Ok(val) => Some(val), Err(e) => { info!("not using environment variable {}: {:?}", name, e); None } } } pub fn apply_config<'a,'b>(cfg: Config, args: &ArgMatches<'a,'b>) -> Config { let mut cfg = cfg; cfg.zookeeper = get_config_str_env(args, "ZOOKEEPER", "ZOOKEEPER") .or(cfg.zookeeper); cfg.elasticsearch = get_config_str_env(args, "ELASTICSEARCH", "ELASTICSEARCH") .or(cfg.elasticsearch); cfg.data_dir = get_config_str(args, "DATA_DIR") .unwrap_or(cfg.data_dir); match args.subcommand() { ("index", Some(indexargs)) => { cfg.repo_location = RepoLocation::new_from_args(&indexargs); }, ("fetch", Some(fetchargs)) => { cfg.repo_location = RepoLocation::new_from_args(&fetchargs); }, ("sync", Some(_syncargs)) => { }, _ => {} } cfg } pub fn get_config_str<'a,'b>(args: &ArgMatches<'a,'b>, key: &str) -> Option<String> { args.value_of(key) .map(|s| s.to_string()) } pub fn get_config_str_env<'a,'b>(args: &ArgMatches<'a,'b>, key: &str, env_key: &str) -> Option<String> { args.value_of(key) .map(|s| s.to_string()) .or(get_env(env_key)) } pub fn get_config<'a,'b>(args: &ArgMatches<'a,'b>) -> Result<Config> { let maybe_config = read_config(get_config_str(args, "CONFIG")); maybe_config.map_err(|err| { error!("error reading config file: {:?}", err); err }).map(|cfg| { apply_config(cfg, args) }) }
use std::env; use clap::{App, SubCommand, ArgMatches}; use toml::{Table, Parser}; use std::io::{Read,Result,Error,ErrorKind}; use std::fs::File; use super::result::*; #[derive(Debug,Clone)] pub struct Config { pub data_dir: String, pub zookeeper: Option<String>, pub elasticsearch: Option<String>, pub index_config: IndexConfig, pub sync_config: SyncConfig, pub repo_location: Option<RepoLocation>, } impl Config { pub fn new() -> Config { Config { data_dir: ".".to_string(), zookeeper: None, elasticsearch: None, index_config: IndexConfig::new(), sync_config: SyncConfig::new(), repo_location: None, } } pub fn new_from_table(table: &Table) -> Config { let mut cfg = Self::new(); cfg.data_dir = table .get("data_dir") .map(|m| m.as_str().unwrap().to_string()) .unwrap_or(cfg.data_dir); cfg.zookeeper = table .get("zookeeper") .map(|m| m.as_str().unwrap().to_string()); cfg.elasticsearch = table .get("elasticsearch") .map(|m| m.as_str().unwrap().to_string()); cfg.index_config = table .get("index") .map(|m| IndexConfig::new_from_table(m.as_table().unwrap()) ) .unwrap_or(cfg.index_config); cfg.sync_config = table .get("sync") .map(|m| SyncConfig::new_from_table(m.as_table().unwrap()) ) .unwrap_or(cfg.sync_config); cfg } } #[derive(Debug,Clone)] pub struct IndexConfig; impl IndexConfig { pub fn new() -> IndexConfig { IndexConfig } pub fn new_from_table(_table: &Table) -> IndexConfig { let cfg = Self::new(); cfg } } #[derive(Debug,Clone)] pub struct SyncConfig; impl SyncConfig { pub fn new() -> SyncConfig { SyncConfig } pub fn new_from_table(_table: &Table) -> SyncConfig { let cfg = Self::new(); cfg } } #[derive(Debug,Clone)] pub struct RepoLocation { pub remote: Option<String>, pub branches: Vec<String>, pub dir: Option<String>, } impl RepoLocation { pub fn new() -> RepoLocation { RepoLocation { remote: None, branches: vec![], dir: None, } } pub fn get_remote<'a>(&'a self) -> RepoResult<&'a str> { self.remote.as_ref().map(|s| s as &str).ok_or(RepoError::NoRemote) } pub fn new_from_args<'a,'b>(args: &ArgMatches<'a,'b>) -> Option<RepoLocation> { if args.is_present("REMOTE") || args.is_present("REPO_DIR") { let mut repo_loc = RepoLocation::new(); repo_loc.remote = get_config_str(args, "REMOTE") .or(repo_loc.remote); match args.values_of("BRANCH") { Some(branches) => { for branch in branches { repo_loc.branches.push(branch.to_string()); } }, None => { repo_loc.branches.push("master".to_string()); } } repo_loc.dir = get_config_str(args, "REPO_DIR") .or(repo_loc.dir); Some(repo_loc) } else { None } } } pub fn parse_args<'a,'b>() -> ArgMatches<'a,'b> { App::new("codelauf") .version("1.0") .author("Chris Dawes <cmsd2@cantab.net>") .about("Codelauf indexes git repositories for search") .args_from_usage( "-c --config=[CONFIG] 'Sets a custom config file' -z --zookeeper=[ZOOKEEPER] 'Zookeeper host:port[/dir] (env var ZOOKEEPER)' -e --elasticsearch=[ELASTICSEARCH] 'Elasticsearch host:port (env var ELASTICSEARCH)' -d --data-dir=[DATA_DIR] 'Data directory'") .subcommand(SubCommand::with_name("init") .about("creates the local database and exits") .args_from_usage("") ) .subcommand(SubCommand::with_name("index") .about("indexes a single repository and exits") .args_from_usage( "-r --remote=[REMOTE] 'Repository remote url (required if not already cloned)' -b --branch=[BRANCH] 'Branch (default master)' -R --repo-dir=[REPO_DIR] 'Repo dir to use for repo (clones if it does not exist)'") ) .subcommand(SubCommand::with_name("fetch") .about("clones or fetches a repository and exits") .args_from_usage( "-r --remote=[REMOTE] 'Repository remote url (required if not already cloned)' -b --branch=[BRANCH] 'Branch (default master)' -R --repo-dir=[REPO_DIR] 'Repo dir to use for repo (clones if it does not exist)'") ) .subcommand(SubCommand::with_name("sync") .about("starts the worker process to mirror and index repos") .args_from_usage("") ) .get_matches() } pub fn parse_config(path: &str) -> Result<Config> { let mut f = try!(File::open(path)); let mut s = String::new(); try!(f.read_to_string(&mut s)); let mut p = Parser::new(&s); p.parse().map(|m| Config::new_from_table(&m)).ok_or(Error::new(ErrorKind::Other, "config parsing error")) } pub fn read_config(config: Option<String>) -> Result<Config> { match config { Some(path) => parse_config(&path), None => Ok(Config::new()) } } pub fn get_env(name: &str) -> Option<String> {
} pub fn apply_config<'a,'b>(cfg: Config, args: &ArgMatches<'a,'b>) -> Config { let mut cfg = cfg; cfg.zookeeper = get_config_str_env(args, "ZOOKEEPER", "ZOOKEEPER") .or(cfg.zookeeper); cfg.elasticsearch = get_config_str_env(args, "ELASTICSEARCH", "ELASTICSEARCH") .or(cfg.elasticsearch); cfg.data_dir = get_config_str(args, "DATA_DIR") .unwrap_or(cfg.data_dir); match args.subcommand() { ("index", Some(indexargs)) => { cfg.repo_location = RepoLocation::new_from_args(&indexargs); }, ("fetch", Some(fetchargs)) => { cfg.repo_location = RepoLocation::new_from_args(&fetchargs); }, ("sync", Some(_syncargs)) => { }, _ => {} } cfg } pub fn get_config_str<'a,'b>(args: &ArgMatches<'a,'b>, key: &str) -> Option<String> { args.value_of(key) .map(|s| s.to_string()) } pub fn get_config_str_env<'a,'b>(args: &ArgMatches<'a,'b>, key: &str, env_key: &str) -> Option<String> { args.value_of(key) .map(|s| s.to_string()) .or(get_env(env_key)) } pub fn get_config<'a,'b>(args: &ArgMatches<'a,'b>) -> Result<Config> { let maybe_config = read_config(get_config_str(args, "CONFIG")); maybe_config.map_err(|err| { error!("error reading config file: {:?}", err); err }).map(|cfg| { apply_config(cfg, args) }) }
match env::var(name) { Ok(val) => Some(val), Err(e) => { info!("not using environment variable {}: {:?}", name, e); None } }
if_condition
[ { "content": "/// open db\n\n/// calc repo dir location\n\n/// create basic db entry if it doesn't exist\n\n/// clone project if it isn't already\n\n/// otherwise:\n\n/// check remote url matches\n\n/// fetch branch\n\n/// checkout branch\n\n/// update db as we go\n\npub fn fetch_repo(config: &Config) -> ...
Rust
src/lib.rs
mihail-milev/pam_blox
05086179dca40a5c46343dab814906c0fdc0cda4
#![allow(non_camel_case_types)] include!("pam_appl.rs"); use std::ffi::{CString, CStr}; use std::os::raw::{c_int, c_char}; use std::ptr; use std::process::Command; use regex::Regex; use std::fs; use std::os::linux::fs::MetadataExt; use std::fs::File; use std::io::{BufRead, BufReader}; #[no_mangle] pub extern fn pam_sm_authenticate(pamh: *mut pam_handle_t, _flags: c_int, _argc: c_int, _argv: *const *const c_char) -> u32 { let mut username : *const c_char = ptr::null(); let prompt = match CString::new("Username:") { Ok(s) => s, Err(e) => { eprintln!("Unable to convert prompt to C-type string: {}", e); return PAM_AUTH_ERR; }, }; let get_user_result = unsafe { pam_get_user(pamh, &mut username, prompt.as_ptr()) }; if get_user_result != (PAM_SUCCESS as i32) || username == ptr::null() { return PAM_AUTH_ERR; } let username_cstr = unsafe { CStr::from_ptr(username) }; let username_str = match username_cstr.to_str() { Ok(s) => s, Err(e) => { eprintln!("Unable to convert username to Rust-type string: {}", e); return PAM_AUTH_ERR; }, }; let bdaddr = match get_device_id_from_users_file(username_str, "/etc/blox_users.conf") { Some(a) => a, None => { return PAM_AUTH_ERR; }, }; println!("Authenticating {:?} using device ID \"{}\"", username_str, bdaddr); let check_result = read_bluetooth_signal_strength_and_decide(&bdaddr, -5); if check_result { return PAM_SUCCESS; } return PAM_AUTH_ERR; } #[no_mangle] pub extern fn pam_sm_setcred(_pamh: *mut pam_handle_t, _flags: c_int, _argc: c_int, _argv: *const *const c_char) -> u32 { return PAM_SUCCESS; } fn get_device_id_from_users_file(username: &str, filename: &str) -> Option<String> { let meta = match fs::metadata(filename) { Ok(m) => m, Err(e) => { eprintln!("Unable to fetch information for file {}: {}", filename, e); return None; }, }; if meta.st_uid() != 0 || meta.st_gid() != 0 { eprintln!("The file {} is not owned by root:root", filename); return None; } if (meta.st_mode() & 3967) != 256 { eprintln!("The file {} must be readable only by root", filename); return None; } let uname_re = match Regex::new("^\"?([a-zA-Z0-9].*?)\"?$") { Ok(r) => r, Err(e) => { eprintln!("Unable to compile username regular expression: {}", e); return None; }, }; let mat = match uname_re.captures(username) { Some(m) => m, None => { eprintln!("Invalid username format supplied: {}", username); return None; }, }; let uname = match mat.get(1) { Some(u) => u.as_str(), None => { eprintln!("Empty username supplied: {}", username); return None; }, }; let f = match File::open(filename) { Ok(f) => f, Err(e) => { eprintln!("Unable to open file {}: {}", filename, e); return None; }, }; let reader = BufReader::new(f); let bdre = match Regex::new(r"^(?:[0-9A-F]{2})(?::[0-9A-F]{2}){5}$") { Ok(r) => r, Err(e) => { eprintln!("Unable to create BT address regular expression: {}", e); return None; }, }; let mut user_found = false; for line in reader.lines() { let ln_text = match line { Ok(l) => l, Err(_e) => continue, }; let items : Vec<&str> = ln_text.split('\t').collect(); if items.len() < 2 { continue; } if items[0] == uname { user_found = true; if !bdre.is_match(items[1]) { eprintln!("User {} found, but the supplied BT address ({}) is not valid, skipping ...", items[0], items[1]); continue; } return Some(String::from(items[1])); } } if !user_found { eprintln!("User {} not found in {}", uname, filename); } return None; } fn read_bluetooth_signal_strength_and_decide(bdaddr: &str, threshold: i32) -> bool { let cmd = format!("hcitool rssi {}", bdaddr); let error_text = format!("Unable to read RSSI value for {}", bdaddr); let rssi_output = match Command::new("sh").arg("-c").arg(cmd).output() { Ok(o) => o.stdout, Err(e) => { eprintln!("{}: {}", &error_text, e); return false; }, }; let rssi_output_str = match std::str::from_utf8(&rssi_output) { Ok(s) => s, Err(e) => { eprintln!("Unable to convert output command to UTF-8: {}", e); return false; }, }; let re = match Regex::new(r"RSSI return value: (-?\d+)") { Ok(r) => r, Err(e) => { eprintln!("Unable to create regular expression for parsing command output: {}", e); return false; }, }; let mut val_found = false; for mat in re.captures_iter(rssi_output_str) { let val = match mat.get(1) { Some(v) => v, None => continue, }; let val_i32 = match val.as_str().parse::<i32>() { Ok(v) => v, Err(_e) => continue, }; if val_i32 >= threshold { println!("Success: signal strength {} is above or equal to threshold {}", val_i32, threshold); return true; } val_found = true; } if val_found { println!("Device not close enough!"); } else { println!("Device not connected!"); } return false; } #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
#![allow(non_camel_case_types)] include!("pam_appl.rs"); use std::ffi::{CString, CStr}; use std::os::raw::{c_int, c_char}; use std::ptr; use std::process::Command; use regex::Regex; use std::fs; use std::os::linux::fs::MetadataExt; use std::fs::File; use std::io::{BufRead, BufReader}; #[no_mangle] pub extern fn pam_sm_authenticate(pamh: *mut pam_handle_t, _flags: c_int, _argc: c_int, _argv: *const *const c_char) -> u32 { let mut username : *const c_char = ptr::null(); let prompt = match CString::new("Username:") { Ok(s) => s, Err(e) => { eprintln!("Unable to convert prompt to C-type string: {}", e); return PAM_AUTH_ERR; }, }; let get_user_result = unsafe { pam_get_user(pamh, &mut username, prompt.as_ptr()) }; if get_user_result != (PAM_SUCCESS as i32) || username == ptr::null() { return PAM_AUTH_ERR; } let username_cstr = unsafe { CStr::from_ptr(username) }; let username_str = match username_cstr.to_str() { Ok(s) => s, Err(e) => { eprintln!("Unable to convert username to Rust-type string: {}", e); return PAM_AUTH_ERR; }, }; let bdaddr = match get_device_id_from_users_file(username_str, "/etc/blox_users.conf") { Some(a) => a, None => { return PAM_AUTH_ERR; }, }; println!("Authenticating {:?} using device ID \"{}\"", username_str, bdaddr); let check_result = read_bluetooth_signal_strength_and_decide(&bdaddr, -5); if check_result { return PAM_SUCCESS; } return PAM_AUTH_ERR; } #[no_mangle] pub extern fn pam_sm_setcred(_pamh: *mut pam_handle_t, _flags: c_int, _argc: c_int, _argv: *const *const c_char) -> u32 { return PAM_SUCCESS; }
fn read_bluetooth_signal_strength_and_decide(bdaddr: &str, threshold: i32) -> bool { let cmd = format!("hcitool rssi {}", bdaddr); let error_text = format!("Unable to read RSSI value for {}", bdaddr); let rssi_output = match Command::new("sh").arg("-c").arg(cmd).output() { Ok(o) => o.stdout, Err(e) => { eprintln!("{}: {}", &error_text, e); return false; }, }; let rssi_output_str = match std::str::from_utf8(&rssi_output) { Ok(s) => s, Err(e) => { eprintln!("Unable to convert output command to UTF-8: {}", e); return false; }, }; let re = match Regex::new(r"RSSI return value: (-?\d+)") { Ok(r) => r, Err(e) => { eprintln!("Unable to create regular expression for parsing command output: {}", e); return false; }, }; let mut val_found = false; for mat in re.captures_iter(rssi_output_str) { let val = match mat.get(1) { Some(v) => v, None => continue, }; let val_i32 = match val.as_str().parse::<i32>() { Ok(v) => v, Err(_e) => continue, }; if val_i32 >= threshold { println!("Success: signal strength {} is above or equal to threshold {}", val_i32, threshold); return true; } val_found = true; } if val_found { println!("Device not close enough!"); } else { println!("Device not connected!"); } return false; } #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
fn get_device_id_from_users_file(username: &str, filename: &str) -> Option<String> { let meta = match fs::metadata(filename) { Ok(m) => m, Err(e) => { eprintln!("Unable to fetch information for file {}: {}", filename, e); return None; }, }; if meta.st_uid() != 0 || meta.st_gid() != 0 { eprintln!("The file {} is not owned by root:root", filename); return None; } if (meta.st_mode() & 3967) != 256 { eprintln!("The file {} must be readable only by root", filename); return None; } let uname_re = match Regex::new("^\"?([a-zA-Z0-9].*?)\"?$") { Ok(r) => r, Err(e) => { eprintln!("Unable to compile username regular expression: {}", e); return None; }, }; let mat = match uname_re.captures(username) { Some(m) => m, None => { eprintln!("Invalid username format supplied: {}", username); return None; }, }; let uname = match mat.get(1) { Some(u) => u.as_str(), None => { eprintln!("Empty username supplied: {}", username); return None; }, }; let f = match File::open(filename) { Ok(f) => f, Err(e) => { eprintln!("Unable to open file {}: {}", filename, e); return None; }, }; let reader = BufReader::new(f); let bdre = match Regex::new(r"^(?:[0-9A-F]{2})(?::[0-9A-F]{2}){5}$") { Ok(r) => r, Err(e) => { eprintln!("Unable to create BT address regular expression: {}", e); return None; }, }; let mut user_found = false; for line in reader.lines() { let ln_text = match line { Ok(l) => l, Err(_e) => continue, }; let items : Vec<&str> = ln_text.split('\t').collect(); if items.len() < 2 { continue; } if items[0] == uname { user_found = true; if !bdre.is_match(items[1]) { eprintln!("User {} found, but the supplied BT address ({}) is not valid, skipping ...", items[0], items[1]); continue; } return Some(String::from(items[1])); } } if !user_found { eprintln!("User {} not found in {}", uname, filename); } return None; }
function_block-full_function
[ { "content": "#[test]\n\nfn bindgen_test_layout_pam_response() {\n\n assert_eq!(\n\n ::std::mem::size_of::<pam_response>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(pam_response))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<pam_response>(),\n\n 8usize,...
Rust
driver/src/command.rs
rise-lang/2021-CGO-artifact
f6c2c4e916810f734bb1983f93103269e620c931
use std::io::prelude::*; use std::process; use crate::*; pub fn setup(env: &Env) { if env.benchmark { let r = &env.target.remote; if !remote_output(&"mkdir", &vec!["-p", r.dir.to_str().unwrap()], &[], &r.dst, Path::new(".")) .expect("could not create remote directory").status.success() { panic!("could not create remote directory"); } } } pub type HostCommand = std::process::Command; pub struct TargetCommand { program: String, args: Vec<String>, env: Vec<(String, String)> } pub struct UploadCommand<'a> { host_path: &'a Path, remote_path: Option<&'a str>, } pub fn host_run<S: AsRef<ffi::OsStr>>(program: S) -> HostCommand { process::Command::new(program) } pub fn target_run<S: AsRef<str>>(program: S) -> TargetCommand { TargetCommand { program: program.as_ref().to_owned(), args: Vec::new(), env: Vec::new() } } pub fn upload_file<'a>(path: &'a Path) -> UploadCommand<'a> { UploadCommand { host_path: path, remote_path: None } } pub fn upload_file_to<'a>(host: &'a Path, remote: &'a str) -> UploadCommand<'a> { UploadCommand { host_path: host, remote_path: Some(remote) } } pub trait CommandExt { fn prompt(&self, env: &Env) -> ColoredString; fn output(&mut self, env: &Env) -> io::Result<process::Output>; #[must_use] fn log<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { let prompt = self.prompt(env); writeln!(w, "{}", prompt).unwrap(); println!("{}", prompt); self.log_no_prompt(w, env) } #[must_use] fn log_no_println<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { let prompt = self.prompt(env); writeln!(w, "{}", prompt).unwrap(); self.log_no_prompt(w, env) } #[must_use] fn log_no_prompt<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { match self.output(env) { Ok(output) => { if !output.status.success() { let s = format!("{} ({})", "failure".red(), output.status); println!("{}", s); writeln!(w, "{}", s).unwrap(); } let out = String::from_utf8_lossy(&output.stdout).into(); let err = String::from_utf8_lossy(&output.stderr); write!(w, "{}", out).unwrap(); if !err.is_empty() { write!(w, "!: {}", err).unwrap(); } if output.status.success() { Some(out) } else { None } } Err(error) => { let s = format!("{}: {}", "could not run command".red(), error); println!("{}", s); writeln!(w, "{}", s).unwrap(); None } } } } impl CommandExt for HostCommand { fn prompt(&self, _: &Env) -> ColoredString { format!("h> {:?}", self).blue() } fn output(&mut self, _: &Env) -> io::Result<process::Output> { self.output() } } impl CommandExt for TargetCommand { fn prompt(&self, _: &Env) -> ColoredString { format!("t>{:?} {:?}{:?}", FlatDbg(&self.env), self.program, FlatDbg(&self.args)).purple() } fn output(&mut self, env: &Env) -> io::Result<process::Output> { let r = &env.target.remote; remote_output(&self.program, &self.args, &self.env, &r.dst, &r.dir) } } struct FlatDbg<I: IntoIterator + Clone>(I); impl<I: IntoIterator<Item = E> + Clone, E: fmt::Debug> fmt::Debug for FlatDbg<I> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for e in self.0.clone().into_iter() { write!(f, " {:?}", e)?; } Ok(()) } } impl<'a> CommandExt for UploadCommand<'a> { fn prompt(&self, env: &Env) -> ColoredString { let r = &env.target.remote; let remote_path = &match self.remote_path { Some(rp) => r.dir.join(rp), None => r.dir.join(self.host_path) }; format!("u> {:?} --> {:?}", self.host_path, remote_path).purple() } fn output(&mut self, env: &Env) -> io::Result<process::Output> { let r = &env.target.remote; let remote_path = &match self.remote_path { Some(rp) => r.dir.join(rp), None => r.dir.join(self.host_path) }; assert!(remote_output(&"mkdir", &vec!["-p", remote_path.parent().unwrap().to_str().unwrap()], &[], &r.dst, Path::new(".") ).expect("could not create upload directory").status.success()); assert!(remote_output(&"rm", &vec!["-rf", remote_path.to_str().unwrap()], &[], &r.dst, Path::new(".") ).expect("could not clear upload directory").status.success()); let mut cmd = process::Command::new("scp"); cmd.arg("-r") .args(&["-o", "ControlMaster=auto", "-o", "ControlPersist=1m"]) .arg(self.host_path) .arg(format!("scp://{}/{:?}", r.dst, remote_path)); cmd.output() } } fn remote_output<S: AsRef<str>>(program: &S, args: &[S], env: &[(S, S)], dst: &str, dir: &Path) -> io::Result<process::Output> { let mut r = process::Command::new("ssh"); r.args(&["-o", "ControlMaster=auto", "-o", "ControlPersist=1m"]) .arg(format!("ssh://{}", dst)); for (k, v) in env { r.arg("export").arg(format!("{}=\"{}\";", k.as_ref(), v.as_ref())); } let r = r.arg("cd").arg(dir).arg(";") .arg(program.as_ref()) .args(args.iter().map(|a| format!("\"{}\"", a.as_ref()))); r.output() } impl TargetCommand { pub fn arg<S: AsRef<str>>(&mut self, s: S) -> &mut TargetCommand { self.args.push(s.as_ref().to_owned()); self } pub fn args<I, S>(&mut self, i: I) -> &mut TargetCommand where I: IntoIterator<Item = S>, S: AsRef<str> { self.args.extend(i.into_iter().map(|a| a.as_ref().to_owned())); self } pub fn env<K, V>(&mut self, k: K, v: V) -> &mut TargetCommand where K: AsRef<str> , V: AsRef<str> { self.env.push((k.as_ref().to_owned(), v.as_ref().to_owned())); self } pub fn envs<I, K, V>(&mut self, i: I) -> &mut TargetCommand where I: IntoIterator<Item = (K, V)>, K: AsRef<str>, V: AsRef<str> { self.env.extend(i.into_iter().map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned()))); self } }
use std::io::prelude::*; use std::process; use crate::*; pub fn setup(env: &Env) { if env.benchmark { let r = &env.target.remote; if !remote_output(&"mkdir", &vec!["-p", r.dir.to_str().unwrap()], &[], &r.dst, Path::new(".")) .expect("could not create remote directory").status.success() { panic!("could not create remote directory"); } } } pub type HostCommand = std::process::Command; pub struct TargetCommand { program: String, args: Vec<String>, env: Vec<(String, String)> } pub struct UploadCommand<'a> { host_path: &'a Path, remote_path: Option<&'a str>, } pub fn host_run<S: AsRef<ffi::OsStr>>(program: S) -> HostCommand { process::Command::new(program) } pub fn target_run<S: AsRef<str>>(program: S) -> TargetCommand { TargetCommand { program: program.as_ref().to_owned(), args: Vec::new(), env: Vec::new() } } pub fn upload_file<'a>(path: &'a Path) -> UploadCommand<'a> { UploadCommand { host_path: path, remote_path: None } } pub fn upload_file_to<'a>(host: &'a Path, remote: &'a str) -> UploadCommand<'a> { UploadCommand { host_path: host, remote_path: Some(remote) } } pub trait CommandExt { fn prompt(&self, env: &Env) -> ColoredString; fn output(&mut self, env: &Env) -> io::Result<process::Output>; #[must_use] fn log<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { let prompt = self.prompt(env); writeln!(w, "{}", prompt).unwrap(); println!("{}", prompt); self.log_no_prompt(w, env) } #[must_use] fn log_no_println<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { let prompt = self.prompt(env); writeln!(w, "{}", prompt).unwrap(); self.log_no_prompt(w, env) } #[must_use] fn log_no_prompt<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { match self.output(env) { Ok(output) => { if !output.status.success() { let s = format!("{} ({})", "failure".red(), output.status); println!("{}", s); writeln!(w, "{}", s).unwrap(); } let out = String::from_utf8_lossy(&output.stdout).into(); let err = String::from_utf8_lossy(&output.stderr); write!(w, "{}", out).unwrap(); if !err.is_empty() { write!(w, "!: {}", err).unwrap(); } if output.status.success() { Some(out) } else { None } } Err(error) => { let s = format!("{}: {}", "could not run command".red(), error); println!("{}", s); writeln!(w, "{}", s).unwrap(); None } } } } impl CommandExt for HostCommand { fn prompt(&self, _: &Env) -> ColoredString { format!("h> {:?}", self).blue() } fn output(&mut self, _: &Env) -> io::Result<process::Output> { self.output() } } impl CommandExt for TargetCommand { fn prompt(&self, _: &Env) -> ColoredString { format!("t>{:?} {:?}{:?}", FlatDbg(&self.env), self.program, FlatDbg(&self.args)).purple() } fn output(&mut self, env: &Env) -> io::Result<process::Output> { let r = &env.target.remote; remote_output(&self.program, &self.args, &self.env, &r.dst, &r.dir) } } struct FlatDbg<I: IntoIterator + Clone>(I); impl<I: IntoIterator<Item = E> + Clone, E: fmt::Debug> fmt::Debug for FlatDbg<I> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for e in self.0.clone().into_iter() { write!(f, " {:?}", e)?; } Ok(()) } } impl<'a> CommandExt for UploadCommand<'a> { fn prompt(&self, env: &Env) -> ColoredString { let r = &env.target.remote; let remote_path = &
; format!("u> {:?} --> {:?}", self.host_path, remote_path).purple() } fn output(&mut self, env: &Env) -> io::Result<process::Output> { let r = &env.target.remote; let remote_path = &match self.remote_path { Some(rp) => r.dir.join(rp), None => r.dir.join(self.host_path) }; assert!(remote_output(&"mkdir", &vec!["-p", remote_path.parent().unwrap().to_str().unwrap()], &[], &r.dst, Path::new(".") ).expect("could not create upload directory").status.success()); assert!(remote_output(&"rm", &vec!["-rf", remote_path.to_str().unwrap()], &[], &r.dst, Path::new(".") ).expect("could not clear upload directory").status.success()); let mut cmd = process::Command::new("scp"); cmd.arg("-r") .args(&["-o", "ControlMaster=auto", "-o", "ControlPersist=1m"]) .arg(self.host_path) .arg(format!("scp://{}/{:?}", r.dst, remote_path)); cmd.output() } } fn remote_output<S: AsRef<str>>(program: &S, args: &[S], env: &[(S, S)], dst: &str, dir: &Path) -> io::Result<process::Output> { let mut r = process::Command::new("ssh"); r.args(&["-o", "ControlMaster=auto", "-o", "ControlPersist=1m"]) .arg(format!("ssh://{}", dst)); for (k, v) in env { r.arg("export").arg(format!("{}=\"{}\";", k.as_ref(), v.as_ref())); } let r = r.arg("cd").arg(dir).arg(";") .arg(program.as_ref()) .args(args.iter().map(|a| format!("\"{}\"", a.as_ref()))); r.output() } impl TargetCommand { pub fn arg<S: AsRef<str>>(&mut self, s: S) -> &mut TargetCommand { self.args.push(s.as_ref().to_owned()); self } pub fn args<I, S>(&mut self, i: I) -> &mut TargetCommand where I: IntoIterator<Item = S>, S: AsRef<str> { self.args.extend(i.into_iter().map(|a| a.as_ref().to_owned())); self } pub fn env<K, V>(&mut self, k: K, v: V) -> &mut TargetCommand where K: AsRef<str> , V: AsRef<str> { self.env.push((k.as_ref().to_owned(), v.as_ref().to_owned())); self } pub fn envs<I, K, V>(&mut self, i: I) -> &mut TargetCommand where I: IntoIterator<Item = (K, V)>, K: AsRef<str>, V: AsRef<str> { self.env.extend(i.into_iter().map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned()))); self } }
match self.remote_path { Some(rp) => r.dir.join(rp), None => r.dir.join(self.host_path) }
if_condition
[ { "content": "pub fn setup<F>(use_env: F) where F: FnOnce(&Env) {\n\n println!(\"{}\", \"-- setting environment up\".yellow());\n\n\n\n let opt = Opt::from_args();\n\n\n\n let target = &Target::load(&opt.target);\n\n let target_name = opt.target.file_stem().unwrap();\n\n let target_name_str = tar...
Rust
mm0-rs/src/parser/ast.rs
mattsse/mm0
247fd3e2ac65eec7d5317285bb2fee639b17a63f
use std::sync::Arc; use std::fmt::{self, Display}; use num::BigUint; use crate::lined_string::LinedString; use crate::util::{Span, ArcString}; use crate::elab::lisp::print::{EnvDisplay, FormatEnv}; use super::ParseError; bitflags! { pub struct Modifiers: u8 { const PURE = 1; const STRICT = 2; const PROVABLE = 4; const FREE = 8; const PUB = 16; const ABSTRACT = 32; const LOCAL = 64; } } impl Modifiers { pub const NONE: Modifiers = Self::empty(); pub fn sort_data() -> Modifiers { Modifiers::PURE | Modifiers::STRICT | Modifiers::PROVABLE | Modifiers::FREE } pub fn allowed_visibility(self, k: DeclKind) -> bool { match k { DeclKind::Term => self.is_empty(), DeclKind::Axiom => self.is_empty(), DeclKind::Def => self == Modifiers::ABSTRACT || self == Modifiers::LOCAL || self.is_empty(), DeclKind::Thm => self == Modifiers::PUB || self.is_empty(), } } pub fn from_name(s: &str) -> Option<Modifiers> { match s { "pure" => Some(Modifiers::PURE), "strict" => Some(Modifiers::STRICT), "provable" => Some(Modifiers::PROVABLE), "free" => Some(Modifiers::FREE), "pub" => Some(Modifiers::PUB), "abstract" => Some(Modifiers::ABSTRACT), "local" => Some(Modifiers::LOCAL), _ => None } } } impl Display for Modifiers { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.contains(Modifiers::PURE) {write!(f, "pure ")?} if self.contains(Modifiers::STRICT) {write!(f, "strict ")?} if self.contains(Modifiers::PROVABLE) {write!(f, "provable ")?} if self.contains(Modifiers::FREE) {write!(f, "free ")?} if self.contains(Modifiers::PUB) {write!(f, "pub ")?} if self.contains(Modifiers::ABSTRACT) {write!(f, "abstract ")?} if self.contains(Modifiers::LOCAL) {write!(f, "local ")?} Ok(()) } } #[derive(Clone)] pub enum Delimiter { Both(Box<[u8]>), LeftRight(Box<[u8]>, Box<[u8]>), } #[derive(Copy, Clone, Debug)] pub struct Formula(pub Span); impl Formula { pub fn inner(&self) -> Span { (self.0.start + 1 .. self.0.end - 1).into() } } #[derive(Clone)] pub struct Const { pub fmla: Formula, pub trim: Span } #[derive(Clone, Copy, PartialEq, Eq)] pub enum DeclKind { Term, Axiom, Thm, Def } #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum LocalKind { Bound, Reg, Dummy, Anon } impl LocalKind { pub fn is_bound(self) -> bool { match self { LocalKind::Bound | LocalKind::Dummy => true, LocalKind::Reg | LocalKind::Anon => false, } } } #[derive(Clone, Debug)] pub struct DepType { pub sort: Span, pub deps: Vec<Span>, } impl DepType { pub fn span(&self) -> Span { (self.sort.start..self.deps.last().unwrap_or(&self.sort).end).into() } } #[derive(Clone, Debug)] pub enum Type { DepType(DepType), Formula(Formula) } impl Type { pub fn span(&self) -> Span { match self { Type::DepType(d) => d.span(), Type::Formula(f) => f.0 } } } #[derive(Clone, Debug)] pub struct Binder { pub span: Span, pub local: Option<Span>, pub kind: LocalKind, pub ty: Option<Type>, } #[derive(Clone, Debug)] pub struct SExpr { pub span: Span, pub k: SExprKind, } #[derive(Copy, Clone, Debug)] pub enum Atom { Ident, Quote, Unquote, Nfx } #[derive(Clone, Debug)] pub enum SExprKind { Atom(Atom), List(Vec<SExpr>), DottedList(Vec<SExpr>, Box<SExpr>), Number(BigUint), String(ArcString), Bool(bool), Formula(Formula), } pub fn curly_transform<T>(es: &mut Vec<T>, no_dot: bool, eq: impl Fn(&T, &T) -> bool, nfx: impl FnOnce() -> T) { let n = es.len(); if n > 2 { let valid_curly = no_dot && n % 2 != 0 && { let e = &es[1]; (3..n).step_by(2).all(|i| eq(&es[i], e)) }; if valid_curly { es.swap(0, 1); let mut from = 4; let mut to = 3; while from < n { es.swap(from, to); to += 1; from += 2; } es.truncate(to); } else { es.insert(0, nfx()); } } } impl SExpr { pub fn atom(span: impl Into<Span>, a: Atom) -> SExpr { SExpr {span: span.into(), k: SExprKind::Atom(a)} } pub fn list(span: impl Into<Span>, es: Vec<SExpr>) -> SExpr { SExpr {span: span.into(), k: SExprKind::List(es)} } pub fn dotted_list(span: impl Into<Span>, mut es: Vec<SExpr>, dot: Option<SExpr>) -> SExpr { match dot { None => SExpr {span: span.into(), k: SExprKind::List(es)}, Some(e) => match e.k { SExprKind::DottedList(es2, e2) => { es.extend(es2); SExpr {span: span.into(), k: SExprKind::DottedList(es, e2)} } SExprKind::List(es2) => { es.extend(es2); SExpr::list(span, es) } _ => SExpr {span: span.into(), k: SExprKind::DottedList(es, Box::new(e))} } } } pub fn curly_list(span: Span, curly: bool, mut es: Vec<SExpr>, dot: Option<SExpr>, eq: impl Fn(&SExpr, &SExpr) -> bool) -> SExpr { if curly { curly_transform(&mut es, dot.is_none(), eq, || SExpr::atom(span.start..span.start+1, Atom::Nfx)) } Self::dotted_list(span, es, dot) } } impl EnvDisplay for SExpr { fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result { match &self.k { &SExprKind::Atom(a) => fe.source.span_atom(self.span, a).fmt(f), SExprKind::List(es) => { let mut it = es.iter(); match it.next() { None => "()".fmt(f), Some(e) => { write!(f, "({}", fe.to(e))?; for e in it {write!(f, " {}", fe.to(e))?} ")".fmt(f) } } } SExprKind::DottedList(es, r) => { "(".fmt(f)?; for e in es {write!(f, "{} ", fe.to(e))?} write!(f, ". {})", fe.to(r)) } SExprKind::Number(n) => n.fmt(f), SExprKind::String(s) => write!(f, "{:?}", s), SExprKind::Bool(true) => "#t".fmt(f), SExprKind::Bool(false) => "#f".fmt(f), SExprKind::Formula(s) => fe.source[s.0].fmt(f), } } } #[derive(Clone)] pub struct Decl { pub mods: Modifiers, pub k: DeclKind, pub id: Span, pub bis: Vec<Binder>, pub ty: Option<Type>, pub val: Option<SExpr>, } #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum Prec { Prec(u32), Max } impl fmt::Display for Prec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &Prec::Prec(p) => p.fmt(f), &Prec::Max => "max".fmt(f) } } } impl fmt::Debug for Prec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } #[derive(Clone)] pub enum SimpleNotaKind { Prefix, Infix {right: bool} } #[derive(Clone)] pub struct SimpleNota { pub k: SimpleNotaKind, pub id: Span, pub c: Const, pub prec: Prec, } #[derive(Clone)] pub enum Literal { Const(Const, Prec), Var(Span), } #[derive(Clone)] pub struct GenNota { pub id: Span, pub bis: Vec<Binder>, pub ty: Option<Type>, pub lits: Vec<Literal>, pub prec: Option<(Prec, bool)> } #[derive(Clone)] pub enum StmtKind { Sort(Span, Modifiers), Decl(Decl), Delimiter(Delimiter), SimpleNota(SimpleNota), Coercion { id: Span, from: Span, to: Span }, Notation(GenNota), Inout { out: bool, k: Span, hs: Vec<SExpr> }, Annot(SExpr, Box<Stmt>), Do(Vec<SExpr>), Import(Span, String), } #[derive(Clone)] pub struct Stmt { pub span: Span, pub k: StmtKind, } pub struct AST { pub source: Arc<LinedString>, pub imports: Vec<(Span, String)>, pub stmts: Vec<Stmt>, pub errors: Vec<ParseError>, } impl LinedString { pub fn span_atom(&self, sp: Span, a: Atom) -> &str { match a { Atom::Ident => &self[sp], Atom::Quote => "quote", Atom::Unquote => "unquote", Atom::Nfx => ":nfx", } } } impl AST { pub fn span(&self, s: Span) -> &str { &self.source[s] } pub fn span_atom(&self, sp: Span, a: Atom) -> &str { self.source.span_atom(sp, a) } pub fn last_checkpoint(&self, pos: usize) -> (usize, usize) { match self.stmts.binary_search_by_key(&pos, |stmt| stmt.span.end) { Ok(i) => (i+1, pos), Err(0) => (0, 0), Err(i) => (i, self.stmts[i-1].span.end) } } }
use std::sync::Arc; use std::fmt::{self, Display}; use num::BigUint; use crate::lined_string::LinedString; use crate::util::{Span, ArcString}; use crate::elab::lisp::print::{EnvDisplay, FormatEnv}; use super::ParseError; bitflags! { pub struct Modifiers: u8 { const PURE = 1; const STRICT = 2; const PROVABLE = 4; const FREE = 8; const PUB = 16; const ABSTRACT = 32; const LOCAL = 64; } } impl Modifiers { pub const NONE: Modifiers = Self::empty(); pub fn sort_data() -> Modifiers { Modifiers::PURE | Modifiers::STRICT | Modifiers::PROVABLE | Modifiers::FREE } pub fn allowed_visibility(self, k: DeclKind) -> bool { match k { DeclKind::Term => self.is_empty(), DeclKind::Axiom => self.is_empty(), DeclKind::Def => self == Modifiers::ABSTRACT || self == Modifiers::LOCAL || self.is_empty(), DeclKind::Thm => self == Modifiers::PUB || self.is_empty(), } } pub fn from_name(s: &str) -> Option<Modifiers> { match s { "pure" => Some(Modifiers::PURE), "strict" => Some(Modifiers::STRICT), "provable" => Some(Modifiers::PROVABLE), "free" => Some(Modifiers::FREE), "pub" => Some(Modifiers::PUB), "abstract" => Some(Modifiers::ABSTRACT), "local" => Some(Modifiers::LOCAL), _ => None } } } impl Display for Modifiers { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.contains(Modifiers::PURE) {write!(f, "pure ")?} if self.contains(Modifiers::STRICT) {write!(f, "strict ")?} if self.contains(Modifiers::PROVABLE) {write!(f, "provable ")?} if self.contains(Modifiers::FREE) {write!(f, "free ")?} if self.contains(Modifiers::PUB) {write!(f, "pub ")?} if self.contains(Modifiers::ABSTRACT) {write!(f, "abstract ")?} if self.contains(Modifiers::LOCAL) {write!(f, "local ")?} Ok(()) } } #[derive(Clone)] pub enum Delimiter { Both(Box<[u8]>), LeftRight(Box<[u8]>, Box<[u8]>), } #[derive(Copy, Clone, Debug)] pub struct Formula(pub Span); impl Formula { pub fn inner(&self) -> Span { (self.0.start + 1 .. self.0.end - 1).into() } } #[derive(Clone)] pub struct Const { pub fmla: Formula, pub trim: Span } #[derive(Clone, Copy, PartialEq, Eq)] pub enum DeclKind { Term, Axiom, Thm, Def } #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum LocalKind { Bound, Reg, Dummy, Anon } impl LocalKind {
} #[derive(Clone, Debug)] pub struct DepType { pub sort: Span, pub deps: Vec<Span>, } impl DepType { pub fn span(&self) -> Span { (self.sort.start..self.deps.last().unwrap_or(&self.sort).end).into() } } #[derive(Clone, Debug)] pub enum Type { DepType(DepType), Formula(Formula) } impl Type { pub fn span(&self) -> Span { match self { Type::DepType(d) => d.span(), Type::Formula(f) => f.0 } } } #[derive(Clone, Debug)] pub struct Binder { pub span: Span, pub local: Option<Span>, pub kind: LocalKind, pub ty: Option<Type>, } #[derive(Clone, Debug)] pub struct SExpr { pub span: Span, pub k: SExprKind, } #[derive(Copy, Clone, Debug)] pub enum Atom { Ident, Quote, Unquote, Nfx } #[derive(Clone, Debug)] pub enum SExprKind { Atom(Atom), List(Vec<SExpr>), DottedList(Vec<SExpr>, Box<SExpr>), Number(BigUint), String(ArcString), Bool(bool), Formula(Formula), } pub fn curly_transform<T>(es: &mut Vec<T>, no_dot: bool, eq: impl Fn(&T, &T) -> bool, nfx: impl FnOnce() -> T) { let n = es.len(); if n > 2 { let valid_curly = no_dot && n % 2 != 0 && { let e = &es[1]; (3..n).step_by(2).all(|i| eq(&es[i], e)) }; if valid_curly { es.swap(0, 1); let mut from = 4; let mut to = 3; while from < n { es.swap(from, to); to += 1; from += 2; } es.truncate(to); } else { es.insert(0, nfx()); } } } impl SExpr { pub fn atom(span: impl Into<Span>, a: Atom) -> SExpr { SExpr {span: span.into(), k: SExprKind::Atom(a)} } pub fn list(span: impl Into<Span>, es: Vec<SExpr>) -> SExpr { SExpr {span: span.into(), k: SExprKind::List(es)} } pub fn dotted_list(span: impl Into<Span>, mut es: Vec<SExpr>, dot: Option<SExpr>) -> SExpr { match dot { None => SExpr {span: span.into(), k: SExprKind::List(es)}, Some(e) => match e.k { SExprKind::DottedList(es2, e2) => { es.extend(es2); SExpr {span: span.into(), k: SExprKind::DottedList(es, e2)} } SExprKind::List(es2) => { es.extend(es2); SExpr::list(span, es) } _ => SExpr {span: span.into(), k: SExprKind::DottedList(es, Box::new(e))} } } } pub fn curly_list(span: Span, curly: bool, mut es: Vec<SExpr>, dot: Option<SExpr>, eq: impl Fn(&SExpr, &SExpr) -> bool) -> SExpr { if curly { curly_transform(&mut es, dot.is_none(), eq, || SExpr::atom(span.start..span.start+1, Atom::Nfx)) } Self::dotted_list(span, es, dot) } } impl EnvDisplay for SExpr { fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result { match &self.k { &SExprKind::Atom(a) => fe.source.span_atom(self.span, a).fmt(f), SExprKind::List(es) => { let mut it = es.iter(); match it.next() { None => "()".fmt(f), Some(e) => { write!(f, "({}", fe.to(e))?; for e in it {write!(f, " {}", fe.to(e))?} ")".fmt(f) } } } SExprKind::DottedList(es, r) => { "(".fmt(f)?; for e in es {write!(f, "{} ", fe.to(e))?} write!(f, ". {})", fe.to(r)) } SExprKind::Number(n) => n.fmt(f), SExprKind::String(s) => write!(f, "{:?}", s), SExprKind::Bool(true) => "#t".fmt(f), SExprKind::Bool(false) => "#f".fmt(f), SExprKind::Formula(s) => fe.source[s.0].fmt(f), } } } #[derive(Clone)] pub struct Decl { pub mods: Modifiers, pub k: DeclKind, pub id: Span, pub bis: Vec<Binder>, pub ty: Option<Type>, pub val: Option<SExpr>, } #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum Prec { Prec(u32), Max } impl fmt::Display for Prec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &Prec::Prec(p) => p.fmt(f), &Prec::Max => "max".fmt(f) } } } impl fmt::Debug for Prec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } #[derive(Clone)] pub enum SimpleNotaKind { Prefix, Infix {right: bool} } #[derive(Clone)] pub struct SimpleNota { pub k: SimpleNotaKind, pub id: Span, pub c: Const, pub prec: Prec, } #[derive(Clone)] pub enum Literal { Const(Const, Prec), Var(Span), } #[derive(Clone)] pub struct GenNota { pub id: Span, pub bis: Vec<Binder>, pub ty: Option<Type>, pub lits: Vec<Literal>, pub prec: Option<(Prec, bool)> } #[derive(Clone)] pub enum StmtKind { Sort(Span, Modifiers), Decl(Decl), Delimiter(Delimiter), SimpleNota(SimpleNota), Coercion { id: Span, from: Span, to: Span }, Notation(GenNota), Inout { out: bool, k: Span, hs: Vec<SExpr> }, Annot(SExpr, Box<Stmt>), Do(Vec<SExpr>), Import(Span, String), } #[derive(Clone)] pub struct Stmt { pub span: Span, pub k: StmtKind, } pub struct AST { pub source: Arc<LinedString>, pub imports: Vec<(Span, String)>, pub stmts: Vec<Stmt>, pub errors: Vec<ParseError>, } impl LinedString { pub fn span_atom(&self, sp: Span, a: Atom) -> &str { match a { Atom::Ident => &self[sp], Atom::Quote => "quote", Atom::Unquote => "unquote", Atom::Nfx => ":nfx", } } } impl AST { pub fn span(&self, s: Span) -> &str { &self.source[s] } pub fn span_atom(&self, sp: Span, a: Atom) -> &str { self.source.span_atom(sp, a) } pub fn last_checkpoint(&self, pos: usize) -> (usize, usize) { match self.stmts.binary_search_by_key(&pos, |stmt| stmt.span.end) { Ok(i) => (i+1, pos), Err(0) => (0, 0), Err(i) => (i, self.stmts[i-1].span.end) } } }
pub fn is_bound(self) -> bool { match self { LocalKind::Bound | LocalKind::Dummy => true, LocalKind::Reg | LocalKind::Anon => false, } }
function_block-full_function
[ { "content": "pub fn whitespace(c: u8) -> bool { c == b' ' || c == b'\\n' }\n\n\n\nimpl<'a> Parser<'a> {\n\n pub fn cur(&self) -> u8 { self.source[self.idx] }\n\n pub fn cur_opt(&self) -> Option<u8> { self.source.get(self.idx).cloned() }\n\n\n\n pub fn err(&self, msg: BoxError) -> ParseError {\n\n ParseEr...
Rust
farms/farm-client/tests/vault_actions/mod.rs
biw/solana-program-library
5611ad8bd595d9e3666f8b115cd28f8116038645
use { crate::{utils, utils::Swap}, solana_farm_client::client::FarmClient, solana_sdk::{commitment_config::CommitmentConfig, signature::Keypair, signer::Signer}, std::{thread, time}, }; const MAX_SOL_BALANCE_TO_USE: f64 = 0.1; const INITIAL_CRANK_DELAY: u64 = 400; const CRANK_INTERVAL: u64 = 100; pub fn do_swap(client: &FarmClient, keypair: &Keypair, swap: &Swap) { let amount = if swap.amount == 0.0 { utils::get_token_or_native_balance(client, &keypair.pubkey(), swap.from_token) } else if swap.amount < 0.0 { -1.0 * swap.amount * utils::get_token_or_native_balance(client, &keypair.pubkey(), swap.from_token) } else { swap.amount }; if amount < 0.0001 { return; } println!( ">> Swap {} {} to {}", amount, swap.from_token, swap.to_token ); println!( " Done: {}", client .swap( keypair, swap.protocol, swap.from_token, swap.to_token, amount, 0.0, ) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), swap.from_token, swap.to_token, "After swap", ); } pub fn do_add_liquidity( client: &FarmClient, keypair: &Keypair, vault_name: &str, max_token_a_ui_amount: f64, max_token_b_ui_amount: f64, ) -> f64 { println!( ">> Add liquidity to {}: {}, {}", vault_name, max_token_a_ui_amount, max_token_b_ui_amount ); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); let vt_balance = utils::get_token_or_native_balance(client, &keypair.pubkey(), &vt_token_name); println!( " Done: {}", client .add_liquidity_vault( keypair, vault_name, max_token_a_ui_amount, max_token_b_ui_amount, ) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), &token_a_str, &token_b_str, "After add liquidity", ); let _ = utils::get_balance(client, &keypair.pubkey(), &vt_token_name, "VT"); let _ = utils::get_vault_stake_balance(client, vault_name); utils::get_token_or_native_balance(client, &keypair.pubkey(), &vt_token_name) - vt_balance } pub fn do_crank(client: &FarmClient, keypair: &Keypair, vault_name: &str, step: u64) { println!(">> Crank {} with step {}", vault_name, step); let initial_info = client.get_vault_info(vault_name).unwrap(); println!( " Done: {}", client.crank_vault(keypair, vault_name, step).unwrap() ); let after_crank_info = client.get_vault_info(vault_name).unwrap(); println!( " Rewards received: {}, {}", after_crank_info.tokens_a_rewards - initial_info.tokens_a_rewards, after_crank_info.tokens_b_rewards - initial_info.tokens_b_rewards ); let _ = utils::get_vault_stake_balance(client, vault_name); } pub fn do_remove_liquidity(client: &FarmClient, keypair: &Keypair, vault_name: &str, amount: f64) { println!(">> Remove liquidity from {}: {}", vault_name, amount); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); println!( " Done: {}", client .remove_liquidity_vault(keypair, vault_name, amount) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), &token_a_str, &token_b_str, "After remove liquidity", ); let _ = utils::get_balance(client, &keypair.pubkey(), &vt_token_name, "VT"); let _ = utils::get_vault_stake_balance(client, vault_name); } pub fn cleanup(client: &FarmClient, keypair: &Keypair, vault_name: &str, cleanup_swaps: Vec<Swap>) { println!("\n>>> Clean-up {}...", vault_name); let wallet = keypair.pubkey(); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); let vt_token_balance = utils::get_token_or_native_balance(client, &wallet, &vt_token_name); if vt_token_balance > 0.0 { do_remove_liquidity(client, keypair, vault_name, vt_token_balance); } for swap in cleanup_swaps { do_swap(client, keypair, &swap); } if token_a_str != "SOL" { let token_a_balance = utils::get_token_or_native_balance(client, &wallet, &token_a_str); if token_a_balance > 0.0 { do_swap( client, keypair, &Swap { protocol: "RDM", from_token: token_a_str.as_str(), to_token: "SOL", amount: token_a_balance, }, ); } } if token_b_str != "SOL" { let token_b_balance = utils::get_token_or_native_balance(client, &wallet, &token_b_str); if token_b_balance > 0.0 { do_swap( client, keypair, &Swap { protocol: "RDM", from_token: token_b_str.as_str(), to_token: "SOL", amount: token_b_balance, }, ); } } let _ = utils::get_vault_stake_balance(client, vault_name); } pub fn run_test(vault_name: &str, swaps: Vec<Swap>, cleanup_swaps: Vec<Swap>) { let (endpoint, keypair) = utils::get_endpoint_and_keypair(); let client = FarmClient::new_with_commitment(&endpoint, CommitmentConfig::confirmed()); let wallet = keypair.pubkey(); cleanup(&client, &keypair, vault_name, cleanup_swaps.clone()); println!("\n>>> Testing {}...", vault_name); let (token_a_str, token_b_str, _) = client.get_vault_token_names(vault_name).unwrap(); let (_, _) = utils::get_balances(&client, &wallet, &token_a_str, &token_b_str, "Initial"); let _ = utils::get_vault_stake_balance(&client, vault_name); for swap in swaps { do_swap(&client, &keypair, &swap); } let token_a_balance = if token_a_str == "SOL" { MAX_SOL_BALANCE_TO_USE.min(utils::get_token_or_native_balance( &client, &wallet, &token_a_str, )) } else { utils::get_token_or_native_balance(&client, &wallet, &token_a_str) }; let token_b_balance = if token_b_str == "SOL" { MAX_SOL_BALANCE_TO_USE.min(utils::get_token_or_native_balance( &client, &wallet, &token_b_str, )) } else { utils::get_token_or_native_balance(&client, &wallet, &token_b_str) }; let mut vt_received; if vault_name.starts_with("SBR.") { if token_a_str == "USDC" { assert!(token_a_balance > 0.0); vt_received = do_add_liquidity( &client, &keypair, vault_name, token_a_balance * 2.0 / 3.0, 0.0, ); } else { assert!(token_b_balance > 0.0); vt_received = do_add_liquidity( &client, &keypair, vault_name, 0.0, token_b_balance * 2.0 / 3.0, ); } } else { assert!(token_a_balance > 0.0 && token_b_balance > 0.0); vt_received = do_add_liquidity(&client, &keypair, vault_name, token_a_balance / 3.0, 0.0); assert!(vt_received > 0.0); vt_received += do_add_liquidity(&client, &keypair, vault_name, 0.0, token_b_balance / 3.0); } println!("Waiting {} secs for rewards...", INITIAL_CRANK_DELAY); thread::sleep(time::Duration::from_secs(INITIAL_CRANK_DELAY)); do_crank(&client, &keypair, vault_name, 1); let cranks = if vault_name.starts_with("SBR.") { 6 } else { 4 }; for step in 2..cranks { println!("Waiting {} secs before next crank...", CRANK_INTERVAL); thread::sleep(time::Duration::from_secs(CRANK_INTERVAL)); do_crank(&client, &keypair, vault_name, step); } do_remove_liquidity(&client, &keypair, vault_name, vt_received / 2.0); do_remove_liquidity(&client, &keypair, vault_name, 0.0); cleanup(&client, &keypair, vault_name, cleanup_swaps); let (_, _) = utils::get_balances(&client, &wallet, &token_a_str, &token_b_str, "Final"); let _ = utils::get_vault_stake_balance(&client, vault_name); }
use { crate::{utils, utils::Swap}, solana_farm_client::client::FarmClient, solana_sdk::{commitment_config::CommitmentConfig, signature::Keypair, signer::Signer}, std::{thread, time}, }; const MAX_SOL_BALANCE_TO_USE: f64 = 0.1; const INITIAL_CRANK_DELAY: u64 = 400; const CRANK_INTERVAL: u64 = 100; pub fn do_swap(client: &FarmClient, keypair: &Keypair, swap: &Swap) { let amount = if swap.amount == 0.0 { utils::get_token_or_native_balance(client, &keypair.pubkey(), swap.from_token) } else if swap.amount < 0.0 { -1.0 * swap.amount * utils::get_token_or_native_balance(client, &keypair.pubkey(), swap.from_token) } else { swap.amount }; if amount < 0.0001 { return; } println!( ">> Swap {} {} to {}", amount, swap.from_token, swap.to_token ); println!( " Done: {}", client .swap( keypair, swap.protocol, swap.from_token, swap.to_token, amount, 0.0, ) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), swap.from_token, swap.to_token, "After swap", ); } pub fn do_add_liquidity( client: &FarmClient, keypair: &Keypair, vault_name: &str, max_token_a_ui_amount: f64, max_token_b_ui_amount: f64, ) -> f64 { println!( ">> Add liquidity to {}: {}, {}", vault_name, max_token_a_ui_amount, max_token_b_ui_amount ); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); let vt_balance = utils::get_token_or_native_balance(client, &keypair.pubkey(), &vt_token_name); println!( " Done: {}", client .add_liquidity_vault( keypair, vault_name, max_token_a_ui_amount, max_token_b_ui_amount, ) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), &token_a_str, &token_b_str, "After add liquidity", ); let _ = utils::get_balance(client, &keypair.pubkey(), &vt_token_name, "VT"); let _ = utils::get_vault_stake_balance(client, vault_name); utils::get_token_or_native_balance(client, &keypair.pubkey(), &vt_token_name) - vt_balance } pub fn do_crank(client: &FarmClient, keypair: &Keypair, vault_name: &str, step: u64) { println!(">> Crank {} with step {}", vault_name, step); let initial_info = client.get_vault_info(vault_name).unwrap(); println!( " Done: {}", client.crank_vault(keypair, vault_name, step).unwrap() ); let after_crank_info = client.get_vault_info(vault_name).unwrap(); println!( " Rewards received: {}, {}", after_crank_info.tokens_a_rewards - initial_info.tokens_a_rewar
pub fn do_remove_liquidity(client: &FarmClient, keypair: &Keypair, vault_name: &str, amount: f64) { println!(">> Remove liquidity from {}: {}", vault_name, amount); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); println!( " Done: {}", client .remove_liquidity_vault(keypair, vault_name, amount) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), &token_a_str, &token_b_str, "After remove liquidity", ); let _ = utils::get_balance(client, &keypair.pubkey(), &vt_token_name, "VT"); let _ = utils::get_vault_stake_balance(client, vault_name); } pub fn cleanup(client: &FarmClient, keypair: &Keypair, vault_name: &str, cleanup_swaps: Vec<Swap>) { println!("\n>>> Clean-up {}...", vault_name); let wallet = keypair.pubkey(); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); let vt_token_balance = utils::get_token_or_native_balance(client, &wallet, &vt_token_name); if vt_token_balance > 0.0 { do_remove_liquidity(client, keypair, vault_name, vt_token_balance); } for swap in cleanup_swaps { do_swap(client, keypair, &swap); } if token_a_str != "SOL" { let token_a_balance = utils::get_token_or_native_balance(client, &wallet, &token_a_str); if token_a_balance > 0.0 { do_swap( client, keypair, &Swap { protocol: "RDM", from_token: token_a_str.as_str(), to_token: "SOL", amount: token_a_balance, }, ); } } if token_b_str != "SOL" { let token_b_balance = utils::get_token_or_native_balance(client, &wallet, &token_b_str); if token_b_balance > 0.0 { do_swap( client, keypair, &Swap { protocol: "RDM", from_token: token_b_str.as_str(), to_token: "SOL", amount: token_b_balance, }, ); } } let _ = utils::get_vault_stake_balance(client, vault_name); } pub fn run_test(vault_name: &str, swaps: Vec<Swap>, cleanup_swaps: Vec<Swap>) { let (endpoint, keypair) = utils::get_endpoint_and_keypair(); let client = FarmClient::new_with_commitment(&endpoint, CommitmentConfig::confirmed()); let wallet = keypair.pubkey(); cleanup(&client, &keypair, vault_name, cleanup_swaps.clone()); println!("\n>>> Testing {}...", vault_name); let (token_a_str, token_b_str, _) = client.get_vault_token_names(vault_name).unwrap(); let (_, _) = utils::get_balances(&client, &wallet, &token_a_str, &token_b_str, "Initial"); let _ = utils::get_vault_stake_balance(&client, vault_name); for swap in swaps { do_swap(&client, &keypair, &swap); } let token_a_balance = if token_a_str == "SOL" { MAX_SOL_BALANCE_TO_USE.min(utils::get_token_or_native_balance( &client, &wallet, &token_a_str, )) } else { utils::get_token_or_native_balance(&client, &wallet, &token_a_str) }; let token_b_balance = if token_b_str == "SOL" { MAX_SOL_BALANCE_TO_USE.min(utils::get_token_or_native_balance( &client, &wallet, &token_b_str, )) } else { utils::get_token_or_native_balance(&client, &wallet, &token_b_str) }; let mut vt_received; if vault_name.starts_with("SBR.") { if token_a_str == "USDC" { assert!(token_a_balance > 0.0); vt_received = do_add_liquidity( &client, &keypair, vault_name, token_a_balance * 2.0 / 3.0, 0.0, ); } else { assert!(token_b_balance > 0.0); vt_received = do_add_liquidity( &client, &keypair, vault_name, 0.0, token_b_balance * 2.0 / 3.0, ); } } else { assert!(token_a_balance > 0.0 && token_b_balance > 0.0); vt_received = do_add_liquidity(&client, &keypair, vault_name, token_a_balance / 3.0, 0.0); assert!(vt_received > 0.0); vt_received += do_add_liquidity(&client, &keypair, vault_name, 0.0, token_b_balance / 3.0); } println!("Waiting {} secs for rewards...", INITIAL_CRANK_DELAY); thread::sleep(time::Duration::from_secs(INITIAL_CRANK_DELAY)); do_crank(&client, &keypair, vault_name, 1); let cranks = if vault_name.starts_with("SBR.") { 6 } else { 4 }; for step in 2..cranks { println!("Waiting {} secs before next crank...", CRANK_INTERVAL); thread::sleep(time::Duration::from_secs(CRANK_INTERVAL)); do_crank(&client, &keypair, vault_name, step); } do_remove_liquidity(&client, &keypair, vault_name, vt_received / 2.0); do_remove_liquidity(&client, &keypair, vault_name, 0.0); cleanup(&client, &keypair, vault_name, cleanup_swaps); let (_, _) = utils::get_balances(&client, &wallet, &token_a_str, &token_b_str, "Final"); let _ = utils::get_vault_stake_balance(&client, vault_name); }
ds, after_crank_info.tokens_b_rewards - initial_info.tokens_b_rewards ); let _ = utils::get_vault_stake_balance(client, vault_name); }
function_block-function_prefixed
[ { "content": "pub fn do_remove_liquidity(client: &FarmClient, keypair: &Keypair, pool_name: &str, amount: f64) {\n\n println!(\">> Remove liquidity from {}: {}\", pool_name, amount);\n\n let (token_a_str, token_b_str, lp_token_name) = client.get_pool_token_names(pool_name).unwrap();\n\n println!(\n\n ...
Rust
.cargo-task/generate-drivers/src/main.rs
flott-motion/stepper
88ff5a41251078943d33d4c3495d9b9103cbe8eb
use std::{ env, error::Error, fs::{create_dir_all, remove_dir_all, File}, io::prelude::*, path::PathBuf, }; use cargo_task_util::ct_info; use serde_derive::Serialize; use serde_json::Value; use tinytemplate::{format_unescaped, TinyTemplate}; mod config; use config::{load_cargo_toml, load_drivers_toml, Driver}; fn main() -> Result<(), Box<dyn Error>> { let root = env::current_dir()?; let drivers = root.join("drivers"); let templates = root.join("templates").join("driver"); let mut tt = TinyTemplate::new(); tt.set_default_formatter(&format_unescaped); tt.add_formatter("upper", format_upper); let cargo_toml = load_template(&templates.join("Cargo.toml.tmpl"))?; tt.add_template("cargo_toml", cargo_toml.as_str())?; let lib_rs = load_template(&templates.join("src").join("lib.rs.tmpl"))?; tt.add_template("lib_rs", lib_rs.as_str())?; let readme_md = load_template(&templates.join("README.md.tmpl"))?; tt.add_template("readme_md", readme_md.as_str())?; let manifest = load_cargo_toml(&root)?; let version = manifest.package.version; let authors = manifest.package.authors; let config = load_drivers_toml(&root)?; for driver in config.drivers { ct_info!("generating '{}' driver...", driver.name); let ctx = &Context::new(driver, &version, &authors); let driver_path = drivers.join(&ctx.name); if driver_path.exists() { remove_dir_all(&driver_path)?; } create_dir_all(&driver_path.join("src"))?; let cargo_toml_output = tt.render("cargo_toml", ctx)?; let lib_rs_output = tt.render("lib_rs", ctx)?; let readme_md_output = tt.render("readme_md", ctx)?; File::create(&driver_path.join("Cargo.toml"))? .write_all(cargo_toml_output.as_ref())?; File::create(&driver_path.join("src").join("lib.rs"))? .write_all(lib_rs_output.as_ref())?; File::create(&driver_path.join("README.md"))? .write_all(readme_md_output.as_ref())?; } Ok(()) } #[derive(Serialize)] struct Context { pub name: String, pub version: String, pub authors: Vec<String>, pub product_url: String, pub pololu_url: String, } impl Context { pub fn new( driver: Driver, version: &String, authors: &Vec<String>, ) -> Self { Self { name: driver.name, version: version.to_owned(), authors: authors.to_owned(), product_url: driver.product_url, pololu_url: driver.pololu_url, } } } fn format_upper( value: &Value, output: &mut String, ) -> Result<(), tinytemplate::error::Error> { let mut s = String::new(); format_unescaped(value, &mut s)?; output.push_str(&s.to_uppercase()); Ok(()) } fn load_template(path: &PathBuf) -> std::io::Result<String> { let mut contents = String::new(); File::open(path)?.read_to_string(&mut contents)?; Ok(contents) }
use std::{ env, error::Error, fs::{create_dir_all, remove_dir_all, File}, io::prelude::*, path::PathBuf, }; use cargo_task_util::ct_info; use serde_derive::Serialize; use serde_json::Value; use tinytemplate::{format_unescaped, TinyTemplate}; mod config; use config::{load_cargo_toml, load_drivers_toml, Driver}; fn main() -> Result<(), Box<dyn Error>> { let root = env::current_dir()?; let drivers = root.join("drivers"); let templates = root.join("templates").join("driver"); let mut tt = TinyTemplate::new(); tt.set_default_formatter(&format_unescaped); tt.add_formatter("upper", format_upper); let cargo_toml = load_template(&templates.join("Cargo.toml.tmpl"))?; tt.add_template("cargo_toml", cargo_toml.as_str())?; let lib_rs = load_template(&templates.join("src").join("lib.rs.tmpl"))?; tt.add_template("lib_rs", lib_rs.as_str())?; let readme_md = load_template(&templates.join("README.md.tmpl"))?; tt.add_template("readme_md", readme_md.as_str())?; let manifest = load_cargo_toml(&root)?; let version = manifest.package.version; let authors = manifest.package.authors; let config = load_drivers_toml(&root)?; for driver in config.drivers { ct_info!("generating '{}' driver...", driver.name); let ctx = &Context::new(driver, &version, &authors); let driver_path = drivers.join(&ctx.name); if driver_path.exists() { remove_dir_all(&driver_path)?; } create_dir_all(&driver_path.join("src"))?; let cargo_toml_output = tt.render("cargo_toml", ctx)?; let lib_rs_output = tt.render("lib_rs", ctx)?; let readme_md_output = tt.render("readme_md", ctx)?; File::create(&driver_path.join("Cargo.toml"))? .write_all(cargo_toml_output.as_ref())?; File::create(&driver_path.join("src").join("lib.rs"))? .write_all(lib_rs_output.as_ref())?; File::create(&driver_path.join("README.md"))? .write_all(readme_md_output.as_ref())?; } Ok(()) } #[derive(Serialize)] struct Context { pub name: String, pub version: String, pub authors: Vec<String>, pub product_url: String, pub pololu_url: String, } impl Context { pub fn new( driver: Driver, version: &String, authors: &Vec<String>, ) -> Self { Self { name: driver.name, version: version.to_owned(), authors: authors.to_owned(), product_url: driver.product_url, pololu_url: driver.pololu_url, } } } fn format_upper( value: &Value,
fn load_template(path: &PathBuf) -> std::io::Result<String> { let mut contents = String::new(); File::open(path)?.read_to_string(&mut contents)?; Ok(contents) }
output: &mut String, ) -> Result<(), tinytemplate::error::Error> { let mut s = String::new(); format_unescaped(value, &mut s)?; output.push_str(&s.to_uppercase()); Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn update<Driver, Timer, Profile, Convert>(\n\n mut state: State<Driver, Timer, Profile>,\n\n new_motion: &mut Option<Direction>,\n\n profile: &mut Profile,\n\n current_step: &mut i32,\n\n current_direction: &mut Direction,\n\n convert: &Convert,\n\n) -> (\n\n Result<\n\n ...
Rust
turbo-txpool/src/error.rs
quilt/turbo
d4efba95f82669d3976120295c50090b31c08428
pub(crate) mod decode_error { use snafu::{ResultExt, Snafu}; use ethereum_interfaces::txpool::ImportResult; #[derive(Debug, Snafu)] #[non_exhaustive] #[snafu(visibility = "pub(crate)")] pub enum DecodeError { RlpDecode { source: Box<dyn std::error::Error + Send + Sync>, field: Option<&'static str>, }, IntegerOverflow, } pub(crate) trait RlpResultExt<T> { fn context_field(self, field: &'static str) -> Result<T, DecodeError>; } impl<T> RlpResultExt<T> for Result<T, rlp::DecoderError> { fn context_field(self, field: &'static str) -> Result<T, DecodeError> { self.map_err(|e| Box::new(e).into()) .context(RlpDecode { field }) } } impl From<DecodeError> for ImportResult { fn from(e: DecodeError) -> ImportResult { match e { DecodeError::RlpDecode { .. } => ImportResult::Invalid, DecodeError::IntegerOverflow => ImportResult::InternalError, } } } } pub(crate) mod import_error { use ethereum_types::{Address, H256, U256}; use snafu::Snafu; use ethereum_interfaces::txpool::ImportResult; #[derive(Debug, Snafu)] #[non_exhaustive] #[snafu(visibility = "pub(crate)")] pub enum ImportError { NonceUsed { tx_hash: H256, }, #[snafu(display( "nonce is too far in the future tx_nonce={} from={} tx={}", from, tx_nonce, tx_hash, ))] NonceGap { from: Address, tx_nonce: u64, tx_hash: H256, }, InsufficientBalance { tx_hash: H256, }, NotReady, FeeTooLow { minimum: U256, }, Ecdsa { source: Box<dyn std::error::Error + Send + Sync>, }, #[snafu(context(false))] Decode { source: super::DecodeError, }, AlreadyExists { tx_hash: H256, }, RequestFailed { source: tonic::Status, }, IncompleteMessage, } impl From<ImportError> for ImportResult { fn from(e: ImportError) -> ImportResult { match e { ImportError::NonceGap { .. } => ImportResult::Invalid, ImportError::NonceUsed { .. } => ImportResult::Invalid, ImportError::FeeTooLow { .. } => ImportResult::FeeTooLow, ImportError::NotReady => ImportResult::InternalError, ImportError::Ecdsa { .. } => ImportResult::Invalid, ImportError::Decode { .. } => ImportResult::Invalid, ImportError::IncompleteMessage => ImportResult::Invalid, ImportError::AlreadyExists { .. } => { ImportResult::AlreadyExists } ImportError::RequestFailed { .. } => { ImportResult::InternalError } ImportError::InsufficientBalance { .. } => { ImportResult::Invalid } } } } } pub use self::decode_error::DecodeError; pub use self::import_error::ImportError;
pub(crate) mod decode_error { use snafu::{ResultExt, Snafu}; use ethereum_interfaces::txpool::ImportResult; #[derive(Debug, Snafu)] #[non_exhaustive] #[snafu(visibility = "pub(crate)")] pub enum DecodeError { RlpDecode { source: Box<dyn std::error::Error + Send + Sync>, field: Option<&'static str>, }, IntegerOverflow, } pub(crate) trait RlpResultExt<T> { fn context_field(self, field: &'static str) -> Result<T, DecodeError>; } impl<T> RlpResultExt<T> for Result<T, rlp::DecoderError> { fn context_field(self, field: &'static str) -> Result<T, DecodeError> { self.map_err(|e| Box::new(e).into()) .context(RlpDecode { field }) } } impl From<DecodeError> for ImportResult {
} } pub(crate) mod import_error { use ethereum_types::{Address, H256, U256}; use snafu::Snafu; use ethereum_interfaces::txpool::ImportResult; #[derive(Debug, Snafu)] #[non_exhaustive] #[snafu(visibility = "pub(crate)")] pub enum ImportError { NonceUsed { tx_hash: H256, }, #[snafu(display( "nonce is too far in the future tx_nonce={} from={} tx={}", from, tx_nonce, tx_hash, ))] NonceGap { from: Address, tx_nonce: u64, tx_hash: H256, }, InsufficientBalance { tx_hash: H256, }, NotReady, FeeTooLow { minimum: U256, }, Ecdsa { source: Box<dyn std::error::Error + Send + Sync>, }, #[snafu(context(false))] Decode { source: super::DecodeError, }, AlreadyExists { tx_hash: H256, }, RequestFailed { source: tonic::Status, }, IncompleteMessage, } impl From<ImportError> for ImportResult { fn from(e: ImportError) -> ImportResult { match e { ImportError::NonceGap { .. } => ImportResult::Invalid, ImportError::NonceUsed { .. } => ImportResult::Invalid, ImportError::FeeTooLow { .. } => ImportResult::FeeTooLow, ImportError::NotReady => ImportResult::InternalError, ImportError::Ecdsa { .. } => ImportResult::Invalid, ImportError::Decode { .. } => ImportResult::Invalid, ImportError::IncompleteMessage => ImportResult::Invalid, ImportError::AlreadyExists { .. } => { ImportResult::AlreadyExists } ImportError::RequestFailed { .. } => { ImportResult::InternalError } ImportError::InsufficientBalance { .. } => { ImportResult::Invalid } } } } } pub use self::decode_error::DecodeError; pub use self::import_error::ImportError;
fn from(e: DecodeError) -> ImportResult { match e { DecodeError::RlpDecode { .. } => ImportResult::Invalid, DecodeError::IntegerOverflow => ImportResult::InternalError, } }
function_block-full_function
[ { "content": "#[async_trait]\n\npub trait Control {\n\n type BlockStream: futures_core::stream::Stream<\n\n Item = Result<BlockDiff, Status>,\n\n >;\n\n\n\n async fn block_stream(\n\n &mut self,\n\n request: BlockStreamRequest,\n\n ) -> Result<Self::BlockStream, Status>;\n\n\n\n...
Rust
src/world/chunk/chunkmanager.rs
Ducolnd/ludwig-world-3d
3eb034d5c14cc95a29be7dab5d564c47c8423daf
use std::collections::HashMap; use std::time::Instant; use crate::world::{ chunk::{chunk::Chunk, pos::*}, constants::*, block::blocks::BlockID, world::World, }; use crate::render::{ low::{ renderer::Renderer, context::Context, }, meshing::chunkmeshing::ChunkMesh, drawables::chunk::ChunkDrawable, }; pub struct ChunkManager { loaded_chunks: HashMap<ChunkPos, Chunk>, chunks_meshes: HashMap<ChunkPos, ChunkMesh>, pub chunk_buffers: HashMap<ChunkPos, ChunkDrawable>, load_queue: Vec<ChunkPos>, render_distance: u32, center_chunk: ChunkPos, updated: bool, chunk_meshing_time: u128, chunk_loading_time: u128, } impl ChunkManager { pub fn new(render_distance: u32) -> Self { let loaded_chunks = HashMap::new(); let chunks_meshes = HashMap::new(); let chunk_buffers = HashMap::new(); Self { loaded_chunks, chunks_meshes, chunk_buffers, load_queue: vec![], render_distance, center_chunk: ChunkPos::new(0, 0, 0), updated: false, chunk_meshing_time: 1, chunk_loading_time: 1, } } pub fn set_camera_location(&mut self, coord: WorldCoord, renderer: &mut Renderer) { let chunkpos = coord.to_chunk_coord(); if self.center_chunk != chunkpos { self.center_around(chunkpos, renderer); self.updated = false; } } pub fn center_around(&mut self, pos: ChunkPos, renderer: &mut Renderer) { let mut targets = vec![]; self.center_chunk = pos; for x in -1 * (self.render_distance as i32)..self.render_distance as i32 { for z in -1 * (self.render_distance as i32)..self.render_distance as i32 { targets.push(ChunkPos::new(pos.x + x, 0, pos.z + z)); } } for pos in self.loaded_chunks.keys().cloned().collect::<Vec<_>>() { if !targets.contains(&pos) { self.unload_chunk(&pos, renderer); } } for pos in targets { if !self.loaded_chunks.contains_key(&pos) { self.queue_chunk_load(pos) } } } pub fn load_chunk(&mut self, pos: ChunkPos, height: [u32; CHUNKSIZE * CHUNKSIZE], renderer: &mut Renderer) { let mut chunk = Chunk::new(pos); renderer.chunkpos_uniform.add(&renderer.queue, pos, pos.to_raw()); let now = Instant::now(); chunk.generate(height); let lapsed = now.elapsed(); self.loaded_chunks.insert( pos, chunk, ); self.chunk_loading_time += lapsed.as_micros(); self.mesh_neighbors(pos); } pub fn mesh_neighbors(&mut self, pos: ChunkPos) { self.mesh_chunk(pos); self.mesh_chunk(ChunkPos {x: pos.x + 1, ..pos}); self.mesh_chunk(ChunkPos {x: pos.x - 1, ..pos}); self.mesh_chunk(ChunkPos {z: pos.z + 1, ..pos}); self.mesh_chunk(ChunkPos {z: pos.z - 1, ..pos}); } pub fn mesh_chunk(&mut self, pos: ChunkPos) { let c = &self.loaded_chunks.get(&pos); if !c.is_none() { let mut mesh = ChunkMesh::new(); let now = Instant::now(); mesh.create_simple_mesh(c.unwrap(), &self); let elapsed = now.elapsed(); self.chunks_meshes.insert( pos, mesh ); self.chunk_meshing_time += elapsed.as_micros(); } } pub fn get_neighbors(&self, center: ChunkPos) -> [Option<&Chunk>; 4]{ [ self.get_chunk_option(center + ChunkPos::new(0, 0, 1)), self.get_chunk_option(center + ChunkPos::new(1, 0, 0)), self.get_chunk_option(center + ChunkPos::new(0, 0, -1)), self.get_chunk_option(center + ChunkPos::new(-1, 0, 0)), ] } pub fn queue_chunk_load(&mut self, pos: ChunkPos) { self.load_queue.push(pos); } pub fn load_queue(&mut self, world: &World, renderer: &mut Renderer) { if self.load_queue.len() > 0 { for pos in self.load_queue.clone() { self.load_chunk(pos.clone(), world.map.create_heightmap(&pos), renderer); } self.load_queue.clear(); } } pub fn unload_chunk(&mut self, pos: &ChunkPos, renderer: &mut Renderer) { self.chunks_meshes.remove(pos); self.loaded_chunks.remove(pos); self.chunk_buffers.remove(pos); renderer.chunkpos_uniform.remove(pos); } pub fn update(&mut self, context: &mut Context, encoder: &mut wgpu::CommandEncoder) { if !self.updated { self.updated = true; for (pos, chunk) in &self.chunks_meshes { let mut c = ChunkDrawable::new(&context.renderer.device, *pos); c.from_chunk_mesh(&chunk, &context.renderer.device, encoder); self.chunk_buffers.insert(*pos, c); } } } pub fn get_chunk(&self, pos: ChunkPos) -> &Chunk { self.loaded_chunks.get(&pos).unwrap() } pub fn get_chunk_option(&self, pos: ChunkPos) -> Option<&Chunk> { self.loaded_chunks.get(&pos) } pub fn get_chunk_mut_option(&mut self, pos: ChunkPos) -> Option<&mut Chunk> { self.loaded_chunks.get_mut(&pos) } pub fn get_mesh(&self, pos: ChunkPos) -> &ChunkMesh { self.chunks_meshes.get(&pos).unwrap() } pub fn get_block_at_coord(&self, coord: WorldCoord) -> Option<BlockID> { if let Some(chunk) = self.loaded_chunks.get(&coord.to_chunk_coord()) { return Some(chunk.at_coord(coord.to_chunk_local())) } else { return None } } pub fn meshing_time(&self) -> u128 { self.chunk_meshing_time / self.chunks_meshes.len() as u128 } pub fn loading_time(&self) -> u128 { self.chunk_loading_time / self.chunks_meshes.len() as u128 } }
use std::collections::HashMap; use std::time::Instant; use crate::world::{ chunk::{chunk::Chunk, pos::*}, constants::*, block::blocks::BlockID, world::World, }; use crate::render::{ low::{ renderer::Renderer, context::Context, }, meshing::chunkmeshing::ChunkMesh, drawables::chunk::ChunkDrawable, }; pub struct ChunkManager { loaded_chunks: HashMap<ChunkPos, Chunk>, chunks_meshes: HashMap<ChunkPos, ChunkMesh>, pub chunk_buffers: HashMap<ChunkPos, ChunkDrawable>, load_queue: Vec<ChunkPos>, render_distance: u32, center_chunk: ChunkPos, updated: bool, chunk_meshing_time: u128, chunk_loading_time: u128, } impl ChunkManager { pub fn new(render_distance: u32) -> Self { let loaded_chunks = HashMap::new(); let chunks_meshes = HashMap::new(); let chunk_buffers = HashMap::new(); Self { loaded_chunks, chunks_meshes, chunk_buffers, load_queue: vec![], render_distance, center_chunk: ChunkPos::new(0, 0, 0), updated: false, chunk_meshing_time: 1, chunk_loading_time: 1, } } pub fn set_camera_location(&mut self, coord: WorldCoord, renderer: &mut Renderer) { let chunkpos = coord.to_chunk_coord(); if self.center_chunk != chunkpos { self.center_around(chunkpos, renderer); self.updated = false; } } pub fn center_around(&mut self, pos: ChunkPos, renderer: &mut Renderer) { let mut targets = vec![]; self.center_chunk = pos; for x in -1 * (self.render_distance as i32)..self.render_distance as i32 { for z in -1 * (self.render_distance as i32)..self.render_distance as i32 { targets.push(ChunkPos::new(pos.x + x, 0, pos.z + z)); } } for pos in self.loaded_chunks.keys().cloned().collect::<Vec<_>>() { if !targets.contains(&pos) { self.unload_chunk(&pos, renderer); } } for pos in targets { if !self.loaded_chunks.contains_key(&pos) { self.queue_chunk_load(pos) } } } pub fn load_chunk(&mut self, pos: ChunkPos, height: [u32; CHUNKSIZE * CHUNKSIZE], renderer: &mut Renderer) { let mut chunk = Chunk::new(pos); renderer.chunkpos_uniform.add(&renderer.queue, pos, pos.to_raw()); let now = Instant::now(); chunk.generate(height); let lapsed = now.elapsed(); self.loaded_chunks.insert( pos, chunk, ); self.chunk_loading_time += lapsed.as_micros(); self.mesh_neighbors(pos); } pub fn mesh_neighbors(&mut self, pos: ChunkPos) { self.mesh_chunk(pos); self.mesh_chunk(ChunkPos {x: pos.x + 1, ..pos}); self.mesh_chunk(ChunkPos {x: pos.x - 1, ..pos}); self.mesh_chunk(ChunkPos {z: pos.z + 1, ..pos}); self.mesh_chunk(ChunkPos {z: pos.z - 1, ..pos}); } pub fn mesh_chunk(&mut self, pos: ChunkPos) { let c = &self.loaded_chunks.get(&pos); if !c.is_none() { let mut mesh = ChunkMesh::new(); let now = Instant::now(); mesh.create_simple_mesh(c.unwrap(), &self); let elapsed = now.elapsed(); self.chunks_meshes.insert( pos, mesh ); self.chunk_meshing_time += elapsed.as_micros(); } } pub fn get_neighbors(&self, center: ChunkPos) -> [Option<&Chunk>;
pub fn queue_chunk_load(&mut self, pos: ChunkPos) { self.load_queue.push(pos); } pub fn load_queue(&mut self, world: &World, renderer: &mut Renderer) { if self.load_queue.len() > 0 { for pos in self.load_queue.clone() { self.load_chunk(pos.clone(), world.map.create_heightmap(&pos), renderer); } self.load_queue.clear(); } } pub fn unload_chunk(&mut self, pos: &ChunkPos, renderer: &mut Renderer) { self.chunks_meshes.remove(pos); self.loaded_chunks.remove(pos); self.chunk_buffers.remove(pos); renderer.chunkpos_uniform.remove(pos); } pub fn update(&mut self, context: &mut Context, encoder: &mut wgpu::CommandEncoder) { if !self.updated { self.updated = true; for (pos, chunk) in &self.chunks_meshes { let mut c = ChunkDrawable::new(&context.renderer.device, *pos); c.from_chunk_mesh(&chunk, &context.renderer.device, encoder); self.chunk_buffers.insert(*pos, c); } } } pub fn get_chunk(&self, pos: ChunkPos) -> &Chunk { self.loaded_chunks.get(&pos).unwrap() } pub fn get_chunk_option(&self, pos: ChunkPos) -> Option<&Chunk> { self.loaded_chunks.get(&pos) } pub fn get_chunk_mut_option(&mut self, pos: ChunkPos) -> Option<&mut Chunk> { self.loaded_chunks.get_mut(&pos) } pub fn get_mesh(&self, pos: ChunkPos) -> &ChunkMesh { self.chunks_meshes.get(&pos).unwrap() } pub fn get_block_at_coord(&self, coord: WorldCoord) -> Option<BlockID> { if let Some(chunk) = self.loaded_chunks.get(&coord.to_chunk_coord()) { return Some(chunk.at_coord(coord.to_chunk_local())) } else { return None } } pub fn meshing_time(&self) -> u128 { self.chunk_meshing_time / self.chunks_meshes.len() as u128 } pub fn loading_time(&self) -> u128 { self.chunk_loading_time / self.chunks_meshes.len() as u128 } }
4]{ [ self.get_chunk_option(center + ChunkPos::new(0, 0, 1)), self.get_chunk_option(center + ChunkPos::new(1, 0, 0)), self.get_chunk_option(center + ChunkPos::new(0, 0, -1)), self.get_chunk_option(center + ChunkPos::new(-1, 0, 0)), ] }
function_block-function_prefixed
[ { "content": "// Helper function\n\npub fn coords_to_float(coords: [u32; 2]) -> [f32; 2] {\n\n [\n\n coords[0] as f32 / TEXTURE_IMAGE_WIDTH as f32,\n\n coords[1] as f32 / TEXTURE_IMAGE_HEIGHT as f32,\n\n ]\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct TextureTile {\n\n pub coord...
Rust
src/engine/obj.rs
PistonDevelopers/dyon_asteroids
51f944cac511e26b4d772c57e1ea245ffab280a1
use wavefront_obj::mtl::MtlSet; use wavefront_obj::obj::{self, ObjSet}; use std::sync::Arc; use current::Current; use dyon::*; use dyon::embed::{PushVariable, PopVariable}; pub type Materials = Vec<(Arc<String>, MtlSet)>; pub type ObjSets = Vec<(Arc<String>, ObjSet)>; pub fn register_obj(module: &mut Module) { module.add(Arc::new("load__material".into()), load__material, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Result(Box::new(Type::Text)) }); module.add(Arc::new("material".into()), material, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Option(Box::new(Type::F64)) }); module.add(Arc::new("materials".into()), materials, PreludeFunction { lts: vec![], tys: vec![], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("load__obj".into()), load__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Result(Box::new(Type::Text)) }); module.add(Arc::new("obj".into()), obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Option(Box::new(Type::F64)) }); module.add(Arc::new("objs".into()), objs, PreludeFunction { lts: vec![], tys: vec![], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("material_library__obj".into()), material_library__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::Option(Box::new(Type::Text)) }); module.add(Arc::new("object_count__obj".into()), object_count__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::F64 }); module.add(Arc::new("objects__obj".into()), objects__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("vertex_count__obj_object".into()), vertex_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("tex_vertex_count__obj_object".into()), tex_vertex_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("normal_count__obj_object".into()), normal_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("geometry_count__obj_object".into()), geometry_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("vertex__obj_object_vertex".into()), vertex__obj_object_vertex, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("tex_vertex__obj_object_tex_vertex".into()), tex_vertex__obj_object_tex_vertex, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("normal__obj_object_normal".into()), normal__obj_object_normal, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("vertices__obj_object".into()), vertices__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("tex_vertices__obj_object".into()), tex_vertices__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("normals__obj_object".into()), normals__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("geometry__obj_object".into()), geometry__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::object())) }); } dyon_fn!{fn load__material(file: Arc<String>) -> Result<Arc<String>, String> { use wavefront_obj::mtl::parse; use std::fs::File; use std::io::Read; use std::error::Error; let materials = unsafe { &mut *Current::<Materials>::new() }; let mut f = try!(File::open(&**file).map_err(|err| String::from(err.description()))); let mut s = String::new(); try!(f.read_to_string(&mut s).map_err(|err| String::from(err.description()))); let mtlset = try!(parse(s).map_err(|err| format!("Error when parsing `{}`:\n{}:{}", file, err.line_number, err.message))); materials.push((file.clone(), mtlset)); Ok(file) }} dyon_fn!{fn load__obj(file: Arc<String>) -> Result<Arc<String>, String> { use wavefront_obj::obj::parse; use std::fs::File; use std::io::Read; use std::error::Error; let obj_sets = unsafe { &mut *Current::<ObjSets>::new() }; let mut f = try!(File::open(&**file).map_err(|err| String::from(err.description()))); let mut s = String::new(); try!(f.read_to_string(&mut s).map_err(|err| String::from(err.description()))); let obj_set = try!(parse(s).map_err(|err| format!("Error when parsing `{}`:\n{}:{}", file, err.line_number, err.message))); obj_sets.push((file.clone(), obj_set)); Ok(file) }} dyon_fn!{fn material(file: Arc<String>) -> Option<usize> { let materials = unsafe { &*Current::<Materials>::new() }; for (i, mat) in materials.iter().enumerate() { if &mat.0 == &file { return Some(i); } } None }} dyon_fn!{fn obj(file: Arc<String>) -> Option<usize> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; for (i, obj_set) in obj_sets.iter().enumerate() { if &obj_set.0 == &file { return Some(i); } } None }} dyon_fn!{fn materials() -> Vec<Arc<String>> { let materials = unsafe { &*Current::<Materials>::new() }; materials.iter().map(|n| n.0.clone()).collect() }} dyon_fn!{fn objs() -> Vec<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets.iter().map(|n| n.0.clone()).collect() }} dyon_fn!{fn material_library__obj(ind: usize) -> Option<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.material_library.as_ref().map(|n| Arc::new(n.clone())) }} dyon_fn!{fn object_count__obj(ind: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.objects.len() }} dyon_fn!{fn objects__obj(ind: usize) -> Vec<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.objects.iter().map(|n| Arc::new(n.name.clone())).collect() }} dyon_fn!{fn vertex_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].vertices.len() }} dyon_fn!{fn tex_vertex_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].tex_vertices.len() }} dyon_fn!{fn normal_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].normals.len() }} dyon_fn!{fn geometry_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].geometry.len() }} dyon_fn!{fn vertex__obj_object_vertex (obj: usize, object: usize, vertex: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let vertex = obj_sets[obj].1.objects[object].vertices[vertex]; [vertex.x, vertex.y, vertex.z].into() }} dyon_fn!{fn tex_vertex__obj_object_tex_vertex (obj: usize, object: usize, tex_vertex: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let tex_vertex = obj_sets[obj].1.objects[object].tex_vertices[tex_vertex]; [tex_vertex.x, tex_vertex.y].into() }} dyon_fn!{fn normal__obj_object_normal (obj: usize, object: usize, normal: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let normal = obj_sets[obj].1.objects[object].normals[normal]; [normal.x, normal.y, normal.z].into() }} dyon_fn!{fn vertices__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].vertices.iter() .map(|vertex| [vertex.x, vertex.y, vertex.z].into()).collect() }} dyon_fn!{fn tex_vertices__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].tex_vertices.iter() .map(|tex_vertex| [tex_vertex.x, tex_vertex.y].into()).collect() }} dyon_fn!{fn normals__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].normals.iter() .map(|normal| [normal.x, normal.y, normal.z].into()).collect() }} pub struct Geometry { pub material_name: Option<Arc<String>>, pub smooth_shading_group: usize, pub shapes: Vec<Shape>, } impl<'a> From<&'a obj::Geometry> for Geometry { fn from(val: &'a obj::Geometry) -> Geometry { Geometry { material_name: val.material_name.as_ref().map(|n| Arc::new(n.clone())), smooth_shading_group: val.smooth_shading_group, shapes: val.shapes.iter().map(|n| Shape(n.clone())).collect() } } } dyon_obj!{Geometry { material_name, smooth_shading_group, shapes }} dyon_fn!{fn geometry__obj_object(obj: usize, object: usize) -> Vec<Geometry> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].geometry.iter() .map(|geometry| geometry.into()).collect() }} pub struct Shape(pub obj::Shape); impl PopVariable for Shape { fn pop_var(rt: &Runtime, var: &Variable) -> Result<Self, String> { if let &Variable::Array(ref arr) = var { Ok(match arr.len() { 1 => { Shape(obj::Shape::Point(try!(rt.var(&arr[0])))) } 2 => { Shape(obj::Shape::Line(try!(rt.var(&arr[0])), try!(rt.var(&arr[1])))) } 3 => { Shape(obj::Shape::Triangle(try!(rt.var(&arr[0])), try!(rt.var(&arr[1])), try!(rt.var(&arr[2])))) } _ => return Err(rt.expected(var, "array of length 1, 2, 3")) }) } else { Err(rt.expected(var, "array")) } } } impl PushVariable for Shape { fn push_var(&self) -> Variable { match self.0 { obj::Shape::Point(ref p) => Variable::Array(Arc::new(vec![p.push_var()])), obj::Shape::Line(ref a, ref b) => Variable::Array(Arc::new(vec![ a.push_var(), b.push_var() ])), obj::Shape::Triangle(ref a, ref b, ref c) => Variable::Array(Arc::new(vec![ a.push_var(), b.push_var(), c.push_var() ])) } } }
use wavefront_obj::mtl::MtlSet; use wavefront_obj::obj::{self, ObjSet}; use std::sync::Arc; use current::Current; use dyon::*; use dyon::embed::{PushVariable, PopVariable}; pub type Materials = Vec<(Arc<String>, MtlSet)>; pub type ObjSets = Vec<(Arc<String>, ObjSet)>; pub fn register_obj(module: &mut Module) { module.add(Arc::new("load__material".into()), load__material, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Result(Box::new(Type::Text)) }); module.add(Arc::new("material".into()), material, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Option(Box::new(Type::F64)) }); module.add(Arc::new("materials".into()), materials, PreludeFunction { lts: vec![], tys: vec![], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("load__obj".into()), load__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Result(Box::new(Type::Text)) }); module.add(Arc::new("obj".into()), obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Option(Box::new(Type::F64)) }); module.add(Arc::new("objs".into()), objs, PreludeFunction { lts: vec![], tys: vec![], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("material_library__obj".into()), material_library__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::Option(Box::new(Type::Text)) }); module.add(Arc::new("object_count__obj".into()), object_count__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::F64 }); module.add(Arc::new("objects__obj".into()), objects__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("vertex_count__obj_object".into()), vertex_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("tex_vertex_count__obj_object".into()), tex_vertex_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("normal_count__obj_object".into()), normal_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("geometry_count__obj_object".into()), geometry_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("vertex__obj_object_vertex".into()), vertex__obj_object_vertex, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("tex_vertex__obj_object_tex_vertex".into()), tex_vertex__obj_object_tex_vertex, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("normal__obj_object_normal".into()), normal__obj_object_normal, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("vertices__obj_object".into()), vertices__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("tex_vertices__obj_object".into()), tex_vertices__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("normals__obj_object".into()), normals__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("geometry__obj_object".into()), geometry__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::object())) }); } dyon_fn!{fn load__material(file: Arc<String>) -> Result<Arc<String>, String> { use wavefront_obj::mtl::parse; use std::fs::File; use std::io::Read; use std::error::Error; let materials = unsafe { &mut *Current::<Materials>::new() }; let mut f = try!(File::open(&**file).map_err(|err| String::from(err.description()))); let mut s = String::new(); try!(f.read_to_string(&mut s).map_err(|err| String::from(err.description()))); let mtlset = try!(parse(s).map_err(|err| format!("Error when parsing `{}`:\n{}:{}", file, err.line_number, err.message))); materials.push((file.clone(), mtlset)); Ok(file) }} dyon_fn!{fn load__obj(file: Arc<String>) -> Result<Arc<String>, String> { use wavefront_obj::obj::parse; use std::fs::File; use std::io::Read; use std::error::Error; let obj_sets = unsafe { &mut *Current::<ObjSets>::new() }; let mut f = try!(File::open(&**file).map_err(|err| String::from(err.description()))); let mut s = String::new(); try!(f.read_to_string(&mut s).map_err(|err| String::from(err.description()))); let obj_set = try!(parse(s).map_err(|err| format!("Error when parsing `{}`:\n{}:{}", file, err.line_number, err.message))); obj_sets.push((file.clone(), obj_set)); Ok(file) }} dyon_fn!{fn material(file: Arc<String>) -> Option<usize> { let materials = unsafe { &*Current::<Materials>::new() }; for (i, mat) in materials.iter().enumerate() { if &mat.0 == &file { return Some(i); } } None }} dyon_fn!{fn obj(file: Arc<String>) -> Option<usize> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; for (i, obj_set) in obj_sets.iter().enumerate() { if &obj_set.0 == &file { return Some(i); } } None }} dyon_fn!{fn materials() -> Vec<Arc<String>> { let materials = unsafe { &*Current::<Materials>::new() }; materials.iter().map(|n| n.0.clone()).collect() }} dyon_fn!{fn objs() -> Vec<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets.iter().map(|n| n.0.clone()).collect() }} dyon_fn!{fn material_library__obj(ind: usize) -> Option<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.material_library.as_ref().map(|n| Arc::new(n.clone())) }} dyon_fn!{fn object_count__obj(ind: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.objects.len() }} dyon_fn!{fn objects__obj(ind: usize) -> Vec<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.objects.iter().map(|n| Arc::new(n.name.clone())).collect() }} dyon_fn!{fn vertex_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].vertices.len() }} dyon_fn!{fn tex_vertex_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].tex_vertices.len() }} dyon_fn!{fn normal_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].normals.len() }} dyon_fn!{fn geometry_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].geometry.len() }} dyon_fn!{fn vertex__obj_object_vertex (obj: usize, object: usize, vertex: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let vertex = obj_sets[obj].1.objects[object].vertices[vertex]; [vertex.x, vertex.y, vertex.z].into() }} dyon_fn!{fn tex_vertex__obj_object_tex_vertex (obj: usize, object: usize, tex_vertex: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let tex_vertex = obj_sets[obj].1.objects[object].tex_vertices[tex_vertex]; [tex_vertex.x, tex_vertex.y].into() }} dyon_fn!{fn normal__obj_object_normal (obj: usize, object: usize, normal: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let normal = obj_sets[obj].1.objects[object].normals[normal]; [normal.x, normal.y, normal.z].into() }} dyon_fn!{fn vertices__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].vertices.iter() .map(|vertex| [vertex.x, vertex.y, vertex.z].into()).collect() }} dyon_fn!{fn tex_vertices__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].tex_vertices.iter() .map(|tex_vertex| [tex_vertex.x, tex_vertex.y].into()).collect() }} dyon_fn!{fn normals__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].normals.iter() .map(|normal| [normal.x, normal.y, normal.z].into()).collect() }} pub struct Geometry { pub material_name: Option<Arc<String>>, pub smooth_shading_group: usize, pub shapes: Vec<Shape>, } impl<'a> From<&'a obj::Geometry> for Geometry { fn from(val: &'a obj::Geometry) -> Geometry { Geometry { material_name: val.material_name.as_ref().map(|n| Arc::new(n.clone())), smooth_shading_group: val.smooth_shading_group, shapes: val.shapes.iter().map(|n| Shape(n.clone())).collect() } } } dyon_obj!{Geometry { material_name, smooth_shading_group, shapes }} dyon_fn!{fn geometry__obj_object(obj: usize, object: usize) -> Vec<Geometry> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].geometry.iter() .map(|geometry| geometry.into()).collect() }} pub struct Shape(pub obj::Shape); impl PopVariable for Shape { fn pop_var(rt: &Runtime, var: &Variable) -> Result<Self, String> { if let &Variable::Array(ref arr) = var {
try!(rt.var(&arr[2])))) } _ => return Err(rt.expected(var, "array of length 1, 2, 3")) }) } else { Err(rt.expected(var, "array")) } } } impl PushVariable for Shape { fn push_var(&self) -> Variable { match self.0 { obj::Shape::Point(ref p) => Variable::Array(Arc::new(vec![p.push_var()])), obj::Shape::Line(ref a, ref b) => Variable::Array(Arc::new(vec![ a.push_var(), b.push_var() ])), obj::Shape::Triangle(ref a, ref b, ref c) => Variable::Array(Arc::new(vec![ a.push_var(), b.push_var(), c.push_var() ])) } } }
Ok(match arr.len() { 1 => { Shape(obj::Shape::Point(try!(rt.var(&arr[0])))) } 2 => { Shape(obj::Shape::Line(try!(rt.var(&arr[0])), try!(rt.var(&arr[1])))) } 3 => { Shape(obj::Shape::Triangle(try!(rt.var(&arr[0])), try!(rt.var(&arr[1])),
function_block-random_span
[ { "content": "pub fn register_shader(module: &mut Module) {\n\n module.add(Arc::new(\"load_program__name_vshader_fshader\".into()),\n\n load_program__name_vshader_fshader, PreludeFunction {\n\n lts: vec![Lt::Default; 3],\n\n tys: vec![Type::Text; 3],\n\n ret: Type::Res...
Rust
src/text.rs
zyxw59/conlang_fmt
2c74dd805de38fff5b28c644517c3347ba93d3a5
use std::io::{Result as IoResult, Write}; use crate::blocks::{BlockCommon, BlockType, Parameter, UpdateParam}; use crate::document::Document; use crate::errors::Result as EResult; use crate::html; type OResult<T> = EResult<Option<T>>; pub trait Referenceable { fn reference_text(&self) -> Text; } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct Text(pub Vec<Inline>); pub const EMPTY_TEXT: &'static Text = &Text(Vec::new()); impl Text { pub fn new() -> Text { Default::default() } pub fn push(&mut self, element: impl Into<Inline>) { self.0.push(element.into()); } pub fn extend(&mut self, other: &Text) { self.0.extend_from_slice(&other.0) } pub fn with_class(self, class: impl Into<String>) -> Text { Text(vec![Inline { kind: InlineType::Span(self), common: InlineCommon { class: class.into(), }, }]) } pub fn write_inline(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { for t in &self.0 { t.kind.write(w, &t.common, document)?; } Ok(()) } pub fn write_inline_plain(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { for t in &self.0 { t.kind.write_plain(w, document)?; } Ok(()) } pub fn starts_with(&self, c: char) -> bool { match self.0.first() { Some(inline) => inline.kind.starts_with(c), None => false, } } pub fn ends_with(&self, c: char) -> bool { match self.0.last() { Some(inline) => inline.kind.ends_with(c), None => false, } } } impl BlockType for Text { fn write(&self, w: &mut dyn Write, _common: &BlockCommon, document: &Document) -> IoResult<()> { write!(w, "<p>")?; self.write_inline(w, document)?; writeln!(w, "</p>\n")?; Ok(()) } } impl<T> From<T> for Text where T: Into<String>, { fn from(s: T) -> Text { let mut t = Text::new(); t.push(s.into()); t } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct Inline { pub kind: InlineType, pub common: InlineCommon, } impl Inline {} impl<T> From<(InlineType, T)> for Inline where T: Into<InlineCommon>, { fn from((kind, common): (InlineType, T)) -> Inline { Inline { kind, common: common.into(), } } } impl From<String> for Inline { fn from(s: String) -> Inline { Inline::from((InlineType::Text(s), String::new())) } } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct InlineCommon { pub class: String, } impl InlineCommon { pub fn new() -> InlineCommon { Default::default() } } impl UpdateParam for InlineCommon { fn update_param(&mut self, param: Parameter) -> OResult<Parameter> { Ok(match param.0.as_ref().map(|n| n.as_ref()) { Some("class") | None => { self.class = param.1; None } _ => Some(param), }) } } impl<T> From<T> for InlineCommon where T: Into<String>, { fn from(class: T) -> InlineCommon { InlineCommon { class: class.into(), } } } #[derive(Clone, Debug, Eq, PartialEq)] pub enum InlineType { Emphasis(Text), Strong(Text), Italics(Text), Bold(Text), SmallCaps(Text), Span(Text), Replace(String), Reference(String), Link(Link), Text(String), } impl InlineType { pub fn link() -> InlineType { InlineType::Link(Default::default()) } pub fn reference() -> InlineType { InlineType::Reference(Default::default()) } fn write(&self, w: &mut dyn Write, common: &InlineCommon, document: &Document) -> IoResult<()> { if let Some(tag) = self.tag() { write!(w, "<{} ", tag)?; write!( w, "class=\"{} {}\"", html::Encoder(self.class()), html::Encoder(&common.class) )?; if let InlineType::Link(link) = self { write!(w, " href=\"{}\"", html::Encoder(&link.url))?; } else if let InlineType::Reference(id) = self { write!(w, " href=\"#{}\"", html::Encoder(id))?; } write!(w, ">")?; } match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.write_inline(w, &document)?, InlineType::Text(s) => write!(w, "{}", html::Encoder(s))?, InlineType::Reference(id) => { if let Some(block) = document.get_id(id) { if let Some(referenceable) = block.kind.as_referenceable() { referenceable.reference_text().write_inline(w, document)?; } else { write!( w, "<span class=\"unreferenceable-block\">#{}</span>", html::Encoder(id) )?; } } else { write!( w, "<span class=\"undefined-reference\">#{}</span>", html::Encoder(id) )?; } } InlineType::Replace(key) => match document.get_replacement(key) { Some(t) => t.write_inline(w, &document)?, None => { write!( w, "<span class=\"undefined-replace\">:{}:</span>", html::Encoder(key) )?; } }, } if let Some(tag) = self.tag() { write!(w, "</{}>", tag)?; } Ok(()) } fn write_plain(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.write_inline_plain(w, &document)?, InlineType::Text(s) => write!(w, "{}", html::Encoder(s))?, InlineType::Reference(id) => { if let Some(block) = document.get_id(id) { if let Some(referenceable) = block.kind.as_referenceable() { referenceable .reference_text() .write_inline_plain(w, document)?; } else { write!(w, "#{}", html::Encoder(id))?; } } else { write!(w, "#{}", html::Encoder(id))?; } } InlineType::Replace(key) => match document.get_replacement(key) { Some(t) => t.write_inline_plain(w, &document)?, None => write!(w, ":{}:", html::Encoder(key))?, }, } Ok(()) } fn tag(&self) -> Option<&'static str> { use self::InlineType::*; match self { Emphasis(_) => Some("em"), Strong(_) => Some("strong"), Italics(_) => Some("i"), Bold(_) => Some("b"), Link(_) | Reference(_) => Some("a"), Text(_) => None, _ => Some("span"), } } fn class(&self) -> &'static str { use self::InlineType::*; match self { SmallCaps(_) => "small-caps", Reference(_) => "reference", _ => "", } } fn starts_with(&self, c: char) -> bool { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.starts_with(c), InlineType::Text(s) => s.starts_with(c), _ => false, } } fn ends_with(&self, c: char) -> bool { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.ends_with(c), InlineType::Text(s) => s.ends_with(c), _ => false, } } } impl UpdateParam for InlineType { fn update_param(&mut self, param: Parameter) -> OResult<Parameter> { Ok(match *self { InlineType::Reference(ref mut s) => match param.0.as_ref().map(|p| p.as_ref()) { Some("ref") | None => { *s = param.1; None } _ => Some(param), }, InlineType::Link(ref mut link) => match param.0.as_ref().map(|p| p.as_ref()) { Some("link") | None => { link.url = param.1; None } Some("title") => { link.title = param.1.into(); None } _ => Some(param), }, _ => Some(param), }) } } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct Link { pub url: String, pub title: Text, }
use std::io::{Result as IoResult, Write}; use crate::blocks::{BlockCommon, BlockType, Parameter, UpdateParam}; use crate::document::Document; use crate::errors::Result as EResult; use crate::html; type OResult<T> = EResult<Option<T>>; pub trait Referenceable { fn reference_text(&self) -> Text; } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct Text(pub Vec<Inline>); pub const EMPTY_TEXT: &'static Text = &Text(Vec::new()); impl Text { pub fn new() -> Text { Default::default() } pub fn push(&mut self, element: impl Into<Inline>) { self.0.push(element.into()); } pub fn extend(&mut self, other: &Text) { self.0.extend_from_slice(&other.0) } pub fn with_class(self, class: impl Into<String>) -> Text { Text(vec![Inline { kind: InlineType::Span(self), common: InlineCommon { class: class.into(), }, }]) } pub fn write_inline(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { for t in &self.0 { t.kind.write(w, &t.common, document)?; } Ok(()) } pub fn w
} pub fn starts_with(&self, c: char) -> bool { match self.0.first() { Some(inline) => inline.kind.starts_with(c), None => false, } } pub fn ends_with(&self, c: char) -> bool { match self.0.last() { Some(inline) => inline.kind.ends_with(c), None => false, } } } impl BlockType for Text { fn write(&self, w: &mut dyn Write, _common: &BlockCommon, document: &Document) -> IoResult<()> { write!(w, "<p>")?; self.write_inline(w, document)?; writeln!(w, "</p>\n")?; Ok(()) } } impl<T> From<T> for Text where T: Into<String>, { fn from(s: T) -> Text { let mut t = Text::new(); t.push(s.into()); t } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct Inline { pub kind: InlineType, pub common: InlineCommon, } impl Inline {} impl<T> From<(InlineType, T)> for Inline where T: Into<InlineCommon>, { fn from((kind, common): (InlineType, T)) -> Inline { Inline { kind, common: common.into(), } } } impl From<String> for Inline { fn from(s: String) -> Inline { Inline::from((InlineType::Text(s), String::new())) } } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct InlineCommon { pub class: String, } impl InlineCommon { pub fn new() -> InlineCommon { Default::default() } } impl UpdateParam for InlineCommon { fn update_param(&mut self, param: Parameter) -> OResult<Parameter> { Ok(match param.0.as_ref().map(|n| n.as_ref()) { Some("class") | None => { self.class = param.1; None } _ => Some(param), }) } } impl<T> From<T> for InlineCommon where T: Into<String>, { fn from(class: T) -> InlineCommon { InlineCommon { class: class.into(), } } } #[derive(Clone, Debug, Eq, PartialEq)] pub enum InlineType { Emphasis(Text), Strong(Text), Italics(Text), Bold(Text), SmallCaps(Text), Span(Text), Replace(String), Reference(String), Link(Link), Text(String), } impl InlineType { pub fn link() -> InlineType { InlineType::Link(Default::default()) } pub fn reference() -> InlineType { InlineType::Reference(Default::default()) } fn write(&self, w: &mut dyn Write, common: &InlineCommon, document: &Document) -> IoResult<()> { if let Some(tag) = self.tag() { write!(w, "<{} ", tag)?; write!( w, "class=\"{} {}\"", html::Encoder(self.class()), html::Encoder(&common.class) )?; if let InlineType::Link(link) = self { write!(w, " href=\"{}\"", html::Encoder(&link.url))?; } else if let InlineType::Reference(id) = self { write!(w, " href=\"#{}\"", html::Encoder(id))?; } write!(w, ">")?; } match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.write_inline(w, &document)?, InlineType::Text(s) => write!(w, "{}", html::Encoder(s))?, InlineType::Reference(id) => { if let Some(block) = document.get_id(id) { if let Some(referenceable) = block.kind.as_referenceable() { referenceable.reference_text().write_inline(w, document)?; } else { write!( w, "<span class=\"unreferenceable-block\">#{}</span>", html::Encoder(id) )?; } } else { write!( w, "<span class=\"undefined-reference\">#{}</span>", html::Encoder(id) )?; } } InlineType::Replace(key) => match document.get_replacement(key) { Some(t) => t.write_inline(w, &document)?, None => { write!( w, "<span class=\"undefined-replace\">:{}:</span>", html::Encoder(key) )?; } }, } if let Some(tag) = self.tag() { write!(w, "</{}>", tag)?; } Ok(()) } fn write_plain(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.write_inline_plain(w, &document)?, InlineType::Text(s) => write!(w, "{}", html::Encoder(s))?, InlineType::Reference(id) => { if let Some(block) = document.get_id(id) { if let Some(referenceable) = block.kind.as_referenceable() { referenceable .reference_text() .write_inline_plain(w, document)?; } else { write!(w, "#{}", html::Encoder(id))?; } } else { write!(w, "#{}", html::Encoder(id))?; } } InlineType::Replace(key) => match document.get_replacement(key) { Some(t) => t.write_inline_plain(w, &document)?, None => write!(w, ":{}:", html::Encoder(key))?, }, } Ok(()) } fn tag(&self) -> Option<&'static str> { use self::InlineType::*; match self { Emphasis(_) => Some("em"), Strong(_) => Some("strong"), Italics(_) => Some("i"), Bold(_) => Some("b"), Link(_) | Reference(_) => Some("a"), Text(_) => None, _ => Some("span"), } } fn class(&self) -> &'static str { use self::InlineType::*; match self { SmallCaps(_) => "small-caps", Reference(_) => "reference", _ => "", } } fn starts_with(&self, c: char) -> bool { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.starts_with(c), InlineType::Text(s) => s.starts_with(c), _ => false, } } fn ends_with(&self, c: char) -> bool { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.ends_with(c), InlineType::Text(s) => s.ends_with(c), _ => false, } } } impl UpdateParam for InlineType { fn update_param(&mut self, param: Parameter) -> OResult<Parameter> { Ok(match *self { InlineType::Reference(ref mut s) => match param.0.as_ref().map(|p| p.as_ref()) { Some("ref") | None => { *s = param.1; None } _ => Some(param), }, InlineType::Link(ref mut link) => match param.0.as_ref().map(|p| p.as_ref()) { Some("link") | None => { link.url = param.1; None } Some("title") => { link.title = param.1.into(); None } _ => Some(param), }, _ => Some(param), }) } } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct Link { pub url: String, pub title: Text, }
rite_inline_plain(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { for t in &self.0 { t.kind.write_plain(w, document)?; } Ok(())
function_block-random_span
[ { "content": "pub trait BlockType: Debug {\n\n /// Outputs the block.\n\n fn write(&self, w: &mut dyn Write, common: &BlockCommon, document: &Document) -> IoResult<()>;\n\n\n\n /// Updates with the given parameter. If the parameter was not updated, returns the parameter.\n\n fn update_param(&mut sel...
Rust
rustracts/src/contracts/option.rs
JOE1994/rustracts
c15541d7968aea40d06dadd5e2c5cb57b4d6d341
use std::sync::Mutex; use std::time::Duration; use crate::context::{ContextError, ContextErrorKind, ContractContext}; use crate::park::{WaitMessage, WaitThread}; use crate::time::Timer; use crate::{Contract, ContractExt, Status}; use futures::{ future::{FusedFuture, Future}, task::{Context, Poll}, }; use parc::{LockWeak, ParentArc}; #[must_use = "contracts do nothing unless polled or awaited"] pub struct OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { runner: WaitThread, timer: Timer, void_context: Option<ParentArc<Mutex<VC>>>, prod_context: Option<ParentArc<Mutex<PC>>>, on_exe: Option<F>, } impl<F, VC, PC, R> OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { pub fn new(expire: Duration, void_c: VC, prod_c: PC, on_exe: F) -> Self { Self { runner: WaitThread::new(), timer: Timer::new(expire), void_context: Some(ParentArc::new(Mutex::new(void_c))), prod_context: Some(ParentArc::new(Mutex::new(prod_c))), on_exe: Some(on_exe), } } fn poll_prod(&self) -> bool { match &self.prod_context { Some(c) => c.as_ref().lock().unwrap().poll_valid(), None => false, } } pin_utils::unsafe_pinned!(timer: Timer); pin_utils::unsafe_unpinned!(void_context: Option<ParentArc<Mutex<VC>>>); pin_utils::unsafe_unpinned!(prod_context: Option<ParentArc<Mutex<PC>>>); pin_utils::unsafe_unpinned!(on_exe: Option<F>); } impl<F, VC, PC, R> Contract for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { fn poll_valid(&self) -> bool { match &self.void_context { Some(c) => c.as_ref().lock().unwrap().poll_valid(), None => false, } } fn execute(mut self: std::pin::Pin<&mut Self>) -> Self::Output { let vlockarc = self .as_mut() .void_context() .take() .expect("Cannot poll after expiration"); let plockarc = self .as_mut() .prod_context() .take() .expect("Cannot poll after expiration"); let vcontext = vlockarc.block_into_inner().into_inner().unwrap(); let pcontext = plockarc.block_into_inner().into_inner().unwrap(); let f = self .as_mut() .on_exe() .take() .expect("Cannot run a contract after expiration"); Status::Completed(f((vcontext, pcontext))) } fn void(self: std::pin::Pin<&mut Self>) -> Self::Output { Status::Terminated } } impl<F, VC, PC, R> ContractExt for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { type Context = (LockWeak<Mutex<VC>>, LockWeak<Mutex<PC>>); fn get_context(&self) -> Result<Self::Context, ContextError> { match (&self.void_context, &self.prod_context) { (Some(ref vc), Some(ref pc)) => { Ok((ParentArc::downgrade(vc), ParentArc::downgrade(pc))) } _ => Err(ContextError::from(ContextErrorKind::ExpiredContext)), } } } impl<F, VC, PC, R> Future for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { type Output = Status<R>; fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { self.runner .sender() .send(WaitMessage::WakeIn { waker: cx.waker().clone(), duration: Duration::new(0, 100), }) .unwrap(); let mv = ( self.as_mut().timer().poll(cx), self.poll_valid(), self.poll_prod(), ); match mv { (Poll::Ready(_), true, true) => Poll::Ready(self.execute()), (Poll::Ready(_), true, false) => Poll::Ready(self.void()), (Poll::Pending, true, _) => Poll::Pending, (_, false, _) => Poll::Ready(self.void()), } } } impl<F, VC, PC, R> FusedFuture for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { fn is_terminated(&self) -> bool { self.void_context.is_none() || self.prod_context.is_none() || self.on_exe.is_none() } } #[cfg(test)] mod tests { use super::OptionContract; use crate::context::cmp::EqContext; use crate::{ContractExt, Status}; use std::time::Duration; #[test] fn prod_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); if let Status::Completed(val) = futures::executor::block_on(c) { assert_eq!(val, 5); } else { assert!(false); } } #[test] fn void_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); let handle = std::thread::spawn({ let (vcontext, _) = c.get_context().unwrap(); move || match vcontext.upgrade() { Some(vc) => vc.lock().unwrap().0 += 1, None => {} } }); if let Status::Completed(val) = futures::executor::block_on(c) { assert_ne!(val, 6); } else { assert!(true); } handle.join().unwrap(); } #[test] fn noprod_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); let _ = std::thread::spawn({ let (_, pcontext) = c.get_context().unwrap(); move || match pcontext.upgrade() { Some(pc) => pc.lock().unwrap().0 += 1, None => {} } }) .join(); if let Status::Completed(val) = futures::executor::block_on(c) { assert_ne!(val, 6); } else { assert!(true); } } }
use std::sync::Mutex; use std::time::Duration; use crate::context::{ContextError, ContextErrorKind, ContractContext}; use crate::park::{WaitMessage, WaitThread}; use crate::time::Timer; use crate::{Contract, ContractExt, Status}; use futures::{ future::{FusedFuture, Future}, task::{Context, Poll}, }; use parc::{LockWeak, ParentArc}; #[must_use = "contracts do nothing unless polled or awaited"] pub struct OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { runner: WaitThread, timer: Timer, void_context: Option<ParentArc<Mutex<VC>>>, prod_context: Option<ParentArc<Mutex<PC>>>, on_exe: Option<F>, } impl<F, VC, PC, R> OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { pub fn new(expire: Duration, void_c: VC, prod_c: PC, on_exe: F) -> Self { Self { runner: WaitThread::new(), timer: Timer::new(expire), void_context: Some(ParentArc::new(Mutex::new(void_c))), prod_context: Some(ParentArc::new(Mutex::new(prod_c))), on_exe: Some(on_exe), } } fn poll_prod(&self) -> bool { match &self.prod_context { Some(c) => c.as_ref().lock().unwrap().poll_valid(), None => false, } } pin_utils::unsafe_pinned!(timer: Timer); pin_utils::unsafe_unpinned!(void_context: Option<ParentArc<Mutex<VC>>>); pin_utils::unsafe_unpinned!(prod_context: Option<ParentArc<Mutex<PC>>>); pin_utils::unsafe_unpinned!(on_exe: Option<F>); } impl<F, VC, PC, R> Contract for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { fn poll_valid(&self) -> bool { match &self.void_context { Some(c) => c.as_ref().lock().unwrap().poll_valid(), None => false, } } fn execute(mut self: std::pin::Pin<&mut Self>) -> Self::Output { let vlockarc = self .as_mut() .void_context() .take() .expect("Cannot poll after expiration"); let plockarc = self .as_mut() .prod_context() .take() .expect("Cannot poll after expiration"); let vcontext = vlockarc.block_into_inner().into_inner().unwrap(); let pcontext = plockarc.block_into_inner().into_inner().unwrap(); let f = self .as_mut() .on_exe() .take() .expect("Cannot run a contract after expiration"); Status::Completed(f((vcontext, pcontext))) } fn void(self: std::pin::Pin<&mut Self>) -> Self::Output { Status::Terminated } } impl<F, VC, PC, R> ContractExt for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { type Context = (LockWeak<Mutex<VC>>, LockWeak<Mutex<PC>>); fn get_context(&self) -> Result<Self::Context, ContextError> { match (&self.void_context, &self.prod_context) { (Some(ref vc), Some(ref pc)) => { Ok((ParentArc::downgrade(vc), ParentArc::downgrade(pc))) } _ => Err(ContextError::from(ContextErrorKind::ExpiredContext)), } } } impl<F, VC, PC, R> Future for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { type Output = Status<R>; fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { self.runner .sender() .send(WaitMessage::WakeIn { waker: cx.waker().clone(), duration: Duration::new(0, 100), }) .unwrap(); let mv = ( self.as_mut().timer().poll(cx), self.poll_valid(), self.poll_prod(), ); match mv { (Poll::Ready(_), true, true) => Poll::Ready(self.execute()), (Poll::Ready(_), true, false) => Poll::Ready(self.void()), (Poll::Pending, true, _) => Poll::Pending, (_, false, _) => Poll::Ready(self.void()), } } } impl<F, VC, PC, R> FusedFuture for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { fn is_terminated(&self) -> bool { self.void_context.is_none() || self.prod_context.is_none() || self.on_exe.is_none() } } #[cfg(test)] mod tests { use super::OptionContract; use crate::context::cmp::EqContext; use crate::{ContractExt, Status}; use std::time::Duration; #[test] fn prod_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); if let Status::Completed(val) = futures::executor::block_on(c) { assert_eq!(val, 5); } else { assert!(false); } } #[test]
#[test] fn noprod_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); let _ = std::thread::spawn({ let (_, pcontext) = c.get_context().unwrap(); move || match pcontext.upgrade() { Some(pc) => pc.lock().unwrap().0 += 1, None => {} } }) .join(); if let Status::Completed(val) = futures::executor::block_on(c) { assert_ne!(val, 6); } else { assert!(true); } } }
fn void_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); let handle = std::thread::spawn({ let (vcontext, _) = c.get_context().unwrap(); move || match vcontext.upgrade() { Some(vc) => vc.lock().unwrap().0 += 1, None => {} } }); if let Status::Completed(val) = futures::executor::block_on(c) { assert_ne!(val, 6); } else { assert!(true); } handle.join().unwrap(); }
function_block-full_function
[]
Rust
src/protocol/flv.rs
nintha/river
56a63312dd7fef48f73dbd3985022604c739ba40
use byteorder::{BigEndian, ByteOrder}; use crate::protocol::rtmp::{ChunkMessageType, RtmpMessage}; use crate::util::spawn_and_log_error; use smol::channel::Receiver; use std::convert::TryFrom; use crate::rtmp_server::eventbus_map; use chrono::Local; use smol::io::AsyncWriteExt; use std::time::{Duration, Instant}; pub const FLV_HEADER_WITH_TAG0: [u8; 13] = [ 0x46, 0x4c, 0x56, 0x01, 0x05, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, ]; pub const FLV_HEADER_ONLY_VIDEO_WITH_TAG0: [u8; 13] = [ 0x46, 0x4c, 0x56, 0x01, 0x01, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, ]; pub struct FlvTag { raw_data: Vec<u8>, } #[allow(unused)] impl FlvTag { pub fn tag_type(&self) -> u8 { self.raw_data[0] } pub fn data_size(&self) -> u32 { BigEndian::read_u24(&self.raw_data[1..4]) } pub fn timestamp(&self) -> u32 { let timestamp_u24 = BigEndian::read_u24(&self.raw_data[4..7]); timestamp_u24 | (self.raw_data[7] as u32) << 24 } pub fn body(&self) -> &[u8] { &self.raw_data[11..] } } impl TryFrom<RtmpMessage> for FlvTag { type Error = anyhow::Error; fn try_from(mut msg: RtmpMessage) -> Result<Self, Self::Error> { let mut raw_data = vec![]; match msg.header.message_type { ChunkMessageType::AudioMessage => raw_data.push(0x08), ChunkMessageType::VideoMessage => raw_data.push(0x09), _ => Err(anyhow::anyhow!( "[FlvTag] invalid message type, {:?}", msg.header.message_type ))?, } raw_data.extend_from_slice(&(msg.body.len() as u32).to_be_bytes()[1..4]); raw_data.extend_from_slice(&(msg.header.timestamp & 0xFFFFFF).to_be_bytes()[1..4]); raw_data.push((msg.header.timestamp >> 24) as u8); raw_data.extend_from_slice(&0u32.to_be_bytes()[1..4]); raw_data.append(&mut msg.body); Ok(FlvTag { raw_data }) } } impl AsRef<[u8]> for FlvTag { fn as_ref(&self) -> &[u8] { self.raw_data.as_ref() } } #[allow(unused)] pub fn save_flv_background(stream_name: &str, peer_addr: String) { if let Some(eventbus) = eventbus_map().get(stream_name) { let flv_rx = eventbus.register_receiver(); spawn_and_log_error(handle_flv_rx(flv_rx, stream_name.to_owned(), peer_addr)); } } async fn handle_flv_rx( flv_rx: Receiver<RtmpMessage>, stream_name: String, peer_addr: String, ) -> anyhow::Result<()> { let tmp_dir = "tmp"; if smol::fs::read_dir(tmp_dir).await.is_err() { smol::fs::create_dir_all(tmp_dir).await?; } let mut file = smol::fs::OpenOptions::new() .create(true) .write(true) .truncate(true) .open("tmp/output.flv") .await?; file.write_all(&FLV_HEADER_WITH_TAG0).await?; let ctx_begin_timestamp = Local::now().timestamp_millis(); let mut last_flush_time = Instant::now(); let min_flush_duration = Duration::from_secs(2); while let Ok(mut msg) = flv_rx.recv().await { msg.header.timestamp = (Local::now().timestamp_millis() - ctx_begin_timestamp) as u32; let flv_tag = FlvTag::try_from(msg)?; file.write_all(flv_tag.as_ref()).await?; file.write_all(&(flv_tag.as_ref().len() as u32).to_be_bytes()) .await?; if last_flush_time.elapsed() > min_flush_duration { last_flush_time = Instant::now(); file.flush().await? } } log::warn!("[peer={}][handle_flv_rx] closed, stream_name={}", peer_addr, stream_name); Ok(()) }
use byteorder::{BigEndian, ByteOrder}; use crate::protocol::rtmp::{ChunkMessageType, RtmpMessage}; use crate::util::spawn_and_log_error; use smol::channel::Receiver; use std::convert::TryFrom; use crate::rtmp_server::eventbus_map; use chrono::Local; use smol::io::AsyncWriteExt; use std::time::{Duration, Instant}; pub const FLV_HEADER_WITH_TAG0: [u8; 13] = [ 0x46, 0x4c, 0x56, 0x01, 0x05, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, ]; pub const FLV_HEADER_ONLY_VIDEO_WITH_TAG0: [u8; 13] = [ 0x46, 0x4c, 0x56, 0x01, 0x01, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, ]; pub struct FlvTag { raw_data: Vec<u8>, } #[allow(unused)] impl FlvTag { pub fn tag_type(&self) -> u8 { self.raw_data[0] }
file.write_all(&FLV_HEADER_WITH_TAG0).await?; let ctx_begin_timestamp = Local::now().timestamp_millis(); let mut last_flush_time = Instant::now(); let min_flush_duration = Duration::from_secs(2); while let Ok(mut msg) = flv_rx.recv().await { msg.header.timestamp = (Local::now().timestamp_millis() - ctx_begin_timestamp) as u32; let flv_tag = FlvTag::try_from(msg)?; file.write_all(flv_tag.as_ref()).await?; file.write_all(&(flv_tag.as_ref().len() as u32).to_be_bytes()) .await?; if last_flush_time.elapsed() > min_flush_duration { last_flush_time = Instant::now(); file.flush().await? } } log::warn!("[peer={}][handle_flv_rx] closed, stream_name={}", peer_addr, stream_name); Ok(()) }
pub fn data_size(&self) -> u32 { BigEndian::read_u24(&self.raw_data[1..4]) } pub fn timestamp(&self) -> u32 { let timestamp_u24 = BigEndian::read_u24(&self.raw_data[4..7]); timestamp_u24 | (self.raw_data[7] as u32) << 24 } pub fn body(&self) -> &[u8] { &self.raw_data[11..] } } impl TryFrom<RtmpMessage> for FlvTag { type Error = anyhow::Error; fn try_from(mut msg: RtmpMessage) -> Result<Self, Self::Error> { let mut raw_data = vec![]; match msg.header.message_type { ChunkMessageType::AudioMessage => raw_data.push(0x08), ChunkMessageType::VideoMessage => raw_data.push(0x09), _ => Err(anyhow::anyhow!( "[FlvTag] invalid message type, {:?}", msg.header.message_type ))?, } raw_data.extend_from_slice(&(msg.body.len() as u32).to_be_bytes()[1..4]); raw_data.extend_from_slice(&(msg.header.timestamp & 0xFFFFFF).to_be_bytes()[1..4]); raw_data.push((msg.header.timestamp >> 24) as u8); raw_data.extend_from_slice(&0u32.to_be_bytes()[1..4]); raw_data.append(&mut msg.body); Ok(FlvTag { raw_data }) } } impl AsRef<[u8]> for FlvTag { fn as_ref(&self) -> &[u8] { self.raw_data.as_ref() } } #[allow(unused)] pub fn save_flv_background(stream_name: &str, peer_addr: String) { if let Some(eventbus) = eventbus_map().get(stream_name) { let flv_rx = eventbus.register_receiver(); spawn_and_log_error(handle_flv_rx(flv_rx, stream_name.to_owned(), peer_addr)); } } async fn handle_flv_rx( flv_rx: Receiver<RtmpMessage>, stream_name: String, peer_addr: String, ) -> anyhow::Result<()> { let tmp_dir = "tmp"; if smol::fs::read_dir(tmp_dir).await.is_err() { smol::fs::create_dir_all(tmp_dir).await?; } let mut file = smol::fs::OpenOptions::new() .create(true) .write(true) .truncate(true) .open("tmp/output.flv") .await?;
random
[ { "content": "pub fn print_hex(bytes: &[u8]) {\n\n println!(\"{}\", bytes_hex_format(bytes));\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 0, "score": 112056.65758520142 }, { "content": "pub fn bytes_hex_format(bytes: &[u8]) -> String {\n\n const COLUMN: usize = 16;\n\n const CO...
Rust
src/third_party/alga/alga_transform.rs
dfarnham/nalgebra
79ef862fe9bd5f6d97a864c36274e0eb69468025
use alga::general::{ AbstractGroup, AbstractLoop, AbstractMagma, AbstractMonoid, AbstractQuasigroup, AbstractSemigroup, Identity, Multiplicative, RealField, TwoSidedInverse, }; use alga::linear::{ProjectiveTransformation, Transformation}; use crate::base::allocator::Allocator; use crate::base::dimension::{DimNameAdd, DimNameSum, U1}; use crate::base::{Const, DefaultAllocator, SVector}; use crate::geometry::{Point, SubTCategoryOf, TCategory, TProjective, Transform}; /* * * Algebraic structures. * */ impl<T: RealField + simba::scalar::RealField, C, const D: usize> Identity<Multiplicative> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] fn identity() -> Self { Self::identity() } } impl<T: RealField + simba::scalar::RealField, C, const D: usize> TwoSidedInverse<Multiplicative> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] #[must_use = "Did you mean to use two_sided_inverse_mut()?"] fn two_sided_inverse(&self) -> Self { self.clone().inverse() } #[inline] fn two_sided_inverse_mut(&mut self) { self.inverse_mut() } } impl<T: RealField + simba::scalar::RealField, C, const D: usize> AbstractMagma<Multiplicative> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] fn operate(&self, rhs: &Self) -> Self { self * rhs } } macro_rules! impl_multiplicative_structures( ($($marker: ident<$operator: ident>),* $(,)*) => {$( impl<T: RealField + simba::scalar::RealField, C, const D: usize> $marker<$operator> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> { } )*} ); macro_rules! impl_inversible_multiplicative_structures( ($($marker: ident<$operator: ident>),* $(,)*) => {$( impl<T: RealField + simba::scalar::RealField, C, const D: usize> $marker<$operator> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> { } )*} ); impl_multiplicative_structures!( AbstractSemigroup<Multiplicative>, AbstractMonoid<Multiplicative>, ); impl_inversible_multiplicative_structures!( AbstractQuasigroup<Multiplicative>, AbstractLoop<Multiplicative>, AbstractGroup<Multiplicative> ); /* * * Transformation groups. * */ impl<T, C, const D: usize> Transformation<Point<T, D>> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, T: RealField + simba::scalar::RealField, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> + Allocator<T, DimNameSum<Const<D>, U1>>, { #[inline] fn transform_point(&self, pt: &Point<T, D>) -> Point<T, D> { self.transform_point(pt) } #[inline] fn transform_vector(&self, v: &SVector<T, D>) -> SVector<T, D> { self.transform_vector(v) } } impl<T, C, const D: usize> ProjectiveTransformation<Point<T, D>> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, T: RealField + simba::scalar::RealField, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> + Allocator<T, DimNameSum<Const<D>, U1>>, { #[inline] fn inverse_transform_point(&self, pt: &Point<T, D>) -> Point<T, D> { self.inverse_transform_point(pt) } #[inline] fn inverse_transform_vector(&self, v: &SVector<T, D>) -> SVector<T, D> { self.inverse_transform_vector(v) } }
use alga::general::{ AbstractGroup, AbstractLoop, AbstractMagma, AbstractMonoid, AbstractQuasigroup, AbstractSemigroup, Identity, Multiplicative, RealField, TwoSidedInverse, }; use alga::linear::{ProjectiveTransformation, Transformation}; use crate::base::allocator::Allocator; use crate::base::dimension::{DimNameAdd, DimNameSum, U1}; use crate::base::{Const, DefaultAllocator, SVector}; use crate::geometry::{Point, SubTCategoryOf, TCategory, TProjective, Transform}; /* * * Algebraic structures. * */ impl<T: RealField + simba::scalar::RealField, C, const D: usize> Identity<Multiplicative> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] fn identity() -> Self { Self::identity() } } impl<T: RealField + simba::scalar::RealField, C, const D: usize> TwoSidedInverse<Multiplicative> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] #[must_use = "Did you mean to use two_sided_inverse_mut()?"] fn two_sided_inverse(&self) -> Self { self.clone().inverse() } #[inline] fn two_sided_inverse_mut(&mut self) { self.inverse_mut() } } impl<T: RealField + simba::scalar::RealField, C, const D: usize> AbstractMagma<Multiplicative> for Transfo
SVector<T, D> { self.transform_vector(v) } } impl<T, C, const D: usize> ProjectiveTransformation<Point<T, D>> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, T: RealField + simba::scalar::RealField, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> + Allocator<T, DimNameSum<Const<D>, U1>>, { #[inline] fn inverse_transform_point(&self, pt: &Point<T, D>) -> Point<T, D> { self.inverse_transform_point(pt) } #[inline] fn inverse_transform_vector(&self, v: &SVector<T, D>) -> SVector<T, D> { self.inverse_transform_vector(v) } }
rm<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] fn operate(&self, rhs: &Self) -> Self { self * rhs } } macro_rules! impl_multiplicative_structures( ($($marker: ident<$operator: ident>),* $(,)*) => {$( impl<T: RealField + simba::scalar::RealField, C, const D: usize> $marker<$operator> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> { } )*} ); macro_rules! impl_inversible_multiplicative_structures( ($($marker: ident<$operator: ident>),* $(,)*) => {$( impl<T: RealField + simba::scalar::RealField, C, const D: usize> $marker<$operator> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> { } )*} ); impl_multiplicative_structures!( AbstractSemigroup<Multiplicative>, AbstractMonoid<Multiplicative>, ); impl_inversible_multiplicative_structures!( AbstractQuasigroup<Multiplicative>, AbstractLoop<Multiplicative>, AbstractGroup<Multiplicative> ); /* * * Transformation groups. * */ impl<T, C, const D: usize> Transformation<Point<T, D>> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, T: RealField + simba::scalar::RealField, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> + Allocator<T, DimNameSum<Const<D>, U1>>, { #[inline] fn transform_point(&self, pt: &Point<T, D>) -> Point<T, D> { self.transform_point(pt) } #[inline] fn transform_vector(&self, v: &SVector<T, D>) ->
random
[]
Rust
src/ais/vdm_t14.rs
johann2/nmea-parser
0dd55af67546526399bbc026067e51da8ea3fdc1
/* Copyright 2020 Timo Saarinen Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use super::*; #[derive(Default, Clone, Debug, PartialEq)] pub struct SafetyRelatedBroadcastMessage { pub own_vessel: bool, pub station: Station, pub mmsi: u32, pub text: String, } pub(crate) fn handle( bv: &BitVec, station: Station, own_vessel: bool, ) -> Result<ParsedMessage, ParseError> { Ok(ParsedMessage::SafetyRelatedBroadcastMessage( SafetyRelatedBroadcastMessage { own_vessel: { own_vessel }, station: { station }, mmsi: { pick_u64(&bv, 8, 30) as u32 }, text: { pick_string(&bv, 40, 161) }, }, )) } #[cfg(test)] mod test { use super::*; #[test] fn test_parse_vdm_type14() { let mut p = NmeaParser::new(); match p.parse_sentence("!AIVDM,1,1,,A,>5?Per18=HB1U:1@E=B0m<L,2*51") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 351809000); assert_eq!(srbm.text, "RCVD YR TEST MSG"); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } match p.parse_sentence("!AIVDM,1,1,,A,>3R1p10E3;;R0USCR0HO>0@gN10kGJp,2*7F") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 237008900); assert_eq!(srbm.text, "EP228 IX48 FG3 DK7 PL56."); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } match p.parse_sentence("!AIVDM,1,1,,A,>4aDT81@E=@,2*2E") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 311764000); assert_eq!(srbm.text, "TEST"); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } } }
/* Copyright 2020 Timo Saarinen Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use super::*; #[derive(Default, Clone, Debug, PartialEq)] pub struct SafetyRelatedBroadcastMessage { pub own_vessel: bool, pub station: Station, pub mmsi: u32, pub text: String, }
#[cfg(test)] mod test { use super::*; #[test] fn test_parse_vdm_type14() { let mut p = NmeaParser::new(); match p.parse_sentence("!AIVDM,1,1,,A,>5?Per18=HB1U:1@E=B0m<L,2*51") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 351809000); assert_eq!(srbm.text, "RCVD YR TEST MSG"); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } match p.parse_sentence("!AIVDM,1,1,,A,>3R1p10E3;;R0USCR0HO>0@gN10kGJp,2*7F") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 237008900); assert_eq!(srbm.text, "EP228 IX48 FG3 DK7 PL56."); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } match p.parse_sentence("!AIVDM,1,1,,A,>4aDT81@E=@,2*2E") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 311764000); assert_eq!(srbm.text, "TEST"); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } } }
pub(crate) fn handle( bv: &BitVec, station: Station, own_vessel: bool, ) -> Result<ParsedMessage, ParseError> { Ok(ParsedMessage::SafetyRelatedBroadcastMessage( SafetyRelatedBroadcastMessage { own_vessel: { own_vessel }, station: { station }, mmsi: { pick_u64(&bv, 8, 30) as u32 }, text: { pick_string(&bv, 40, 161) }, }, )) }
function_block-full_function
[ { "content": "/// Make key for store\n\nfn make_gsv_key(sentence_type: &str, msg_count: u32, msg_num: u32) -> String {\n\n format!(\"{},{},{}\", sentence_type, msg_count, msg_num)\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n#[cfg(tes...
Rust
sourcemap/src/types.rs
jaspervandenberg/symbolic
c420a47d67a4b17cbdc48de5b5900e8e2e7fa62f
use std::mem; use std::borrow::Cow; use sourcemap; use symbolic_common::Result; pub struct SourceView<'a> { sv: sourcemap::SourceView<'a>, } pub struct SourceMapView { sm: sourcemap::SourceMap, } pub struct TokenMatch<'a> { pub src_line: u32, pub src_col: u32, pub dst_line: u32, pub dst_col: u32, pub src_id: u32, pub name: Option<&'a str>, pub src: Option<&'a str>, pub function_name: Option<String>, } impl<'a> SourceView<'a> { pub fn new(source: &'a str) -> SourceView<'a> { SourceView { sv: sourcemap::SourceView::new(source), } } pub fn from_string(source: String) -> SourceView<'static> { SourceView { sv: sourcemap::SourceView::from_string(source), } } pub fn from_bytes(source: &'a [u8]) -> SourceView<'a> { match String::from_utf8_lossy(source) { Cow::Owned(s) => SourceView::from_string(s), Cow::Borrowed(s) => SourceView::new(s), } } pub fn as_str(&self) -> &str { self.sv.source() } pub fn get_line(&self, idx: u32) -> Option<&str> { self.sv.get_line(idx) } pub fn line_count(&self) -> usize { self.sv.line_count() } } impl SourceMapView { pub fn from_json_slice(buffer: &[u8]) -> Result<SourceMapView> { Ok(SourceMapView { sm: match sourcemap::decode_slice(buffer)? { sourcemap::DecodedMap::Regular(sm) => sm, sourcemap::DecodedMap::Index(smi) => smi.flatten()?, }, }) } pub fn lookup_token<'a>(&'a self, line: u32, col: u32) -> Option<TokenMatch<'a>> { self.sm .lookup_token(line, col) .map(|tok| self.make_token_match(tok)) } pub fn get_token<'a>(&'a self, idx: u32) -> Option<TokenMatch<'a>> { self.sm.get_token(idx).map(|tok| self.make_token_match(tok)) } pub fn get_token_count(&self) -> u32 { self.sm.get_token_count() } pub fn get_source_view<'a>(&'a self, idx: u32) -> Option<&'a SourceView<'a>> { self.sm .get_source_view(idx) .map(|x| unsafe { mem::transmute(x) }) } pub fn get_source_name(&self, idx: u32) -> Option<&str> { self.sm.get_source(idx) } pub fn get_source_count(&self) -> u32 { self.sm.get_source_count() } pub fn lookup_token_with_function_name<'a, 'b>( &'a self, line: u32, col: u32, minified_name: &str, source: &SourceView<'b>, ) -> Option<TokenMatch<'a>> { self.sm.lookup_token(line, col).map(|token| { let mut rv = self.make_token_match(token); rv.function_name = source .sv .get_original_function_name(token, minified_name) .map(|x| x.to_string()); rv }) } fn make_token_match<'a>(&'a self, tok: sourcemap::Token<'a>) -> TokenMatch<'a> { TokenMatch { src_line: tok.get_src_line(), src_col: tok.get_src_col(), dst_line: tok.get_dst_line(), dst_col: tok.get_dst_col(), src_id: tok.get_src_id(), name: tok.get_name(), src: tok.get_source(), function_name: None, } } }
use std::mem; use std::borrow::Cow; use sourcemap; use symbolic_common::Result; pub struct SourceView<'a> { sv: sourcemap::SourceView<'a>, } pub struct SourceMapView { sm: sourcemap::SourceMap, } pub struct TokenMatch<'a> { pub src_line: u32, pub src_col: u32, pub dst_line: u32, pub dst_col: u32, pub src_id: u32, pub name: Option<&'a str>, pub src: Option<&'a str>,
self.sm.get_source_count() } pub fn lookup_token_with_function_name<'a, 'b>( &'a self, line: u32, col: u32, minified_name: &str, source: &SourceView<'b>, ) -> Option<TokenMatch<'a>> { self.sm.lookup_token(line, col).map(|token| { let mut rv = self.make_token_match(token); rv.function_name = source .sv .get_original_function_name(token, minified_name) .map(|x| x.to_string()); rv }) } fn make_token_match<'a>(&'a self, tok: sourcemap::Token<'a>) -> TokenMatch<'a> { TokenMatch { src_line: tok.get_src_line(), src_col: tok.get_src_col(), dst_line: tok.get_dst_line(), dst_col: tok.get_dst_col(), src_id: tok.get_src_id(), name: tok.get_name(), src: tok.get_source(), function_name: None, } } }
pub function_name: Option<String>, } impl<'a> SourceView<'a> { pub fn new(source: &'a str) -> SourceView<'a> { SourceView { sv: sourcemap::SourceView::new(source), } } pub fn from_string(source: String) -> SourceView<'static> { SourceView { sv: sourcemap::SourceView::from_string(source), } } pub fn from_bytes(source: &'a [u8]) -> SourceView<'a> { match String::from_utf8_lossy(source) { Cow::Owned(s) => SourceView::from_string(s), Cow::Borrowed(s) => SourceView::new(s), } } pub fn as_str(&self) -> &str { self.sv.source() } pub fn get_line(&self, idx: u32) -> Option<&str> { self.sv.get_line(idx) } pub fn line_count(&self) -> usize { self.sv.line_count() } } impl SourceMapView { pub fn from_json_slice(buffer: &[u8]) -> Result<SourceMapView> { Ok(SourceMapView { sm: match sourcemap::decode_slice(buffer)? { sourcemap::DecodedMap::Regular(sm) => sm, sourcemap::DecodedMap::Index(smi) => smi.flatten()?, }, }) } pub fn lookup_token<'a>(&'a self, line: u32, col: u32) -> Option<TokenMatch<'a>> { self.sm .lookup_token(line, col) .map(|tok| self.make_token_match(tok)) } pub fn get_token<'a>(&'a self, idx: u32) -> Option<TokenMatch<'a>> { self.sm.get_token(idx).map(|tok| self.make_token_match(tok)) } pub fn get_token_count(&self) -> u32 { self.sm.get_token_count() } pub fn get_source_view<'a>(&'a self, idx: u32) -> Option<&'a SourceView<'a>> { self.sm .get_source_view(idx) .map(|x| unsafe { mem::transmute(x) }) } pub fn get_source_name(&self, idx: u32) -> Option<&str> { self.sm.get_source(idx) } pub fn get_source_count(&self) -> u32 {
random
[ { "content": "/// Checks whether an ELF binary contains a section.\n\n///\n\n/// This is useful to determine whether the binary contains certain information\n\n/// without loading its section data.\n\npub fn has_elf_section(elf: &elf::Elf, sh_type: u32, name: &str) -> bool {\n\n for header in &elf.section_he...
Rust
arch/x86/cpu/idt.rs
mvdnes/element76
87e0206b022bda177dd1ca8fb6e2989429c2c8bf
/* * This file contains code for the Interrupt Descriptor Table * * See: http://www.jamesmolloy.co.uk/tutorial_html/4.-The%20GDT%20and%20IDT.html */ use core::marker::Copy; use core::clone::Clone; const IDT_COUNT: usize = 256; static mut IDT_ENTRIES: [IDTEntry; IDT_COUNT] = [IDTEntry { base_low: 0, selector: 0, zero: 0, flags: 0, base_high: 0 }; IDT_COUNT]; static mut IDT_PTR: IDTPointer = IDTPointer { limit: 0, base: 0 }; #[repr(packed)] struct IDTEntry { base_low: u16, selector: u16, zero: u8, flags: u8, base_high: u16 } impl Copy for IDTEntry {} impl Clone for IDTEntry { fn clone(&self) -> Self { *self } } #[repr(packed)] struct IDTPointer { limit: u16, base: usize } pub fn init_idt() { unsafe { IDT_PTR.limit = (::core::mem::size_of::<IDTEntry>() * IDT_COUNT - 1) as u16; IDT_PTR.base = &IDT_ENTRIES as *const [IDTEntry; IDT_COUNT] as usize; idt_set_gate( 0, isr0 as usize, 0x08, 0x8E); idt_set_gate( 1, isr1 as usize, 0x08, 0x8E); idt_set_gate( 2, isr2 as usize, 0x08, 0x8E); idt_set_gate( 3, isr3 as usize, 0x08, 0x8E); idt_set_gate( 4, isr4 as usize, 0x08, 0x8E); idt_set_gate( 5, isr5 as usize, 0x08, 0x8E); idt_set_gate( 6, isr6 as usize, 0x08, 0x8E); idt_set_gate( 7, isr7 as usize, 0x08, 0x8E); idt_set_gate( 8, isr8 as usize, 0x08, 0x8E); idt_set_gate( 9, isr9 as usize, 0x08, 0x8E); idt_set_gate(10, isr10 as usize, 0x08, 0x8E); idt_set_gate(11, isr11 as usize, 0x08, 0x8E); idt_set_gate(12, isr12 as usize, 0x08, 0x8E); idt_set_gate(13, isr13 as usize, 0x08, 0x8E); idt_set_gate(14, isr14 as usize, 0x08, 0x8E); idt_set_gate(15, isr15 as usize, 0x08, 0x8E); idt_set_gate(16, isr16 as usize, 0x08, 0x8E); idt_set_gate(17, isr17 as usize, 0x08, 0x8E); idt_set_gate(18, isr18 as usize, 0x08, 0x8E); idt_set_gate(19, isr19 as usize, 0x08, 0x8E); idt_set_gate(20, isr20 as usize, 0x08, 0x8E); idt_set_gate(21, isr21 as usize, 0x08, 0x8E); idt_set_gate(22, isr22 as usize, 0x08, 0x8E); idt_set_gate(23, isr23 as usize, 0x08, 0x8E); idt_set_gate(24, isr24 as usize, 0x08, 0x8E); idt_set_gate(25, isr25 as usize, 0x08, 0x8E); idt_set_gate(26, isr26 as usize, 0x08, 0x8E); idt_set_gate(27, isr27 as usize, 0x08, 0x8E); idt_set_gate(28, isr28 as usize, 0x08, 0x8E); idt_set_gate(29, isr29 as usize, 0x08, 0x8E); idt_set_gate(30, isr30 as usize, 0x08, 0x8E); idt_set_gate(31, isr31 as usize, 0x08, 0x8E); idt_set_gate(32, irq0 as usize, 0x08, 0x8E); idt_set_gate(33, irq1 as usize, 0x08, 0x8E); idt_set_gate(34, irq2 as usize, 0x08, 0x8E); idt_set_gate(35, irq3 as usize, 0x08, 0x8E); idt_set_gate(36, irq4 as usize, 0x08, 0x8E); idt_set_gate(37, irq5 as usize, 0x08, 0x8E); idt_set_gate(38, irq6 as usize, 0x08, 0x8E); idt_set_gate(39, irq7 as usize, 0x08, 0x8E); idt_set_gate(40, irq8 as usize, 0x08, 0x8E); idt_set_gate(41, irq9 as usize, 0x08, 0x8E); idt_set_gate(42, irq10 as usize, 0x08, 0x8E); idt_set_gate(43, irq11 as usize, 0x08, 0x8E); idt_set_gate(44, irq12 as usize, 0x08, 0x8E); idt_set_gate(45, irq13 as usize, 0x08, 0x8E); idt_set_gate(46, irq14 as usize, 0x08, 0x8E); idt_set_gate(47, irq15 as usize, 0x08, 0x8E); idt_flush(&IDT_PTR as *const IDTPointer as u32); } } unsafe fn idt_set_gate(n: usize, base: usize, sel: u16, flags: u8) { IDT_ENTRIES[n].base_low = (base & 0xFFFF) as u16; IDT_ENTRIES[n].base_high = ((base >> 16) & 0xFFFF) as u16; IDT_ENTRIES[n].selector = sel; IDT_ENTRIES[n].zero = 0; IDT_ENTRIES[n].flags = (flags & 0b11100000) | 0b01110; } extern { fn idt_flush(pointer: u32); fn isr0 (); fn isr1 (); fn isr2 (); fn isr3 (); fn isr4 (); fn isr5 (); fn isr6 (); fn isr7 (); fn isr8 (); fn isr9 (); fn isr10(); fn isr11(); fn isr12(); fn isr13(); fn isr14(); fn isr15(); fn isr16(); fn isr17(); fn isr18(); fn isr19(); fn isr20(); fn isr21(); fn isr22(); fn isr23(); fn isr24(); fn isr25(); fn isr26(); fn isr27(); fn isr28(); fn isr29(); fn isr30(); fn isr31(); fn irq0 (); fn irq1 (); fn irq2 (); fn irq3 (); fn irq4 (); fn irq5 (); fn irq6 (); fn irq7 (); fn irq8 (); fn irq9 (); fn irq10(); fn irq11(); fn irq12(); fn irq13(); fn irq14(); fn irq15(); }
/* * This file contains code for the Interrupt Descriptor Table * * See: http://www.jamesmolloy.co.uk/tutorial_html/4.-The%20GDT%20and%20IDT.html */ use core::marker::Copy; use core::clone::Clone; const IDT_COUNT: usize = 256; static mut IDT_ENTRIES: [IDTEntry; IDT_COUNT] = [IDTEntry { base_low: 0, selector: 0, zero: 0, flags: 0, base_high: 0 }; IDT_COUNT]; static mut IDT_PTR: IDTPointer = IDTPointer { limit: 0, base: 0 }; #[repr(packed)] struct IDTEntry { base_low: u16, selector: u16, zero: u8, flags: u8, base_high: u16 } impl Copy for IDTEntry {} impl Clone for IDTEntry { fn clone(&self) -> Self { *self } } #[repr(packed)] struct IDTPointer { limit: u16, base: usize } pub fn init_idt() { unsafe { IDT_PTR.limit = (::core::mem::size_of::<IDTEntry>() * IDT_COUNT - 1) as u16; IDT_PTR.base = &IDT_ENTRIES as *const [IDTEntry; IDT_COUNT] as usize; idt_set_gate( 0, isr0 as usize, 0x08, 0x8E); idt_set_gate( 1, isr1 as usize, 0x08, 0x8E); idt_set_gate( 2, isr2 as usize, 0x08, 0x8E); idt_set_gate( 3, isr3 as usize, 0x08, 0x8E); idt_set_gate( 4, isr4 as usize, 0x08, 0x8E); idt_set_gate( 5, isr5 as usize, 0x08, 0x8E); idt_set_gate( 6, isr6 as usize, 0x08, 0x8E); idt_set_gate( 7, isr7 as usize, 0x08, 0x8E); idt_set_gate( 8, isr8 as usize, 0x08, 0x8E); idt_set_gate( 9, isr9 as usize, 0x08, 0x8E); idt_set_gate(10, isr10 as usize, 0x08, 0x8E); idt_set_gate(11, isr11 as usize, 0x08, 0x8E); idt_set_gate(12, isr12 as usize, 0x08, 0x8E); idt_set_gate(13, isr13 as usize, 0x08, 0x8E); idt_set_gate(14, isr14 as usize, 0x08, 0x8E); idt_set_gate(15, isr15 as usize, 0x08, 0x8E); idt_set_gate(16, isr16 as usize, 0x08, 0x8E); idt_set_gate(17, isr17 as usize, 0x08, 0x8E); idt_set_gate(18, isr18 as usize, 0x08, 0x8E); idt_set_gate(19, isr19 as usize, 0x0
, 0x08, 0x8E); idt_set_gate(44, irq12 as usize, 0x08, 0x8E); idt_set_gate(45, irq13 as usize, 0x08, 0x8E); idt_set_gate(46, irq14 as usize, 0x08, 0x8E); idt_set_gate(47, irq15 as usize, 0x08, 0x8E); idt_flush(&IDT_PTR as *const IDTPointer as u32); } } unsafe fn idt_set_gate(n: usize, base: usize, sel: u16, flags: u8) { IDT_ENTRIES[n].base_low = (base & 0xFFFF) as u16; IDT_ENTRIES[n].base_high = ((base >> 16) & 0xFFFF) as u16; IDT_ENTRIES[n].selector = sel; IDT_ENTRIES[n].zero = 0; IDT_ENTRIES[n].flags = (flags & 0b11100000) | 0b01110; } extern { fn idt_flush(pointer: u32); fn isr0 (); fn isr1 (); fn isr2 (); fn isr3 (); fn isr4 (); fn isr5 (); fn isr6 (); fn isr7 (); fn isr8 (); fn isr9 (); fn isr10(); fn isr11(); fn isr12(); fn isr13(); fn isr14(); fn isr15(); fn isr16(); fn isr17(); fn isr18(); fn isr19(); fn isr20(); fn isr21(); fn isr22(); fn isr23(); fn isr24(); fn isr25(); fn isr26(); fn isr27(); fn isr28(); fn isr29(); fn isr30(); fn isr31(); fn irq0 (); fn irq1 (); fn irq2 (); fn irq3 (); fn irq4 (); fn irq5 (); fn irq6 (); fn irq7 (); fn irq8 (); fn irq9 (); fn irq10(); fn irq11(); fn irq12(); fn irq13(); fn irq14(); fn irq15(); }
8, 0x8E); idt_set_gate(20, isr20 as usize, 0x08, 0x8E); idt_set_gate(21, isr21 as usize, 0x08, 0x8E); idt_set_gate(22, isr22 as usize, 0x08, 0x8E); idt_set_gate(23, isr23 as usize, 0x08, 0x8E); idt_set_gate(24, isr24 as usize, 0x08, 0x8E); idt_set_gate(25, isr25 as usize, 0x08, 0x8E); idt_set_gate(26, isr26 as usize, 0x08, 0x8E); idt_set_gate(27, isr27 as usize, 0x08, 0x8E); idt_set_gate(28, isr28 as usize, 0x08, 0x8E); idt_set_gate(29, isr29 as usize, 0x08, 0x8E); idt_set_gate(30, isr30 as usize, 0x08, 0x8E); idt_set_gate(31, isr31 as usize, 0x08, 0x8E); idt_set_gate(32, irq0 as usize, 0x08, 0x8E); idt_set_gate(33, irq1 as usize, 0x08, 0x8E); idt_set_gate(34, irq2 as usize, 0x08, 0x8E); idt_set_gate(35, irq3 as usize, 0x08, 0x8E); idt_set_gate(36, irq4 as usize, 0x08, 0x8E); idt_set_gate(37, irq5 as usize, 0x08, 0x8E); idt_set_gate(38, irq6 as usize, 0x08, 0x8E); idt_set_gate(39, irq7 as usize, 0x08, 0x8E); idt_set_gate(40, irq8 as usize, 0x08, 0x8E); idt_set_gate(41, irq9 as usize, 0x08, 0x8E); idt_set_gate(42, irq10 as usize, 0x08, 0x8E); idt_set_gate(43, irq11 as usize
random
[ { "content": "fn parse_keycode(code: u8) -> KeyboardKey\n\n{\n\n\tmatch code\n\n\t{\n\n\t\t1 => KeyboardKey::Escape,\n\n\t\t2 => KeyboardKey::Printable('1', '!'),\n\n\t\t3 => KeyboardKey::Printable('2', '@'),\n\n\t\t4 => KeyboardKey::Printable('3', '#'),\n\n\t\t5 => KeyboardKey::Printable('4', '$'),\n\n\t\t6 =>...
Rust
src/bdd.rs
tangentstorm/bex
c2dea80e284de3fd577fda892577edc829c07bf7
use std::collections::HashMap; use std::collections::HashSet; use std::cell::RefCell; extern crate num_cpus; use bincode; use base::{Base}; use io; use reg::Reg; use {vhl, vhl::{HiLo, Walkable}}; use nid::{NID,O,I}; use vid::{VID,VidOrdering,topmost_of3}; mod bdd_sols; mod bdd_swarm; use self::bdd_swarm::*; pub type BDDHashMap<K,V> = vhl::VHLHashMap<K,V>; #[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Clone, Copy)] pub struct ITE {i:NID, t:NID, e:NID} impl ITE { pub fn new (i:NID, t:NID, e:NID)-> ITE { ITE { i, t, e } } pub fn top_vid(&self)->VID { let (i,t,e) = (self.i.vid(), self.t.vid(), self.e.vid()); topmost_of3(i,t,e) }} #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Norm { Nid(NID), Ite(ITE), Not(ITE)} impl ITE { pub fn norm(f0:NID, g0:NID, h0:NID)->Norm { let mut f = f0; let mut g = g0; let mut h = h0; loop { if f.is_const() { return Norm::Nid(if f==I { g } else { h }) } if g==h { return Norm::Nid(g) } if g==f { if h.is_const() { return Norm::Nid(if h==I { I } else { f }) } else { g=I }} else if g.is_const() && h.is_const() { return if g==I { Norm::Nid(f) } else { Norm::Nid(!f) }} else { let nf = !f; if g==nf { g=O } else if h==nf { h=I } else if h==f { h=O } else { let (fv, fi) = (f.vid(), f.idx()); macro_rules! cmp { ($x0:expr,$x1:expr) => { { let x0=$x0; ((x0.is_above(&fv)) || ((x0==fv) && ($x1<fi))) }}} if g.is_const() && cmp!(h.vid(),h.idx()) { if g==I { g = f; f = h; h = g; g = I; } else { f = !h; g = O; h = nf; }} else if h.is_const() && cmp!(g.vid(),g.idx()) { if h==I { f = !g; g = nf; h = I; } else { h = f; f = g; g = h; h = O; }} else { let ng = !g; if (h==ng) && cmp!(g.vid(), g.idx()) { h=f; f=g; g=h; h=nf; } else if f.is_inv() { f=g; g=h; h=f; f=nf; } else if g.is_inv() { return match ITE::norm(f,ng,!h) { Norm::Nid(nid) => Norm::Nid(!nid), Norm::Not(ite) => Norm::Ite(ite), Norm::Ite(ite) => Norm::Not(ite)}} else { return Norm::Ite(ITE::new(f,g,h)) }}}}}} } #[derive(Debug, Serialize, Deserialize, Clone)] pub struct BddState { hilos: vhl::HiLoCache, xmemo: BDDHashMap<ITE, NID> } thread_local!{ pub static COUNT_XMEMO_TEST: RefCell<u64> = RefCell::new(0); pub static COUNT_XMEMO_FAIL: RefCell<u64> = RefCell::new(0); } impl BddState { fn new()->BddState { BddState { hilos: vhl::HiLoCache::new(), xmemo: BDDHashMap::default() }} #[inline] fn tup(&self, n:NID)-> (NID, NID) { if n.is_const() { if n==I { (I, O) } else { (O, I) } } else if n.is_vid() { if n.is_inv() { (O, I) } else { (I, O) }} else { let hilo = self.hilos.get_hilo(n); (hilo.hi, hilo.lo) }} #[inline] fn simple_node(&mut self, v:VID, hilo:HiLo)->NID { match self.get_simple_node(v, hilo) { Some(n) => n, None => { self.hilos.insert(v, hilo) }}} #[inline] fn get_memo(&self, ite:&ITE) -> Option<NID> { if ite.i.is_vid() { debug_assert!(!ite.i.is_inv()); let hilo = if ite.i.is_inv() { HiLo::new(ite.e,ite.t) } else { HiLo::new(ite.t,ite.e) }; self.get_simple_node(ite.i.vid(), hilo) } else { COUNT_XMEMO_TEST.with(|c| *c.borrow_mut() += 1 ); let test = self.xmemo.get(&ite).copied(); if test == None { COUNT_XMEMO_FAIL.with(|c| *c.borrow_mut() += 1 ); } test }} #[inline] fn get_simple_node(&self, v:VID, hl:HiLo)-> Option<NID> { self.hilos.get_node(v, hl) }} #[derive(Debug, Serialize, Deserialize)] pub struct BDDBase { pub tags: HashMap<String, NID>, swarm: BddSwarm} impl BDDBase { #[inline] fn tup(&self, n:NID)->(NID,NID) { self.swarm.tup(n) } pub fn load(path:&str)->::std::io::Result<BDDBase> { let s = io::get(path)?; Ok(bincode::deserialize(&s).unwrap()) } pub fn gt(&mut self, x:NID, y:NID)->NID { self.ite(x, !y, O) } pub fn lt(&mut self, x:NID, y:NID)->NID { self.ite(x, O, y) } #[inline] pub fn ite(&mut self, f:NID, g:NID, h:NID)->NID { self.swarm.ite(f,g,h) } pub fn swap(&mut self, n:NID, x:VID, y:VID)-> NID { if x.is_below(&y) { return self.swap(n,y,x) } /* x ____ x'____ : \ : \ y __ y __ => y'__ y'__ : \ : \ : \ : \ ll lh hl hh ll hl lh hh */ let (xlo, xhi) = (self.when_lo(x,n), self.when_hi(x,n)); let (xlo_ylo, xlo_yhi) = (self.when_lo(y,xlo), self.when_hi(y,xlo)); let (xhi_ylo, xhi_yhi) = (self.when_lo(y,xhi), self.when_hi(y,xhi)); let lo = self.ite(NID::from_vid(x), xlo_ylo, xhi_ylo); let hi = self.ite(NID::from_vid(y), xlo_yhi, xhi_yhi); self.ite(NID::from_vid(x), lo, hi) } pub fn node_count(&self, n:NID)->usize { let mut c = 0; self.walk(n, &mut |_,_,_,_| c+=1); c } fn tt_aux(&mut self, res:&mut Vec<u8>, n:NID, i:usize, level:u32) { if level == 0 { match n { O => {} I => { res[i] = 1; } x => panic!("expected a leaf nid, got {}", x) }} else { let v = VID::var(level-1); let lo = self.when_lo(v,n); self.tt_aux(res, lo, i*2, level-1); let hi = self.when_hi(v,n); self.tt_aux(res, hi, i*2+1, level-1); }} pub fn tt(&mut self, n0:NID, num_vars:u32)->Vec<u8> { if !n0.vid().is_var() { todo!("tt only works for actual variables. got {:?}", n0); } if num_vars > 16 { panic!("refusing to generate a truth table of 2^{} bytes", num_vars) } if num_vars == 0 { panic!("num_vars should be > 0")} let mut res = vec![0;(1 << num_vars) as usize]; self.tt_aux(&mut res, n0, 0, num_vars); res } } impl Base for BDDBase { fn new()->BDDBase { BDDBase{swarm: BddSwarm::new(), tags:HashMap::new()}} fn when_hi(&mut self, x:VID, y:NID)->NID { let yv = y.vid(); match x.cmp_depth(&yv) { VidOrdering::Level => self.tup(y).0, VidOrdering::Above => y, VidOrdering::Below => { let (yt, ye) = self.tup(y); let (th,el) = (self.when_hi(x,yt), self.when_hi(x,ye)); self.ite(NID::from_vid(yv), th, el) }}} fn when_lo(&mut self, x:VID, y:NID)->NID { let yv = y.vid(); match x.cmp_depth(&yv) { VidOrdering::Level => self.tup(y).1, VidOrdering::Above => y, VidOrdering::Below => { let (yt, ye) = self.tup(y); let (th,el) = (self.when_lo(x,yt), self.when_lo(x,ye)); self.ite(NID::from_vid(yv), th, el) }}} fn def(&mut self, _s:String, _i:VID)->NID { todo!("BDDBase::def()") } fn tag(&mut self, n:NID, s:String)->NID { self.tags.insert(s, n); n } fn get(&self, s:&str)->Option<NID> { Some(*self.tags.get(s)?) } fn and(&mut self, x:NID, y:NID)->NID { self.ite(x, y, O) } fn xor(&mut self, x:NID, y:NID)->NID { self.ite(x, !y, y) } fn or(&mut self, x:NID, y:NID)->NID { self.ite(x, I, y) } fn sub(&mut self, v:VID, n:NID, ctx:NID)->NID { if ctx.might_depend_on(v) { let (zt,ze) = self.tup(ctx); let zv = ctx.vid(); if v==zv { self.ite(n, zt, ze) } else { let th = self.sub(v, n, zt); let el = self.sub(v, n, ze); self.ite(NID::from_vid(zv), th, el) }} else { ctx }} fn save(&self, path:&str)->::std::io::Result<()> { let s = bincode::serialize(&self).unwrap(); io::put(path, &s) } fn dot(&self, n:NID, wr: &mut dyn std::fmt::Write) { macro_rules! w { ($x:expr $(,$xs:expr)*) => { writeln!(wr, $x $(,$xs)*).unwrap(); }} w!("digraph bdd {{"); w!("subgraph head {{ h1[shape=plaintext; label=\"BDD\"] }}"); w!(" I[label=⊤; shape=square];"); w!(" O[label=⊥; shape=square];"); w!("node[shape=circle];"); self.walk(n, &mut |n,_,_,_| w!(" \"{}\"[label=\"{}\"];", n, n.vid())); w!("edge[style=solid];"); self.walk(n, &mut |n,_,t,_| w!(" \"{}\"->\"{}\";", n, t)); w!("edge[style=dashed];"); self.walk(n, &mut |n,_,_,e| w!(" \"{}\"->\"{}\";", n, e)); w!("}}"); } fn solution_set(&self, n: NID, nvars: usize)->hashbrown::HashSet<Reg> { self.solutions_pad(n, nvars).collect() }} include!("test-bdd.rs");
use std::collections::HashMap; use std::collections::HashSet; use std::cell::RefCell; extern crate num_cpus; use bincode; use base::{Base}; use io; use reg::Reg; use {vhl, vhl::{HiLo, Walkable}}; use nid::{NID,O,I}; use vid::{VID,VidOrdering,topmost_of3}; mod bdd_sols; mod bdd_swarm; use self::bdd_swarm::*; pub type BDDHashMap<K,V> = vhl::VHLHashMap<K,V>; #[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Clone, Copy)] pub struct ITE {i:NID, t:NID, e:NID} impl ITE { pub fn new (i:NID, t:NID, e:NID)-> ITE { ITE { i, t, e } } pub fn top_vid(&self)->VID { let (i,t,e) = (self.i.vid(), self.t.vid(), self.e.vid()); topmost_of3(i,t,e) }} #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Norm { Nid(NID), Ite(ITE), Not(ITE)} impl ITE { pub fn norm(f0:NID, g0:NID, h0:NID)->Norm { let mut f = f0; let mut g = g0; let mut h = h0; loop { if f.is_const() { return Norm::Nid(if f==I { g } else { h }) } if g==h { return Norm::Nid(g) } if g==f { if h.is_const() { return Norm::Nid(if h==I { I } else { f }) } else { g=I }} else if g.is_const() && h.is_const() { return if g==I { Norm::Nid(f) } else { Norm::Nid(!f) }} else { let nf = !f; if g==nf { g=O } else if h==nf { h=I } else if h==f { h=O } else { let (fv, fi) = (f.vid(), f.idx()); macro_rules! cmp { ($x0:expr,$x1:expr) => { { let x0=$x0; ((x0.is_above(&fv)) || ((x0==fv) && ($x1<fi))) }}} if g.is_const() && cmp!(h.vid(),h.idx()) { if g==I { g = f; f = h; h = g; g = I; } else { f = !h; g = O; h = nf; }} else if h.is_const() && cmp!(g.vid(),g.idx()) { if h==I { f = !g; g = nf; h = I; } else { h = f; f = g; g = h; h = O; }} else { let ng = !g; if (h==ng) && cmp!(g.vid(), g.idx()) { h=f; f=g; g=h; h=nf; } else if f.is_inv() { f=g; g=h; h=f; f=nf; } else if g.is_inv() { return match ITE::norm(f,ng,!h) { Norm::Nid(nid) => Norm::Nid(!nid), Norm::Not(ite) => Norm::Ite(ite), Norm::Ite(ite) => Norm::Not(ite)}} else { return Norm::Ite(ITE::new(f,g,h)) }}}}}} } #[derive(Debug, Serialize, Deserialize, Clone)] pub struct BddState { hilos: vhl::HiLoCache, xmemo: BDDHashMap<ITE, NID> } thread_local!{ pub static COUNT_XMEMO_TEST: RefCell<u64> = RefCell::new(0); pub static COUNT_XMEMO_FAIL: RefCell<u64> = RefCell::new(0); } impl BddState { fn new()->BddState { BddState { hilos: vhl::HiLoCache::new(), xmemo: BDDHashMap::default() }} #[inline] fn tup(&self, n:NID)-> (NID, NID) { if n.is_const() { if n==I { (I, O) } else { (O, I) } } else if n.is_vid() { if n.is_inv() { (O, I) } else { (I, O) }} else { let hilo = self.hilos.get_hilo(n); (hilo.hi, hilo.lo) }} #[inline] fn simple_node(&mut self, v:VID, hilo:HiLo)->NID { match self.get_simple_node(v, hilo) { Some(n) => n, None => { self.hilos.insert(v, hilo) }}} #[inline] fn get_memo(&self, ite:&ITE) -> Option<NID> { if ite.i.is_vid() { debug_assert!(!ite.i.is_inv()); let hilo = if ite.i.is_inv() { HiLo::new(ite.e,ite.t) } else { HiLo::new(ite.t,ite.e) }; self.get_simple_node(ite.i.vid(), hilo) } else { COUNT_XMEMO_TEST.with(|c| *c.borrow_mut() += 1 ); let test = self.xmemo.get(&ite).copied(); if test == None { COUNT_XMEMO_FAIL.with(|c| *c.borrow_mut() += 1 ); } test }} #[inline] fn get_simple_node(&self, v:VID, hl:HiLo)-> Option<NID> { self.hilos.get_node(v, hl) }} #[derive(Debug, Serialize, Deserialize)] pub struct BDDBase { pub tags: HashMap<String, NID>, swarm: BddSwarm} impl BDDBase { #[inline] fn tup(&self, n:NID)->(NID,NID) { self.swarm.tup(n) } pub fn load(path:&str)->::std::io::Result<BDDBase> { let s = io::get(path)?; Ok(bincode::deserialize(&s).unwrap()) } pub fn gt(&mut self, x:NID, y:NID)->NID { self.ite(x, !y, O) } pub fn lt(&mut self, x:NID, y:NID)->NID { self.ite(x, O, y) } #[inline] pub fn ite(&mut self, f:NID, g:NID, h:NID)->NID { self.swarm.ite(f,g,h) } pub fn swap(&mut self, n:NID, x:VID, y:VID)-> NID { if x.is_below(&y) { return self.swap(n,y,x) } /* x ____ x'____ : \ : \ y __ y __ => y'__ y'__ : \ : \ : \ : \ ll lh hl hh ll hl lh hh */ let (xlo, xhi) = (self.when_lo(x,n), self.when_hi(x,n)); let (xlo_ylo, xlo_yhi) = (self.when_lo(y,xlo), self.when_hi(y,xlo)); let (xhi_ylo, xhi_yhi) = (self.when_lo(y,xhi), self.when_hi(y,xhi)); let lo = self.ite(NID::from_vid(x), xlo_ylo, xhi_ylo); let hi = self.ite(NID::from_vid(y), xlo_yhi, xhi_yhi); self.ite(NID::from_vid(x), lo, hi) } pub fn node_count(&self, n:NID)->usize { let mut c = 0; self.walk(n, &mut |_,_,_,_| c+=1); c } fn tt_aux(&mut self, res:&mut Vec<u8>, n:NID, i:usize, level:u32) { if level == 0 { match n { O => {} I => { res[i] = 1; } x => panic!("expected a leaf nid, got {}", x) }} else { let v = VID::var(level-1); let lo = self.when_lo(v,n); self.tt_aux(res, lo, i*2, level-1); let hi = self.when_hi(v,n); self.tt_aux(res, hi, i*2+1, level-1); }}
} impl Base for BDDBase { fn new()->BDDBase { BDDBase{swarm: BddSwarm::new(), tags:HashMap::new()}} fn when_hi(&mut self, x:VID, y:NID)->NID { let yv = y.vid(); match x.cmp_depth(&yv) { VidOrdering::Level => self.tup(y).0, VidOrdering::Above => y, VidOrdering::Below => { let (yt, ye) = self.tup(y); let (th,el) = (self.when_hi(x,yt), self.when_hi(x,ye)); self.ite(NID::from_vid(yv), th, el) }}} fn when_lo(&mut self, x:VID, y:NID)->NID { let yv = y.vid(); match x.cmp_depth(&yv) { VidOrdering::Level => self.tup(y).1, VidOrdering::Above => y, VidOrdering::Below => { let (yt, ye) = self.tup(y); let (th,el) = (self.when_lo(x,yt), self.when_lo(x,ye)); self.ite(NID::from_vid(yv), th, el) }}} fn def(&mut self, _s:String, _i:VID)->NID { todo!("BDDBase::def()") } fn tag(&mut self, n:NID, s:String)->NID { self.tags.insert(s, n); n } fn get(&self, s:&str)->Option<NID> { Some(*self.tags.get(s)?) } fn and(&mut self, x:NID, y:NID)->NID { self.ite(x, y, O) } fn xor(&mut self, x:NID, y:NID)->NID { self.ite(x, !y, y) } fn or(&mut self, x:NID, y:NID)->NID { self.ite(x, I, y) } fn sub(&mut self, v:VID, n:NID, ctx:NID)->NID { if ctx.might_depend_on(v) { let (zt,ze) = self.tup(ctx); let zv = ctx.vid(); if v==zv { self.ite(n, zt, ze) } else { let th = self.sub(v, n, zt); let el = self.sub(v, n, ze); self.ite(NID::from_vid(zv), th, el) }} else { ctx }} fn save(&self, path:&str)->::std::io::Result<()> { let s = bincode::serialize(&self).unwrap(); io::put(path, &s) } fn dot(&self, n:NID, wr: &mut dyn std::fmt::Write) { macro_rules! w { ($x:expr $(,$xs:expr)*) => { writeln!(wr, $x $(,$xs)*).unwrap(); }} w!("digraph bdd {{"); w!("subgraph head {{ h1[shape=plaintext; label=\"BDD\"] }}"); w!(" I[label=⊤; shape=square];"); w!(" O[label=⊥; shape=square];"); w!("node[shape=circle];"); self.walk(n, &mut |n,_,_,_| w!(" \"{}\"[label=\"{}\"];", n, n.vid())); w!("edge[style=solid];"); self.walk(n, &mut |n,_,t,_| w!(" \"{}\"->\"{}\";", n, t)); w!("edge[style=dashed];"); self.walk(n, &mut |n,_,_,e| w!(" \"{}\"->\"{}\";", n, e)); w!("}}"); } fn solution_set(&self, n: NID, nvars: usize)->hashbrown::HashSet<Reg> { self.solutions_pad(n, nvars).collect() }} include!("test-bdd.rs");
pub fn tt(&mut self, n0:NID, num_vars:u32)->Vec<u8> { if !n0.vid().is_var() { todo!("tt only works for actual variables. got {:?}", n0); } if num_vars > 16 { panic!("refusing to generate a truth table of 2^{} bytes", num_vars) } if num_vars == 0 { panic!("num_vars should be > 0")} let mut res = vec![0;(1 << num_vars) as usize]; self.tt_aux(&mut res, n0, 0, num_vars); res }
function_block-full_function
[ { "content": "#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]\n\nstruct XHiLo { pub hi: XID, pub lo: XID }\n\nimpl std::ops::Not for XHiLo { type Output = XHiLo; fn not(self)->XHiLo { XHiLo { hi:!self.hi, lo:!self.lo }}}\n\nimpl XHiLo { fn as_tup(&self)->(XID,XID) { (self.hi, self.lo) }}\n\n\n\n#[derive(Debu...
Rust
crates/tools/src/setup.rs
RetricSu/godwoken
88df30ffad824b3b3b2980d67ece79ca31ab0b47
use crate::deploy_genesis::deploy_genesis; use crate::deploy_scripts::deploy_scripts; use crate::generate_config::generate_config; use crate::prepare_scripts::{self, prepare_scripts, ScriptsBuildMode}; use crate::utils; use ckb_types::{ core::ScriptHashType, packed as ckb_packed, prelude::Builder as CKBBuilder, prelude::Pack as CKBPack, prelude::Unpack as CKBUnpack, }; use gw_types::prelude::Entity as GwEntity; use rand::Rng; use serde::Serialize; use serde_json::json; use std::fs; use std::{ collections::HashMap, path::{Path, PathBuf}, thread, time, }; pub const TRANSFER_CAPACITY: &str = "200000"; const MIN_WALLET_CAPACITY: f64 = 100000.0f64; #[derive(Debug)] pub struct NodeWalletInfo { pub testnet_address: String, pub lock_hash: String, pub lock_arg: String, pub block_assembler_code_hash: String, } #[allow(clippy::too_many_arguments)] pub fn setup( ckb_rpc_url: &str, indexer_url: &str, mode: ScriptsBuildMode, scripts_path: &Path, privkey_path: &Path, nodes_count: u8, server_url: &str, output_dir: &Path, ) { let prepare_scripts_result = utils::make_path(output_dir, vec!["scripts-deploy.json"]); prepare_scripts( mode, scripts_path, Path::new(prepare_scripts::REPOS_DIR_PATH), Path::new(prepare_scripts::SCRIPTS_DIR_PATH), &prepare_scripts_result, ) .expect("prepare scripts"); let scripts_deployment_result = utils::make_path(output_dir, vec!["scripts-deploy-result.json"]); deploy_scripts( privkey_path, ckb_rpc_url, &prepare_scripts_result, &scripts_deployment_result, ) .expect("deploy scripts"); let poa_config_path = utils::make_path(output_dir, vec!["poa-config.json"]); let rollup_config_path = utils::make_path(output_dir, vec!["rollup-config.json"]); let capacity = TRANSFER_CAPACITY.parse().expect("get capacity"); prepare_nodes_configs( privkey_path, capacity, nodes_count, output_dir, &poa_config_path, &rollup_config_path, ); let genesis_deploy_result = utils::make_path(output_dir, vec!["genesis-deploy-result.json"]); deploy_genesis( privkey_path, ckb_rpc_url, &scripts_deployment_result, &rollup_config_path, &poa_config_path, None, &genesis_deploy_result, false, ) .expect("deploy genesis"); (0..nodes_count).for_each(|index| { let node_name = format!("node{}", index + 1); let privkey_path = utils::make_path(output_dir, vec![&node_name, &"pk".to_owned()]); let output_file_path = utils::make_path(output_dir, vec![node_name, "config.toml".to_owned()]); generate_config( &genesis_deploy_result, &scripts_deployment_result, privkey_path.as_ref(), ckb_rpc_url.to_owned(), indexer_url.to_owned(), output_file_path.as_ref(), None, &prepare_scripts_result, server_url.to_string(), ) .expect("generate_config"); }); log::info!("Finish"); } fn prepare_nodes_configs( payer_privkey: &Path, capacity: u32, nodes_count: u8, output_dir: &Path, poa_config_path: &Path, rollup_config_path: &Path, ) { let nodes_privkeys = prepare_privkeys(output_dir, nodes_count); let nodes_info = check_wallets_info(nodes_privkeys, capacity, payer_privkey); generate_poa_config(&nodes_info, poa_config_path); generate_rollup_config(rollup_config_path); } fn prepare_privkeys(output_dir: &Path, nodes_count: u8) -> HashMap<String, PathBuf> { (0..nodes_count) .map(|index| { let node_name = format!("node{}", (index + 1).to_string()); let node_dir = utils::make_path(output_dir, vec![&node_name]); fs::create_dir_all(&node_dir).expect("create node dir"); let privkey_file = utils::make_path(&node_dir, vec!["pk"]); let privkey = fs::read_to_string(&privkey_file) .map(|s| s.trim().into()) .unwrap_or_else(|_| Vec::new()); if !privkey.starts_with(b"0x") || privkey.len() != 66 || hex::decode(&privkey[2..]).is_err() { log::info!("Generate privkey file..."); generate_privkey_file(&privkey_file); } (node_name, privkey_file) }) .collect() } fn check_wallets_info( nodes_privkeys: HashMap<String, PathBuf>, capacity: u32, payer_privkey_path: &Path, ) -> HashMap<String, NodeWalletInfo> { nodes_privkeys .into_iter() .map(|(node, privkey)| { let wallet_info = get_wallet_info(&privkey); let mut current_capacity = query_wallet_capacity(&wallet_info.testnet_address); log::info!("{}'s wallet capacity: {}", node, current_capacity); if current_capacity < MIN_WALLET_CAPACITY { log::info!("Start to transfer ckb, and it will take 30 seconds..."); transfer_ckb(&wallet_info, payer_privkey_path, capacity); thread::sleep(time::Duration::from_secs(30)); current_capacity = query_wallet_capacity(&wallet_info.testnet_address); assert!( current_capacity >= MIN_WALLET_CAPACITY, "wallet haven't received ckb, please try again" ); log::info!("{}'s wallet capacity: {}", node, current_capacity); } (node, wallet_info) }) .collect() } fn generate_poa_config(nodes_info: &HashMap<String, NodeWalletInfo>, poa_config_path: &Path) { let identities: Vec<&str> = nodes_info .iter() .map(|(_, node)| node.lock_hash.as_str()) .collect(); let poa_config = json!({ "poa_setup" : { "identity_size": 32, "round_interval_uses_seconds": true, "identities": identities, "aggregator_change_threshold": identities.len(), "round_intervals": 24, "subblocks_per_round": 1 } }); generate_json_file(&poa_config, poa_config_path); } fn generate_rollup_config(rollup_config_path: &Path) { let burn_lock_script = ckb_packed::Script::new_builder() .code_hash(CKBPack::pack(&[0u8; 32])) .hash_type(ScriptHashType::Data.into()) .build(); let burn_lock_script_hash: [u8; 32] = burn_lock_script.calc_script_hash().unpack(); let rollup_config = json!({ "l1_sudt_script_type_hash": "0x0000000000000000000000000000000000000000000000000000000000000000", "burn_lock_hash": format!("0x{}", hex::encode(burn_lock_script_hash)), "required_staking_capacity": 10000000000u64, "challenge_maturity_blocks": 5, "finality_blocks": 20, "reward_burn_rate": 50, "compatible_chain_id": 0, "allowed_eoa_type_hashes": [] }); generate_json_file(&rollup_config, rollup_config_path); log::info!("Finish"); } fn generate_privkey_file(privkey_file_path: &Path) { let key = rand::thread_rng().gen::<[u8; 32]>(); let privkey = format!("0x{}", hex::encode(key)); fs::write(&privkey_file_path, &privkey).expect("create pk file"); } pub fn get_wallet_info(privkey_path: &Path) -> NodeWalletInfo { let (stdout, stderr) = utils::run_in_output_mode( "ckb-cli", vec![ "util", "key-info", "--privkey-path", &privkey_path.display().to_string(), ], ) .expect("get key info"); NodeWalletInfo { testnet_address: look_after_in_line(&stdout, "testnet:"), lock_hash: look_after_in_line(&stdout, "lock_hash:"), lock_arg: look_after_in_line(&stdout, "lock_arg:"), block_assembler_code_hash: look_after_in_line(&stderr, "code_hash ="), } } fn query_wallet_capacity(address: &str) -> f64 { let (stdout, _) = utils::run_in_output_mode( "ckb-cli", vec!["wallet", "get-capacity", "--address", address], ) .expect("query wallet capacity"); look_after_in_line(&stdout, "total:") .split(' ') .collect::<Vec<&str>>()[0] .parse::<f64>() .expect("parse capacity") } fn transfer_ckb(node_wallet: &NodeWalletInfo, payer_privkey_path: &Path, capacity: u32) { utils::run( "ckb-cli", vec![ "wallet", "transfer", "--to-address", &node_wallet.testnet_address, "--capacity", &capacity.to_string(), "--tx-fee", "1", "--privkey-path", &payer_privkey_path.display().to_string(), ], ) .expect("transfer ckb"); } fn look_after_in_line(text: &str, key: &str) -> String { text.split(key).collect::<Vec<&str>>()[1] .split('\n') .collect::<Vec<&str>>()[0] .trim_matches(&['"', ' '][..]) .to_owned() } fn generate_json_file<T>(value: &T, json_file_path: &Path) where T: Serialize, { let output_content = serde_json::to_string_pretty(value).expect("serde json to string pretty"); let output_dir = json_file_path.parent().expect("get output dir"); fs::create_dir_all(&output_dir).expect("create output dir"); fs::write(json_file_path, output_content.as_bytes()).expect("generate json file"); }
use crate::deploy_genesis::deploy_genesis; use crate::deploy_scripts::deploy_scripts; use crate::generate_config::generate_config; use crate::prepare_scripts::{self, prepare_scripts, ScriptsBuildMode}; use crate::utils; use ckb_types::{ core::ScriptHashType, packed as ckb_packed, prelude::Builder as CKBBuilder, prelude::Pack as CKBPack, prelude::Unpack as CKBUnpack, }; use gw_types::prelude::Entity as GwEntity; use rand::Rng; use serde::Serialize; use serde_json::json; use std::fs; use std::{ collections::HashMap, path::{Path, PathBuf}, thread, time, }; pub const TRANSFER_CAPACITY: &str = "200000"; const MIN_WALLET_CAPACITY: f64 = 100000.0f64; #[derive(Debug)] pub struct NodeWalletInfo { pub testnet_address: String, pub lock_hash: String, pub lock_arg: String, pub block_assembler_code_hash: String, } #[allow(clippy::too_many_arguments)] pub fn setup( ckb_rpc_url: &str, indexer_url: &str, mode: ScriptsBuildMode, scripts_path: &Path, privkey_path: &Path, nodes_count: u8, server_url: &str, output_dir: &Path, ) { let prepare_scripts_result = utils::make_path(output_dir, vec!["scripts-deploy.json"]); prepare_scripts( mode, scripts_path, Path::new(prepare_scripts::REPOS_DIR_PATH), Path::new(prepare_scripts::SCRIPTS_DIR_PATH), &prepare_scripts_result, ) .expect("prepare scripts"); let scripts_deployment_result = utils::make_path(output_dir, vec!["scripts-deploy-result.json"]); deploy_scripts( privkey_path, ckb_rpc_url, &prepare_scripts_result, &scripts_deployment_result, ) .expect("deploy scripts"); let poa_config_path = utils::make_path(output_dir, vec!["poa-config.json"]); let rollup_config_path = utils::make_path(output_dir, vec!["rollup-config.json"]); let capacity = TRANSFER_CAPACITY.parse().expect("get capacity"); prepare_nodes_configs( privkey_path, capacity, nodes_count, output_dir, &poa_config_path, &rollup_config_path, ); let genesis_deploy_result = utils::make_path(output_dir, vec!["genesis-deploy-result.json"]); deploy_genesis( privkey_path, ckb_rpc_url, &scripts_deployment_result, &rollup_config_path, &poa_config_path, None, &genesis_deploy_result, false, ) .expect("deploy genesis"); (0..nodes_count).for_each(|index| { let node_name = format!("node{}", index + 1); let privkey_path = utils::make_path(output_dir, vec![&node_name, &"pk".to_owned()]); let output_file_path = utils::make_path(output_dir, vec![node_name, "config.toml".to_owned()]); generate_config( &genesis_deploy_result, &scripts_deployment_result, privkey_path.as_ref(), ckb_rpc_url.to_owned(), indexer_url.to_owned(), output_file_path.as_ref(), None, &prepare_scripts_result, server_url.to_string(), ) .expect("generate_config"); }); log::info!("Finish"); } fn prepare_nodes_configs( payer_privkey: &Path, capacity: u32, nodes_count: u8, output_dir: &Path, poa_config_path: &Path, rollup_config_path: &Path, ) { let nodes_privkeys = prepare_privkeys(output_dir, nodes_count); let nodes_info = check_wallets_info(nodes_privkeys, capacity, payer_privkey); generate_poa_config(&nodes_info, poa_config_path); generate_rollup_config(rollup_config_path); } fn prepare_privkeys(output_dir: &Path, nodes_count: u8) -> HashMap<String, PathBuf> { (0..nodes_count) .map(|index| { let node_name = format!("node{}", (index + 1).to_string()); let node_dir = utils::make_path(output_dir, vec![&node_name]); fs::create_dir_all(&node_dir).expect("create node dir"); let privkey_file = u
) } fn transfer_ckb(node_wallet: &NodeWalletInfo, payer_privkey_path: &Path, capacity: u32) { utils::run( "ckb-cli", vec![ "wallet", "transfer", "--to-address", &node_wallet.testnet_address, "--capacity", &capacity.to_string(), "--tx-fee", "1", "--privkey-path", &payer_privkey_path.display().to_string(), ], ) .expect("transfer ckb"); } fn look_after_in_line(text: &str, key: &str) -> String { text.split(key).collect::<Vec<&str>>()[1] .split('\n') .collect::<Vec<&str>>()[0] .trim_matches(&['"', ' '][..]) .to_owned() } fn generate_json_file<T>(value: &T, json_file_path: &Path) where T: Serialize, { let output_content = serde_json::to_string_pretty(value).expect("serde json to string pretty"); let output_dir = json_file_path.parent().expect("get output dir"); fs::create_dir_all(&output_dir).expect("create output dir"); fs::write(json_file_path, output_content.as_bytes()).expect("generate json file"); }
tils::make_path(&node_dir, vec!["pk"]); let privkey = fs::read_to_string(&privkey_file) .map(|s| s.trim().into()) .unwrap_or_else(|_| Vec::new()); if !privkey.starts_with(b"0x") || privkey.len() != 66 || hex::decode(&privkey[2..]).is_err() { log::info!("Generate privkey file..."); generate_privkey_file(&privkey_file); } (node_name, privkey_file) }) .collect() } fn check_wallets_info( nodes_privkeys: HashMap<String, PathBuf>, capacity: u32, payer_privkey_path: &Path, ) -> HashMap<String, NodeWalletInfo> { nodes_privkeys .into_iter() .map(|(node, privkey)| { let wallet_info = get_wallet_info(&privkey); let mut current_capacity = query_wallet_capacity(&wallet_info.testnet_address); log::info!("{}'s wallet capacity: {}", node, current_capacity); if current_capacity < MIN_WALLET_CAPACITY { log::info!("Start to transfer ckb, and it will take 30 seconds..."); transfer_ckb(&wallet_info, payer_privkey_path, capacity); thread::sleep(time::Duration::from_secs(30)); current_capacity = query_wallet_capacity(&wallet_info.testnet_address); assert!( current_capacity >= MIN_WALLET_CAPACITY, "wallet haven't received ckb, please try again" ); log::info!("{}'s wallet capacity: {}", node, current_capacity); } (node, wallet_info) }) .collect() } fn generate_poa_config(nodes_info: &HashMap<String, NodeWalletInfo>, poa_config_path: &Path) { let identities: Vec<&str> = nodes_info .iter() .map(|(_, node)| node.lock_hash.as_str()) .collect(); let poa_config = json!({ "poa_setup" : { "identity_size": 32, "round_interval_uses_seconds": true, "identities": identities, "aggregator_change_threshold": identities.len(), "round_intervals": 24, "subblocks_per_round": 1 } }); generate_json_file(&poa_config, poa_config_path); } fn generate_rollup_config(rollup_config_path: &Path) { let burn_lock_script = ckb_packed::Script::new_builder() .code_hash(CKBPack::pack(&[0u8; 32])) .hash_type(ScriptHashType::Data.into()) .build(); let burn_lock_script_hash: [u8; 32] = burn_lock_script.calc_script_hash().unpack(); let rollup_config = json!({ "l1_sudt_script_type_hash": "0x0000000000000000000000000000000000000000000000000000000000000000", "burn_lock_hash": format!("0x{}", hex::encode(burn_lock_script_hash)), "required_staking_capacity": 10000000000u64, "challenge_maturity_blocks": 5, "finality_blocks": 20, "reward_burn_rate": 50, "compatible_chain_id": 0, "allowed_eoa_type_hashes": [] }); generate_json_file(&rollup_config, rollup_config_path); log::info!("Finish"); } fn generate_privkey_file(privkey_file_path: &Path) { let key = rand::thread_rng().gen::<[u8; 32]>(); let privkey = format!("0x{}", hex::encode(key)); fs::write(&privkey_file_path, &privkey).expect("create pk file"); } pub fn get_wallet_info(privkey_path: &Path) -> NodeWalletInfo { let (stdout, stderr) = utils::run_in_output_mode( "ckb-cli", vec![ "util", "key-info", "--privkey-path", &privkey_path.display().to_string(), ], ) .expect("get key info"); NodeWalletInfo { testnet_address: look_after_in_line(&stdout, "testnet:"), lock_hash: look_after_in_line(&stdout, "lock_hash:"), lock_arg: look_after_in_line(&stdout, "lock_arg:"), block_assembler_code_hash: look_after_in_line(&stderr, "code_hash ="), } } fn query_wallet_capacity(address: &str) -> f64 { let (stdout, _) = utils::run_in_output_mode( "ckb-cli", vec!["wallet", "get-capacity", "--address", address], ) .expect("query wallet capacity"); look_after_in_line(&stdout, "total:") .split(' ') .collect::<Vec<&str>>()[0] .parse::<f64>() .expect("parse capacity"
random
[ { "content": "pub fn hex(raw: &[u8]) -> Result<String> {\n\n Ok(format!(\"0x{}\", faster_hex::hex_string(raw)?))\n\n}\n", "file_path": "crates/web3-indexer/src/helper.rs", "rank": 4, "score": 274876.3472011481 }, { "content": "fn prepare_scripts_in_copy_mode(prebuild_image: &PathBuf, scri...
Rust
src/rule_finder.rs
ciphergoth/rerast
2abd359242dee27ab09919259154274cca612bb5
use std::marker; use syntax::ast::NodeId; use syntax::symbol::Symbol; use syntax::ext::quote::rt::Span; use std::vec::Vec; use rustc::hir::{self, intravisit}; use rustc::ty::{self, TyCtxt}; use definitions::RerastDefinitions; use rule_matcher::{Matchable, OperatorPrecedence}; use rules::{Rule, Rules}; use errors::ErrorWithSpan; use super::DeclaredNamesFinder; pub(crate) struct RuleFinder<'a, 'gcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'gcx>, rerast_definitions: RerastDefinitions<'gcx>, rules_mod_symbol: Symbol, rules: Rules<'gcx>, body_id: Option<hir::BodyId>, in_rules_module: bool, errors: Vec<ErrorWithSpan>, } impl<'a, 'gcx> RuleFinder<'a, 'gcx> { pub(crate) fn find_rules( tcx: TyCtxt<'a, 'gcx, 'gcx>, rerast_definitions: RerastDefinitions<'gcx>, krate: &'gcx hir::Crate, ) -> Result<Rules<'gcx>, Vec<ErrorWithSpan>> { let mut rule_finder = RuleFinder { tcx, rerast_definitions, rules_mod_symbol: Symbol::intern(super::RULES_MOD_NAME), rules: Rules::new(), body_id: None, in_rules_module: false, errors: Vec::new(), }; intravisit::walk_crate(&mut rule_finder, krate); if rule_finder.errors.is_empty() { Ok(rule_finder.rules) } else { Err(rule_finder.errors) } } fn maybe_add_rule( &mut self, arg_ty: ty::Ty<'gcx>, arms: &'gcx [hir::Arm], body_id: hir::BodyId, arg_ty_span: Span, ) -> Result<(), Vec<ErrorWithSpan>> { if self.maybe_add_typed_rule::<hir::Expr>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::Pat>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::TraitRef>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::Ty>(arg_ty, arms, body_id)? { Ok(()) } else { Err(vec![ ErrorWithSpan::new("Unexpected code found in rule function", arg_ty_span), ]) } } fn maybe_add_typed_rule<T: 'gcx + StartMatch>( &mut self, arg_ty: ty::Ty<'gcx>, arms: &'gcx [hir::Arm], body_id: hir::BodyId, ) -> Result<bool, Vec<ErrorWithSpan>> { fn get_arm(arms: &[hir::Arm], arm_name: Symbol) -> Option<&hir::Block> { for arm in arms { if let hir::PatKind::Path(hir::QPath::Resolved(None, ref path)) = arm.pats[0].node { if let Some(segment) = path.segments.last() { if segment.name == arm_name { if let hir::Expr_::ExprBlock(ref block) = arm.body.node { return Some(block); } } } } } None } if arg_ty != T::replace_marker_type(&self.rerast_definitions) { return Ok(false); } if let (Some(search_block), Some(replace_block)) = ( get_arm(arms, self.rerast_definitions.search_symbol), get_arm(arms, self.rerast_definitions.replace_symbol), ) { let search = T::extract_root(search_block)?; let replace = T::extract_root(replace_block)?; let placeholder_ids = self.tcx .hir .body(body_id) .arguments .iter() .map(|arg| arg.pat.id) .collect(); let rule = Rule { search, replace, body_id, declared_name_node_ids: DeclaredNamesFinder::find(self.tcx, search), placeholder_ids, }; rule.validate(self.tcx)?; T::add_rule(rule, &mut self.rules); } else { panic!("Missing search/replace pattern"); } Ok(true) } } impl<'a, 'gcx, 'tcx> intravisit::Visitor<'gcx> for RuleFinder<'a, 'gcx> { fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> { intravisit::NestedVisitorMap::All(&self.tcx.hir) } fn visit_item(&mut self, item: &'gcx hir::Item) { use hir::Item_::*; if let ItemMod(_) = item.node { if item.name == self.rules_mod_symbol { self.in_rules_module = true; intravisit::walk_item(self, item); self.in_rules_module = false; return; } else if !self.in_rules_module { return; } } intravisit::walk_item(self, item); } fn visit_expr(&mut self, expr: &'gcx hir::Expr) { if !self.in_rules_module { return; } use hir::Expr_::*; if let ExprMatch(ref match_expr, ref arms, _) = expr.node { if let ExprMethodCall(ref _name, ref _tys, ref args) = match_expr.node { if let Some(body_id) = self.body_id { let type_tables = self.tcx .typeck_tables_of(self.tcx.hir.body_owner_def_id(body_id)); let arg0 = &args[0]; let arg_ty = type_tables.node_id_to_type(self.tcx.hir.node_to_hir_id(arg0.id)); if let Err(errors) = self.maybe_add_rule(arg_ty, arms, body_id, arg0.span) { self.errors.extend(errors); } return; } } } intravisit::walk_expr(self, expr) } fn visit_body(&mut self, body: &'gcx hir::Body) { if !self.in_rules_module { return; } let old_body_id = self.body_id; self.body_id = Some(body.id()); intravisit::walk_body(self, body); self.body_id = old_body_id; } } pub(crate) trait StartMatch: Matchable { fn span(&self) -> Span; fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self); fn needs_parenthesis(_parent: Option<&Self>, _child: &Self) -> bool { false } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan>; fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) where Self: marker::Sized; fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx>; fn bindings_can_match_patterns() -> bool { false } fn node_id(&self) -> NodeId; } impl StartMatch for hir::Expr { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_expr(visitor, node); } fn needs_parenthesis(parent: Option<&Self>, child: &Self) -> bool { OperatorPrecedence::needs_parenthesis(parent, child) } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtSemi(ref addr_expr, _) = block.stmts[0].node { if let hir::Expr_::ExprAddrOf(_, ref expr) = addr_expr.node { return Ok(&**expr); } } } Err(ErrorWithSpan::new( "replace! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.expr_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.expr_rule_marker } fn node_id(&self) -> NodeId { self.id } } impl StartMatch for hir::Ty { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_ty(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtDecl(ref decl, _) = block.stmts[0].node { if let hir::Decl_::DeclLocal(ref local) = decl.node { if let Some(ref ref_ty) = local.ty { if let hir::Ty_::TyRptr(_, ref mut_ty) = ref_ty.node { return Ok(&*mut_ty.ty); } } } } } Err(ErrorWithSpan::new( "replace_type! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.type_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.type_rule_marker } fn node_id(&self) -> NodeId { self.id } } impl StartMatch for hir::TraitRef { fn span(&self) -> Span { self.path.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_trait_ref(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { let ty = <hir::Ty as StartMatch>::extract_root(block)?; if let hir::Ty_::TyTraitObject(ref bounds, _) = ty.node { if bounds.len() == 1 { return Ok(&bounds[0].trait_ref); } else { return Err(ErrorWithSpan::new( "replace_trait_ref! requires exactly one trait", ty.span, )); } } else { return Err(ErrorWithSpan::new( "replace_trait_ref! requires a trait", ty.span, )); } } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.trait_ref_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.trait_ref_rule_marker } fn node_id(&self) -> NodeId { self.ref_id } } impl StartMatch for hir::Pat { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_pat(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtSemi(ref expr, _) = block.stmts[0].node { if let hir::Expr_::ExprMatch(_, ref arms, _) = expr.node { if let hir::PatKind::TupleStruct(_, ref patterns, _) = arms[0].pats[0].node { return Ok(&patterns[0]); } } } } Err(ErrorWithSpan::new( "replace_pattern! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.pattern_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.pattern_rule_marker } fn bindings_can_match_patterns() -> bool { true } fn node_id(&self) -> NodeId { self.id } }
use std::marker; use syntax::ast::NodeId; use syntax::symbol::Symbol; use syntax::ext::quote::rt::Span; use std::vec::Vec; use rustc::hir::{self, intravisit}; use rustc::ty::{self, TyCtxt}; use definitions::RerastDefinitions; use rule_matcher::{Matchable, OperatorPrecedence}; use rules::{Rule, Rules}; use errors::ErrorWithSpan; use super::DeclaredNamesFinder; pub(crate) struct RuleFinder<'a, 'gcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'gcx>, rerast_definitions: RerastDefinitions<'gcx>, rules_mod_symbol: Symbol, rules: Rules<'gcx>, body_id: Option<hir::BodyId>, in_rules_module: bool, errors: Vec<ErrorWithSpan>, } impl<'a, 'gcx> RuleFinder<'a, 'gcx> { pub(crate) fn find_rules( tcx: TyCtxt<'a, 'gcx, 'gcx>, rerast_definitions: RerastDefinitions<'gcx>, krate: &'gcx hir::Crate, ) -> Result<Rules<'gcx>, Vec<ErrorWithSpan>> {
intravisit::walk_crate(&mut rule_finder, krate); if rule_finder.errors.is_empty() { Ok(rule_finder.rules) } else { Err(rule_finder.errors) } } fn maybe_add_rule( &mut self, arg_ty: ty::Ty<'gcx>, arms: &'gcx [hir::Arm], body_id: hir::BodyId, arg_ty_span: Span, ) -> Result<(), Vec<ErrorWithSpan>> { if self.maybe_add_typed_rule::<hir::Expr>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::Pat>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::TraitRef>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::Ty>(arg_ty, arms, body_id)? { Ok(()) } else { Err(vec![ ErrorWithSpan::new("Unexpected code found in rule function", arg_ty_span), ]) } } fn maybe_add_typed_rule<T: 'gcx + StartMatch>( &mut self, arg_ty: ty::Ty<'gcx>, arms: &'gcx [hir::Arm], body_id: hir::BodyId, ) -> Result<bool, Vec<ErrorWithSpan>> { fn get_arm(arms: &[hir::Arm], arm_name: Symbol) -> Option<&hir::Block> { for arm in arms { if let hir::PatKind::Path(hir::QPath::Resolved(None, ref path)) = arm.pats[0].node { if let Some(segment) = path.segments.last() { if segment.name == arm_name { if let hir::Expr_::ExprBlock(ref block) = arm.body.node { return Some(block); } } } } } None } if arg_ty != T::replace_marker_type(&self.rerast_definitions) { return Ok(false); } if let (Some(search_block), Some(replace_block)) = ( get_arm(arms, self.rerast_definitions.search_symbol), get_arm(arms, self.rerast_definitions.replace_symbol), ) { let search = T::extract_root(search_block)?; let replace = T::extract_root(replace_block)?; let placeholder_ids = self.tcx .hir .body(body_id) .arguments .iter() .map(|arg| arg.pat.id) .collect(); let rule = Rule { search, replace, body_id, declared_name_node_ids: DeclaredNamesFinder::find(self.tcx, search), placeholder_ids, }; rule.validate(self.tcx)?; T::add_rule(rule, &mut self.rules); } else { panic!("Missing search/replace pattern"); } Ok(true) } } impl<'a, 'gcx, 'tcx> intravisit::Visitor<'gcx> for RuleFinder<'a, 'gcx> { fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> { intravisit::NestedVisitorMap::All(&self.tcx.hir) } fn visit_item(&mut self, item: &'gcx hir::Item) { use hir::Item_::*; if let ItemMod(_) = item.node { if item.name == self.rules_mod_symbol { self.in_rules_module = true; intravisit::walk_item(self, item); self.in_rules_module = false; return; } else if !self.in_rules_module { return; } } intravisit::walk_item(self, item); } fn visit_expr(&mut self, expr: &'gcx hir::Expr) { if !self.in_rules_module { return; } use hir::Expr_::*; if let ExprMatch(ref match_expr, ref arms, _) = expr.node { if let ExprMethodCall(ref _name, ref _tys, ref args) = match_expr.node { if let Some(body_id) = self.body_id { let type_tables = self.tcx .typeck_tables_of(self.tcx.hir.body_owner_def_id(body_id)); let arg0 = &args[0]; let arg_ty = type_tables.node_id_to_type(self.tcx.hir.node_to_hir_id(arg0.id)); if let Err(errors) = self.maybe_add_rule(arg_ty, arms, body_id, arg0.span) { self.errors.extend(errors); } return; } } } intravisit::walk_expr(self, expr) } fn visit_body(&mut self, body: &'gcx hir::Body) { if !self.in_rules_module { return; } let old_body_id = self.body_id; self.body_id = Some(body.id()); intravisit::walk_body(self, body); self.body_id = old_body_id; } } pub(crate) trait StartMatch: Matchable { fn span(&self) -> Span; fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self); fn needs_parenthesis(_parent: Option<&Self>, _child: &Self) -> bool { false } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan>; fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) where Self: marker::Sized; fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx>; fn bindings_can_match_patterns() -> bool { false } fn node_id(&self) -> NodeId; } impl StartMatch for hir::Expr { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_expr(visitor, node); } fn needs_parenthesis(parent: Option<&Self>, child: &Self) -> bool { OperatorPrecedence::needs_parenthesis(parent, child) } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtSemi(ref addr_expr, _) = block.stmts[0].node { if let hir::Expr_::ExprAddrOf(_, ref expr) = addr_expr.node { return Ok(&**expr); } } } Err(ErrorWithSpan::new( "replace! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.expr_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.expr_rule_marker } fn node_id(&self) -> NodeId { self.id } } impl StartMatch for hir::Ty { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_ty(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtDecl(ref decl, _) = block.stmts[0].node { if let hir::Decl_::DeclLocal(ref local) = decl.node { if let Some(ref ref_ty) = local.ty { if let hir::Ty_::TyRptr(_, ref mut_ty) = ref_ty.node { return Ok(&*mut_ty.ty); } } } } } Err(ErrorWithSpan::new( "replace_type! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.type_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.type_rule_marker } fn node_id(&self) -> NodeId { self.id } } impl StartMatch for hir::TraitRef { fn span(&self) -> Span { self.path.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_trait_ref(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { let ty = <hir::Ty as StartMatch>::extract_root(block)?; if let hir::Ty_::TyTraitObject(ref bounds, _) = ty.node { if bounds.len() == 1 { return Ok(&bounds[0].trait_ref); } else { return Err(ErrorWithSpan::new( "replace_trait_ref! requires exactly one trait", ty.span, )); } } else { return Err(ErrorWithSpan::new( "replace_trait_ref! requires a trait", ty.span, )); } } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.trait_ref_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.trait_ref_rule_marker } fn node_id(&self) -> NodeId { self.ref_id } } impl StartMatch for hir::Pat { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_pat(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtSemi(ref expr, _) = block.stmts[0].node { if let hir::Expr_::ExprMatch(_, ref arms, _) = expr.node { if let hir::PatKind::TupleStruct(_, ref patterns, _) = arms[0].pats[0].node { return Ok(&patterns[0]); } } } } Err(ErrorWithSpan::new( "replace_pattern! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.pattern_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.pattern_rule_marker } fn bindings_can_match_patterns() -> bool { true } fn node_id(&self) -> NodeId { self.id } }
let mut rule_finder = RuleFinder { tcx, rerast_definitions, rules_mod_symbol: Symbol::intern(super::RULES_MOD_NAME), rules: Rules::new(), body_id: None, in_rules_module: false, errors: Vec::new(), };
assignment_statement
[ { "content": "struct Placeholder<'gcx> {\n\n expr: &'gcx hir::Expr,\n\n uses: Vec<Span>,\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 0, "score": 126178.62619493643 }, { "content": "struct RuleFinder<'a, 'gcx: 'a> {\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n changed_span:...
Rust
kernel/env/mod.rs
pwoolcoc/redox
87f5ea23d26502494439cbf2a094bb5102f989b8
use alloc::boxed::Box; use collections::string::{String, ToString}; use collections::vec::Vec; use core::cell::UnsafeCell; use arch::context::ContextManager; use common::event::Event; use common::time::Duration; use disk::Disk; use network::Nic; use fs::{KScheme, Resource, Scheme, VecResource, Url}; use sync::WaitQueue; use system::error::{Error, Result, ENOENT, EEXIST}; use system::syscall::{O_CREAT, Stat}; use self::console::Console; use self::log::Log; pub mod console; pub mod log; pub struct Environment { pub contexts: UnsafeCell<ContextManager>, pub clock_realtime: UnsafeCell<Duration>, pub clock_monotonic: UnsafeCell<Duration>, pub console: UnsafeCell<Console>, pub disks: UnsafeCell<Vec<Box<Disk>>>, pub nics: UnsafeCell<Vec<Box<Nic>>>, pub events: WaitQueue<Event>, pub log: UnsafeCell<Log>, pub schemes: UnsafeCell<Vec<Box<KScheme>>>, pub interrupts: UnsafeCell<[u64; 256]>, } impl Environment { pub fn new() -> Box<Environment> { box Environment { contexts: UnsafeCell::new(ContextManager::new()), clock_realtime: UnsafeCell::new(Duration::new(0, 0)), clock_monotonic: UnsafeCell::new(Duration::new(0, 0)), console: UnsafeCell::new(Console::new()), disks: UnsafeCell::new(Vec::new()), nics: UnsafeCell::new(Vec::new()), events: WaitQueue::new(), log: UnsafeCell::new(Log::new()), schemes: UnsafeCell::new(Vec::new()), interrupts: UnsafeCell::new([0; 256]), } } pub fn on_irq(&self, irq: u8) { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { scheme.on_irq(irq); } } pub fn open(&self, url: Url, flags: usize) -> Result<Box<Resource>> { let url_scheme = url.scheme(); if url_scheme.is_empty() { let url_path = url.reference(); if url_path.trim_matches('/').is_empty() { let mut list = String::new(); for scheme in unsafe { &mut *self.schemes.get() }.iter() { let scheme_str = scheme.scheme(); if !scheme_str.is_empty() { if !list.is_empty() { list = list + "\n" + scheme_str; } else { list = scheme_str.to_string(); } } } Ok(box VecResource::new(":".to_string(), list.into_bytes())) } else if flags & O_CREAT == O_CREAT { for scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_path { return Err(Error::new(EEXIST)); } } match Scheme::new(url_path) { Ok((scheme, server)) => { unsafe { &mut *self.schemes.get() }.push(scheme); Ok(server) }, Err(err) => Err(err) } } else { Err(Error::new(ENOENT)) } } else { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.open(url, flags); } } Err(Error::new(ENOENT)) } } pub fn mkdir(&self, url: Url, flags: usize) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.mkdir(url, flags); } } } Err(Error::new(ENOENT)) } pub fn rmdir(&self, url: Url) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.rmdir(url); } } } Err(Error::new(ENOENT)) } pub fn stat(&self, url: Url, stat: &mut Stat) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.stat(url, stat); } } } Err(Error::new(ENOENT)) } pub fn unlink(&self, url: Url) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.unlink(url); } } } Err(Error::new(ENOENT)) } }
use alloc::boxed::Box; use collections::string::{String, ToString}; use collections::vec::Vec; use core::cell::UnsafeCell; use arch::context::ContextManager; use common::event::Event; use common::time::Duration; use disk::Disk; use network::Nic; use fs::{KScheme, Resource, Scheme, VecResource, Url}; use sync::WaitQueue; use system::error::{Error, Result, ENOENT, EEXIST}; use system::syscall::{O_CREAT, Stat}; use self::console::Console; use self::log::Log; pub mod console; pub mod log; pub struct Environment { pub contexts: UnsafeCell<ContextManager>, pub clock_realtime: UnsafeCell<Duration>, pub clock_monotonic: UnsafeCell<Duration>, pub console: UnsafeCell<Console>, pub disks: UnsafeCell<Vec<Box<Disk>>>, pub nics: UnsafeCell<Vec<Box<Nic>>>, pub events: WaitQueue<Event>, pub log: UnsafeCell<Log>, pub schemes: UnsafeCell<Vec<Box<KScheme>>>,
} } else { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.open(url, flags); } } Err(Error::new(ENOENT)) } } pub fn mkdir(&self, url: Url, flags: usize) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.mkdir(url, flags); } } } Err(Error::new(ENOENT)) } pub fn rmdir(&self, url: Url) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.rmdir(url); } } } Err(Error::new(ENOENT)) } pub fn stat(&self, url: Url, stat: &mut Stat) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.stat(url, stat); } } } Err(Error::new(ENOENT)) } pub fn unlink(&self, url: Url) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.unlink(url); } } } Err(Error::new(ENOENT)) } }
pub interrupts: UnsafeCell<[u64; 256]>, } impl Environment { pub fn new() -> Box<Environment> { box Environment { contexts: UnsafeCell::new(ContextManager::new()), clock_realtime: UnsafeCell::new(Duration::new(0, 0)), clock_monotonic: UnsafeCell::new(Duration::new(0, 0)), console: UnsafeCell::new(Console::new()), disks: UnsafeCell::new(Vec::new()), nics: UnsafeCell::new(Vec::new()), events: WaitQueue::new(), log: UnsafeCell::new(Log::new()), schemes: UnsafeCell::new(Vec::new()), interrupts: UnsafeCell::new([0; 256]), } } pub fn on_irq(&self, irq: u8) { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { scheme.on_irq(irq); } } pub fn open(&self, url: Url, flags: usize) -> Result<Box<Resource>> { let url_scheme = url.scheme(); if url_scheme.is_empty() { let url_path = url.reference(); if url_path.trim_matches('/').is_empty() { let mut list = String::new(); for scheme in unsafe { &mut *self.schemes.get() }.iter() { let scheme_str = scheme.scheme(); if !scheme_str.is_empty() { if !list.is_empty() { list = list + "\n" + scheme_str; } else { list = scheme_str.to_string(); } } } Ok(box VecResource::new(":".to_string(), list.into_bytes())) } else if flags & O_CREAT == O_CREAT { for scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_path { return Err(Error::new(EEXIST)); } } match Scheme::new(url_path) { Ok((scheme, server)) => { unsafe { &mut *self.schemes.get() }.push(scheme); Ok(server) }, Err(err) => Err(err) } } else { Err(Error::new(ENOENT))
random
[ { "content": "pub trait Disk {\n\n fn name(&self) -> String;\n\n fn on_irq(&mut self, irq: u8);\n\n fn size(&self) -> u64;\n\n fn read(&mut self, block: u64, buffer: &mut [u8]) -> Result<usize>;\n\n fn write(&mut self, block: u64, buffer: &[u8]) -> Result<usize>;\n\n}\n", "file_path": "kernel...
Rust
src/lib.rs
antoyo/password-store-rs
b2615c12fec5d8957798fa8a745bdec1f1274719
/* * Copyright (c) 2016-2020 Boucher, Antoni <bouanto@zoho.com> * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #[macro_use] extern crate json; mod chomp; use std::error; use std::ffi::OsStr; use std::fmt::{self, Display, Formatter}; use std::io::{self, Write}; use std::process::{Command, Stdio}; use std::str::{self, Utf8Error}; use std::string; use json::JsonValue; use Error::*; use chomp::Chomp; macro_rules! validate_path { ($path:expr) => { if $path.trim().is_empty() { return Err(InvalidInput); } }; } const MSG_SIZE: usize = 4; #[derive(Debug)] pub enum Error { FromUtf8(string::FromUtf8Error), Json(json::Error), Io(io::Error), InvalidInput, InvalidOutput, Pass(String), Utf8(Utf8Error), } impl From<json::Error> for Error { fn from(error: json::Error) -> Self { Json(error) } } impl From<io::Error> for Error { fn from(error: io::Error) -> Self { Io(error) } } impl From<Utf8Error> for Error { fn from(error: Utf8Error) -> Self { Utf8(error) } } impl From<string::FromUtf8Error> for Error { fn from(error: string::FromUtf8Error) -> Self { FromUtf8(error) } } impl Display for Error { fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { let string = match *self { FromUtf8(ref error) => error.to_string(), Json(ref error) => error.to_string(), Io(ref error) => error.to_string(), InvalidInput => "invalid input".to_string(), InvalidOutput => "invalid output".to_string(), Pass(ref error) => error.clone(), Utf8(ref error) => error.to_string(), }; write!(formatter, "{}", string) } } impl error::Error for Error { fn description(&self) -> &str { match *self { FromUtf8(ref error) => error.description(), Json(ref error) => error.description(), Io(ref error) => error.description(), InvalidInput => "invalid input", InvalidOutput => "invalid output", Pass(ref error) => error, Utf8(ref error) => error.description(), } } } pub type Result<T> = std::result::Result<T, Error>; pub struct PasswordStore; impl PasswordStore { pub fn get(path: &str) -> Result<(String, String)> { validate_path!(path); let mut response = gopass_ipc(object! { "type" => "getLogin", "entry" => path })?; if let (Some(mut username), Some(password)) = (response["username"].take_string(), response["password"].take_string()) { if username.is_empty() { username = path.to_string(); } Ok((username, password)) } else { Err(InvalidOutput) } } pub fn get_usernames(path: &str) -> Result<Vec<String>> { validate_path!(path); let response = gopass_ipc(object! { "type" => "query", "query" => path })?; let mut result = vec![]; match response { JsonValue::Array(usernames) => { for username in usernames { let username = match username.as_str() { Some(username) => username, None => return Err(InvalidOutput), }; let index = username.rfind('/').map(|index| index + 1).unwrap_or(0); result.push(username[index..].to_string()); } }, _ => return Err(InvalidOutput), } Ok(result) } pub fn generate(path: &str, use_symbols: bool, length: i32) -> Result<()> { validate_path!(path); let response = gopass_ipc(object! { "type" => "create", "entry_name" => path, "password" => "", "generate" => true, "length" => length, "use_symbols" => use_symbols })?; if response["username"].as_str().is_none() { return Err(InvalidOutput); } Ok(()) } pub fn insert(path: &str, password: &str) -> Result<()> { validate_path!(path); let response = gopass_ipc(object! { "type" => "create", "entry_name" => path, "password" => password })?; if let Some(inserted_password) = response["password"].as_str() { if password != inserted_password { return Err(InvalidOutput); } } Ok(()) } pub fn remove(path: &str) -> Result<()> { validate_path!(path); exec_pass("rm", &["-f", path])?; Ok(()) } } fn exec_pass<S: AsRef<OsStr>>(command: &str, args: &[S]) -> Result<String> { let mut process = Command::new("gopass"); if !command.trim().is_empty() { process.arg(command); } let child = process.args(args) .stderr(Stdio::piped()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; let output = child.wait_with_output()?; let mut stderr = String::from_utf8(output.stderr)?; if !stderr.is_empty() { stderr.chomp(); Err(Pass(stderr)) } else { Ok(String::from_utf8(output.stdout)?) } } fn gopass_ipc(json_query: JsonValue) -> Result<JsonValue> { let mut process = Command::new("gopass-jsonapi"); let mut child = process.args(&["listen"]) .stderr(Stdio::piped()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; if let Some(stdin) = child.stdin.as_mut() { let json_string = json_query.dump(); stdin.write_all(&i32_to_bytes(json_string.len() as i32))?; write!(stdin, "{}", json_string)?; } let output = child.wait_with_output()?; let mut stderr = String::from_utf8(output.stderr)?; if !stderr.is_empty() { stderr.chomp(); Err(Pass(stderr)) } else { json::parse(str::from_utf8(&output.stdout[MSG_SIZE..])?) .map_err(Into::into) } } fn i32_to_bytes(num: i32) -> Vec<u8> { vec![ (num & 0xFF) as u8, ((num >> 8) & 0xFF) as u8, ((num >> 16) & 0xFF) as u8, ((num >> 24) & 0xFF) as u8, ] }
/* * Copyright (c) 2016-2020 Boucher, Antoni <bouanto@zoho.com> * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #[macro_use] extern crate json; mod chomp; use std::error; use std::ffi::OsStr; use std::fmt::{self, Display, Formatter}; use std::io::{self, Write}; use std::process::{Command, Stdio}; use std::str::{self, Utf8Error}; use std::string; use json::JsonValue; use Error::*; use chomp::Chomp; macro_rules! validate_path { ($path:expr) => { if $path.trim().is_empty() { return Err(InvalidInput); } }; } const MSG_SIZE: usize = 4; #[derive(Debug)] pub enum Error { FromUtf8(string::FromUtf8Error), Json(json::Error), Io(io::Error), InvalidInput, InvalidOutput, Pass(String), Utf8(Utf8Error), } impl From<json::Error> for Error { fn from(error: json::Error) -> Self { Json(error) } } impl From<io::Error> for Error { fn from(error: io::Error) -> Self { Io(error) } } impl From<Utf8Error> for Error { fn from(error: Utf8Error) -> Self { Utf8(error) } } impl From<string::FromUtf8Error> for Error { fn from(error: string::FromUtf8Error) -> Self { FromUtf8(error) } } impl Display for Error { fn fmt(&self, fo
=> "invalid output".to_string(), Pass(ref error) => error.clone(), Utf8(ref error) => error.to_string(), }; write!(formatter, "{}", string) } } impl error::Error for Error { fn description(&self) -> &str { match *self { FromUtf8(ref error) => error.description(), Json(ref error) => error.description(), Io(ref error) => error.description(), InvalidInput => "invalid input", InvalidOutput => "invalid output", Pass(ref error) => error, Utf8(ref error) => error.description(), } } } pub type Result<T> = std::result::Result<T, Error>; pub struct PasswordStore; impl PasswordStore { pub fn get(path: &str) -> Result<(String, String)> { validate_path!(path); let mut response = gopass_ipc(object! { "type" => "getLogin", "entry" => path })?; if let (Some(mut username), Some(password)) = (response["username"].take_string(), response["password"].take_string()) { if username.is_empty() { username = path.to_string(); } Ok((username, password)) } else { Err(InvalidOutput) } } pub fn get_usernames(path: &str) -> Result<Vec<String>> { validate_path!(path); let response = gopass_ipc(object! { "type" => "query", "query" => path })?; let mut result = vec![]; match response { JsonValue::Array(usernames) => { for username in usernames { let username = match username.as_str() { Some(username) => username, None => return Err(InvalidOutput), }; let index = username.rfind('/').map(|index| index + 1).unwrap_or(0); result.push(username[index..].to_string()); } }, _ => return Err(InvalidOutput), } Ok(result) } pub fn generate(path: &str, use_symbols: bool, length: i32) -> Result<()> { validate_path!(path); let response = gopass_ipc(object! { "type" => "create", "entry_name" => path, "password" => "", "generate" => true, "length" => length, "use_symbols" => use_symbols })?; if response["username"].as_str().is_none() { return Err(InvalidOutput); } Ok(()) } pub fn insert(path: &str, password: &str) -> Result<()> { validate_path!(path); let response = gopass_ipc(object! { "type" => "create", "entry_name" => path, "password" => password })?; if let Some(inserted_password) = response["password"].as_str() { if password != inserted_password { return Err(InvalidOutput); } } Ok(()) } pub fn remove(path: &str) -> Result<()> { validate_path!(path); exec_pass("rm", &["-f", path])?; Ok(()) } } fn exec_pass<S: AsRef<OsStr>>(command: &str, args: &[S]) -> Result<String> { let mut process = Command::new("gopass"); if !command.trim().is_empty() { process.arg(command); } let child = process.args(args) .stderr(Stdio::piped()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; let output = child.wait_with_output()?; let mut stderr = String::from_utf8(output.stderr)?; if !stderr.is_empty() { stderr.chomp(); Err(Pass(stderr)) } else { Ok(String::from_utf8(output.stdout)?) } } fn gopass_ipc(json_query: JsonValue) -> Result<JsonValue> { let mut process = Command::new("gopass-jsonapi"); let mut child = process.args(&["listen"]) .stderr(Stdio::piped()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; if let Some(stdin) = child.stdin.as_mut() { let json_string = json_query.dump(); stdin.write_all(&i32_to_bytes(json_string.len() as i32))?; write!(stdin, "{}", json_string)?; } let output = child.wait_with_output()?; let mut stderr = String::from_utf8(output.stderr)?; if !stderr.is_empty() { stderr.chomp(); Err(Pass(stderr)) } else { json::parse(str::from_utf8(&output.stdout[MSG_SIZE..])?) .map_err(Into::into) } } fn i32_to_bytes(num: i32) -> Vec<u8> { vec![ (num & 0xFF) as u8, ((num >> 8) & 0xFF) as u8, ((num >> 16) & 0xFF) as u8, ((num >> 24) & 0xFF) as u8, ] }
rmatter: &mut Formatter) -> fmt::Result { let string = match *self { FromUtf8(ref error) => error.to_string(), Json(ref error) => error.to_string(), Io(ref error) => error.to_string(), InvalidInput => "invalid input".to_string(), InvalidOutput
function_block-random_span
[ { "content": "pub trait Chomp {\n\n fn chomp(&mut self);\n\n}\n\n\n\nimpl Chomp for String {\n\n fn chomp(&mut self) {\n\n if self.chars().last() == Some('\\n') {\n\n self.pop();\n\n }\n\n }\n\n}\n", "file_path": "src/chomp.rs", "rank": 0, "score": 50258.44084005050...
Rust
gstreamer-app/src/app_src.rs
kad3nce/gstreamer-rs
fcc361f920c9c4d0926cc90de997a0293a21b0b1
use glib::translate::*; use glib_sys::{gboolean, gpointer}; use gst; use gst_app_sys; use std::cell::RefCell; use std::mem; use std::ptr; use AppSrc; #[allow(clippy::type_complexity)] pub struct AppSrcCallbacks { need_data: Option<RefCell<Box<dyn FnMut(&AppSrc, u32) + Send + 'static>>>, enough_data: Option<Box<dyn Fn(&AppSrc) + Send + Sync + 'static>>, seek_data: Option<Box<dyn Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>>, callbacks: gst_app_sys::GstAppSrcCallbacks, } unsafe impl Send for AppSrcCallbacks {} unsafe impl Sync for AppSrcCallbacks {} impl AppSrcCallbacks { #[allow(clippy::new_ret_no_self)] pub fn new() -> AppSrcCallbacksBuilder { skip_assert_initialized!(); AppSrcCallbacksBuilder { need_data: None, enough_data: None, seek_data: None, } } } #[allow(clippy::type_complexity)] pub struct AppSrcCallbacksBuilder { need_data: Option<RefCell<Box<dyn FnMut(&AppSrc, u32) + Send + 'static>>>, enough_data: Option<Box<dyn Fn(&AppSrc) + Send + Sync + 'static>>, seek_data: Option<Box<dyn Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>>, } impl AppSrcCallbacksBuilder { pub fn need_data<F: FnMut(&AppSrc, u32) + Send + 'static>(self, need_data: F) -> Self { Self { need_data: Some(RefCell::new(Box::new(need_data))), ..self } } pub fn enough_data<F: Fn(&AppSrc) + Send + Sync + 'static>(self, enough_data: F) -> Self { Self { enough_data: Some(Box::new(enough_data)), ..self } } pub fn seek_data<F: Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>( self, seek_data: F, ) -> Self { Self { seek_data: Some(Box::new(seek_data)), ..self } } pub fn build(self) -> AppSrcCallbacks { let have_need_data = self.need_data.is_some(); let have_enough_data = self.enough_data.is_some(); let have_seek_data = self.seek_data.is_some(); AppSrcCallbacks { need_data: self.need_data, enough_data: self.enough_data, seek_data: self.seek_data, callbacks: gst_app_sys::GstAppSrcCallbacks { need_data: if have_need_data { Some(trampoline_need_data) } else { None }, enough_data: if have_enough_data { Some(trampoline_enough_data) } else { None }, seek_data: if have_seek_data { Some(trampoline_seek_data) } else { None }, _gst_reserved: [ ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ], }, } } } unsafe extern "C" fn trampoline_need_data( appsrc: *mut gst_app_sys::GstAppSrc, length: u32, callbacks: gpointer, ) { let callbacks = &*(callbacks as *const AppSrcCallbacks); if let Some(ref need_data) = callbacks.need_data { (&mut *need_data.borrow_mut())(&from_glib_borrow(appsrc), length); } } unsafe extern "C" fn trampoline_enough_data( appsrc: *mut gst_app_sys::GstAppSrc, callbacks: gpointer, ) { let callbacks = &*(callbacks as *const AppSrcCallbacks); if let Some(ref enough_data) = callbacks.enough_data { (*enough_data)(&from_glib_borrow(appsrc)); } } unsafe extern "C" fn trampoline_seek_data( appsrc: *mut gst_app_sys::GstAppSrc, offset: u64, callbacks: gpointer, ) -> gboolean { let callbacks = &*(callbacks as *const AppSrcCallbacks); let ret = if let Some(ref seek_data) = callbacks.seek_data { (*seek_data)(&from_glib_borrow(appsrc), offset) } else { false }; ret.to_glib() } unsafe extern "C" fn destroy_callbacks(ptr: gpointer) { Box::<AppSrcCallbacks>::from_raw(ptr as *mut _); } impl AppSrc { pub fn end_of_stream(&self) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_end_of_stream( self.to_glib_none().0, )) }; ret.into_result() } pub fn push_buffer(&self, buffer: gst::Buffer) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_buffer( self.to_glib_none().0, buffer.into_ptr(), )) }; ret.into_result() } #[cfg(any(feature = "v1_14", feature = "dox"))] pub fn push_buffer_list( &self, list: gst::BufferList, ) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_buffer_list( self.to_glib_none().0, list.into_ptr(), )) }; ret.into_result() } pub fn push_sample(&self, sample: &gst::Sample) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_sample( self.to_glib_none().0, sample.to_glib_none().0, )) }; ret.into_result() } pub fn set_callbacks(&self, callbacks: AppSrcCallbacks) { unsafe { gst_app_sys::gst_app_src_set_callbacks( self.to_glib_none().0, mut_override(&callbacks.callbacks), Box::into_raw(Box::new(callbacks)) as *mut _, Some(destroy_callbacks), ); } } pub fn set_latency(&self, min: gst::ClockTime, max: gst::ClockTime) { unsafe { gst_app_sys::gst_app_src_set_latency( self.to_glib_none().0, min.to_glib(), max.to_glib(), ); } } pub fn get_latency(&self) -> (gst::ClockTime, gst::ClockTime) { unsafe { let mut min = mem::MaybeUninit::uninit(); let mut max = mem::MaybeUninit::uninit(); gst_app_sys::gst_app_src_get_latency( self.to_glib_none().0, min.as_mut_ptr(), max.as_mut_ptr(), ); (from_glib(min.assume_init()), from_glib(max.assume_init())) } } }
use glib::translate::*; use glib_sys::{gboolean, gpointer}; use gst; use gst_app_sys; use std::cell::RefCell; use std::mem; use std::ptr; use AppSrc; #[allow(clippy::type_complexity)] pub struct AppSrcCallbacks { need_data: Option<RefCell<Box<dyn FnMut(&AppSrc, u32) + Send + 'static>>>, enough_data: Option<Box<dyn Fn(&AppSrc) + Send + Sync + 'static>>, seek_data: Option<Box<dyn Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>>, callbacks: gst_app_sys::GstAppSrcCallbacks, } unsafe impl Send for AppSrcCallbacks {} unsafe impl Sync for AppSrcCallbacks {} impl AppSrcCallbacks { #[allow(clippy::new_ret_no_self)] pub fn new() -> AppSrcCallbacksBuilder { skip_assert_initialized!(); AppSrcCallbacksBuilder { need_data: None, enough_data: None, seek_data: None, } } } #[allow(clippy::type_complexity)] pub struct AppSrcCallbacksBuilder { need_data: Option<RefCell<Box<dyn FnMut(&AppSrc, u32) + Send + 'static>>>, enough_data: Option<Box<dyn Fn(&AppSrc) + Send + Sync + 'static>>, seek_data: Option<Box<dyn Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>>, } impl AppSrcCallbacksBuilder { pub fn need_data<F: FnMut(&AppSrc, u32) + Send + 'static>(self, need_data: F) -> Self { Self { need_data: Some(RefCell::new(Box::new(need_data))), ..self } }
pub fn seek_data<F: Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>( self, seek_data: F, ) -> Self { Self { seek_data: Some(Box::new(seek_data)), ..self } } pub fn build(self) -> AppSrcCallbacks { let have_need_data = self.need_data.is_some(); let have_enough_data = self.enough_data.is_some(); let have_seek_data = self.seek_data.is_some(); AppSrcCallbacks { need_data: self.need_data, enough_data: self.enough_data, seek_data: self.seek_data, callbacks: gst_app_sys::GstAppSrcCallbacks { need_data: if have_need_data { Some(trampoline_need_data) } else { None }, enough_data: if have_enough_data { Some(trampoline_enough_data) } else { None }, seek_data: if have_seek_data { Some(trampoline_seek_data) } else { None }, _gst_reserved: [ ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ], }, } } } unsafe extern "C" fn trampoline_need_data( appsrc: *mut gst_app_sys::GstAppSrc, length: u32, callbacks: gpointer, ) { let callbacks = &*(callbacks as *const AppSrcCallbacks); if let Some(ref need_data) = callbacks.need_data { (&mut *need_data.borrow_mut())(&from_glib_borrow(appsrc), length); } } unsafe extern "C" fn trampoline_enough_data( appsrc: *mut gst_app_sys::GstAppSrc, callbacks: gpointer, ) { let callbacks = &*(callbacks as *const AppSrcCallbacks); if let Some(ref enough_data) = callbacks.enough_data { (*enough_data)(&from_glib_borrow(appsrc)); } } unsafe extern "C" fn trampoline_seek_data( appsrc: *mut gst_app_sys::GstAppSrc, offset: u64, callbacks: gpointer, ) -> gboolean { let callbacks = &*(callbacks as *const AppSrcCallbacks); let ret = if let Some(ref seek_data) = callbacks.seek_data { (*seek_data)(&from_glib_borrow(appsrc), offset) } else { false }; ret.to_glib() } unsafe extern "C" fn destroy_callbacks(ptr: gpointer) { Box::<AppSrcCallbacks>::from_raw(ptr as *mut _); } impl AppSrc { pub fn end_of_stream(&self) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_end_of_stream( self.to_glib_none().0, )) }; ret.into_result() } pub fn push_buffer(&self, buffer: gst::Buffer) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_buffer( self.to_glib_none().0, buffer.into_ptr(), )) }; ret.into_result() } #[cfg(any(feature = "v1_14", feature = "dox"))] pub fn push_buffer_list( &self, list: gst::BufferList, ) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_buffer_list( self.to_glib_none().0, list.into_ptr(), )) }; ret.into_result() } pub fn push_sample(&self, sample: &gst::Sample) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_sample( self.to_glib_none().0, sample.to_glib_none().0, )) }; ret.into_result() } pub fn set_callbacks(&self, callbacks: AppSrcCallbacks) { unsafe { gst_app_sys::gst_app_src_set_callbacks( self.to_glib_none().0, mut_override(&callbacks.callbacks), Box::into_raw(Box::new(callbacks)) as *mut _, Some(destroy_callbacks), ); } } pub fn set_latency(&self, min: gst::ClockTime, max: gst::ClockTime) { unsafe { gst_app_sys::gst_app_src_set_latency( self.to_glib_none().0, min.to_glib(), max.to_glib(), ); } } pub fn get_latency(&self) -> (gst::ClockTime, gst::ClockTime) { unsafe { let mut min = mem::MaybeUninit::uninit(); let mut max = mem::MaybeUninit::uninit(); gst_app_sys::gst_app_src_get_latency( self.to_glib_none().0, min.as_mut_ptr(), max.as_mut_ptr(), ); (from_glib(min.assume_init()), from_glib(max.assume_init())) } } }
pub fn enough_data<F: Fn(&AppSrc) + Send + Sync + 'static>(self, enough_data: F) -> Self { Self { enough_data: Some(Box::new(enough_data)), ..self } }
function_block-full_function
[ { "content": "fn into_raw_pad_task<F: FnMut() + Send + 'static>(func: F) -> gpointer {\n\n #[allow(clippy::type_complexity)]\n\n let func: Box<RefCell<F>> = Box::new(RefCell::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\nunsafe extern \"C\" fn destroy_closure_pad_task<F>(ptr: gpointer) {\...
Rust
tests/sweep.rs
hellow554/flo_curves
8807b446271f0d2212d3f27d86741beeab5abebb
use flo_curves::geo::{ sweep_against, sweep_self, BoundingBox, Bounds, Coord2, Coordinate, Coordinate2D, Coordinate3D, }; use rand::prelude::*; use std::cmp::Ordering; #[test] fn sweep_self_single_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 1); } #[test] fn sweep_self_double_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 2); } #[test] fn sweep_self_triple_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 3); } #[test] fn sweep_self_quad_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), Bounds::from_min_max(Coord2(0.0, 0.0), Coord2(1000.0, 1000.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 6); } #[test] fn sweep_against_single_overlap() { let mut bounds1 = vec![Bounds::from_min_max( Coord2(100.0, 200.0), Coord2(200.0, 300.0), )]; let mut bounds2 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 1); } #[test] fn sweep_against_double_overlap_1() { let mut bounds1 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; let mut bounds2 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 2); } #[test] fn sweep_against_double_overlap_2() { let mut bounds1 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; let mut bounds2 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 2); } #[test] fn sweep_against_quad_overlap() { let mut bounds1 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), ]; let mut bounds2 = vec![ Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), Bounds::from_min_max(Coord2(0.0, 0.0), Coord2(1000.0, 1000.0)), ]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 4); } #[test] fn sweep_self_1000_random() { let mut rng = StdRng::from_seed([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, ]); let mut bounds = (0..1000) .into_iter() .map(|_| { let x = rng.gen::<f64>() * 900.0; let y = rng.gen::<f64>() * 900.0; let w = rng.gen::<f64>() * 400.0; let h = rng.gen::<f64>() * 400.0; Bounds::from_min_max(Coord2(x, y), Coord2(x + w, y + h)) }) .collect::<Vec<_>>(); bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()).collect::<Vec<_>>(); let mut slow_collisions = vec![]; for i1 in 0..bounds.len() { for i2 in 0..i1 { if i1 == i2 { continue; } if bounds[i1].overlaps(&bounds[i2]) { slow_collisions.push((&bounds[i1], &bounds[i2])); } } } assert!(collisions.len() == slow_collisions.len()); }
use flo_curves::geo::{ sweep_against, sweep_self, BoundingBox, Bounds, Coord2, Coordinate, Coordinate2D, Coordinate3D, }; use rand::prelude::*; use std::cmp::Ordering; #[test] fn sweep_self_single_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 1); } #[test] fn sweep_self_double_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 2); } #[test] fn sweep_self_triple_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 3); } #[test] fn sweep_self_quad_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), Bounds::from_min_max(Coord2(0.0, 0.0), Coord2(1000.0, 1000.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 6); } #[test] fn sweep_against_single_overlap() { let mut bounds1 = vec![Bounds::from_min_max( Coord2(100.0, 200.0), Coord2(200.0, 300.0), )]; let mut bounds2 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 1); } #[test] fn sweep_against_double_overlap_1() { let mut bounds1 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; let mut bounds2 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 2); } #[test] fn sweep_against_double_overlap_2() { let mut bounds1 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; let mut bounds2 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 2); } #[test]
#[test] fn sweep_self_1000_random() { let mut rng = StdRng::from_seed([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, ]); let mut bounds = (0..1000) .into_iter() .map(|_| { let x = rng.gen::<f64>() * 900.0; let y = rng.gen::<f64>() * 900.0; let w = rng.gen::<f64>() * 400.0; let h = rng.gen::<f64>() * 400.0; Bounds::from_min_max(Coord2(x, y), Coord2(x + w, y + h)) }) .collect::<Vec<_>>(); bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()).collect::<Vec<_>>(); let mut slow_collisions = vec![]; for i1 in 0..bounds.len() { for i2 in 0..i1 { if i1 == i2 { continue; } if bounds[i1].overlaps(&bounds[i2]) { slow_collisions.push((&bounds[i1], &bounds[i2])); } } } assert!(collisions.len() == slow_collisions.len()); }
fn sweep_against_quad_overlap() { let mut bounds1 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), ]; let mut bounds2 = vec![ Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), Bounds::from_min_max(Coord2(0.0, 0.0), Coord2(1000.0, 1000.0)), ]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 4); }
function_block-full_function
[ { "content": "fn detect_collisions(mut graph_path: GraphPath<Coord2, ()>) {\n\n graph_path.self_collide(0.1);\n\n}\n\n\n", "file_path": "benches/sweep.rs", "rank": 0, "score": 169984.23598104744 }, { "content": "///\n\n/// Finds the upper and lower points in a cubic curve's bounding box\n...
Rust
crates/examples/src/readobj/mod.rs
sunfishcode/object
aaf312e51fc6e4511e19a32c05d4b2ddf248b5b6
use std::io::Write; use std::{fmt, str}; use object::read::archive::ArchiveFile; use object::read::macho::{FatArch, FatHeader}; use object::Endianness; pub fn print(w: &'_ mut dyn Write, e: &'_ mut dyn Write, file: &[u8]) { let mut printer = Printer::new(w, e); print_object(&mut printer, &*file); } struct Printer<'a> { w: &'a mut dyn Write, e: &'a mut dyn Write, indent: usize, } impl<'a> Printer<'a> { fn new(w: &'a mut dyn Write, e: &'a mut dyn Write) -> Self { Self { w, e, indent: 0 } } fn w(&mut self) -> &mut dyn Write { self.w } fn blank(&mut self) { writeln!(self.w).unwrap(); } fn print_indent(&mut self) { if self.indent != 0 { write!(self.w, "{:-1$}", " ", self.indent * 4).unwrap(); } } fn print_string(&mut self, s: &[u8]) { if let Ok(s) = str::from_utf8(s) { write!(self.w, "\"{}\"", s).unwrap(); } else { write!(self.w, "{:X?}", s).unwrap(); } } fn indent<F: FnOnce(&mut Self)>(&mut self, f: F) { self.indent += 1; f(self); self.indent -= 1; } fn group<F: FnOnce(&mut Self)>(&mut self, name: &str, f: F) { self.print_indent(); writeln!(self.w, "{} {{", name).unwrap(); self.indent(f); self.print_indent(); writeln!(self.w, "}}").unwrap(); } fn field_name(&mut self, name: &str) { self.print_indent(); if !name.is_empty() { write!(self.w, "{}: ", name).unwrap(); } } fn field<T: fmt::Display>(&mut self, name: &str, value: T) { self.field_name(name); writeln!(self.w, "{}", value).unwrap(); } fn field_hex<T: fmt::UpperHex>(&mut self, name: &str, value: T) { self.field_name(name); writeln!(self.w, "0x{:X}", value).unwrap(); } fn field_bytes(&mut self, name: &str, value: &[u8]) { self.field_name(name); writeln!(self.w, "{:X?}", value).unwrap(); } fn field_string_option<T: fmt::UpperHex>(&mut self, name: &str, value: T, s: Option<&[u8]>) { if let Some(s) = s { self.field_name(name); self.print_string(s); writeln!(self.w, " (0x{:X})", value).unwrap(); } else { self.field_hex(name, value); } } fn field_string<T: fmt::UpperHex, E: fmt::Display>( &mut self, name: &str, value: T, s: Result<&[u8], E>, ) { let s = s.print_err(self); self.field_string_option(name, value, s); } fn field_inline_string(&mut self, name: &str, s: &[u8]) { self.field_name(name); self.print_string(s); writeln!(self.w).unwrap(); } fn field_enum<T: Eq + fmt::UpperHex>(&mut self, name: &str, value: T, flags: &[Flag<T>]) { for flag in flags { if value == flag.value { self.field_name(name); writeln!(self.w, "{} (0x{:X})", flag.name, value).unwrap(); return; } } self.field_hex(name, value); } fn field_enums<T: Eq + fmt::UpperHex>(&mut self, name: &str, value: T, enums: &[&[Flag<T>]]) { for flags in enums { for flag in *flags { if value == flag.value { self.field_name(name); writeln!(self.w, "{} (0x{:X})", flag.name, value).unwrap(); return; } } } self.field_hex(name, value); } fn flags<T: Into<u64>, U: Copy + Into<u64>>(&mut self, value: T, mask: U, flags: &[Flag<U>]) { let value = value.into(); let mask = mask.into(); self.indent(|p| { if mask != 0 { for flag in flags { if value & mask == flag.value.into() { p.print_indent(); writeln!(p.w, "{} (0x{:X})", flag.name, flag.value.into()).unwrap(); return; } } p.print_indent(); writeln!(p.w, "<unknown> (0x{:X})", value & mask).unwrap(); } else { for flag in flags { if value & flag.value.into() == flag.value.into() { p.print_indent(); writeln!(p.w, "{} (0x{:X})", flag.name, flag.value.into()).unwrap(); } } } }); } } struct Flag<T> { value: T, name: &'static str, } macro_rules! flags { ($($name:ident),+ $(,)?) => ( [ $(Flag { value: $name, name: stringify!($name), }),+ ] ) } fn print_object(p: &mut Printer<'_>, data: &[u8]) { let kind = match object::FileKind::parse(data) { Ok(file) => file, Err(err) => { println!("Failed to parse file: {}", err); return; } }; match kind { object::FileKind::Archive => print_archive(p, data), object::FileKind::Coff => pe::print_coff(p, data), object::FileKind::DyldCache => macho::print_dyld_cache(p, data), object::FileKind::Elf32 => elf::print_elf32(p, data), object::FileKind::Elf64 => elf::print_elf64(p, data), object::FileKind::MachO32 => macho::print_macho32(p, data, 0), object::FileKind::MachO64 => macho::print_macho64(p, data, 0), object::FileKind::MachOFat32 => macho::print_macho_fat32(p, data), object::FileKind::MachOFat64 => macho::print_macho_fat64(p, data), object::FileKind::Pe32 => pe::print_pe32(p, data), object::FileKind::Pe64 => pe::print_pe64(p, data), _ => {} } } fn print_object_at(p: &mut Printer<'_>, data: &[u8], offset: u64) { let kind = match object::FileKind::parse_at(data, offset) { Ok(file) => file, Err(err) => { println!("Failed to parse file: {}", err); return; } }; match kind { object::FileKind::MachO32 => macho::print_macho32(p, data, offset), object::FileKind::MachO64 => macho::print_macho64(p, data, offset), _ => {} } } fn print_archive(p: &mut Printer<'_>, data: &[u8]) { if let Some(archive) = ArchiveFile::parse(data).print_err(p) { p.field("Format", format!("Archive ({:?})", archive.kind())); for member in archive.members() { if let Some(member) = member.print_err(p) { p.blank(); p.field("Member", String::from_utf8_lossy(member.name())); if let Some(data) = member.data(data).print_err(p) { print_object(p, data); } } } } } trait PrintErr<T> { fn print_err(self, p: &mut Printer<'_>) -> Option<T>; } impl<T, E: fmt::Display> PrintErr<T> for Result<T, E> { fn print_err(self, p: &mut Printer<'_>) -> Option<T> { match self { Ok(val) => Some(val), Err(err) => { writeln!(p.e, "Error: {}", err).unwrap(); None } } } } mod elf; mod macho; mod pe;
use std::io::Write; use std::{fmt, str}; use object::read::archive::ArchiveFile; use object::read::macho::{FatArch, FatHeader}; use object::Endianness; pub fn print(w: &'_ mut dyn Write, e: &'_ mut dyn Write, file: &[u8]) { let mut printer = Printer::new(w, e); print_object(&mut printer, &*file); } struct Printer<'a> { w: &'a mut dyn Write, e: &'a mut dyn Write, indent: usize, } impl<'a> Printer<'a> { fn new(w: &'a mut dyn Write, e: &'a mut dyn Write) -> Self { Self { w, e, indent: 0 } } fn w(&mut self) -> &mut dyn Write { self.w } fn blank(&mut self) { writeln!(self.w).unwrap(); } fn print_indent(&mut self) { if self.indent != 0 { write!(self.w, "{:-1$}", " ", self.indent * 4).unwrap(); } } fn print_string(&mut self, s: &[u8]) { if let Ok(s) = str::from_utf8(s) { write!(self.w, "\"{}\"", s).unwrap(); } else { write!(self.w, "{:X?}", s).unwrap(); } } fn indent<F: FnOnce(&mut Self)>(&mut self, f: F) { self.indent += 1; f(self); self.indent -= 1; } fn group<F: FnOnce(&mut Self)>(&mut self, name: &str, f: F) { self.print_indent(); writeln!(self.w, "{} {{", name).unwrap(); self.indent(f); self.print_indent(); writeln!(self.w, "}}").unwrap(); }
fn field<T: fmt::Display>(&mut self, name: &str, value: T) { self.field_name(name); writeln!(self.w, "{}", value).unwrap(); } fn field_hex<T: fmt::UpperHex>(&mut self, name: &str, value: T) { self.field_name(name); writeln!(self.w, "0x{:X}", value).unwrap(); } fn field_bytes(&mut self, name: &str, value: &[u8]) { self.field_name(name); writeln!(self.w, "{:X?}", value).unwrap(); } fn field_string_option<T: fmt::UpperHex>(&mut self, name: &str, value: T, s: Option<&[u8]>) { if let Some(s) = s { self.field_name(name); self.print_string(s); writeln!(self.w, " (0x{:X})", value).unwrap(); } else { self.field_hex(name, value); } } fn field_string<T: fmt::UpperHex, E: fmt::Display>( &mut self, name: &str, value: T, s: Result<&[u8], E>, ) { let s = s.print_err(self); self.field_string_option(name, value, s); } fn field_inline_string(&mut self, name: &str, s: &[u8]) { self.field_name(name); self.print_string(s); writeln!(self.w).unwrap(); } fn field_enum<T: Eq + fmt::UpperHex>(&mut self, name: &str, value: T, flags: &[Flag<T>]) { for flag in flags { if value == flag.value { self.field_name(name); writeln!(self.w, "{} (0x{:X})", flag.name, value).unwrap(); return; } } self.field_hex(name, value); } fn field_enums<T: Eq + fmt::UpperHex>(&mut self, name: &str, value: T, enums: &[&[Flag<T>]]) { for flags in enums { for flag in *flags { if value == flag.value { self.field_name(name); writeln!(self.w, "{} (0x{:X})", flag.name, value).unwrap(); return; } } } self.field_hex(name, value); } fn flags<T: Into<u64>, U: Copy + Into<u64>>(&mut self, value: T, mask: U, flags: &[Flag<U>]) { let value = value.into(); let mask = mask.into(); self.indent(|p| { if mask != 0 { for flag in flags { if value & mask == flag.value.into() { p.print_indent(); writeln!(p.w, "{} (0x{:X})", flag.name, flag.value.into()).unwrap(); return; } } p.print_indent(); writeln!(p.w, "<unknown> (0x{:X})", value & mask).unwrap(); } else { for flag in flags { if value & flag.value.into() == flag.value.into() { p.print_indent(); writeln!(p.w, "{} (0x{:X})", flag.name, flag.value.into()).unwrap(); } } } }); } } struct Flag<T> { value: T, name: &'static str, } macro_rules! flags { ($($name:ident),+ $(,)?) => ( [ $(Flag { value: $name, name: stringify!($name), }),+ ] ) } fn print_object(p: &mut Printer<'_>, data: &[u8]) { let kind = match object::FileKind::parse(data) { Ok(file) => file, Err(err) => { println!("Failed to parse file: {}", err); return; } }; match kind { object::FileKind::Archive => print_archive(p, data), object::FileKind::Coff => pe::print_coff(p, data), object::FileKind::DyldCache => macho::print_dyld_cache(p, data), object::FileKind::Elf32 => elf::print_elf32(p, data), object::FileKind::Elf64 => elf::print_elf64(p, data), object::FileKind::MachO32 => macho::print_macho32(p, data, 0), object::FileKind::MachO64 => macho::print_macho64(p, data, 0), object::FileKind::MachOFat32 => macho::print_macho_fat32(p, data), object::FileKind::MachOFat64 => macho::print_macho_fat64(p, data), object::FileKind::Pe32 => pe::print_pe32(p, data), object::FileKind::Pe64 => pe::print_pe64(p, data), _ => {} } } fn print_object_at(p: &mut Printer<'_>, data: &[u8], offset: u64) { let kind = match object::FileKind::parse_at(data, offset) { Ok(file) => file, Err(err) => { println!("Failed to parse file: {}", err); return; } }; match kind { object::FileKind::MachO32 => macho::print_macho32(p, data, offset), object::FileKind::MachO64 => macho::print_macho64(p, data, offset), _ => {} } } fn print_archive(p: &mut Printer<'_>, data: &[u8]) { if let Some(archive) = ArchiveFile::parse(data).print_err(p) { p.field("Format", format!("Archive ({:?})", archive.kind())); for member in archive.members() { if let Some(member) = member.print_err(p) { p.blank(); p.field("Member", String::from_utf8_lossy(member.name())); if let Some(data) = member.data(data).print_err(p) { print_object(p, data); } } } } } trait PrintErr<T> { fn print_err(self, p: &mut Printer<'_>) -> Option<T>; } impl<T, E: fmt::Display> PrintErr<T> for Result<T, E> { fn print_err(self, p: &mut Printer<'_>) -> Option<T> { match self { Ok(val) => Some(val), Err(err) => { writeln!(p.e, "Error: {}", err).unwrap(); None } } } } mod elf; mod macho; mod pe;
fn field_name(&mut self, name: &str) { self.print_indent(); if !name.is_empty() { write!(self.w, "{}: ", name).unwrap(); } }
function_block-full_function
[ { "content": "fn dump_parsed_object<W: Write, E: Write>(w: &mut W, e: &mut E, file: &object::File) -> Result<()> {\n\n writeln!(\n\n w,\n\n \"Format: {:?} {:?}-endian {}-bit\",\n\n file.format(),\n\n file.endianness(),\n\n if file.is_64() { \"64\" } else { \"32\" }\n\n )...
Rust
src/move_table.rs
lePerdu/twisted
6f3330fbb594beb9f06d8bfeb307cb60ca8035b9
use std::marker::PhantomData; use crate::coord::{CompositeCoord, Coord}; use crate::puzzle::{PuzzleMove, PuzzlePerm}; use crate::symmetry::{SymCoord, Symmetry}; use crate::util::{EnumCount, IntoEnumIterator}; pub trait MoveTable { type Puzzle: PuzzlePerm; type Coord: Coord<Self::Puzzle>; type Move: PuzzleMove<Puzzle = Self::Puzzle>; fn get_move(&self, coord: Self::Coord, mov: Self::Move) -> Self::Coord; } pub struct BasicMoveTable<C, M> { table: Box<[C]>, _moves: PhantomData<M>, } impl<C, M> BasicMoveTable<C, M> where C: Coord<M::Puzzle>, M: PuzzleMove, { pub fn create() -> Self { let mut table = Vec::with_capacity(C::COUNT * M::COUNT); for coord in C::iter() { let perm = coord.into_perm(); for mov in M::iter() { let new_perm = perm.sequence(mov.permutation()); table.push(C::from_perm(&new_perm)); } } Self { table: table.into_boxed_slice(), _moves: PhantomData::default(), } } } impl<C, M> MoveTable for BasicMoveTable<C, M> where C: Coord<M::Puzzle>, M: PuzzleMove, { type Puzzle = M::Puzzle; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { self.table[M::COUNT * coord.index() + mov.index()] } } pub struct CompositeMoveTable<'a, C, AT, BT> { table_a: &'a AT, table_b: &'a BT, _coord: PhantomData<C>, } impl<'a, C, AT, BT> CompositeMoveTable<'a, C, AT, BT> { pub fn new(table_a: &'a AT, table_b: &'a BT) -> Self { CompositeMoveTable { table_a, table_b, _coord: PhantomData::default(), } } } impl<'a, P, A, B, C, M, AT, BT> CompositeMoveTable<'a, C, AT, BT> where P: PuzzlePerm, M: PuzzleMove<Puzzle = P>, AT: MoveTable<Puzzle = P, Coord = A, Move = M>, BT: MoveTable<Puzzle = P, Coord = B, Move = M>, A: Coord<P>, B: Coord<P>, C: Coord<P> + CompositeCoord<P, CoordA = A, CoordB = B>, { pub fn to_basic(&self) -> BasicMoveTable<C, M> { let mut table = Vec::with_capacity(C::COUNT * M::COUNT); for coord in C::iter() { for mov in M::iter() { table.push(self.get_move(coord, mov)); } } BasicMoveTable { table: table.into_boxed_slice(), _moves: PhantomData::default(), } } } impl<'a, P, A, B, C, M, AT, BT> MoveTable for CompositeMoveTable<'a, C, AT, BT> where P: PuzzlePerm, M: PuzzleMove<Puzzle = P>, AT: MoveTable<Puzzle = P, Coord = A, Move = M>, BT: MoveTable<Puzzle = P, Coord = B, Move = M>, A: Coord<P>, B: Coord<P>, C: Coord<P> + CompositeCoord<P, CoordA = A, CoordB = B>, { type Puzzle = P; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { let (a, b) = coord.into_coords(); C::from_coords(self.table_a.get_move(a, mov), self.table_b.get_move(b, mov)) } } pub struct SymMoveTable<C, M> { coord_table: Box<[C]>, move_table: Box<[M]>, } impl<C, M> SymMoveTable<C, M> where C: SymCoord<M::Puzzle> + Coord<M::Puzzle>, M: PuzzleMove, { pub fn create() -> Self { let representants = C::representants(); let mut coord_table = Vec::with_capacity(representants.len() * M::COUNT); for representant in representants.iter() { let perm = representant.into_perm(); for mov in M::iter() { let new_perm = perm.sequence(mov.permutation()); coord_table.push(C::from_perm(&new_perm)); } } let mut move_table = Vec::with_capacity(M::COUNT * C::Symmetry::COUNT); for mov in M::iter() { let perm = mov.permutation(); for sym in C::Symmetry::iter() { let transformed = perm.sequence(sym.permutation()); if let Some(transformed_move) = M::iter().find(|m| *m.permutation() == transformed) { move_table.push(transformed_move); } else { panic!("Transformed move coult not be found."); } } } Self { coord_table: coord_table.into_boxed_slice(), move_table: move_table.into_boxed_slice(), } } } impl<C, M> MoveTable for SymMoveTable<C, M> where C: SymCoord<M::Puzzle> + Coord<M::Puzzle>, M: PuzzleMove, { type Puzzle = M::Puzzle; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { } } /* TODO Move into cube-specific mod #[cfg(test)] pub(crate) mod test { use super::*; use std::fmt::Debug; use crate::coord::{CornerOrient7Coord, CornerPos7Coord}; use crate::cube::moves::UrfTurn; fn coordinates_correct_after_move<C: Coord + Debug, M: PuzzleMove>( table: &impl MoveTable<C, M>, ) { let mut perm = PuzzlePerm::default(); // Run through a series of moves and make sure the coordinates match up for turn in M::iter() { let orig_coord = C::from(&perm); perm += turn.permutation(); let table_coord = table.get_move(orig_coord, turn); let perm_coord = C::from(&perm); assert_eq!(table_coord, perm_coord); } } // Do the tests for each coordinate macro_rules! make_tests { ($name:ident, $coord:ty) => { pub mod $name { use super::*; lazy_static! { pub static ref TABLE: BasicMoveTable<$coord, UrfTurn> = BasicMoveTable::create(); } #[test] fn coordinates_correct_after_move() { super::coordinates_correct_after_move(&*TABLE); } } }; } make_tests!(corner_orient, CornerOrient7Coord); make_tests!(corner_pos, CornerPos7Coord); lazy_static! { pub static ref CORNER_MOVE_TABLE: CompositeMoveTable< 'static, CornerOrient7Coord, CornerPos7Coord, UrfTurn, BasicMoveTable<CornerOrient7Coord, UrfTurn>, BasicMoveTable<CornerPos7Coord, UrfTurn>, > = CompositeMoveTable::new(&*corner_orient::TABLE, &*corner_pos::TABLE); } } */
use std::marker::PhantomData; use crate::coord::{CompositeCoord, Coord}; use crate::puzzle::{PuzzleMove, PuzzlePerm}; use crate::symmetry::{SymCoord, Symmetry}; use crate::util::{EnumCount, IntoEnumIterator}; pub trait MoveTable { type Puzzle: PuzzlePerm; type Coord: Coord<Self::Puzzle>; type Move: PuzzleMove<Puzzle = Self::Puzzle>; fn get_move(&self, coord: Self::Coord, mov: Self::Move) -> Self::Coord; } pub struct BasicMoveTable<C, M> { table: Box<[C]>, _moves: PhantomData<M>, } impl<C, M> BasicMoveTable<C, M> where C: Coord<M::Puzzle>, M: PuzzleMove, { pub fn create() -> Self { let mut table = Vec::with_capacity(C::COUNT * M::COUNT); for coord in C::iter() { let perm = coord.into_perm(); for mov in M::iter() { let new_perm = perm.sequence(mov.permutation()); table.push(C::from_perm(&new_perm)); } } Self { table: table.into_boxed_slice(), _moves: PhantomData::default(), } } } impl<C, M> MoveTable for BasicMoveTable<C, M> where C: Coord<M::Puzzle>, M: PuzzleMove, { type Puzzle = M::Puzzle; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { self.table[M::COUNT * coord.index() + mov.index()] } } pub struct CompositeMoveTable<'a, C, AT, BT> { table_a: &'a AT, table_b: &'a BT, _coord: PhantomData<C>, } impl<'a, C, AT, BT> CompositeMoveTable<'a, C, AT, BT> { pub fn new(table_a: &'a AT, table_b: &'a BT) -> Self { CompositeMoveTable { table_a, table_b, _coord: PhantomData::default(), } } } impl<'a, P, A, B, C, M, AT, BT> CompositeMoveTable<'a, C, AT, BT> where P: PuzzlePerm, M: PuzzleMove<Puzzle = P>, AT: MoveTable<Puzzle = P, Coord = A, Move = M>, BT: MoveTable<Puzzle = P, Coord = B, Move = M>, A: Coord<P>, B: Coord<P>, C: Coord<P> + CompositeCoord<P, CoordA = A, CoordB = B>, { pub fn to_basic(&self) -> BasicMoveTable<C, M> { let mut table = Vec::with_capacity(C::COUNT * M::COUNT); for coord in C::iter() { for mov in M::iter() { table.push(self.get_move(coord, mov)); } } BasicMoveTable { table: table.into_boxed_slice(), _moves: PhantomData::default(), } } } impl<'a, P, A, B, C, M, AT, BT> MoveTable for CompositeMoveTable<'a, C, AT, BT> where P: PuzzlePerm, M: PuzzleMove<Puzzle = P>, AT: MoveTable<Puzzle = P, Coord = A, Move = M>, BT: MoveTable<Puzzle = P, Coord = B, Move = M>, A: Coord<P>, B: Coord<P>, C: Coord<P> + CompositeCoord<P, CoordA = A, CoordB = B>, { type Puzzle = P; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { let (a, b) = coord.into_coords(); C::from_coords(self.table_a.get_move(a, mov), self.table_b.get_move(b, mov)) } } pub struct SymMoveTable<C, M> { coord_table: Box<[C]>, move_table: Box<[M]>, } impl<C, M> SymMoveTable<C, M> where C: SymCoord<M::Puzzle> + Coord<M::Puzzle>, M: PuzzleMove, { pub fn create() -> Self { let representants = C::representants(); let mut coord_table = Vec::with_capacity(representants.len() * M::COUNT); for representant in representants.iter() { let perm = representant.into_perm(); for mov in M::iter() { let new_perm = perm.sequence(mov.permutation()); coord_table.push(C::from_perm(&new_perm)); } } let mut move_table = Vec::with_capacity(M::COUNT * C::Symmetry::COUNT); for mov in M::iter() { let perm = mov.permutation(); for sym in C::Symmetry::iter() {
ble_coord = table.get_move(orig_coord, turn); let perm_coord = C::from(&perm); assert_eq!(table_coord, perm_coord); } } // Do the tests for each coordinate macro_rules! make_tests { ($name:ident, $coord:ty) => { pub mod $name { use super::*; lazy_static! { pub static ref TABLE: BasicMoveTable<$coord, UrfTurn> = BasicMoveTable::create(); } #[test] fn coordinates_correct_after_move() { super::coordinates_correct_after_move(&*TABLE); } } }; } make_tests!(corner_orient, CornerOrient7Coord); make_tests!(corner_pos, CornerPos7Coord); lazy_static! { pub static ref CORNER_MOVE_TABLE: CompositeMoveTable< 'static, CornerOrient7Coord, CornerPos7Coord, UrfTurn, BasicMoveTable<CornerOrient7Coord, UrfTurn>, BasicMoveTable<CornerPos7Coord, UrfTurn>, > = CompositeMoveTable::new(&*corner_orient::TABLE, &*corner_pos::TABLE); } } */
let transformed = perm.sequence(sym.permutation()); if let Some(transformed_move) = M::iter().find(|m| *m.permutation() == transformed) { move_table.push(transformed_move); } else { panic!("Transformed move coult not be found."); } } } Self { coord_table: coord_table.into_boxed_slice(), move_table: move_table.into_boxed_slice(), } } } impl<C, M> MoveTable for SymMoveTable<C, M> where C: SymCoord<M::Puzzle> + Coord<M::Puzzle>, M: PuzzleMove, { type Puzzle = M::Puzzle; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { } } /* TODO Move into cube-specific mod #[cfg(test)] pub(crate) mod test { use super::*; use std::fmt::Debug; use crate::coord::{CornerOrient7Coord, CornerPos7Coord}; use crate::cube::moves::UrfTurn; fn coordinates_correct_after_move<C: Coord + Debug, M: PuzzleMove>( table: &impl MoveTable<C, M>, ) { let mut perm = PuzzlePerm::default(); // Run through a series of moves and make sure the coordinates match up for turn in M::iter() { let orig_coord = C::from(&perm); perm += turn.permutation(); let ta
random
[ { "content": "/// Symmetry-reduced coordinate, constructed from a regular coordinate and a symmetry.\n\npub trait SymCoord<P: PuzzlePerm>: EnumIndex {\n\n type BaseCoord: Coord<P>;\n\n type Symmetry: Symmetry<Puzzle = P>;\n\n type EquivClass: EnumIndex;\n\n\n\n fn from_sym_and_class(sym: Self::Symme...
Rust
compiler/src/llvm.rs
kowaalczyk/instant
95ae0bcb58ec829828e68bc8b09016514c6e08f5
use instant_parser::ast; use crate::common::CompilationError; use std::collections::HashSet; use instant_parser::ast::Stmt; pub trait FormatLLVM { fn format_llvm(&self) -> String; } enum CompilationResult { Register { id: u32 }, Constant { val: i32 }, None, } impl FormatLLVM for CompilationResult { fn format_llvm(&self) -> String { match self { CompilationResult::Constant { val } => val.to_string(), CompilationResult::Register { id } => format!("%r{}", id), CompilationResult::None => String::from(""), } } } pub trait CompileLLVM { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String>, ) -> Result<CompiledCode, CompilationError>; } pub struct CompiledCode { instructions: Vec<String>, result: CompilationResult, } impl CompileLLVM for ast::Prog { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { let mut instructions: Vec<String> = vec![]; for stmt in self.stmts.iter() { let mut compiled_stmt = stmt.compile_llvm( available_reg, variables, )?; instructions.append(&mut compiled_stmt.instructions); } let compiled_program = CompiledCode { instructions, result: CompilationResult::None }; Ok(compiled_program) } } impl CompileLLVM for ast::Stmt { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { match self { Stmt::Expr { expr } => { let mut compiled_expr = expr.compile_llvm(available_reg, variables)?; let print_instr = format!( "call void @printInt(i32 {})", compiled_expr.result.format_llvm(), ); compiled_expr.instructions.push(print_instr); compiled_expr.result = CompilationResult::None; Ok(compiled_expr) }, Stmt::Decl { var, expr } => { let mut compiled_expr = expr.compile_llvm(available_reg, variables)?; if !variables.contains(var) { let alloc_instr = format!( "%{}ptr = alloca i32", var ); compiled_expr.instructions.push(alloc_instr); variables.insert(var.clone()); } let store_instr = format!( "store i32 {}, i32* %{}ptr", compiled_expr.result.format_llvm(), var ); compiled_expr.instructions.push(store_instr); compiled_expr.result = CompilationResult::None; Ok(compiled_expr) }, } } } impl CompileLLVM for ast::Expr { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { match self { ast::Expr::Binary { left, op, right } => { let mut compiled_instructions: Vec<String> = vec![]; let mut lhs = left.compile_llvm(available_reg, variables)?; compiled_instructions.append(&mut lhs.instructions); let mut rhs = right.compile_llvm(available_reg, variables)?; compiled_instructions.append(&mut rhs.instructions); let current_reg = CompilationResult::Register { id: available_reg.clone() }; let current_instr = format!( "{} = {} {}, {}", current_reg.format_llvm(), op.format_llvm(), lhs.result.format_llvm(), rhs.result.format_llvm(), ); compiled_instructions.push(current_instr); *available_reg += 1; let compiled_code = CompiledCode { instructions: compiled_instructions, result: current_reg }; Ok(compiled_code) }, ast::Expr::Number { val } => { let compiled_code = CompiledCode { instructions: vec![], result: CompilationResult::Constant { val: val.clone() } }; Ok(compiled_code) }, ast::Expr::Variable { var } => { if variables.contains(var) { let current_reg = CompilationResult::Register { id: available_reg.clone() }; let current_instr = format!( "{} = load i32, i32* %{}ptr", current_reg.format_llvm(), var ); *available_reg += 1; let compiled_code = CompiledCode { instructions: vec![current_instr], result: current_reg, }; Ok(compiled_code) } else { Err(CompilationError::UnidentifiedVariable { identifier: var.clone() }) } }, } } } impl FormatLLVM for ast::Opcode { fn format_llvm(&self) -> String { let op_str = match self { ast::Opcode::Add => {"add i32"}, ast::Opcode::Sub => {"sub i32"}, ast::Opcode::Mul => {"mul i32"}, ast::Opcode::Div => {"sdiv i32"}, }; String::from(op_str) } } pub fn compile_llvm(program: &ast::Prog) -> Result<Vec<String>, CompilationError> { let mut instructions = vec![ String::from("declare void @printInt(i32)"), String::from("define i32 @main() {"), ]; let mut available_reg = 0 as u32; let mut used_variables: HashSet<String> = HashSet::new(); let mut compilation_result = program.compile_llvm( &mut available_reg, &mut used_variables )?; instructions.append(&mut compilation_result.instructions); instructions.append(&mut vec![ String::from("ret i32 0"), String::from("}"), ]); Ok(instructions) }
use instant_parser::ast; use crate::common::CompilationError; use std::collections::HashSet; use instant_parser::ast::Stmt; pub trait FormatLLVM { fn format_llvm(&self) -> String; } enum CompilationResult { Register { id: u32 }, Constant { val: i32 }, None, } impl FormatLLVM for CompilationResult { fn format_llvm(&self) -> String { match self { CompilationResult::Constant { val } => val.to_string(), CompilationResult::Register { id } => format!("%r{}", id), CompilationResult::None => String::from(""), } } } pub trait CompileLLVM { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String>, ) -> Result<CompiledCode, CompilationError>; } pub struct CompiledCode { instructions: Vec<String>, result: CompilationResult, } impl CompileLLVM for ast::Prog {
} impl CompileLLVM for ast::Stmt { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { match self { Stmt::Expr { expr } => { let mut compiled_expr = expr.compile_llvm(available_reg, variables)?; let print_instr = format!( "call void @printInt(i32 {})", compiled_expr.result.format_llvm(), ); compiled_expr.instructions.push(print_instr); compiled_expr.result = CompilationResult::None; Ok(compiled_expr) }, Stmt::Decl { var, expr } => { let mut compiled_expr = expr.compile_llvm(available_reg, variables)?; if !variables.contains(var) { let alloc_instr = format!( "%{}ptr = alloca i32", var ); compiled_expr.instructions.push(alloc_instr); variables.insert(var.clone()); } let store_instr = format!( "store i32 {}, i32* %{}ptr", compiled_expr.result.format_llvm(), var ); compiled_expr.instructions.push(store_instr); compiled_expr.result = CompilationResult::None; Ok(compiled_expr) }, } } } impl CompileLLVM for ast::Expr { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { match self { ast::Expr::Binary { left, op, right } => { let mut compiled_instructions: Vec<String> = vec![]; let mut lhs = left.compile_llvm(available_reg, variables)?; compiled_instructions.append(&mut lhs.instructions); let mut rhs = right.compile_llvm(available_reg, variables)?; compiled_instructions.append(&mut rhs.instructions); let current_reg = CompilationResult::Register { id: available_reg.clone() }; let current_instr = format!( "{} = {} {}, {}", current_reg.format_llvm(), op.format_llvm(), lhs.result.format_llvm(), rhs.result.format_llvm(), ); compiled_instructions.push(current_instr); *available_reg += 1; let compiled_code = CompiledCode { instructions: compiled_instructions, result: current_reg }; Ok(compiled_code) }, ast::Expr::Number { val } => { let compiled_code = CompiledCode { instructions: vec![], result: CompilationResult::Constant { val: val.clone() } }; Ok(compiled_code) }, ast::Expr::Variable { var } => { if variables.contains(var) { let current_reg = CompilationResult::Register { id: available_reg.clone() }; let current_instr = format!( "{} = load i32, i32* %{}ptr", current_reg.format_llvm(), var ); *available_reg += 1; let compiled_code = CompiledCode { instructions: vec![current_instr], result: current_reg, }; Ok(compiled_code) } else { Err(CompilationError::UnidentifiedVariable { identifier: var.clone() }) } }, } } } impl FormatLLVM for ast::Opcode { fn format_llvm(&self) -> String { let op_str = match self { ast::Opcode::Add => {"add i32"}, ast::Opcode::Sub => {"sub i32"}, ast::Opcode::Mul => {"mul i32"}, ast::Opcode::Div => {"sdiv i32"}, }; String::from(op_str) } } pub fn compile_llvm(program: &ast::Prog) -> Result<Vec<String>, CompilationError> { let mut instructions = vec![ String::from("declare void @printInt(i32)"), String::from("define i32 @main() {"), ]; let mut available_reg = 0 as u32; let mut used_variables: HashSet<String> = HashSet::new(); let mut compilation_result = program.compile_llvm( &mut available_reg, &mut used_variables )?; instructions.append(&mut compilation_result.instructions); instructions.append(&mut vec![ String::from("ret i32 0"), String::from("}"), ]); Ok(instructions) }
fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { let mut instructions: Vec<String> = vec![]; for stmt in self.stmts.iter() { let mut compiled_stmt = stmt.compile_llvm( available_reg, variables, )?; instructions.append(&mut compiled_stmt.instructions); } let compiled_program = CompiledCode { instructions, result: CompilationResult::None }; Ok(compiled_program) }
function_block-full_function
[ { "content": "pub fn parse_arg() -> String {\n\n let args: Vec<String> = env::args().collect();\n\n match args.get(1) {\n\n Some(input_filename) => {\n\n String::from(input_filename)\n\n },\n\n None => {\n\n println!(\"Usage: {} {}\", &args[0], \"[input_filename]...
Rust
testsuite/cluster-test/src/experiments/client_compatibility_test.rs
chouette254/libra
1eaefa60d29e1df72ba6c4f9cf1867964821b586
#![forbid(unsafe_code)] use crate::{ cluster::Cluster, cluster_swarm::cluster_swarm_kube::CFG_SEED, experiments::{Context, Experiment, ExperimentParam}, instance::Instance, }; use async_trait::async_trait; use libra_logger::prelude::*; use libra_types::chain_id::ChainId; use std::{collections::HashSet, fmt, time::Duration}; use structopt::StructOpt; use tokio::time; #[derive(StructOpt, Debug)] pub struct ClientCompatiblityTestParams { #[structopt(long, help = "Image tag of old client to test")] pub old_image_tag: String, } pub struct ClientCompatibilityTest { old_image_tag: String, faucet_node: Instance, cli_node: Instance, } impl ExperimentParam for ClientCompatiblityTestParams { type E = ClientCompatibilityTest; fn build(self, cluster: &Cluster) -> Self::E { let (test_nodes, _) = cluster.split_n_fullnodes_random(2); let mut test_nodes = test_nodes.into_fullnode_instances(); let faucet_node = test_nodes.pop().expect("Requires at least one faucet node"); let cli_node = test_nodes.pop().expect("Requires at least one test node"); Self::E { old_image_tag: self.old_image_tag, faucet_node, cli_node, } } } #[async_trait] impl Experiment for ClientCompatibilityTest { fn affected_validators(&self) -> HashSet<String> { HashSet::new() } async fn run(&mut self, context: &mut Context<'_>) -> anyhow::Result<()> { context.report.report_text(format!( "Client compatibility test results for {} ==> {} (PR)", self.old_image_tag, context.current_tag )); let test_image = format!( "853397791086.dkr.ecr.us-west-2.amazonaws.com/libra_faucet:{}", self.old_image_tag ); let faucet_port: &str = "9999"; let num_validators = context.cluster.validator_instances().len(); let config_cmd = format!( "/opt/libra/bin/config-builder faucet -o /opt/libra/etc --chain-id {chain_id} -s {seed} -n {num_validators}; echo $?; cat /opt/libra/etc/waypoint.txt", chain_id=ChainId::test(), seed=CFG_SEED, num_validators=num_validators ); let env_cmd = format!( "CFG_CHAIN_ID={chain_id} AC_HOST={ac_host} AC_PORT={ac_port}", chain_id = ChainId::test(), ac_host = self.faucet_node.ip(), ac_port = self.faucet_node.ac_port() ); let run_cmd = format!("gunicorn --bind 0.0.0.0:{faucet_port} --access-logfile - --error-logfile - --log-level debug --pythonpath /opt/libra/bin server", faucet_port=faucet_port); let full_faucet_cmd = format!( "{config_cmd}; {env_cmd} {run_cmd}", config_cmd = config_cmd, env_cmd = env_cmd, run_cmd = run_cmd ); let msg = format!("1. Starting faucet on node {}", self.faucet_node); info!("{}", msg); context.report.report_text(msg); let faucet_job_name = self .faucet_node .spawn_job(&test_image, &full_faucet_cmd, "run-faucet") .await .map_err(|err| anyhow::format_err!("Failed to spawn faucet job: {}", err))?; info!( "Job {} started for node {}:{} faucet command: {}", faucet_job_name, self.faucet_node, self.faucet_node.peer_name(), full_faucet_cmd ); info!("Waiting for faucet job to spin up completely"); time::delay_for(Duration::from_secs(20)).await; let run_cli_cmd = format!( "/opt/libra/bin/cli --url {fn_url} --chain-id {chain_id} -f http://{faucet_host}:{faucet_port} --waypoint $(cat /opt/libra/etc/waypoint.txt)", fn_url = self.cli_node.json_rpc_url(), chain_id = ChainId::test(), faucet_host = self.faucet_node.ip(), faucet_port = faucet_port ); let mut build_cli_cmd = String::new(); let cli_cmd_file = "/opt/libra/etc/cmds.txt"; let cmds = include_str!("client_compatibility_cmds.txt"); for cmd in cmds.split('\n') { build_cli_cmd.push_str(&format!( "echo {cmd} >> {cmd_file};", cmd = cmd, cmd_file = cli_cmd_file )); } let full_cli_cmd = format!( "{config_cmd}; {build_cli_cmd} {run_cli_cmd} < {cli_cmd_file} && echo SUCCESS", config_cmd = config_cmd, build_cli_cmd = build_cli_cmd, run_cli_cmd = run_cli_cmd, cli_cmd_file = cli_cmd_file ); let msg = format!("2. Running CLI mint from node {}", self.cli_node); info!("{}", msg); context.report.report_text(msg); info!( "Job starting for node {}:{} CLI command: {}", self.cli_node, self.cli_node.peer_name(), full_cli_cmd ); self.cli_node .cmd(&test_image, &full_cli_cmd, "run-cli-commands") .await .map_err(|err| anyhow::format_err!("Failed to run CLI: {}", err))?; let msg = format!("3. CLI success from node {}", self.cli_node); info!("{}", msg); context.report.report_text(msg); context .cluster_builder .cluster_swarm .kill_job(&faucet_job_name) .await .map_err(|err| anyhow::format_err!("Failed to kill faucet: {}", err))?; Ok(()) } fn deadline(&self) -> Duration { Duration::from_secs(5 * 60) } } impl fmt::Display for ClientCompatibilityTest { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Client compatibility test {}, faucet {}, CLI on {}", self.old_image_tag, self.faucet_node, self.cli_node ) } }
#![forbid(unsafe_code)] use crate::{ cluster::Cluster, cluster_swarm::cluster_swarm_kube::CFG_SEED, experiments::{Context, Experiment, ExperimentParam}, instance::Instance, }; use async_trait::async_trait; use libra_logger::prelude::*; use libra_types::chain_id::ChainId; use std::{collections::HashSet, fmt, time::Duration}; use structopt::StructOpt; use tokio::time; #[derive(StructOpt, Debug)] pub struct ClientCompatiblityTestParams { #[structopt(long, help = "Image tag of old client to test")] pub old_image_tag: String, } pub struct ClientCompatibilityTest { old_image_tag: String, faucet_node: Instance, cli_node: Instance, } impl ExperimentParam for ClientCompatiblityTestParams { type E = ClientCompatibilityTest; fn build(self, cluster: &Cluster) -> Self::E { let (test_nodes, _) = cluster.split_n_fullnodes_random(2); let mut test_nodes = test_nodes.into_fullnode_instances(); let faucet_node = test_nodes.pop().expect("Requires at least one faucet node"); let cli_node = test_nodes.pop().expect("Requires at least one test node"); Self::E { old_image_tag: self.old_image_tag, faucet_node, cli_node, } } } #[async_trait] impl Experiment for ClientCompatibilityTest { fn affected_validators(&self) -> HashSet<String> { HashSet::new() } async fn run(&mut self, context: &mut Context<'_>) -> anyhow::Result<()> { context.report.report_text(format!( "Client compatibility test results for {} ==> {} (PR)", self.old_image_tag, context.current_tag )); let test_image = format!( "853397791086.dkr.ecr.us-west-2.amazonaws.com/libra_faucet:{}", self.old_image_tag ); let faucet_port: &str = "9999"; let num_validators = context.cluster.validator_instances().len(); let config_cmd = format!( "/opt/libra/bin/config-builder faucet -o /opt/libra/etc --chain-id {chain_id} -s {seed} -n {num_validators}; echo $?; cat /opt/libra/etc/waypoint.txt", chain_id=ChainId::test(), seed=CFG_SEED, num_validators=num_validators ); let env_cmd = format!( "CFG_CHAIN_ID={chain_id} AC_HOST={ac_host} AC_PORT={ac_port}", chain_id = ChainId::test(), ac_host = self.faucet_node.ip(), ac_port = self.faucet_node.ac_port() ); let run_cmd = format!("gunicorn --bind 0.0.0.0:{faucet_port} --access-logfile - --error-logfile - --log-level debug --pythonpath /opt/libra/bin server", faucet_port=faucet_port); let full_faucet_cmd = format!( "{config_cmd}; {env_cmd} {run_cmd}", config_cmd = config_cmd, env_cmd = env_cmd, run_cmd = run_cmd ); let msg = format!("1. Starting faucet on node {}", self.faucet_node); info!("{}", msg); context.report.report_text(msg);
self.faucet_node.peer_name(), full_faucet_cmd ); info!("Waiting for faucet job to spin up completely"); time::delay_for(Duration::from_secs(20)).await; let run_cli_cmd = format!( "/opt/libra/bin/cli --url {fn_url} --chain-id {chain_id} -f http://{faucet_host}:{faucet_port} --waypoint $(cat /opt/libra/etc/waypoint.txt)", fn_url = self.cli_node.json_rpc_url(), chain_id = ChainId::test(), faucet_host = self.faucet_node.ip(), faucet_port = faucet_port ); let mut build_cli_cmd = String::new(); let cli_cmd_file = "/opt/libra/etc/cmds.txt"; let cmds = include_str!("client_compatibility_cmds.txt"); for cmd in cmds.split('\n') { build_cli_cmd.push_str(&format!( "echo {cmd} >> {cmd_file};", cmd = cmd, cmd_file = cli_cmd_file )); } let full_cli_cmd = format!( "{config_cmd}; {build_cli_cmd} {run_cli_cmd} < {cli_cmd_file} && echo SUCCESS", config_cmd = config_cmd, build_cli_cmd = build_cli_cmd, run_cli_cmd = run_cli_cmd, cli_cmd_file = cli_cmd_file ); let msg = format!("2. Running CLI mint from node {}", self.cli_node); info!("{}", msg); context.report.report_text(msg); info!( "Job starting for node {}:{} CLI command: {}", self.cli_node, self.cli_node.peer_name(), full_cli_cmd ); self.cli_node .cmd(&test_image, &full_cli_cmd, "run-cli-commands") .await .map_err(|err| anyhow::format_err!("Failed to run CLI: {}", err))?; let msg = format!("3. CLI success from node {}", self.cli_node); info!("{}", msg); context.report.report_text(msg); context .cluster_builder .cluster_swarm .kill_job(&faucet_job_name) .await .map_err(|err| anyhow::format_err!("Failed to kill faucet: {}", err))?; Ok(()) } fn deadline(&self) -> Duration { Duration::from_secs(5 * 60) } } impl fmt::Display for ClientCompatibilityTest { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Client compatibility test {}, faucet {}, CLI on {}", self.old_image_tag, self.faucet_node, self.cli_node ) } }
let faucet_job_name = self .faucet_node .spawn_job(&test_image, &full_faucet_cmd, "run-faucet") .await .map_err(|err| anyhow::format_err!("Failed to spawn faucet job: {}", err))?; info!( "Job {} started for node {}:{} faucet command: {}", faucet_job_name, self.faucet_node,
random
[ { "content": "pub fn get_instance_list_str(batch: &[Instance]) -> String {\n\n let mut nodes_list = String::from(\"\");\n\n for instance in batch.iter() {\n\n nodes_list.push_str(&instance.to_string());\n\n nodes_list.push_str(\", \")\n\n }\n\n nodes_list\n\n}\n\n\n\n#[derive(StructOpt...
Rust
src/unwrap.rs
jkfritcher/aes-keywrap-rs
76abb49644769a8a48228227ff3ebd56336f6ddf
use crate::{ types::{Aes128Ecb, Aes192Ecb, Aes256Ecb, AES_BLOCK_LEN, BLOCK_LEN}, }; use block_modes::BlockMode; use thiserror::Error; #[derive(Error, Debug)] pub enum UnwrapKeyError { #[error("Key length must be 16, 24 or 32 octets")] KeyLengthInvalid, #[error("Ciphertext length must be a multiple of {0} octets")] CipherTextInvalidLength(usize), #[error("Ciphertext length can not be longer than {0} octets")] CipherTextLengthTooLong(u32), #[error("Ciphertext length must be atleast {0} octet(s)")] CipherTextLengthTooShort(usize), #[error("Failed to successfully unwrap key")] CipherTextValidationFailure, } pub fn aes_unwrap_with_nopadding(ct: &[u8], key: &[u8]) -> Result<Vec<u8>, UnwrapKeyError> { let mut pt: Vec<u8> = Vec::new(); let ct_len = match ct.len() { ct_len if (ct_len % BLOCK_LEN) > 0 => { return Err(UnwrapKeyError::CipherTextInvalidLength(BLOCK_LEN)); }, ct_len => ct_len, }; let n = match (ct_len / BLOCK_LEN) - 1 { 0 | 1 => { return Err(UnwrapKeyError::CipherTextLengthTooShort(24)); }, n => n, }; pt.resize(ct_len, 0); pt.as_mut_slice().copy_from_slice(ct); let aes_func = match key.len() { 16 => aes128_ecb_decrypt, 24 => aes192_ecb_decrypt, 32 => aes256_ecb_decrypt, _ => return Err(UnwrapKeyError::KeyLengthInvalid), }; unwrap_core(key, n, pt.as_mut_slice(), aes_func); #[allow(non_snake_case)] let A: [u8; BLOCK_LEN] = [0xa6; BLOCK_LEN]; if !constant_time_eq(&pt[0..BLOCK_LEN], &A[0..BLOCK_LEN]) { return Err(UnwrapKeyError::CipherTextValidationFailure); } Ok(pt[BLOCK_LEN..].to_vec()) } pub fn aes_unwrap_with_padding(ct: &[u8], key: &[u8]) -> Result<Vec<u8>, UnwrapKeyError> { let mut pt: Vec<u8> = Vec::new(); let ct_len = match ct.len() { ct_len if (ct_len % BLOCK_LEN) > 0 => { return Err(UnwrapKeyError::CipherTextInvalidLength(BLOCK_LEN)); }, ct_len => ct_len, }; let n = match (ct_len / BLOCK_LEN) - 1 { 0 => { return Err(UnwrapKeyError::CipherTextLengthTooShort(16)); }, n => n, }; pt.resize(ct_len, 0); pt.as_mut_slice().copy_from_slice(ct); let aes_func = match key.len() { 16 => aes128_ecb_decrypt, 24 => aes192_ecb_decrypt, 32 => aes256_ecb_decrypt, _ => return Err(UnwrapKeyError::KeyLengthInvalid), }; unwrap_core(key, n, pt.as_mut_slice(), aes_func); #[allow(non_snake_case)] let A: [u8; 4] = [0xa6, 0x59, 0x59, 0xa6]; if !constant_time_eq(&pt[0..4], &A) { return Err(UnwrapKeyError::CipherTextValidationFailure); } let mli = { let mut mli_bytes: [u8; 4] = Default::default(); mli_bytes[..].copy_from_slice(&pt[4..8]); u32::from_be_bytes(mli_bytes) as usize }; if !(mli > (8 * (n - 1)) && mli <= (8 * n)) { return Err(UnwrapKeyError::CipherTextValidationFailure); } let pad_len = ct_len - mli - BLOCK_LEN; let padding = &pt[(ct_len - pad_len)..]; for pad_byte in padding { if *pad_byte != 0 { return Err(UnwrapKeyError::CipherTextValidationFailure); } } Ok(pt[BLOCK_LEN..(BLOCK_LEN + mli)].to_vec()) } fn constant_time_eq(a: &[u8], b: &[u8]) -> bool { if a.len() != b.len() { return false; } let c = a.iter().zip(b.iter()).fold(0, |acc, (a, b)| acc | (a ^ b)); c == 0 } fn aes128_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes128Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn aes192_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes192Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn aes256_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes256Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn unwrap_core<AesEcb>(key: &[u8], n: usize, pt: &mut [u8], aes_ecb_decrypt: AesEcb) where AesEcb: Fn(&[u8], &mut [u8]), { if pt.len() > AES_BLOCK_LEN { let mut tmp: Vec<u8> = vec![0u8; AES_BLOCK_LEN]; tmp[0..BLOCK_LEN].copy_from_slice(&pt[0..BLOCK_LEN]); for j in (0..6).rev() { for i in (1..=n).rev() { let idx = i * BLOCK_LEN; tmp[BLOCK_LEN..].copy_from_slice(&pt[idx..idx + BLOCK_LEN]); let t = ((n * j) + i) as u64; tmp[0..BLOCK_LEN] .iter_mut() .zip(t.to_be_bytes().iter()) .for_each(|(x1, x2)| *x1 ^= *x2); aes_ecb_decrypt(key, &mut tmp); pt[idx..idx + BLOCK_LEN].copy_from_slice(&tmp[BLOCK_LEN..]); } } pt[0..BLOCK_LEN].copy_from_slice(&tmp[0..BLOCK_LEN]); } else { aes_ecb_decrypt(key, pt); } } #[cfg(test)] mod tests { use super::{aes_unwrap_with_nopadding, aes_unwrap_with_padding}; #[test] fn test_unwrap_nopad_invalid_key_length() { let ct = hex!("000102030405060708090a0b0c0d0e0f").to_vec(); let key = hex!("000102030405060708090a0b0c0d0e").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_err(), "Invalid key length erroneously passed"); } #[test] fn test_wrap_pad_invalid_key_length() { let pt = hex!("000102030405060708090a0b0c0d0e0f").to_vec(); let key = hex!("000102030405060708090a0b0c0d0e").to_vec(); let ct = aes_unwrap_with_padding(&pt, &key); assert!(ct.is_err(), "Invalid key length erroneously passed"); } #[test] fn test_unwrap_nopad_16_byte_key_16_byte_data() { let ct = hex!("1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_24_byte_key_16_byte_data() { let ct = hex!("96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F1011121314151617").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_32_byte_key_16_byte_data() { let ct = hex!("64E8C3F9CE0F5BA263E9777905818A2A93C8191E7D6E8AE7").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_24_byte_key_24_byte_data() { let ct = hex!("031D33264E15D33268F24EC260743EDCE1C6C7DDEE725A936BA814915C6762D2").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F1011121314151617").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF0001020304050607").to_vec() ); } #[test] fn test_unwrap_nopad_32_byte_key_24_byte_data() { let ct = hex!("A8F9BC1612C68B3FF6E6F4FBE30E71E4769C8B80A32CB8958CD5D17D6B254DA1").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF0001020304050607").to_vec() ); } #[test] fn test_unwrap_nopad_32_byte_key_32_byte_data() { let ct = hex!("28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F").to_vec() ); } #[test] fn test_unwrap_pad_24_byte_key_20_byte_data() { let ct = hex!("138bdeaa9b8fa7fc61f97742e72248ee5ae6ae5360d1ae6a5f54f373fa543b6a").to_vec(); let key = hex!("5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8").to_vec(); let pt = aes_unwrap_with_padding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("c37b7e6492584340bed12207808941155068f738").to_vec() ); } #[test] fn test_unwrap_pad_24_byte_key_7_byte_data() { let ct = hex!("afbeb0f07dfbf5419200f2ccb50bb24f").to_vec(); let key = hex!("5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8").to_vec(); let pt = aes_unwrap_with_padding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("466f7250617369").to_vec()); } }
use crate::{ types::{Aes128Ecb, Aes192Ecb, Aes256Ecb, AES_BLOCK_LEN, BLOCK_LEN}, }; use block_modes::BlockMode; use thiserror::Error; #[derive(Error, Debug)] pub enum UnwrapKeyError { #[error("Key length must be 16, 24 or 32 octets")] KeyLengthInvalid, #[error("Ciphertext length must be a multiple of {0} octets")] CipherTextInvalidLength(usize), #[error("Ciphertext length can not be longer than {0} octets")] CipherTextLengthTooLong(u32), #[error("Ciphertext length must be atleast {0} octet(s)")] CipherTextLengthTooShort(usize), #[error("Failed to successfully unwrap key")] CipherTextValidationFailure, } pub fn aes_unwrap_with_nopadding(ct: &[u8], key: &[u8]) -> Result<Vec<u8>, UnwrapKeyError> { let mut pt: Vec<u8> = Vec::new(); let ct_len = match ct.len() { ct_len if (ct_len % BLOCK_LEN) > 0 => { return Err(UnwrapKeyError::CipherTextInvalidLength(BLOCK_LEN)); }, ct_len => ct_len, }; let n = match (ct_len / BLOCK_LEN) - 1 { 0 | 1 => { return Err(UnwrapKeyError::CipherTextLengthTooShort(24)); }, n => n, }; pt.resize(ct_len, 0); pt.as_mut_slice().copy_from_slice(ct); let aes_func = match key.len() { 16 => aes128_ecb_decrypt, 24 => aes192_ecb_decrypt, 32 => aes256_ecb_decrypt, _ => return Err(UnwrapKeyError::KeyLengthInvalid), }; unwrap_core(key, n, pt.as_mut_slice(), aes_func); #[allow(non_snake_case)] let A: [u8; BLOCK_LEN] = [0xa6; BLOCK_LEN]; if !constant_time_eq(&pt[0..BLOCK_LEN], &A[0..BLOCK_LEN]) { return Err(UnwrapKeyError::CipherTextValidationFailure); } Ok(pt[BLOCK_LEN..].to_vec()) } pub fn aes_unwrap_with_padding(ct: &[u8], key: &[u8]) -> Result<Vec<u8>, UnwrapKeyError> { let mut pt: Vec<u8> = Vec::new(); let ct_len = match ct.len() { ct_len if (ct_len % BLOCK_LEN) > 0 => { return Err(UnwrapKeyError::CipherTextInvalidLength(BLOCK_LEN)); }, ct_len => ct_len, }; let n = match (ct_len / BLOCK_LEN) - 1 { 0 => { return Err(UnwrapKeyError::CipherTextLengthTooShort(16)); }, n => n, }; pt.resize(ct_len, 0); pt.as_mut_slice().copy_from_slice(ct); let aes_func = match key.len() { 16 => aes128_ecb_decrypt, 24 => aes192_ecb_decrypt, 32 => aes256_ecb_decrypt, _ => return Err(UnwrapKeyError::KeyLengthInvalid), }; unwrap_core(key, n, pt.as_mut_slice(), aes_func); #[allow(non_snake_case)] let A: [u8; 4] = [0xa6, 0x59, 0x59, 0xa6]; if !constant_time_eq(&pt[0..4], &A) { return Err(UnwrapKeyError::CipherTextValidationFailure); } let mli = { let mut mli_bytes: [u8; 4] = Default::default(); mli_bytes[..].copy_from_slice(&pt[4..8]); u32::from_be_bytes(mli_bytes) as usize }; if !(mli > (8 * (n - 1)) && mli <= (8 * n)) { return Err(UnwrapKeyError::CipherTextValidationFailure); } let pad_len = ct_len - mli - BLOCK_LEN; let padding = &pt[(ct_len - pad_len)..]; for pad_byte in padding { if *pad_byte != 0 { return Err(UnwrapKeyError::CipherTextValidationFailure); } } Ok(pt[BLOCK_LEN..(BLOCK_LEN + mli)].to_vec()) } fn constant_time_eq(a: &[u8], b: &[u8]) -> bool { if a.len() != b.len() { return false; } let c = a.iter().zip(b.iter()).fold(0, |acc, (a, b)| acc | (a ^ b)); c == 0 } fn aes128_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes128Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn aes192_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes192Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn aes256_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes256Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn unwrap_core<AesEcb>(key: &[u8], n: usize, pt: &mut [u8], aes_ecb_decrypt: AesEcb) where AesEcb: Fn(&[u8], &mut [u8]), { if pt.len() > AES_BLOCK_LEN { let mut tmp: Vec<u8> = vec![0u8; AES_BLOCK_LEN]; tmp[0..BLOCK_LEN].copy_from_slice(&pt[0..BLOCK_LEN]); for j in (0..6).rev() { for i in (1..=n).rev() { let idx = i * BLOCK_LEN; tmp[BLOCK_LEN..].copy_from_slice(&pt[idx..idx + BLOCK_LEN]); let t = ((n * j) + i) as u64; tmp[0..BLOCK_LEN] .iter_mut() .zip(t.to_be_bytes().iter()) .for_each(|(x1, x2)| *x1 ^= *x2); aes_ecb_decrypt(key, &mut tmp); pt[idx..idx + BLOCK_LEN].copy_from_slice(&tmp[BLOCK_LEN..]); } } pt[0..BLOCK_LEN].copy_from_slice(&tmp[0..BLOCK_LEN]); } else { aes_ecb_decrypt(key, pt); } } #[cfg(test)] mod tests { use super::{aes_unwrap_with_nopadding, aes_unwrap_with_padding}; #[test] fn test_unwrap_nopad_invalid_key_length() { let ct = hex!("000102030405060708090a0b0c0d0e0f").to_vec(); let key = hex!("000102030405060708090a0b0c0d0e").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_err(), "Invalid key length erroneously passed"); } #[test] fn test_wrap_pad_invalid_key_length() { let pt = hex!("000102030405060708090a0b0c0d0e0f").to_vec(); let key = hex!("000102030405060708090a0b0c0d0e").to_vec(); let ct = aes_unwrap_with_padding(&pt, &key); assert!(ct.is_err(), "Invalid key length erroneously passed"); } #[test] fn test_unwrap_nopad_16_byte_key_16_byte_data() { let ct = hex!("1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_24_byte_key_16_byte_data() { let ct = hex!("96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F1011121314151617").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_32_byte_key_16_byte_data() { let ct = hex!("64E8C3F9CE0F5BA263E9777905818A2A93C8191E7D6E8AE7").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_24_byte_key_24_byte_data() { let ct = hex!("031D33264E15D33268F24EC260743
#[test] fn test_unwrap_nopad_32_byte_key_24_byte_data() { let ct = hex!("A8F9BC1612C68B3FF6E6F4FBE30E71E4769C8B80A32CB8958CD5D17D6B254DA1").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF0001020304050607").to_vec() ); } #[test] fn test_unwrap_nopad_32_byte_key_32_byte_data() { let ct = hex!("28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F").to_vec() ); } #[test] fn test_unwrap_pad_24_byte_key_20_byte_data() { let ct = hex!("138bdeaa9b8fa7fc61f97742e72248ee5ae6ae5360d1ae6a5f54f373fa543b6a").to_vec(); let key = hex!("5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8").to_vec(); let pt = aes_unwrap_with_padding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("c37b7e6492584340bed12207808941155068f738").to_vec() ); } #[test] fn test_unwrap_pad_24_byte_key_7_byte_data() { let ct = hex!("afbeb0f07dfbf5419200f2ccb50bb24f").to_vec(); let key = hex!("5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8").to_vec(); let pt = aes_unwrap_with_padding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("466f7250617369").to_vec()); } }
EDCE1C6C7DDEE725A936BA814915C6762D2").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F1011121314151617").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF0001020304050607").to_vec() ); }
function_block-function_prefixed
[ { "content": "fn wrap_core<AesEcb>(key: &[u8], n: usize, ct: &mut [u8], aes_ecb_encrypt: AesEcb)\n\nwhere\n\n AesEcb: Fn(&[u8], &mut [u8]),\n\n{\n\n if ct.len() > AES_BLOCK_LEN {\n\n // Allocate buffer for operations in loop\n\n // tmp = A | R[i]\n\n let mut tmp: Vec<u8> = vec![0u8; A...
Rust
lang/src/compiler/path.rs
leops/hatchet
4f788737551b3cf74c06c8fae24b5b20be420ef4
use std::collections::VecDeque; use std::borrow::Borrow; use hct::ast::Path; use super::builder::*; use super::function::*; use super::scope::Scope; use super::types::*; use atom::*; pub fn resolve_path<'a, P: Borrow<Path>>(path: P, scope: &Scope<'a>, builder: &mut Builder) -> ValueRef { let path = path.borrow(); match *path { Path::Deref(ref obj, ref prop) => { let res = resolve_path(obj.borrow(), scope, builder); if res.ty == TypeId::Entity { res } else if let TypeId::Object { ref items } = res.ty { let idx = { items.binary_search_by(|&(ref item, _)| item.cmp(prop)) .expect(&format!("key \"{}\" not found in object", prop)) }; let (_, ref ty) = items[idx]; let zero = builder.build_const_i32(0i32); let idx = builder.build_const_i32(idx as i32); let gep = builder.build_in_bounds_gep( &res, &[ zero, idx, ], ); let res = builder.build_load(&gep); ValueRef { ty: ty.clone(), ptr: res.ptr } } else { panic!("trying to deref a non-map value {}", *path) } }, Path::Instance(ref obj) => resolve_path(obj.borrow(), scope, builder), Path::Binding(ref name) => { match scope.binding(builder, name) { Some(val) => val, None => if name.starts_with('@') { builder.build_const_entity(name) } else { panic!("entity {} not found", *path) } } }, } } type Trigger<'a> = (Option<ValueRef>, Option<ValueRef>, Option<Atom>); fn break_trigger<'a>(path: Path, scope: &Scope<'a>, builder: &mut Builder) -> Trigger<'a> { match path { Path::Deref(obj, prop) => match break_trigger(*obj, scope, builder) { (a, Some(b), None) => (a, Some(b), Some(prop)), (a, None, None) => (a, Some(builder.build_const_entity(prop)), None), path => panic!("invalid path {:?}", path), }, Path::Instance(pat) => (Some(resolve_path(pat, scope, builder)), None, None), Path::Binding(_) => (None, Some(resolve_path(&path, scope, builder)), None), } } pub fn event<'a>(path: Path, scope: &Scope<'a>, builder: &mut Builder) -> (ValueRef, ValueRef) { match break_trigger(path, scope, builder) { (Some(inst), Some(ent), Some(method)) => { let method = builder.build_const_atom(method); let method = call_stl( builder, hct_atom!("get_instance"), vec![ &ent, &method ], ); (inst, method) }, (None, Some(ent), Some(method)) => ( ent, builder.build_const_atom(method), ), (Some(inst), Some(ent), None) => { let method = builder.build_const_atom(hct_atom!("Trigger")); let method = call_stl( builder, hct_atom!("get_instance"), vec![ &ent, &method ], ); (inst, method) }, (None, Some(ent), None) => ( ent, builder.build_const_atom(hct_atom!("Trigger")), ), path => panic!("invalid path {:?}", path), } } pub fn unwind_path(path: Path) -> VecDeque<Atom> { match path { Path::Deref(obj, prop) => { let mut res = unwind_path(*obj); res.push_back(prop); res }, Path::Binding(name) => { let mut res = VecDeque::new(); res.push_back(name); res }, Path::Instance(_) => unimplemented!(), } }
use std::collections::VecDeque; use std::borrow::Borrow; use hct::ast::Path; use super::builder::*; use super::function::*; use super::scope::Scope; use super::types::*; use atom::*; pub fn resolve_path<'a, P: Borrow<Path>>(path: P, scope: &Scope<'a>, builder: &mut Builder) -> ValueRef { let path = path.borrow(); match *path { Path::Deref(ref obj, ref prop) => { let res = resolve_path(obj.borrow(), scope, builder); if res.ty == TypeId::Entity { res } else if let TypeId::Object { ref items } = res.ty { let idx = { items.binary_search_by(|&(ref item, _)| item.cmp(prop)) .expect(&format!("key \"{}\" not found in object", prop)) }; let (_, ref ty) = items[idx]; let zero = builder.build_const_i32(0i32); let idx = builder.build_const_i32(idx as i32); let gep = builder.build_in_bounds_gep( &res, &[ zero, idx, ], ); let res = builder.build_load(&gep); ValueRef { ty: ty.clone(), ptr: res.ptr } } else { panic!("trying to deref a non-map value {}", *path) } }, Path::Instance(ref obj) => resolve_path(obj.borrow(), scope, builder), Path::Binding(ref name) => { match scope.binding(builder, name) { Some(val) => val, None => if name.starts_with('@') { builder.build_const_entity(name) } else { panic!("entity {} not found", *path) } } }, } } type Trigger<'a> = (Option<ValueRef>, Option<ValueRef>, Option<Atom>); fn break_trigger<'a>(path: Path, scope: &Scope<'a>, builder: &mut Builder) -> Trigger<'a> { match path { Path::Deref(obj, prop) =>
, Path::Instance(pat) => (Some(resolve_path(pat, scope, builder)), None, None), Path::Binding(_) => (None, Some(resolve_path(&path, scope, builder)), None), } } pub fn event<'a>(path: Path, scope: &Scope<'a>, builder: &mut Builder) -> (ValueRef, ValueRef) { match break_trigger(path, scope, builder) { (Some(inst), Some(ent), Some(method)) => { let method = builder.build_const_atom(method); let method = call_stl( builder, hct_atom!("get_instance"), vec![ &ent, &method ], ); (inst, method) }, (None, Some(ent), Some(method)) => ( ent, builder.build_const_atom(method), ), (Some(inst), Some(ent), None) => { let method = builder.build_const_atom(hct_atom!("Trigger")); let method = call_stl( builder, hct_atom!("get_instance"), vec![ &ent, &method ], ); (inst, method) }, (None, Some(ent), None) => ( ent, builder.build_const_atom(hct_atom!("Trigger")), ), path => panic!("invalid path {:?}", path), } } pub fn unwind_path(path: Path) -> VecDeque<Atom> { match path { Path::Deref(obj, prop) => { let mut res = unwind_path(*obj); res.push_back(prop); res }, Path::Binding(name) => { let mut res = VecDeque::new(); res.push_back(name); res }, Path::Instance(_) => unimplemented!(), } }
match break_trigger(*obj, scope, builder) { (a, Some(b), None) => (a, Some(b), Some(prop)), (a, None, None) => (a, Some(builder.build_const_entity(prop)), None), path => panic!("invalid path {:?}", path), }
if_condition
[ { "content": "/// Create a call to an STL function\n\npub fn call_stl<'a, A>(builder: &mut Builder, name: Atom, args: A) -> ValueRef\n\n where A: IntoIterator<Item=&'a ValueRef>, Type: 'a, Value: 'a {\n\n let mut args = args.into_iter().peekable();\n\n\n\n match name {\n\n hct_atom!(\"length\") ...
Rust
src/day21.rs
codedstructure/aoc2021
27e151e4c8cbcda78b29fe734df6733818783461
use std::collections::HashMap; #[derive(Default, Debug, Clone)] struct DetDie { state: i32, roll_count: i32, } impl Iterator for DetDie { type Item = i32; fn next(&mut self) -> Option<i32> { self.roll_count += 1; let value = self.state + 1; self.state = (self.state + 1) % 100; Some(value) } } struct Player { position: i32, score: i32, } impl Player { fn new(start: i32) -> Self { Self { position: (start - 1) % 10, score: 0, } } fn advance(&mut self, amount: i32) { self.position = (self.position + amount) % 10; self.score += self.position + 1; } } pub fn step1() { let mut dd: DetDie = Default::default(); let dd = dd.by_ref(); let mut p1 = Player::new(8); let mut p2 = Player::new(6); let losing_score = loop { p1.advance(dd.take(3).sum()); if p1.score >= 1000 { break p2.score; } p2.advance(dd.take(3).sum()); if p2.score >= 1000 { break p1.score; } }; println!("Final result: {}", dd.roll_count * losing_score); } fn run_game( remain: i32, pos: i32, ways: i128, throw_count: i32, throw_way_map: &mut HashMap<i32, i128>, ) -> i128 { let roll_dist: HashMap<i32, i128> = HashMap::from_iter([(3, 1), (4, 3), (5, 6), (6, 7), (7, 6), (8, 3), (9, 1)]); if remain > 0 { let mut new_ways = 0; for roll_sum in 3..=9 { let possibilities = roll_dist.get(&roll_sum).unwrap(); let new_pos = ((pos - 1) + roll_sum) % 10 + 1; new_ways += run_game( remain - new_pos, new_pos, ways * possibilities, throw_count + 1, throw_way_map, ); } return new_ways; } *throw_way_map.entry(throw_count).or_insert(0) += ways; ways } pub fn step2() { let mut p1_throw_ways = HashMap::new(); let p1_complete = run_game(21, 8, 1, 0, &mut p1_throw_ways); println!("p1: {}", p1_complete); println!("{:?}", p1_throw_ways); let mut p2_throw_ways = HashMap::new(); let p2_complete = run_game(21, 6, 1, 0, &mut p2_throw_ways); println!("p2: {}", p2_complete); println!("{:?}", p2_throw_ways); let mut p1_win_count = 0; let mut p2_win_count = 0; let mut total_universes_p1 = 1; let mut total_universes_p2 = 1; for round in 1..=10 { total_universes_p1 *= 27; if let Some(p1_wins_this_throw) = p1_throw_ways.get(&round) { total_universes_p1 -= p1_wins_this_throw; p1_win_count += p1_wins_this_throw * total_universes_p2; } total_universes_p2 *= 27; if let Some(p2_wins_this_throw) = p2_throw_ways.get(&round) { total_universes_p2 -= p2_wins_this_throw; p2_win_count += p2_wins_this_throw * total_universes_p1; } println!( "round {:2}: universes p1: {:8}, p2: {:8}", round, total_universes_p1, total_universes_p2 ); } println!("p1 win universes: {}", p1_win_count); println!("p2 win universes: {}", p2_win_count); }
use std::collections::HashMap; #[derive(Default, Debug, Clone)] struct DetDie { state: i32, roll_count: i32, } impl Iterator for DetDie { type Item = i32; fn next(&mut self) -> Option<i32> { self.roll_count += 1; let value = self.state + 1; self.state = (self.state + 1) % 100; Some(value) } } struct Player { position: i32, score: i32, } impl Player { fn new(start: i32) -> Self { Self { position: (start - 1) % 10, score: 0, } } fn advance(&mut self, amount: i32) { self.position = (self.position + amount) % 10; self.score += self.position + 1; } } pub fn step1() { let mut dd: DetDie = Default::default(); let dd = dd.by_ref(); let mut p1 = Player::new(8); let mut p2 = Player::new(6); let losing_score = loop { p1.advance(dd.take(3).sum()); if p1.score >= 1000 { break p2.score; } p2.advance(dd.take(3).sum()); if p2.score >= 1000 { break p1.score; } }; println!("Final result: {}", dd.roll_count * losing_score); } fn run_game( remain: i32, pos: i32, ways: i128, throw_count: i32, throw_way_map: &mut HashMap<i32
throw_count + 1, throw_way_map, ); } return new_ways; } *throw_way_map.entry(throw_count).or_insert(0) += ways; ways } pub fn step2() { let mut p1_throw_ways = HashMap::new(); let p1_complete = run_game(21, 8, 1, 0, &mut p1_throw_ways); println!("p1: {}", p1_complete); println!("{:?}", p1_throw_ways); let mut p2_throw_ways = HashMap::new(); let p2_complete = run_game(21, 6, 1, 0, &mut p2_throw_ways); println!("p2: {}", p2_complete); println!("{:?}", p2_throw_ways); let mut p1_win_count = 0; let mut p2_win_count = 0; let mut total_universes_p1 = 1; let mut total_universes_p2 = 1; for round in 1..=10 { total_universes_p1 *= 27; if let Some(p1_wins_this_throw) = p1_throw_ways.get(&round) { total_universes_p1 -= p1_wins_this_throw; p1_win_count += p1_wins_this_throw * total_universes_p2; } total_universes_p2 *= 27; if let Some(p2_wins_this_throw) = p2_throw_ways.get(&round) { total_universes_p2 -= p2_wins_this_throw; p2_win_count += p2_wins_this_throw * total_universes_p1; } println!( "round {:2}: universes p1: {:8}, p2: {:8}", round, total_universes_p1, total_universes_p2 ); } println!("p1 win universes: {}", p1_win_count); println!("p2 win universes: {}", p2_win_count); }
, i128>, ) -> i128 { let roll_dist: HashMap<i32, i128> = HashMap::from_iter([(3, 1), (4, 3), (5, 6), (6, 7), (7, 6), (8, 3), (9, 1)]); if remain > 0 { let mut new_ways = 0; for roll_sum in 3..=9 { let possibilities = roll_dist.get(&roll_sum).unwrap(); let new_pos = ((pos - 1) + roll_sum) % 10 + 1; new_ways += run_game( remain - new_pos, new_pos, ways * possibilities,
function_block-random_span
[ { "content": "pub fn step1() {\n\n let hm = HeightMap::new(\"inputs/day09.txt\");\n\n\n\n println!(\"{}\", hm.risk_level());\n\n}\n\n\n", "file_path": "src/day09.rs", "rank": 0, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut game = Game::new(\"inputs/day04...
Rust
src/types.rs
pitkley/i3nator
c79dc059d7f174ac0ef4823769149dec537ed063
use crate::{configfiles::ConfigFile, layouts::Layout as ManagedLayout, shlex}; use serde::{ de::{self, Deserializer}, Deserialize, }; #[cfg(unix)] use std::os::unix::ffi::OsStrExt; use std::{ borrow::Cow, ffi::{OsStr, OsString}, fmt, marker::PhantomData, path::{Path, PathBuf}, time::Duration, }; #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct Config { pub general: General, pub applications: Vec<Application>, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct General { #[serde(default, deserialize_with = "deserialize_opt_pathbuf_with_tilde")] pub working_directory: Option<PathBuf>, pub workspace: Option<String>, #[serde(deserialize_with = "deserialize_layout")] pub layout: Layout, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "lowercase")] pub enum Layout { Contents(String), Managed(String), Path(PathBuf), } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct Application { #[serde(deserialize_with = "deserialize_application_command")] pub command: ApplicationCommand, #[serde(default, deserialize_with = "deserialize_opt_pathbuf_with_tilde")] pub working_directory: Option<PathBuf>, #[serde(default, deserialize_with = "deserialize_opt_exec")] pub exec: Option<Exec>, } #[derive(Deserialize, Debug, Default, Clone, PartialEq, Eq)] pub struct ApplicationCommand { pub program: String, #[serde(default)] pub args: Vec<String>, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] pub struct Exec { pub commands: Vec<String>, #[serde(default = "default_exec_type")] pub exec_type: ExecType, #[serde(default = "default_timeout", deserialize_with = "deserialize_duration")] pub timeout: Duration, } fn default_exec_type() -> ExecType { ExecType::Text } fn default_timeout() -> Duration { Duration::from_secs(5) } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "snake_case")] pub enum ExecType { Text, TextNoReturn, Keys, } struct Phantom<T>(PhantomData<T>); fn deserialize_application_command<'de, D>(deserializer: D) -> Result<ApplicationCommand, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<ApplicationCommand> { type Value = ApplicationCommand; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string, sequence of strings or map") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { match shlex::split(value) { Some(mut v) => { if v.is_empty() { Err(de::Error::custom("command can not be empty")) } else { Ok(ApplicationCommand { program: v.remove(0).to_owned(), args: v.into_iter().map(str::to_owned).collect::<Vec<_>>(), }) } } None => Err(de::Error::custom("command can not be empty")), } } fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error> where S: de::SeqAccess<'de>, { let mut v: Vec<String> = de::Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))?; if v.is_empty() { Err(de::Error::custom("command can not be empty")) } else { Ok(ApplicationCommand { program: v.remove(0), args: v, }) } } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<ApplicationCommand>(PhantomData)) } fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Duration> { type Value = Duration; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("integer or map") } fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E> where E: de::Error, { Ok(Duration::from_secs(value as u64)) } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<Duration>(PhantomData)) } fn deserialize_exec<'de, D>(deserializer: D) -> Result<Exec, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Exec> { type Value = Exec; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string, sequence of strings or map") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { Ok(Exec { commands: vec![value.to_owned()], exec_type: default_exec_type(), timeout: default_timeout(), }) } fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error> where S: de::SeqAccess<'de>, { let v: Vec<String> = de::Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))?; if v.is_empty() { Err(de::Error::custom("commands can not be empty")) } else { Ok(Exec { commands: v, exec_type: default_exec_type(), timeout: default_timeout(), }) } } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<Exec>(PhantomData)) } fn deserialize_opt_exec<'de, D>(deserializer: D) -> Result<Option<Exec>, D::Error> where D: Deserializer<'de>, { deserialize_exec(deserializer).map(Some) } fn deserialize_layout<'de, D>(deserializer: D) -> Result<Layout, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Layout> { type Value = Layout; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { if value.find('{').is_some() { Ok(Layout::Contents(value.into())) } else if ManagedLayout::open(value).is_ok() { Ok(Layout::Managed(value.to_owned())) } else { Ok(Layout::Path(tilde(value).into_owned())) } } } deserializer.deserialize_any(Phantom::<Layout>(PhantomData)) } fn deserialize_pathbuf_with_tilde<'de, D>(deserializer: D) -> Result<PathBuf, D::Error> where D: Deserializer<'de>, { let pathbuf: PathBuf = de::Deserialize::deserialize(deserializer)?; Ok(tilde(&pathbuf).into_owned()) } fn deserialize_opt_pathbuf_with_tilde<'de, D>(deserializer: D) -> Result<Option<PathBuf>, D::Error> where D: Deserializer<'de>, { deserialize_pathbuf_with_tilde(deserializer).map(Some) } #[doc(hidden)] fn tilde_with_context<SI: ?Sized, P, HD>(input: &SI, home_dir: HD) -> Cow<Path> where SI: AsRef<Path>, P: AsRef<Path>, HD: FnOnce() -> Option<P>, { let input_str = input.as_ref(); let bytes = input_str.as_os_str().as_bytes(); if bytes[0] == b'~' { let input_after_tilde = &bytes[1..]; if input_after_tilde.is_empty() || input_after_tilde[0] == b'/' { if let Some(hd) = home_dir() { let mut s = OsString::new(); s.push(hd.as_ref().to_path_buf()); s.push(OsStr::from_bytes(input_after_tilde)); PathBuf::from(s).into() } else { input_str.into() } } else { input_str.into() } } else { input_str.into() } } fn tilde<SI: ?Sized>(input: &SI) -> Cow<Path> where SI: AsRef<Path>, { tilde_with_context(input, dirs_next::home_dir) }
use crate::{configfiles::ConfigFile, layouts::Layout as ManagedLayout, shlex}; use serde::{ de::{self, Deserializer}, Deserialize, }; #[cfg(unix)] use std::os::unix::ffi::OsStrExt; use std::{ borrow::Cow, ffi::{OsStr, OsString}, fmt, marker::PhantomData, path::{Path, PathBuf}, time::Duration, }; #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct Config { pub general: General, pub applications: Vec<Application>, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct General { #[serde(default, deserialize_with = "deserialize_opt_pathbuf_with_tilde")] pub working_directory: Option<PathBuf>, pub workspace: Option<String>, #[serde(deserialize_with = "deserialize_layout")] pub layout: Layout, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "lowercase")] pub enum Layout { Contents(String), Managed(String), Path(PathBuf), } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct Application { #[serde(deserialize_with = "deserialize_application_command")] pub command: ApplicationCommand, #[serde(default, deserialize_with = "deserialize_opt_pathbuf_with_tilde")] pub working_directory: Option<PathBuf>, #[serde(default, deserialize_with = "deserialize_opt_exec")] pub exec: Option<Exec>, } #[derive(Deserialize, Debug, Default, Clone, PartialEq, Eq)] pub struct ApplicationCommand { pub program: String, #[serde(default)] pub args: Vec<String>, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] pub struct Exec { pub commands: Vec<String>, #[serde(default = "default_exec_type")] pub exec_type: ExecType, #[serde(default = "default_timeout", deserialize_with = "deserialize_duration")] pub timeout: Duration, } fn default_exec_type() -> ExecType { ExecType::Text } fn default_timeout() -> Duration { Duration::from_secs(5) } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "snake_case")] pub enum ExecType { Text, TextNoReturn, Keys, } struct Phantom<T>(PhantomData<T>); fn deserialize_application_command<'de, D>(deserializer: D) -> Result<ApplicationCommand, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<ApplicationCommand> { type Value = ApplicationCommand; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string, sequence of strings or map") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { match shlex::split(value) { Some(mut v) => { if v.is_empty() { Err(de::Error::custom("command can not be empty")) } else { Ok(ApplicationCommand { program: v.remove(0).to_owned(), args: v.into_iter().map(str::to_owned).collect::<Vec<_>>(), }) } } None => Err(de::Error::custom("command can not be empty")), } } fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error> where S: de::SeqAccess<'de>, { let mut v: Vec<String> = de::Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))?; if v.is_empty() { Err(de::Error::custom("command can not be empty")) } else { Ok(ApplicationCommand { program: v.remove(0), args: v, }) } } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<ApplicationCommand>(PhantomData)) }
fn deserialize_exec<'de, D>(deserializer: D) -> Result<Exec, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Exec> { type Value = Exec; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string, sequence of strings or map") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { Ok(Exec { commands: vec![value.to_owned()], exec_type: default_exec_type(), timeout: default_timeout(), }) } fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error> where S: de::SeqAccess<'de>, { let v: Vec<String> = de::Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))?; if v.is_empty() { Err(de::Error::custom("commands can not be empty")) } else { Ok(Exec { commands: v, exec_type: default_exec_type(), timeout: default_timeout(), }) } } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<Exec>(PhantomData)) } fn deserialize_opt_exec<'de, D>(deserializer: D) -> Result<Option<Exec>, D::Error> where D: Deserializer<'de>, { deserialize_exec(deserializer).map(Some) } fn deserialize_layout<'de, D>(deserializer: D) -> Result<Layout, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Layout> { type Value = Layout; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { if value.find('{').is_some() { Ok(Layout::Contents(value.into())) } else if ManagedLayout::open(value).is_ok() { Ok(Layout::Managed(value.to_owned())) } else { Ok(Layout::Path(tilde(value).into_owned())) } } } deserializer.deserialize_any(Phantom::<Layout>(PhantomData)) } fn deserialize_pathbuf_with_tilde<'de, D>(deserializer: D) -> Result<PathBuf, D::Error> where D: Deserializer<'de>, { let pathbuf: PathBuf = de::Deserialize::deserialize(deserializer)?; Ok(tilde(&pathbuf).into_owned()) } fn deserialize_opt_pathbuf_with_tilde<'de, D>(deserializer: D) -> Result<Option<PathBuf>, D::Error> where D: Deserializer<'de>, { deserialize_pathbuf_with_tilde(deserializer).map(Some) } #[doc(hidden)] fn tilde_with_context<SI: ?Sized, P, HD>(input: &SI, home_dir: HD) -> Cow<Path> where SI: AsRef<Path>, P: AsRef<Path>, HD: FnOnce() -> Option<P>, { let input_str = input.as_ref(); let bytes = input_str.as_os_str().as_bytes(); if bytes[0] == b'~' { let input_after_tilde = &bytes[1..]; if input_after_tilde.is_empty() || input_after_tilde[0] == b'/' { if let Some(hd) = home_dir() { let mut s = OsString::new(); s.push(hd.as_ref().to_path_buf()); s.push(OsStr::from_bytes(input_after_tilde)); PathBuf::from(s).into() } else { input_str.into() } } else { input_str.into() } } else { input_str.into() } } fn tilde<SI: ?Sized>(input: &SI) -> Cow<Path> where SI: AsRef<Path>, { tilde_with_context(input, dirs_next::home_dir) }
fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Duration> { type Value = Duration; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("integer or map") } fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E> where E: de::Error, { Ok(Duration::from_secs(value as u64)) } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<Duration>(PhantomData)) }
function_block-full_function
[ { "content": "fn exec_text(base_parameters: &[&str], text: &str, timeout: Duration) -> Result<()> {\n\n let args = &[base_parameters, &[\"type\", \"--window\", \"%1\", text]].concat();\n\n let mut child = Command::new(\"xdotool\")\n\n .args(args)\n\n .stdin(Stdio::null())\n\n .stdout(...
Rust
snapshot/src/logic.rs
AllSafeCybercurity/RClient
88aa5fe784621041b05038ae62139398a34b74bc
use sodiumoxide::crypto::{ hash, pwhash, secretstream::{self, Header, Key, Pull, Push, Stream, Tag}, }; use std::{ fs::File, io::{Read, Write}, }; const CHUNK_SIZE: usize = 4096; const SIGN: [u8; 5] = [0x50, 0x41, 0x52, 0x54, 0x49]; const VERSION: [u8; 2] = [0x1, 0x0]; fn generate_salt() -> crate::Result<pwhash::Salt> { let salt = pwhash::gen_salt(); let hash = hash::sha256::hash(&salt.0); let salt = pwhash::Salt::from_slice(hash.as_ref()).expect("Unable to rewrap salt"); Ok(salt) } fn derive_key_from_password(password: &[u8], salt: &pwhash::Salt) -> crate::Result<Key> { let mut key = [0; secretstream::KEYBYTES]; match pwhash::derive_key( &mut key, password, &salt, pwhash::OPSLIMIT_INTERACTIVE, pwhash::MEMLIMIT_INTERACTIVE, ) { Ok(_) => Ok(Key(key)), Err(_) => Err(crate::Error::SnapshotError("Could not derive key from password".into())), } } fn create_stream(&Key(ref key): &Key) -> crate::Result<(Stream<Push>, Header)> { let stream_key = secretstream::Key(key.to_owned()); Stream::init_push(&stream_key).map_err(|_| crate::Error::SnapshotError("Unable to create stream".into())) } fn pull_stream(header: &[u8], &Key(ref key): &Key) -> crate::Result<Stream<Pull>> { let stream_key = secretstream::Key(key.to_owned()); let header = Header::from_slice(header).expect("Invalid Header size"); Stream::init_pull(&header, &stream_key).map_err(|_| crate::Error::SnapshotError("Unable to open stream".into())) } pub fn encrypt_snapshot(input: Vec<u8>, out: &mut File, password: &[u8]) -> crate::Result<()> { let mut slice = input.as_slice(); let mut buf = [0; CHUNK_SIZE]; let mut input_len = slice.len(); out.write_all(&SIGN)?; out.write_all(&VERSION)?; let salt = generate_salt()?; out.write_all(&salt.0)?; let key = derive_key_from_password(password, &salt)?; let (mut stream, header) = create_stream(&key)?; out.write_all(&header.0)?; loop { match slice.read(&mut buf) { Ok(amount_read) if amount_read > 0 => { input_len -= amount_read as usize; let tag = match input_len { 0 => Tag::Final, _ => Tag::Message, }; out.write_all( &stream .push(&buf[..amount_read], None, tag) .map_err(|_| crate::Error::SnapshotError("Failed to encrypt".into()))?, )? } Err(e) => return Err(crate::Error::from(e)), _ => break, } } Ok(()) } pub fn decrypt_snapshot(input: &mut File, output: &mut Vec<u8>, password: &[u8]) -> crate::Result<()> { check_file_len(input)?; let salt = get_salt(input, true)?; decrypt_file(input, output, password, salt)?; Ok(()) } pub fn update_snapshot(input: &mut File, output: &mut File, password: &[u8]) -> crate::Result<()> { let mut buffer: Vec<u8> = Vec::new(); check_file_len(input)?; let salt = get_salt(input, false)?; decrypt_file(input, &mut buffer, password, salt)?; encrypt_snapshot(buffer, output, password)?; Ok(()) } fn decrypt_file(input: &mut File, output: &mut Vec<u8>, password: &[u8], salt: pwhash::Salt) -> crate::Result<()> { let mut header = [0u8; secretstream::HEADERBYTES]; input.read_exact(&mut header)?; let key = derive_key_from_password(&password, &salt)?; let mut buf = [0u8; CHUNK_SIZE + secretstream::ABYTES]; let mut stream = pull_stream(&header, &key)?; while stream.is_not_finalized() { match input.read(&mut buf) { Ok(bytes_read) if bytes_read > 0 => { let (decrypt, _tag) = stream.pull(&buf[..bytes_read], None).map_err(|_| { crate::Error::SnapshotError("Stream pull failed, could not decrypt snapshot".into()) })?; output.extend(&decrypt); } Err(_) => return Err(crate::Error::SnapshotError("Incorrect Password".into())), _ => return Err(crate::Error::SnapshotError("Decryption failed... ".into())), } } Ok(()) } fn check_file_len(input: &mut File) -> crate::Result<()> { if input.metadata()?.len() <= (pwhash::SALTBYTES + secretstream::HEADERBYTES + SIGN.len()) as u64 { return Err(crate::Error::SnapshotError("Snapshot is not valid or encrypted".into())); } Ok(()) } fn get_salt(input: &mut File, chk_version: bool) -> crate::Result<pwhash::Salt> { let mut sign = [0u8; 5]; let mut version = [0u8; 2]; let mut salt = [0u8; pwhash::SALTBYTES]; input.read_exact(&mut sign)?; input.read_exact(&mut version)?; if chk_version { check_version(&version)?; } if sign == SIGN { input.read_exact(&mut salt)?; } else { salt[..7].copy_from_slice(&sign); input.read_exact(&mut salt[7..])?; } let salt = pwhash::Salt(salt); Ok(salt) } fn check_version(version: &[u8]) -> crate::Result<()> { if version != VERSION { Err(crate::Error::SnapshotError("Snapshot version is incorrect".into())) } else { Ok(()) } } #[cfg(test)] mod test { use super::*; use sodiumoxide::crypto::secretstream::Tag; use std::fs::OpenOptions; #[test] fn test_key_derivation() { let salt = generate_salt().unwrap(); let key_one = derive_key_from_password(b"some long password", &salt).unwrap(); let key_two = derive_key_from_password(b"some long password", &salt).unwrap(); assert_eq!(key_one, key_two); } #[test] fn test_stream() { let salt = generate_salt().unwrap(); let key = derive_key_from_password(b"a password", &salt).unwrap(); let data = b"data"; let (mut push_stream, header) = create_stream(&key).unwrap(); let mut pull_stream = pull_stream(&header.0, &key).unwrap(); let cipher = push_stream.push(data, None, Tag::Final).unwrap(); let (plain, _) = pull_stream.pull(&cipher, None).unwrap(); assert_eq!(data, &plain.as_slice()); } #[test] fn test_snapshot() { let password = b"some_password"; let data = vec![ 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, ]; let expected = data.clone(); let mut encrypt = OpenOptions::new() .write(true) .create(true) .open("test/snapshot.snapshot") .unwrap(); let mut decrypt = OpenOptions::new().read(true).open("test/snapshot.snapshot").unwrap(); let mut output: Vec<u8> = Vec::new(); encrypt_snapshot(data, &mut encrypt, password).unwrap(); decrypt_snapshot(&mut decrypt, &mut output, password).unwrap(); assert_eq!(expected, output); } }
use sodiumoxide::crypto::{ hash, pwhash, secretstream::{self, Header, Key, Pull, Push, Stream, Tag}, }; use std::{ fs::File, io::{Read, Write}, }; const CHUNK_SIZE: usize = 4096; const SIGN: [u8; 5] = [0x50, 0x41, 0x52, 0x54, 0x49]; const VERSION: [u8; 2] = [0x1, 0x0]; fn generate_salt() -> crate::Result<pwhash::Salt> { let salt = pwhash::gen_salt(); let hash = hash::sha256::hash(&salt.0); let salt = pwhash::Salt::from_slice(hash.as_ref()).expect("Unable to rewrap salt"); Ok(salt) } fn derive_key_from_password(password: &[u8], salt: &pwhash::Salt) -> crate::Result<Key> { let mut key = [0; secretstream::KEYBYTES]; match pwhash::derive_key( &mut key, password, &salt, pwhash::OPSLIMIT_INTERACTIVE, pwhash::MEMLIMIT_INTERACTIVE, ) { Ok(_) => Ok(Key(key)), Err(_) => Err(crate::Error::SnapshotError("Could not derive key from password".into())), } } fn create_stream(&Key(ref key): &Key) -> crate::Result<(Stream<Push>, Header)> { let stream_key = secretstream::Key(key.to_owned()); Stream::init_push(&stream_key).map_err(|_| crate::Error::SnapshotError("Unable to create stream".into())) } fn pull_stream(header: &[u8], &Key(ref key): &Key) -> crate::Result<Stream<Pull>> { let stream_key = secretstream::Key(key.to_owned()); let header = Header::from_slice(header).expect("Invalid Header size"); Stream::init_pull(&header, &stream_key).map_err(|_| crate::Error::SnapshotError("Unable to open stream".into())) } pub fn encrypt_snapshot(input: Vec<u8>, out: &mut File, password: &[u8]) -> crate::Result<()> { let mut slice = input.as_slice(); let mut buf = [0; CHUNK_SIZE]; let mut input_len = slice.len(); out.write_all(&SIGN)?; out.write_all(&VERSION)?; let salt = generate_salt()?; out.write_all(&salt.0)?; let key = derive_key_from_password(password, &salt)?; let (mut stream, header) = create_stream(&key)?; out.write_all(&header.0)?; loop { match slice.read(&mut buf) { Ok(amount_read) if amount_read > 0 => { input_len -= amount_read as usize; let tag = match input_len { 0 => Tag::Final, _ => Tag::Message, }; out.write_all( &stream .push(&buf[..amount_read], None, tag) .map_err(|_| crate::Error::SnapshotError("Failed to encrypt".into()))?, )? } Err(e) => return Err(crate::Error::from(e)), _ => break, } } Ok(()) } pub fn decrypt_snapshot(input: &mut File, output: &mut Vec<u8>, password: &[u8]) -> crate::Result<()> { check_file_len(input)?; let salt = get_salt(input, true)?; decrypt_file(input, output, password, salt)?; Ok(()) } pub fn update_snapshot(input: &mut File, output: &mut File, password: &[u8]) -> crate::Result<()> { let mut buffer: Vec<u8> = Vec::new(); check_file_len(input)?; let salt = get_salt(input, false)?; decrypt_file(input, &mut buffer, password, salt)?; encrypt_snapshot(buffer, output, password)?; Ok(()) } fn decrypt_file(input: &mut File, output: &mut Vec<u8>, password: &[u8], salt: pwhash::Salt) -> crate::Result<()> { let mut header = [0u8; secretstream::HEADERBYTES]; input.read_exact(&mut header)?; let key = derive_key_from_password(&p
=> return Err(crate::Error::SnapshotError("Incorrect Password".into())), _ => return Err(crate::Error::SnapshotError("Decryption failed... ".into())), } } Ok(()) } fn check_file_len(input: &mut File) -> crate::Result<()> { if input.metadata()?.len() <= (pwhash::SALTBYTES + secretstream::HEADERBYTES + SIGN.len()) as u64 { return Err(crate::Error::SnapshotError("Snapshot is not valid or encrypted".into())); } Ok(()) } fn get_salt(input: &mut File, chk_version: bool) -> crate::Result<pwhash::Salt> { let mut sign = [0u8; 5]; let mut version = [0u8; 2]; let mut salt = [0u8; pwhash::SALTBYTES]; input.read_exact(&mut sign)?; input.read_exact(&mut version)?; if chk_version { check_version(&version)?; } if sign == SIGN { input.read_exact(&mut salt)?; } else { salt[..7].copy_from_slice(&sign); input.read_exact(&mut salt[7..])?; } let salt = pwhash::Salt(salt); Ok(salt) } fn check_version(version: &[u8]) -> crate::Result<()> { if version != VERSION { Err(crate::Error::SnapshotError("Snapshot version is incorrect".into())) } else { Ok(()) } } #[cfg(test)] mod test { use super::*; use sodiumoxide::crypto::secretstream::Tag; use std::fs::OpenOptions; #[test] fn test_key_derivation() { let salt = generate_salt().unwrap(); let key_one = derive_key_from_password(b"some long password", &salt).unwrap(); let key_two = derive_key_from_password(b"some long password", &salt).unwrap(); assert_eq!(key_one, key_two); } #[test] fn test_stream() { let salt = generate_salt().unwrap(); let key = derive_key_from_password(b"a password", &salt).unwrap(); let data = b"data"; let (mut push_stream, header) = create_stream(&key).unwrap(); let mut pull_stream = pull_stream(&header.0, &key).unwrap(); let cipher = push_stream.push(data, None, Tag::Final).unwrap(); let (plain, _) = pull_stream.pull(&cipher, None).unwrap(); assert_eq!(data, &plain.as_slice()); } #[test] fn test_snapshot() { let password = b"some_password"; let data = vec![ 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, ]; let expected = data.clone(); let mut encrypt = OpenOptions::new() .write(true) .create(true) .open("test/snapshot.snapshot") .unwrap(); let mut decrypt = OpenOptions::new().read(true).open("test/snapshot.snapshot").unwrap(); let mut output: Vec<u8> = Vec::new(); encrypt_snapshot(data, &mut encrypt, password).unwrap(); decrypt_snapshot(&mut decrypt, &mut output, password).unwrap(); assert_eq!(expected, output); } }
assword, &salt)?; let mut buf = [0u8; CHUNK_SIZE + secretstream::ABYTES]; let mut stream = pull_stream(&header, &key)?; while stream.is_not_finalized() { match input.read(&mut buf) { Ok(bytes_read) if bytes_read > 0 => { let (decrypt, _tag) = stream.pull(&buf[..bytes_read], None).map_err(|_| { crate::Error::SnapshotError("Stream pull failed, could not decrypt snapshot".into()) })?; output.extend(&decrypt); } Err(_)
function_block-random_span
[ { "content": "/// HChaCha20 implementation\n\npub fn h_chacha20_hash(key: &[u8], nonce: &[u8], buf: &mut [u8]) {\n\n // initialize state\n\n let mut state = vec![0u32; 16];\n\n (0..4).for_each(|i| state[i] = BASIS[i]);\n\n (4..12).for_each(|i| state[i] = read32_little_endian!(&key[(i - 4) * 4..]));\...
Rust
aoc2020/src/main.rs
kylewillmon/advent-of-code-rs
f6d37627eabe4b39f87329159b11aedf59362b73
use std::io::{self, Read}; use std::fs; use clap::{App, Arg}; use aoclib::{self, AOC, Day}; pub(crate) mod parse; pub(crate) mod error; mod day1; mod day2; mod day3; mod day4; mod day6; mod day7; mod day8; mod day9; mod day10; mod day11; mod day12; mod day13; mod day14; mod day15; mod day16; mod day17; mod day18; mod day19; mod day20; mod day21; mod day22; mod day23; mod day24; mod day25; fn main() { let m = App::new("Advent of Code 2020 solvers") .author("Kyle Willmon <kylewillmon@gmail.com>") .arg(Arg::from_usage("<INPUT> 'Sets the input file to use'")) .arg(Arg::with_name("day") .short("d") .long("day") .takes_value(true) .help("day to solve")) .get_matches(); let day = match m.value_of("day") { None => None, Some(val) => match val.parse::<u8>() { Ok(val) => Some(val), Err(err) => { println!("Invalid day {:?}: {}", val, err); return; } } }; let aoc = AOC::new() .day(Day::new(1) .part(1, day1::part1) .part(2, day1::part2)) .day(Day::new(2) .part(1, day2::part1) .part(2, day2::part2)) .day(Day::new(3) .part(1, day3::part1) .part(2, day3::part2)) .day(Day::new(4) .part(1, day4::part1) .part(2, day4::part2)) .day(Day::new(6) .part(1, day6::part1) .part(2, day6::part2)) .day(Day::new(7) .part(1, day7::part1) .part(2, day7::part2)) .day(Day::new(8) .part(1, day8::part1) .part(2, day8::part2)) .day(Day::new(9) .part(1, day9::part1) .part(2, day9::part2)) .day(Day::new(10) .part(1, day10::part1) .part(2, day10::part2)) .day(Day::new(11) .part(1, day11::part1) .part(2, day11::part2)) .day(Day::new(12) .part(1, day12::part1) .part(2, day12::part2)) .day(Day::new(13) .part(1, day13::part1) .part(2, day13::part2)) .day(Day::new(14) .part(1, day14::part1) .part(2, day14::part2)) .day(Day::new(15) .part(1, day15::part1) .part(2, day15::part2)) .day(Day::new(16) .part(1, day16::part1) .part(2, day16::part2)) .day(Day::new(17) .part(1, day17::part1) .part(2, day17::part2)) .day(Day::new(18) .part(1, day18::part1) .part(2, day18::part2)) .day(Day::new(19) .part(1, day19::part1) .part(2, day19::part2)) .day(Day::new(20) .part(1, day20::part1) .part(2, day20::part2)) .day(Day::new(21) .part(1, day21::part1) .part(2, day21::part2)) .day(Day::new(22) .part(1, day22::part1) .part(2, day22::part2)) .day(Day::new(23) .part(1, day23::part1) .part(2, day23::part2)) .day(Day::new(24) .part(1, day24::part1) .part(2, day24::part2)) .day(Day::new(25) .part(1, day25::part1) .part(2, day25::part2)); match get_input(m.value_of("INPUT").unwrap()) { Ok(input) => print!("{}", aoc.run(day, input)), Err(err) => println!("Error: {}", err), }; } fn get_input<P: AsRef<str>>(filename: P) -> io::Result<String> { if filename.as_ref() == "-" { let mut data = String::new(); return io::stdin().read_to_string(&mut data).map(move |_| data); } fs::read_to_string(filename.as_ref()) }
use std::io::{self, Read}; use std::fs; use clap::{App, Arg}; use aoclib::{self, AOC, Day}; pub(crate) mod parse; pub(crate) mod error; mod day1; mod day2; mod day3; mod day4; mod day6; mod day7; mod day8; mod day9; mod day10; mod day11; mod day12; mod day13; mod day14; mod day15; mod day16; mod day17; mod day18; mod day19; mod day20; mod day21; mod day22; mod day23; mod day24; mod day25; fn main() { let m = App::new("Advent of Code 2020 solvers") .author("Kyle Willmon <kylewillmon@gmail.com>") .arg(Arg::from_usage("<INPUT> 'Sets the input file to use'")) .arg(Arg::with_name("day") .short("d") .long("day") .takes_value(true) .help("day to solve")) .get_matches(); let day = match m.value_of("day") { None => None, Some(val) => match val.parse::<u8>() { Ok(val) => Some(val), Err(err) => { println!("Invalid day {:?}: {}", val, err); return; } } }; let aoc = AOC::new() .day(Day::new(1) .part(1, day1::part1) .part(2, day1::part2)) .day(Day::new(2) .part(1, day2::part1) .part(2, day2::part2)) .day(Day::new(3) .part(1, day3::part1) .part(2, day3::part2)) .day(Day::new(4) .part(1, day4::part1) .part(2, day4::part2)) .day(Day::new(6) .part(1, day6::part1) .part(2, day6::part2)) .day(Day::new(7) .part(1, day7::part1) .part(2, day7::part2)) .day(Day::n
et_input<P: AsRef<str>>(filename: P) -> io::Result<String> { if filename.as_ref() == "-" { let mut data = String::new(); return io::stdin().read_to_string(&mut data).map(move |_| data); } fs::read_to_string(filename.as_ref()) }
ew(8) .part(1, day8::part1) .part(2, day8::part2)) .day(Day::new(9) .part(1, day9::part1) .part(2, day9::part2)) .day(Day::new(10) .part(1, day10::part1) .part(2, day10::part2)) .day(Day::new(11) .part(1, day11::part1) .part(2, day11::part2)) .day(Day::new(12) .part(1, day12::part1) .part(2, day12::part2)) .day(Day::new(13) .part(1, day13::part1) .part(2, day13::part2)) .day(Day::new(14) .part(1, day14::part1) .part(2, day14::part2)) .day(Day::new(15) .part(1, day15::part1) .part(2, day15::part2)) .day(Day::new(16) .part(1, day16::part1) .part(2, day16::part2)) .day(Day::new(17) .part(1, day17::part1) .part(2, day17::part2)) .day(Day::new(18) .part(1, day18::part1) .part(2, day18::part2)) .day(Day::new(19) .part(1, day19::part1) .part(2, day19::part2)) .day(Day::new(20) .part(1, day20::part1) .part(2, day20::part2)) .day(Day::new(21) .part(1, day21::part1) .part(2, day21::part2)) .day(Day::new(22) .part(1, day22::part1) .part(2, day22::part2)) .day(Day::new(23) .part(1, day23::part1) .part(2, day23::part2)) .day(Day::new(24) .part(1, day24::part1) .part(2, day24::part2)) .day(Day::new(25) .part(1, day25::part1) .part(2, day25::part2)); match get_input(m.value_of("INPUT").unwrap()) { Ok(input) => print!("{}", aoc.run(day, input)), Err(err) => println!("Error: {}", err), }; } fn g
random
[ { "content": "pub fn part2(input: String) -> Result<usize, AocError> {\n\n let mut total = 0;\n\n for entry in input.split(\"\\n\\n\") {\n\n let cf = entry.parse::<CustomsForm>()?;\n\n total += cf.everyone_yes_count();\n\n }\n\n Ok(total)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n u...
Rust
verifier/src/verify_merkle.rs
patrickbiel01/Cairo_Verifier
c174b5d5bc906cb64c832534ffac74268bd9b308
use num256::uint256::Uint256 as Uint256; use crate::uint256_ops; pub fn get_hash_mask() -> Uint256 { return uint256_ops::get_uint256("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000000"); } /* Verifies a Merkle tree decommitment for n leaves in a Merkle tree with N leaves. The inputs data sits in the queue at queuePtr. Each slot in the queue contains a 32 bytes leaf index and a 32 byte leaf value. The indices need to be in the range [N..2*N-1] and strictly incrementing. Decommitments are read from the channel in the ctx. The input data is destroyed during verification. Queue Structure: 0 1 [Index, Hash/Value] [Index, Hash/Value] */ pub fn verify_merkle( channel_idx: usize, ctx: &mut Vec<Uint256>, queue_idx: usize, root: Uint256, unique_queries: usize ) -> Uint256 { let l_hash_mask = get_hash_mask(); let max_merkle_verifier_queries: usize = 128; assert!(unique_queries <= max_merkle_verifier_queries); let hashes_index: usize = queue_idx + 1; let slot_size: usize = 2; let queue_size: usize = slot_size * unique_queries; let mut rd_idx: usize = 0; let mut wr_idx: usize = 0; let mut index: Uint256 = ctx[queue_idx + rd_idx].clone(); let mut proof_idx = uint256_ops::to_usize( &ctx[channel_idx] ); let mut sibling_data: Vec<[u8; 32]> = vec![ [0; 32], [0; 32] ]; while index > uint256_ops::get_uint256("1") { let sibling_index = uint256_ops::to_usize(&index) ^ 1; let sibling_offset = sibling_index % 2; sibling_data[1 ^ sibling_offset] = uint256_ops::to_fixed_bytes( &ctx[rd_idx + hashes_index] ); rd_idx = ( rd_idx + slot_size ) % queue_size; let mut new_hash_index = proof_idx; proof_idx += 1; ctx[queue_idx + wr_idx] = index / uint256_ops::get_uint256("2"); index = ctx[queue_idx + rd_idx].clone(); if index == Uint256::from_bytes_le( &sibling_index.to_le_bytes() ) { new_hash_index = hashes_index + rd_idx; proof_idx -= 1; rd_idx = (rd_idx + slot_size) % queue_size; index = ctx[queue_idx + rd_idx].clone(); } sibling_data[sibling_offset] = uint256_ops::to_fixed_bytes( &ctx[new_hash_index] ); let mut combined_data: [u8; 64] = [0; 64]; for i in 0..31 { combined_data[i] = sibling_data[0][i]; combined_data[i + 32] = sibling_data[1][i]; } let sibling_hash = uint256_ops::keccak_256(&combined_data); ctx[hashes_index + wr_idx] = uint256_ops::bitwise_and( &l_hash_mask, &sibling_hash ); wr_idx = (wr_idx + slot_size) % queue_size; } let hash = ctx[rd_idx + hashes_index].clone(); ctx[channel_idx] = Uint256::from_bytes_le( &proof_idx.to_le_bytes() ); assert!(hash == root); return root; }
use num256::uint256::Uint256 as Uint256; use crate::uint256_ops; pub fn get_hash_mask() -> Uint256 { return uint256_ops::get_uint256("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000000"); } /* Verifies a Merkle tree decommitment for n leaves in a Merkle tree with N leaves. The inputs data sits in the queue at queuePtr. Each slot in the queue contains a 32 bytes leaf index and a 32 byte leaf value. The indices need to be in the range [N..2*N-1] and strictly incrementing. Decommitments are read from the channel in the ctx. The input data is destroyed during verification. Queue Structure: 0 1 [Index, Hash/Value] [Index, Hash/Value] */ pub fn verify_merkle( channel_idx: usize, ctx: &mut Vec<Uint256>, queue_idx: usize, root: Uint256, unique_queries: usize ) -> Uint256 { let l_hash_mask = get_hash_mask(); let max_merkle_verifier_queries: usize = 128; assert!(unique_queries <= max_merkle_verifier_queries); let hashes_index: usize = queue_idx + 1; let slot_size: usize = 2; let queue_size: usize = slot_size * unique_queries; let mut rd_idx: usize = 0; let mut wr_idx: usize = 0; let mut index: Uint256 = ctx[queue_idx + rd_idx].clone(); let mut proof_idx = uint256_ops::to_usize( &ctx[channel_idx] ); let mut sibling_data: Vec<[u8; 32]> = vec![ [0; 32], [0; 32] ]; while index > uint256_ops::get_uint256("1") { let sibling_index = uint256_ops::to_usize(&index) ^ 1; let sibling_offset = sibling_inde
x % 2; sibling_data[1 ^ sibling_offset] = uint256_ops::to_fixed_bytes( &ctx[rd_idx + hashes_index] ); rd_idx = ( rd_idx + slot_size ) % queue_size; let mut new_hash_index = proof_idx; proof_idx += 1; ctx[queue_idx + wr_idx] = index / uint256_ops::get_uint256("2"); index = ctx[queue_idx + rd_idx].clone(); if index == Uint256::from_bytes_le( &sibling_index.to_le_bytes() ) { new_hash_index = hashes_index + rd_idx; proof_idx -= 1; rd_idx = (rd_idx + slot_size) % queue_size; index = ctx[queue_idx + rd_idx].clone(); } sibling_data[sibling_offset] = uint256_ops::to_fixed_bytes( &ctx[new_hash_index] ); let mut combined_data: [u8; 64] = [0; 64]; for i in 0..31 { combined_data[i] = sibling_data[0][i]; combined_data[i + 32] = sibling_data[1][i]; } let sibling_hash = uint256_ops::keccak_256(&combined_data); ctx[hashes_index + wr_idx] = uint256_ops::bitwise_and( &l_hash_mask, &sibling_hash ); wr_idx = (wr_idx + slot_size) % queue_size; } let hash = ctx[rd_idx + hashes_index].clone(); ctx[channel_idx] = Uint256::from_bytes_le( &proof_idx.to_le_bytes() ); assert!(hash == root); return root; }
function_block-function_prefixed
[ { "content": "pub fn read_hash(channel_idx: usize, mix: bool, ctx: &mut Vec<Uint256>) -> Uint256 {\n\n\tlet val = read_bytes(channel_idx, mix, ctx);\n\n\treturn val;\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 0, "score": 340848.0792643196 }, { "content"...
Rust
src/watcher.rs
rakaly/desktop
4d16ef506704109c4300410a2f6fc12d2fef51e7
use anyhow::{anyhow, Context}; use flate2::bufread::GzEncoder; use flate2::Compression; use log::{debug, info, warn}; use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; use std::fs::File; use std::io::{BufReader, Read}; use std::path::Path; use std::sync::mpsc::channel; use std::time::Duration; pub struct Client { pub username: String, pub api_key: String, pub api_url: String, } impl Client { fn upload_zip(&self, path: &Path) -> anyhow::Result<()> { let file = File::open(path).context("unable to open")?; let size = file.metadata().map(|m| m.len()).unwrap_or(0); let reader = BufReader::new(file); let resp = ureq::post(&self.api_url) .auth(&self.username, &self.api_key) .set("Content-Length", &size.to_string()) .set("Content-Type", "application/zip") .send(reader); if resp.ok() { Ok(()) } else { let err = resp .into_string() .context("unable to interpret eror server response")?; Err(anyhow!("server responded with an error: {}", err)) } } fn upload_txt(&self, path: &Path) -> anyhow::Result<()> { let file = File::open(path).context("unable to open")?; let reader = BufReader::new(file); let mut buffer = Vec::new(); let mut gz = GzEncoder::new(reader, Compression::new(4)); gz.read_to_end(&mut buffer).context("unable to compress")?; let resp = ureq::post(&self.api_url) .auth(&self.username, &self.api_key) .set("Content-Encoding", "gzip") .send_bytes(&buffer); if resp.ok() { Ok(()) } else { let err = resp .into_string() .context("unable to interpret eror server response")?; Err(anyhow!("server responded with an error: {}", err)) } } } pub fn core_loop(watch_dir: &Path, client: &Client) -> anyhow::Result<()> { let (tx, rx) = channel(); let mut watcher = watcher(tx, Duration::from_secs(5)) .with_context(|| "unable to create file watcher".to_string())?; watcher .watch(watch_dir, RecursiveMode::Recursive) .with_context(|| format!("unable to watch: {}", watch_dir.display()))?; info!("watching directory for save files: {}", watch_dir.display()); log::logger().flush(); loop { match rx.recv() { Ok(DebouncedEvent::Error(e, path)) => { if let Some(path) = path { warn!("watch error on {}: {:?}", path.as_path().display(), e); } else { warn!("watch error: {:?}", e); } } Ok(DebouncedEvent::Write(path)) | Ok(DebouncedEvent::Create(path)) => { if !path.as_path().extension().map_or(false, |x| x == "eu4") { continue; } let path_display = path.as_path().display(); info!("detected write: {}", path_display); match process_file(client, &path) { Ok(_) => info!("successfully uploaded {}", path_display), Err(e) => warn!("{:?}", e), } } Ok(event) => { debug!("{:?}", event); continue; } Err(e) => warn!("watch error: {:?}", e), } log::logger().flush(); } } fn process_file(client: &Client, path: &Path) -> anyhow::Result<()> { let path_display = path.display(); let magic = { let mut buffer = [0; 4]; let mut file = File::open(path).with_context(|| format!("unable to open: {}", path_display))?; file.read_exact(&mut buffer) .with_context(|| format!("unable to read: {}", path_display))?; buffer }; match magic { [0x50, 0x4b, 0x03, 0x04] => client .upload_zip(&path) .with_context(|| format!("unable to upload zip: {}", path_display)), [b'E', b'U', b'4', b't'] => client .upload_txt(&path) .with_context(|| format!("unable to upload txt: {}", path_display)), x => Err(anyhow!( "unexpected file signature: {:?} - {}", x, path_display )), } }
use anyhow::{anyhow, Context}; use flate2::bufread::GzEncoder; use flate2::Compression; use log::{debug, info, warn}; use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; use std::fs::File; use std::io::{BufReader, Read}; use std::path::Path; use std::sync::mpsc::channel; use std::time::Duration; pub struct Client { pub username: String, pub api_key: String, pub api_url: String, } impl Client { fn upload_zip(&self, path: &Path) -> anyhow::Result<()> { let file = File::open(path).context("unable to open")?; let size = file.metadata().map(|m| m.len()).unwrap_or(0); let reader = BufReader::new(file); let resp = ureq::post(&self.api_url) .auth(&self.username, &self.api_key) .set("Content-Length", &size.to_string()) .set("Content-Type", "application/zip") .send(reader); if resp.ok() { Ok(()) } else { let err = resp .into_strin
fn upload_txt(&self, path: &Path) -> anyhow::Result<()> { let file = File::open(path).context("unable to open")?; let reader = BufReader::new(file); let mut buffer = Vec::new(); let mut gz = GzEncoder::new(reader, Compression::new(4)); gz.read_to_end(&mut buffer).context("unable to compress")?; let resp = ureq::post(&self.api_url) .auth(&self.username, &self.api_key) .set("Content-Encoding", "gzip") .send_bytes(&buffer); if resp.ok() { Ok(()) } else { let err = resp .into_string() .context("unable to interpret eror server response")?; Err(anyhow!("server responded with an error: {}", err)) } } } pub fn core_loop(watch_dir: &Path, client: &Client) -> anyhow::Result<()> { let (tx, rx) = channel(); let mut watcher = watcher(tx, Duration::from_secs(5)) .with_context(|| "unable to create file watcher".to_string())?; watcher .watch(watch_dir, RecursiveMode::Recursive) .with_context(|| format!("unable to watch: {}", watch_dir.display()))?; info!("watching directory for save files: {}", watch_dir.display()); log::logger().flush(); loop { match rx.recv() { Ok(DebouncedEvent::Error(e, path)) => { if let Some(path) = path { warn!("watch error on {}: {:?}", path.as_path().display(), e); } else { warn!("watch error: {:?}", e); } } Ok(DebouncedEvent::Write(path)) | Ok(DebouncedEvent::Create(path)) => { if !path.as_path().extension().map_or(false, |x| x == "eu4") { continue; } let path_display = path.as_path().display(); info!("detected write: {}", path_display); match process_file(client, &path) { Ok(_) => info!("successfully uploaded {}", path_display), Err(e) => warn!("{:?}", e), } } Ok(event) => { debug!("{:?}", event); continue; } Err(e) => warn!("watch error: {:?}", e), } log::logger().flush(); } } fn process_file(client: &Client, path: &Path) -> anyhow::Result<()> { let path_display = path.display(); let magic = { let mut buffer = [0; 4]; let mut file = File::open(path).with_context(|| format!("unable to open: {}", path_display))?; file.read_exact(&mut buffer) .with_context(|| format!("unable to read: {}", path_display))?; buffer }; match magic { [0x50, 0x4b, 0x03, 0x04] => client .upload_zip(&path) .with_context(|| format!("unable to upload zip: {}", path_display)), [b'E', b'U', b'4', b't'] => client .upload_txt(&path) .with_context(|| format!("unable to upload txt: {}", path_display)), x => Err(anyhow!( "unexpected file signature: {:?} - {}", x, path_display )), } }
g() .context("unable to interpret eror server response")?; Err(anyhow!("server responded with an error: {}", err)) } }
function_block-function_prefixed
[ { "content": "pub fn write_minimal_config<P: AsRef<Path>>(\n\n input: &UserInputConfig,\n\n destination: P,\n\n) -> anyhow::Result<UploaderConfig> {\n\n let path = destination.as_ref();\n\n let config_data =\n\n toml::ser::to_vec(&input).context(\"unable to serialize user input to a config\")...
Rust
src/compile.rs
shino16/cargo-auto-bundle
1cbd9fe1a3a8ea7f6ca499372a6d9eb7ca0edd8a
use super::ModPath; use anyhow::Result; use itertools::Itertools; use proc_macro2::{Ident, Span}; use quote::ToTokens; use std::{ collections::BTreeMap, path::{Path, PathBuf}, }; pub fn compile( crate_name: &str, paths: &[ModPath], file_paths: &[PathBuf], mod_visibility: BTreeMap<ModPath, String>, macros: &[String], ) -> Result<String> { let mut res = String::new(); let mut location = ModPath::new(); for (path, file_path) in paths.into_iter().zip(file_paths) { let base = location .iter() .zip(path.iter()) .take_while(|(a, b)| a == b) .count(); while location.len() > base { let p = location.pop().unwrap(); res += &format!("\n}} // mod {}\n", p); } while location.len() < path.len() { let name = &path[location.len()]; location.push(name.clone()); if mod_visibility .get(&location) .filter(|s| s.is_empty()) .is_some() { res += &format!("\nmod {} {{\n", name); } else { let vis = mod_visibility .get(&location) .cloned() .unwrap_or("pub".to_owned()); res += &format!("\n{} mod {} {{\n", vis, name); } } res += "\n"; res += &read_process(&file_path, crate_name, false, macros)?; } while let Some(p) = location.pop() { res += &format!("\n}} // mod {}\n", p); } Ok(reduce_newline(res)) } fn reduce_newline(mut s: String) -> String { let bytes = unsafe { s.as_bytes_mut() }; let mut j = 0; let mut newline_cnt = 0; for i in 0..bytes.len() { if bytes[i] == b'\n' { newline_cnt += 1; } else { newline_cnt = 0; } if newline_cnt <= 2 { bytes[j] = bytes[i]; j += 1; } } s.truncate(j); s } pub fn compile_entry(path: &Path, crate_name: &str, macros: &[String]) -> Result<String> { Ok(read_process(path, crate_name, true, macros)?) } fn read_process<'a>( file_path: &Path, crate_name: &'a str, external: bool, macros: &[String], ) -> Result<String> { use syn::visit::Visit; struct Visitor<'ast, 'a, 'b> { use_spans: Vec<(&'ast Ident, Span)>, remove_spans: Vec<(Span, Span)>, crate_name: &'a str, macros: &'b [String], } impl<'ast, 'a, 'b> Visit<'ast> for Visitor<'ast, 'a, 'b> { fn visit_item_use(&mut self, item: &'ast syn::ItemUse) { if let syn::UseTree::Path(ref path) = item.tree { if let syn::UseTree::Name(ref name) = *path.tree { if path.ident == self.crate_name && self.macros.contains(&name.ident.to_string()) { if path.ident != "crate" { let mut iter = item.to_token_stream().into_iter(); let start = iter.next().unwrap().span(); let end = iter.last().unwrap().span(); self.remove_spans.push((start, end)); } return; } } self.use_spans.push((&path.ident, path.ident.span())); }; } fn visit_item_mod(&mut self, item: &'ast syn::ItemMod) { if item.semi.is_some() { let mut iter = item.to_token_stream().into_iter(); let start = iter.next().unwrap().span(); let end = iter.last().unwrap().span(); self.remove_spans.push((start, end)); } syn::visit::visit_item_mod(self, item); } } let content = std::fs::read_to_string(file_path)?; let file = syn::parse_file(&content)?; let mut visitor = Visitor { use_spans: Vec::new(), remove_spans: Vec::new(), crate_name: if external { crate_name } else { "crate" }, macros, }; visitor.visit_file(&file); let mut targets = Vec::new(); for (ident, span) in visitor.use_spans { if !external && ident.to_string() == "crate" { targets.push((span.end(), span.end(), format!("::{}", crate_name))); } if external && ident.to_string() == crate_name { targets.push((span.start(), span.start(), "crate::".to_owned())); } } for (start, end) in visitor.remove_spans { targets.push((start.start(), end.end(), "".to_owned())); } targets.sort_unstable(); let lines = content.lines().collect_vec(); if lines.is_empty() { return Ok("".to_owned()); } let (mut line_pos, mut col_pos) = (0, 0); let mut res = String::new(); for (start, end, pat) in targets { while line_pos < start.line - 1 { res += &lines[line_pos][col_pos..]; res += "\n"; line_pos += 1; col_pos = 0; } if pat.is_empty() && lines[start.line - 1][..start.column] .chars() .all(|c| c.is_ascii_whitespace()) && lines[end.line - 1][end.column..] .chars() .all(|c| c.is_ascii_whitespace()) { line_pos = end.line; col_pos = 0; } else { res += &lines[line_pos][..start.column]; res += &pat; line_pos = end.line - 1; col_pos = end.column; } } if line_pos < lines.len() { res += &lines[line_pos][col_pos..]; res += "\n"; lines[line_pos + 1..].into_iter().for_each(|line| { res += line; res += "\n"; }); } Ok(res) }
use super::ModPath; use anyhow::Result; use itertools::Itertools; use proc_macro2::{Ident, Span}; use quote::ToTokens; use std::{ collections::BTreeMap, path::{Path, PathBuf}, }; pub fn compile( crate_name: &str, paths: &[ModPath], file_paths: &[PathBuf], mod_visibility: BTreeMap<ModPath, String>, macros: &[String], ) -> Result<String> { let mut res = String::new(); let mut location = ModPath::new(); for (path, file_path) in paths.into_iter().zip(file_paths) { let base = location .iter() .zip(path.iter()) .take_while(|(a, b)| a == b) .count(); while location.len() > base { let p = location.pop().unwrap(); res += &format!("\n}} // mod {}\n", p); } while location.len() < path.len() { let name = &path[location.len()]; location.push(name.clone()); if mod_visibility .get(&location) .filter(|s| s.is_empty()) .is_some() { res += &format!("\nmod {} {{\n", name); } else { let vis = mod_visibility .get(&location) .cloned() .unwrap_or("pub".to_owned()); res += &format!("\n{} mod {} {{\n", vis, name); } } res += "\n"; res += &read_process(&file_path, crate_name, false, macros)?; } while let Some(p) = location.pop() { res += &format!("\n}} // mod {}\n", p); } Ok(reduce_newline(res)) } fn reduce_newline(mut s: String) -> String { let bytes = unsafe { s.as_bytes_mut() }; let mut j = 0; let mut newline_cnt = 0; for i in 0..bytes.len() { if bytes[i] == b'\n' { newline_cnt += 1; } else { newline_cnt = 0; } if newline_cnt <= 2 { bytes[j] = bytes[i]; j += 1; } } s.truncate(j); s } pub fn compile_entry(path: &Path, crate_name: &str, macros: &[String]) -> Result<String> { Ok(read_process(path, crate_name, true, macros)?) } fn read_process<'a>( file_path: &Path, crate_name: &'a str, external: bool, macros: &[String], ) -> Result<String> { use syn::visit::Visit; struct Visitor<'ast, 'a, 'b> { use_spans: Vec<(&'ast Ident, Span)>, remove_spans: Vec<(Span, Span)>, crate_name: &'a str, macros: &'b [String], } impl<'ast, 'a, 'b> Visit<'ast> for Visitor<'ast, 'a, 'b> { fn visit_item_use(&mut self, item: &'ast syn::ItemUse) { if let syn::UseTree::Path(ref path) = item.tree { if let syn::UseTree::Name(ref name) = *path.tree { if path.ident == self.crate_name && self.macros.contains(&name.ident.to_string()) { if path.ident != "crate" { let mut iter = item.to_token_stream().into_iter(); let start = iter.next().unwrap().span(); let end = iter.last().unwrap().span(); self.remove_spans.push((start, end)); } return; } } self.use_spans.push((&path.ident, path.ident.span())); }; }
fn visit_item_mod(&mut self, item: &'ast syn::ItemMod) { if item.semi.is_some() { let mut iter = item.to_token_stream().into_iter(); let start = iter.next().unwrap().span(); let end = iter.last().unwrap().span(); self.remove_spans.push((start, end)); } syn::visit::visit_item_mod(self, item); } } let content = std::fs::read_to_string(file_path)?; let file = syn::parse_file(&content)?; let mut visitor = Visitor { use_spans: Vec::new(), remove_spans: Vec::new(), crate_name: if external { crate_name } else { "crate" }, macros, }; visitor.visit_file(&file); let mut targets = Vec::new(); for (ident, span) in visitor.use_spans { if !external && ident.to_string() == "crate" { targets.push((span.end(), span.end(), format!("::{}", crate_name))); } if external && ident.to_string() == crate_name { targets.push((span.start(), span.start(), "crate::".to_owned())); } } for (start, end) in visitor.remove_spans { targets.push((start.start(), end.end(), "".to_owned())); } targets.sort_unstable(); let lines = content.lines().collect_vec(); if lines.is_empty() { return Ok("".to_owned()); } let (mut line_pos, mut col_pos) = (0, 0); let mut res = String::new(); for (start, end, pat) in targets { while line_pos < start.line - 1 { res += &lines[line_pos][col_pos..]; res += "\n"; line_pos += 1; col_pos = 0; } if pat.is_empty() && lines[start.line - 1][..start.column] .chars() .all(|c| c.is_ascii_whitespace()) && lines[end.line - 1][end.column..] .chars() .all(|c| c.is_ascii_whitespace()) { line_pos = end.line; col_pos = 0; } else { res += &lines[line_pos][..start.column]; res += &pat; line_pos = end.line - 1; col_pos = end.column; } } if line_pos < lines.len() { res += &lines[line_pos][col_pos..]; res += "\n"; lines[line_pos + 1..].into_iter().for_each(|line| { res += line; res += "\n"; }); } Ok(res) }
function_block-function_prefix_line
[ { "content": "fn visit_use_file(path: &Path) -> Result<Vec<ModPath>> {\n\n use syn::UseTree::{self, *};\n\n fn dfs(tree: &UseTree, prefix: &mut ModPath, buf: &mut Vec<ModPath>) {\n\n match tree {\n\n Path(path) => {\n\n prefix.push(path.ident.to_string());\n\n ...
Rust
fabric_contract/src/dataapi/wirebuffer.rs
wtllc/fabric-contract-api-rust
6cf261d7795f1e26169934757422bf13772c9589
/* * SPDX-License-Identifier: Apache-2.0 */ use super::TypeSchema; use std::fmt::Debug; pub struct WireBuffer { pub buffer: Option<Vec<u8>>, pub schema: TypeSchema, } impl WireBuffer { pub fn new( buffer: Vec<u8>, schema: TypeSchema, /*, converter: Box<dyn Converter>*/ ) -> Self { Self { buffer: Some(buffer), schema, } } pub fn new_unfilled(schema: TypeSchema /*, converter: Box<dyn Converter>*/) -> Self { Self { buffer: Option::None, schema, } } } impl Debug for WireBuffer { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.buffer { Some(b) =>{ write!(f, "WireBuffer: {:?}", b.as_slice()) }, None => { write!(f, "WireBuffer: <emptry>") } } } } impl From<&WireBuffer> for String { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => std::str::from_utf8(&buffer).unwrap().to_string(), None => "".to_string(), } } } impl From<&WireBuffer> for i32 { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => { match std::str::from_utf8(&buffer) { Ok(a) => i32::from_str_radix(a,10).unwrap_or(0), _ => unreachable!(), } } None => 0, } } } impl From<&WireBuffer> for u32 { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => { match std::str::from_utf8(&buffer) { Ok(a) => u32::from_str_radix(a,10).unwrap_or(0), _ => unreachable!(), } } None => 0, } } } pub trait WireBufferFromReturnType<T> { fn from_rt(self: &mut Self, _: T); } impl WireBufferFromReturnType<String> for WireBuffer { fn from_rt(self: &mut Self, s: String) { self.buffer = Some(s.into_bytes()); } } impl WireBufferFromReturnType<()> for WireBuffer { fn from_rt(self: &mut Self, _: ()) { self.buffer = None; } } impl WireBufferFromReturnType<bool> for WireBuffer { fn from_rt(self: &mut Self, b: bool) { self.buffer = match b { true => Some(b"true".to_vec()), false => Some(b"false".to_vec()), }; } } impl WireBufferFromReturnType<i8> for WireBuffer { fn from_rt(self: &mut Self, s: i8) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i16> for WireBuffer { fn from_rt(self: &mut Self, s: i16) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i32> for WireBuffer { fn from_rt(self: &mut Self, s: i32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i64> for WireBuffer { fn from_rt(self: &mut Self, s: i64) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<isize> for WireBuffer { fn from_rt(self: &mut Self, s: isize) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u8> for WireBuffer { fn from_rt(self: &mut Self, s: u8) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u16> for WireBuffer { fn from_rt(self: &mut Self, s: u16) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u32> for WireBuffer { fn from_rt(self: &mut Self, s: u32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u64> for WireBuffer { fn from_rt(self: &mut Self, s: u64) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<usize> for WireBuffer { fn from_rt(self: &mut Self, s: usize) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<f32> for WireBuffer { fn from_rt(self: &mut Self, s: f32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<f64> for WireBuffer { fn from_rt(self: &mut Self, s: f64) { self.buffer = Some(s.to_string().into_bytes()); } }
/* * SPDX-License-Identifier: Apache-2.0 */ use super::TypeSchema; use std::fmt::Debug; pub struct WireBuffer { pub buffer: Option<Vec<u8>>, pub schema: TypeSchema, } impl WireBuffer { pub fn new( buffer: Vec<u8>, schema: TypeSchema, /*, converter: Box<dyn Converter>*/ ) -> Self { Self { buffer: Some(buffer), schema, } } pub fn new_unfilled(schema: TypeSchema /*, converter: Box<dyn Converter>*/) -> Self { Self { buffer: Option::None, schema, } } } impl Debug for WireBuffer { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.buffer { Some(b) =>{ write!(f, "WireBuffer: {:?}", b.as_slice()) }, None => { write!(f, "WireBuffer: <emptry>") } } } } impl From<&WireBuffer> for String { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => std::str::from_utf8(&buffer).unwrap().to_string(), None => "".to_string(), } } } impl From<&WireBuffer> for i32 { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => { match std::str::from_utf8(&buffer) { Ok(a) => i32::from_str_radix(a,10).unwrap_or(0), _ => unreachable!(), } } None => 0, } } } impl From<&WireBuffer> for u32 { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => { match std::str::from_utf8(&buffer) { Ok(a) => u32::from_str_radix(a,10).unwrap_or(0), _ => unreachable!(), } } None => 0, } } } pub trait WireBufferFromReturnType<T> { fn from_rt(self: &mut Self, _: T); } impl WireBufferFromReturnType<String> for WireBuffer { fn from_rt(self: &mut Self, s: String) { self.buffer = Some(s.into_bytes()); } } impl WireBufferFromReturnType<()> for WireBuffer { fn from_rt(self: &mut Self, _: ()) { self.buffer = None; } } impl WireBufferFromReturnType<bool> for WireBuffer {
} impl WireBufferFromReturnType<i8> for WireBuffer { fn from_rt(self: &mut Self, s: i8) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i16> for WireBuffer { fn from_rt(self: &mut Self, s: i16) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i32> for WireBuffer { fn from_rt(self: &mut Self, s: i32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i64> for WireBuffer { fn from_rt(self: &mut Self, s: i64) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<isize> for WireBuffer { fn from_rt(self: &mut Self, s: isize) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u8> for WireBuffer { fn from_rt(self: &mut Self, s: u8) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u16> for WireBuffer { fn from_rt(self: &mut Self, s: u16) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u32> for WireBuffer { fn from_rt(self: &mut Self, s: u32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u64> for WireBuffer { fn from_rt(self: &mut Self, s: u64) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<usize> for WireBuffer { fn from_rt(self: &mut Self, s: usize) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<f32> for WireBuffer { fn from_rt(self: &mut Self, s: f32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<f64> for WireBuffer { fn from_rt(self: &mut Self, s: f64) { self.buffer = Some(s.to_string().into_bytes()); } }
fn from_rt(self: &mut Self, b: bool) { self.buffer = match b { true => Some(b"true".to_vec()), false => Some(b"false".to_vec()), }; }
function_block-full_function
[ { "content": "pub trait Converter {\n\n fn into_string(&self, buffer: &[u8], ts: &TypeSchema) -> String;\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct JSONConverter {}\n\n\n\nimpl Converter for JSONConverter {\n\n // straight conversion\n\n fn into_string(&self, buffer: &[u8], ts: &TypeSchema...
Rust
src/imapw.rs
mordak/runt
03ac329c54d61a05d2390f28c963f2a7d8c9a590
use crate::config::Account; use imap::extensions::idle; use imap::types::{Fetch, Flag, Mailbox, Name, Uid, UnsolicitedResponse, ZeroCopy}; use imap::Session; use imap::{Client, ClientBuilder}; use rustls_connector::TlsStream as RustlsStream; use std::convert::From; use std::net::TcpStream; use std::ops::Deref; use std::time::Duration; use std::vec::Vec; pub enum FetchResult<'a> { Uid(UidResult<'a>), Other(&'a Fetch), } #[derive(Debug)] pub struct UidResult<'a> { fetch: &'a Fetch, } impl<'a> UidResult<'a> { pub fn uid(&self) -> Uid { self.fetch.uid.unwrap() } pub fn size(&self) -> u32 { self.fetch.size.unwrap() } pub fn internal_date_millis(&self) -> i64 { self.fetch.internal_date().unwrap().timestamp_millis() } pub fn flags(&self) -> &[Flag] { self.fetch.flags() } } impl<'a> From<&'a Fetch> for FetchResult<'a> { fn from(fetch: &'a Fetch) -> FetchResult<'a> { if fetch.uid.is_some() && fetch.size.is_some() && fetch.internal_date().is_some() { FetchResult::Uid(UidResult { fetch }) } else { FetchResult::Other(fetch) } } } pub struct Imap { session: Session<RustlsStream<TcpStream>>, mailbox: Option<String>, qresync: bool, } impl Imap { pub fn new(config: &Account) -> Result<Imap, String> { let client = Imap::connect(config)?; let mut session = client .login(config.username.as_str(), config.password.as_ref().unwrap()) .map_err(|e| format!("Login failed: {:?}", e.0))?; let capabilities = session .capabilities() .map_err(|e| format!("CAPABILITIES Error: {}", e))?; let mut missing = Vec::new(); if !capabilities.deref().has_str("ENABLE") { missing.push("ENABLE"); } if !capabilities.deref().has_str("UIDPLUS") { missing.push("UIDPLUS"); } if !capabilities.deref().has_str("IDLE") { missing.push("IDLE"); } if !missing.is_empty() { return Err(format!("Missing capability: {}", missing.join(" "))); } Ok(Imap { session, mailbox: None, qresync: capabilities.deref().has_str("QRESYNC"), }) } #[allow(dead_code)] pub fn debug(&mut self, enable: bool) { self.session.debug = enable; } fn connect(config: &Account) -> Result<Client<RustlsStream<TcpStream>>, String> { ClientBuilder::new(&config.server, config.port.unwrap()) .rustls() .map_err(|e| format!("Connection to {:?} failed: {}", &config.server, e)) } pub fn list( &mut self, reference_name: Option<&str>, mailbox_pattern: Option<&str>, ) -> Result<ZeroCopy<Vec<Name>>, String> { self.session .list(reference_name, mailbox_pattern) .map_err(|e| format!("LIST failed: {}", e)) } pub fn idle(&mut self) -> Result<(), String> { /* IDLE Builder - not released yet self.session .idle() .timeout(Duration::from_secs(10 * 60)) .wait_while(idle::stop_on_any) .map_err(|e| format!("{}", e)) .map(|_| ()) */ self.session .idle() .map_err(|e| format!("{}", e)) .and_then(|mut i| { i.set_keepalive(Duration::from_secs(10 * 60)); i.wait_keepalive_while(idle::stop_on_any) .map_err(|e| format!("{}", e)) }) .map(|_| ()) } pub fn fetch_uid(&mut self, uid: u32) -> Result<ZeroCopy<Vec<Fetch>>, String> { self.session .uid_fetch( format!("{}", uid), "(UID RFC822.SIZE INTERNALDATE FLAGS BODY.PEEK[])", ) .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn fetch_uid_meta(&mut self, uid: u32) -> Result<ZeroCopy<Vec<Fetch>>, String> { self.session .uid_fetch(format!("{}", uid), "(UID RFC822.SIZE INTERNALDATE FLAGS)") .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn fetch_uids( &mut self, first: u32, last: Option<u32>, changedsince: Option<u64>, ) -> Result<ZeroCopy<Vec<Fetch>>, String> { let range = match last { None => format!("{}:*", first), Some(n) if n > first => format!("{}:{}", first, n), _ => return Err(format!("Invalid range {}:{}", first, last.unwrap())), }; let qresync = match changedsince { None => "".to_string(), Some(n) => format!(" (CHANGEDSINCE {} VANISHED)", n), }; self.session .uid_fetch( range, format!("(UID RFC822.SIZE INTERNALDATE FLAGS){}", qresync), ) .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn enable_qresync(&mut self) -> Result<(), String> { self.session .run_command_and_check_ok("ENABLE QRESYNC") .map_err(|e| format!("ENABLE QRESYNC Error: {}", e)) } pub fn can_qresync(&self) -> bool { self.qresync } pub fn select_mailbox(&mut self, mailbox: &str) -> Result<Mailbox, String> { self.session .select(mailbox) .map_err(|e| format!("SELECT {} failed: {}", mailbox, e)) .map(|mbox| { self.mailbox = Some(mailbox.to_string()); mbox }) } pub fn logout(&mut self) -> Result<(), String> { self.session .logout() .map_err(|e| format!("LOGOUT failed: {}", e)) } pub fn delete_uid(&mut self, uid: u32) -> Result<(), String> { self.session .uid_store(format!("{}", uid), "+FLAGS (\\Deleted)") .map_err(|e| format!("STORE UID {} +Deleted failed: {}", uid, e))?; self.session .uid_expunge(format!("{}", uid)) .map_err(|e| format!("EXPUNGE UID {} failed: {}", uid, e))?; Ok(()) } pub fn append(&mut self, body: &[u8], flags: &[Flag]) -> Result<(), String> { if self.mailbox.is_none() { return Err("No mailbox selected".to_string()); } let r = self .session .append(self.mailbox.as_ref().unwrap(), body) .flags(flags.iter().cloned()) .finish() .map_err(|e| e.to_string()); r } /* pub fn replace_uid(&mut self, uid: u32, body: &[u8]) -> Result<(), String> { // Fetch the current flags so we can copy them to the new message. let zc_vec_fetch = self.fetch_uid_meta(uid)?; let mut uidres: Option<UidResult> = None; for fetch in zc_vec_fetch.deref() { if let FetchResult::Uid(res) = FetchResult::from(fetch) { if res.uid() == uid { uidres.replace(res); break; } } } if uidres.is_none() { return Err(format!("UID {} not found on server", uid)); } // Append first so if it fails we don't delete the original self.append(body, uidres.unwrap().flags())?; self.delete_uid(uid) } */ pub fn add_flags_for_uid(&mut self, uid: u32, flags: &[Flag]) -> Result<(), String> { let flagstr = flags .iter() .map(|f| f.to_string()) .collect::<Vec<String>>() .join(" "); self.session .uid_store(format!("{}", uid), format!("+FLAGS ({})", flagstr)) .map_err(|e| format!("STORE UID {} +FLAGS failed: {}", uid, e)) .map(|_| ()) } pub fn remove_flags_for_uid(&mut self, uid: u32, flags: &[Flag]) -> Result<(), String> { let flagstr = flags .iter() .map(|f| f.to_string()) .collect::<Vec<String>>() .join(" "); self.session .uid_store(format!("{}", uid), format!("-FLAGS ({})", flagstr)) .map_err(|e| format!("STORE UID {} -FLAGS failed: {}", uid, e)) .map(|_| ()) } pub fn for_each_unsolicited_response<F>(&mut self, mut f: F) where F: FnMut(UnsolicitedResponse), { while let Ok(u) = self.session.unsolicited_responses.try_recv() { f(u) } } }
use crate::config::Account; use imap::extensions::idle; use imap::types::{Fetch, Flag, Mailbox, Name, Uid, UnsolicitedResponse, ZeroCopy}; use imap::Session; use imap::{Client, ClientBuilder}; use rustls_connector::TlsStream as RustlsStream; use std::convert::From; use std::net::TcpStream; use std::ops::Deref; use std::time::Duration; use std::vec::Vec; pub enum FetchResult<'a> { Uid(UidResult<'a>), Other(&'a Fetch), } #[derive(Debug)] pub struct UidResult<'a> { fetch: &'a Fetch, } impl<'a> UidResult<'a> { pub fn uid(&self) -> Uid { self.fetch.uid.unwrap() } pub fn size(&self) -> u32 { self.fetch.size.unwrap() } pub fn internal_date_millis(&self) -> i64 { self.fetch.internal_date().unwrap().timestamp_millis() } pub fn flags(&self) -> &[Flag] { self.fetch.flags() } } impl<'a> From<&'a Fetch> for FetchResult<'a> { fn from(fetch: &'a Fetch) -> FetchResult<'a> { if fetch.uid.is_some() && fetch.size.is_some() && fetch.internal_date().is_some() { FetchResult::Uid(UidResult { fetch }) } else { FetchResult::Other(fetch) } } } pub struct Imap { session: Session<RustlsStream<TcpStream>>, mailbox: Option<String>, qresync: bool, } impl Imap { pub fn new(config: &Account) -> Result<Imap, String> { let client = Imap::connect(config)?; let mut session = client .login(config.username.as_str(), config.password.as_ref().unwrap()) .map_err(|e| format!("Login failed: {:?}", e.0))?; let capabilities = session .capabilities() .map_err(|e| format!("CAPABILITIES Error: {}", e))?; let mut missing = Vec::new(); if !capabilities.deref().has_str("ENABLE") { missing.push("ENABLE"); } if !capabilities.deref().has_str("UIDPLUS") { missing.push("UIDPLUS"); } if !capabilities.deref().has_str("IDLE") { missing.push("IDLE"); } if !missing.is_empty() { return Err(format!("Missing capability: {}", missing.join(" "))); } Ok(Imap { session, mailbox: None, qresync: capabilities.deref().has_str("QRESYNC"), }) } #[allow(dead_code)] pub fn debug(&mut self, enable: bool) { self.session.debug = enable; } fn connect(config: &Account) -> Result<Client<RustlsStream<TcpStream>>, String> { ClientBuilder::new(&config.server, config.port.unwrap()) .rustls() .map_err(|e| format!("Connection to {:?} failed: {}", &config.server, e)) } pub fn list( &mut self, reference_name: Option<&str>, mailbox_pattern: Option<&str>, ) -> Result<ZeroCopy<Vec<Name>>, String> { self.session .list(reference_name, mailbox_pattern) .map_err(|e| format!("LIST failed: {}", e)) } pub fn idle(&mut self) -> Result<(), String> { /* IDLE Builder - not released yet self.session .idle() .timeout(Duration::from_secs(10 * 60)) .wait_whi
last.unwrap())), }; let qresync = match changedsince { None => "".to_string(), Some(n) => format!(" (CHANGEDSINCE {} VANISHED)", n), }; self.session .uid_fetch( range, format!("(UID RFC822.SIZE INTERNALDATE FLAGS){}", qresync), ) .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn enable_qresync(&mut self) -> Result<(), String> { self.session .run_command_and_check_ok("ENABLE QRESYNC") .map_err(|e| format!("ENABLE QRESYNC Error: {}", e)) } pub fn can_qresync(&self) -> bool { self.qresync } pub fn select_mailbox(&mut self, mailbox: &str) -> Result<Mailbox, String> { self.session .select(mailbox) .map_err(|e| format!("SELECT {} failed: {}", mailbox, e)) .map(|mbox| { self.mailbox = Some(mailbox.to_string()); mbox }) } pub fn logout(&mut self) -> Result<(), String> { self.session .logout() .map_err(|e| format!("LOGOUT failed: {}", e)) } pub fn delete_uid(&mut self, uid: u32) -> Result<(), String> { self.session .uid_store(format!("{}", uid), "+FLAGS (\\Deleted)") .map_err(|e| format!("STORE UID {} +Deleted failed: {}", uid, e))?; self.session .uid_expunge(format!("{}", uid)) .map_err(|e| format!("EXPUNGE UID {} failed: {}", uid, e))?; Ok(()) } pub fn append(&mut self, body: &[u8], flags: &[Flag]) -> Result<(), String> { if self.mailbox.is_none() { return Err("No mailbox selected".to_string()); } let r = self .session .append(self.mailbox.as_ref().unwrap(), body) .flags(flags.iter().cloned()) .finish() .map_err(|e| e.to_string()); r } /* pub fn replace_uid(&mut self, uid: u32, body: &[u8]) -> Result<(), String> { // Fetch the current flags so we can copy them to the new message. let zc_vec_fetch = self.fetch_uid_meta(uid)?; let mut uidres: Option<UidResult> = None; for fetch in zc_vec_fetch.deref() { if let FetchResult::Uid(res) = FetchResult::from(fetch) { if res.uid() == uid { uidres.replace(res); break; } } } if uidres.is_none() { return Err(format!("UID {} not found on server", uid)); } // Append first so if it fails we don't delete the original self.append(body, uidres.unwrap().flags())?; self.delete_uid(uid) } */ pub fn add_flags_for_uid(&mut self, uid: u32, flags: &[Flag]) -> Result<(), String> { let flagstr = flags .iter() .map(|f| f.to_string()) .collect::<Vec<String>>() .join(" "); self.session .uid_store(format!("{}", uid), format!("+FLAGS ({})", flagstr)) .map_err(|e| format!("STORE UID {} +FLAGS failed: {}", uid, e)) .map(|_| ()) } pub fn remove_flags_for_uid(&mut self, uid: u32, flags: &[Flag]) -> Result<(), String> { let flagstr = flags .iter() .map(|f| f.to_string()) .collect::<Vec<String>>() .join(" "); self.session .uid_store(format!("{}", uid), format!("-FLAGS ({})", flagstr)) .map_err(|e| format!("STORE UID {} -FLAGS failed: {}", uid, e)) .map(|_| ()) } pub fn for_each_unsolicited_response<F>(&mut self, mut f: F) where F: FnMut(UnsolicitedResponse), { while let Ok(u) = self.session.unsolicited_responses.try_recv() { f(u) } } }
le(idle::stop_on_any) .map_err(|e| format!("{}", e)) .map(|_| ()) */ self.session .idle() .map_err(|e| format!("{}", e)) .and_then(|mut i| { i.set_keepalive(Duration::from_secs(10 * 60)); i.wait_keepalive_while(idle::stop_on_any) .map_err(|e| format!("{}", e)) }) .map(|_| ()) } pub fn fetch_uid(&mut self, uid: u32) -> Result<ZeroCopy<Vec<Fetch>>, String> { self.session .uid_fetch( format!("{}", uid), "(UID RFC822.SIZE INTERNALDATE FLAGS BODY.PEEK[])", ) .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn fetch_uid_meta(&mut self, uid: u32) -> Result<ZeroCopy<Vec<Fetch>>, String> { self.session .uid_fetch(format!("{}", uid), "(UID RFC822.SIZE INTERNALDATE FLAGS)") .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn fetch_uids( &mut self, first: u32, last: Option<u32>, changedsince: Option<u64>, ) -> Result<ZeroCopy<Vec<Fetch>>, String> { let range = match last { None => format!("{}:*", first), Some(n) if n > first => format!("{}:{}", first, n), _ => return Err(format!("Invalid range {}:{}", first,
random
[ { "content": "// FIXME: Move this to imapw?\n\n/// Convert imap flags to maildir flags\n\npub fn maildir_flags_from_imap(inflags: &[Flag]) -> String {\n\n let syncflags = SyncFlags::from(inflags);\n\n syncflags.to_string()\n\n}\n\n\n", "file_path": "src/cache/mod.rs", "rank": 0, "score": 12506...
Rust
src/drivers/keyboard.rs
arbel03/os
ba061f795cc6e492dd752344e43d4d8e4896d5f3
use drivers::utils::inb; #[derive(Copy, Clone, PartialEq, Eq)] pub enum ScanCodeType { Digit(u8), Character(char), Shift, Backspace, Enter, Space, Quote, } pub struct ScanCode { pub released: bool, pub scan_code_type: ScanCodeType, } #[derive(PartialEq)] pub enum ScanCodeError { BackspaceScancode, InvalidScancode, } impl ScanCode { pub fn new(scan_code_type: ScanCodeType) -> Self { ScanCode { released: false, scan_code_type: scan_code_type, } } pub fn released(&self) -> Self { ScanCode { released: true, scan_code_type: self.scan_code_type.clone(), } } pub fn get_char(&self) -> Result<char, ScanCodeError> { let c = match self.scan_code_type { ScanCodeType::Digit(digit) => ('0' as u8 + digit) as char, ScanCodeType::Character(character) => { let character = character.to_string(); let character = if unsafe { IS_UPPERCASE } { character.to_uppercase() } else { character }; character.as_bytes()[0] as char }, ScanCodeType::Enter => '\n', ScanCodeType::Quote => if unsafe { IS_UPPERCASE } { '\"' } else { '\'' }, ScanCodeType::Space => ' ', ScanCodeType::Backspace => return Err(ScanCodeError::BackspaceScancode), _ => return Err(ScanCodeError::InvalidScancode), }; Ok(c) } } use core::fmt; use core::fmt::Write; use alloc::string::ToString; impl fmt::Display for ScanCode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Ok(ch) = self.get_char() { f.write_char(ch as char) } else { Ok(()) } } } pub struct RawScanCode(u8); impl RawScanCode { pub fn get_scancode(&self) -> Option<ScanCode> { let scancode = match self.0 { 0x02 ... 0x0A => ScanCode::new(ScanCodeType::Digit(self.0 - 0x01)), 0x0B => ScanCode::new(ScanCodeType::Digit(0)), 0x0E => ScanCode::new(ScanCodeType::Backspace), 0x10 => ScanCode::new(ScanCodeType::Character('q')), 0x11 => ScanCode::new(ScanCodeType::Character('w')), 0x12 => ScanCode::new(ScanCodeType::Character('e')), 0x13 => ScanCode::new(ScanCodeType::Character('r')), 0x14 => ScanCode::new(ScanCodeType::Character('t')), 0x15 => ScanCode::new(ScanCodeType::Character('y')), 0x16 => ScanCode::new(ScanCodeType::Character('u')), 0x17 => ScanCode::new(ScanCodeType::Character('i')), 0x18 => ScanCode::new(ScanCodeType::Character('o')), 0x19 => ScanCode::new(ScanCodeType::Character('p')), 0x1E => ScanCode::new(ScanCodeType::Character('a')), 0x1F => ScanCode::new(ScanCodeType::Character('s')), 0x20 => ScanCode::new(ScanCodeType::Character('d')), 0x21 => ScanCode::new(ScanCodeType::Character('f')), 0x22 => ScanCode::new(ScanCodeType::Character('g')), 0x23 => ScanCode::new(ScanCodeType::Character('h')), 0x24 => ScanCode::new(ScanCodeType::Character('j')), 0x25 => ScanCode::new(ScanCodeType::Character('k')), 0x26 => ScanCode::new(ScanCodeType::Character('l')), 0x28 => ScanCode::new(ScanCodeType::Quote), 0x2A => ScanCode::new(ScanCodeType::Shift), 0x2B => ScanCode::new(ScanCodeType::Character('\\')), 0x2C => ScanCode::new(ScanCodeType::Character('z')), 0x2D => ScanCode::new(ScanCodeType::Character('x')), 0x2E => ScanCode::new(ScanCodeType::Character('c')), 0x2F => ScanCode::new(ScanCodeType::Character('v')), 0x30 => ScanCode::new(ScanCodeType::Character('b')), 0x31 => ScanCode::new(ScanCodeType::Character('n')), 0x32 => ScanCode::new(ScanCodeType::Character('m')), 0x33 => ScanCode::new(ScanCodeType::Character(',')), 0x34 => ScanCode::new(ScanCodeType::Character('.')), 0x35 => ScanCode::new(ScanCodeType::Character('/')), 0x0C => ScanCode::new(ScanCodeType::Character('-')), 0x36 => ScanCode::new(ScanCodeType::Shift), 0xAA => ScanCode::new(ScanCodeType::Shift).released(), 0xB6 => ScanCode::new(ScanCodeType::Shift).released(), 0x1C => ScanCode::new(ScanCodeType::Enter), 0x39 => ScanCode::new(ScanCodeType::Space), _ => return None, }; Some(scancode) } } static mut IS_UPPERCASE: bool = false; pub fn set_uppercased(is_uppercased: bool) { unsafe { IS_UPPERCASE = is_uppercased; } } pub fn get_scancode() -> Option<ScanCode> { let scancode_value = read_scancode_value(); let raw_scancode = RawScanCode(scancode_value); raw_scancode.get_scancode() } pub fn read_scancode_value() -> u8 { unsafe { while inb(0x64) & 1 != 1 {} inb(0x60) } } pub fn getc() -> usize { loop { if let Some(c) = get_scancode() { use drivers::keyboard::ScanCodeType; if ScanCodeType::Shift == c.scan_code_type { set_uppercased(!c.released); } else { match c.get_char() { Ok(character) => return character as usize, Err(scan_code_error) => { if scan_code_error == ScanCodeError::BackspaceScancode { return 0xffffffff; } } } } } } }
use drivers::utils::inb; #[derive(Copy, Clone, PartialEq, Eq)] pub enum ScanCodeType { Digit(u8), Character(char), Shift, Backspace, Enter, Space, Quote, } pub struct ScanCode { pub released: bool, pub scan_code_type: ScanCodeType, } #[derive(PartialEq)] pub enum ScanCodeError { BackspaceScancode, InvalidScancode, } impl ScanCode { pub fn new(scan_code_type: ScanCodeType) -> Self { ScanCode { released: false, scan_code_type: scan_code_type, } } pub fn released(&self) -> Self { ScanCode { released: true, scan_code_type: self.scan_code_type.clone(), } } pub fn get_char(&self) -> Result<char, ScanCodeError> { let c = match self.scan_code_type { ScanCodeType::Digit(digit) => ('0' as u8 + digit) as char, ScanCodeType::Character(character) => { let character = character.to_string(); let character = if unsafe { IS_UPPERCASE } { character.to_uppercase() } else { character }; character.as_bytes()[0] as char }, ScanCodeType::Enter => '\n', ScanCodeType::Quote => if unsafe { IS_UPPERCASE } { '\"' } else { '\'' }, ScanCodeType::Space => ' ', ScanCodeType::Backspace => return Err(ScanCodeError::BackspaceScancode), _ => return Err(ScanCodeError::InvalidScancode), }; Ok(c) } } use core::fmt; use core::fmt::Write; use alloc::string::ToString; impl fmt::Display for ScanCode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Ok(ch) = self.get_char() { f.write_char(ch as char) } else { Ok(()) } } } pub struct RawScanCode(u8); impl RawScanCode { pub fn get_scancode(&self) -> Option<ScanCode> { let scancode = match self.0 { 0x02 ... 0x0A => ScanCode::new(ScanCodeType::Digit(self.0 - 0x01)), 0x0B => ScanCode::new(ScanCodeType::Digit(0)), 0x0E => ScanCode::new(ScanCodeType::Backspace), 0x10 => ScanCode::new(ScanCodeType::Character('q')), 0x11 => ScanCode::new(ScanCodeType::Character('w')), 0x12 => ScanCode::new(ScanCodeType::Character('e')), 0x13 => ScanCode::new(ScanCodeType::Character('r')), 0x14 => ScanCode::new(ScanCodeType::Character('t')), 0x15 => ScanCode::new(ScanCodeType::Character('y')), 0x16 => ScanCode::new(ScanCodeType::Character('u')), 0x17 => ScanCode::new(ScanCodeType::Character('i')), 0x18 => ScanCode::new(ScanCodeType::Character('o')), 0x19 => ScanCode::new(ScanCodeType::Character('p')), 0x1E => ScanCode::new(ScanCodeType::Character('a')), 0x1F => ScanCode::new(ScanCodeType::Character('s')), 0x20 => ScanCode::new(ScanCodeType::Character('d')), 0x21 => ScanCode::new(ScanCodeType::Character('f')), 0x22 => ScanCode::new(ScanCodeType::Character('g')), 0x23 => ScanCode::new(ScanCodeType::Character('h')), 0x24 => ScanCode::new(ScanCodeType::Character('j')), 0x25 => ScanCode::new(ScanCodeType::Character('k')), 0x26 => ScanCode::new(ScanCodeType::Character('l')), 0x28 => ScanCode::new(ScanCodeType::Quote), 0x2A => ScanCode::new(ScanCodeType::Shift), 0x2B => ScanCode::new(ScanCodeType::Character('\\')), 0x2C => ScanCode::new(ScanCodeType::Character('z')), 0x2D => ScanCode::new(ScanCodeType::Character('x')), 0x2E => ScanCode::new(ScanCodeType::Character('c')), 0x2F => ScanCode::new(ScanCodeType::Character('v')), 0x30 => ScanCode::new(ScanCodeType::Character('b')), 0x31 => ScanCode::new(ScanCodeType::Character('n')), 0x32 => ScanCode::new(ScanCodeType::Character('m')), 0x33 => ScanCode::new(ScanCodeType::Character(',')), 0x34 => ScanCode::new(ScanCodeType::Character('.')), 0x35 => ScanCode::new(ScanCodeType::Character('/')), 0x0C => ScanCode::new(ScanCodeType::Character('-')), 0x36 => ScanCode::new(ScanCodeType::Shift), 0xAA => ScanCode::new(ScanCodeType::Shift).released(), 0xB6 => ScanCode::new(ScanCodeType::Shift).released(), 0x1C => ScanCode::new(ScanCodeType::Enter), 0x39 => ScanCode::new(ScanCodeType::Space), _ => return None, }; Some(scancode) } } static mut IS_UPPERCASE: bool = false; pub fn set_uppercased(is_uppercased: bool) { unsafe { IS_UPPERCASE = is_uppercased; } } pub fn get_scancode() -> Option<ScanCode> { let scancode_value = read_scancode_value(); let raw_scancode = RawScanCode(scancode_value); raw_scancode.get_scancode() } pub fn read_scancode_value() -> u8 { unsafe { while inb(0x64) & 1 != 1 {} inb(0x60) } } pub fn getc() -> usize { loop { if let Some(c) = get_scancode() { use drivers::keyboard::ScanCodeType; if ScanCodeType::Shift == c.scan_code_type { set_uppercased(!c.released); } else {
} } } }
match c.get_char() { Ok(character) => return character as usize, Err(scan_code_error) => { if scan_code_error == ScanCodeError::BackspaceScancode { return 0xffffffff; } } }
if_condition
[ { "content": "pub fn proc_info(info_struct_ptr: *mut u8, proc_number: usize) -> usize {\n\n #[repr(packed)]\n\n #[derive(Debug)]\n\n pub struct ProcInfo {\n\n pub process_index: u32,\n\n pub process_name_length: u32,\n\n pub process_base: u32,\n\n pub process_total_size: u32...
Rust
benches/hash.rs
flier/rust-t1ha
163460ad90b424e75a5480cfe3c94edafc4883ef
#![allow(deprecated)] #[macro_use] extern crate lazy_static; #[macro_use] extern crate criterion; use std::collections::hash_map::DefaultHasher; use std::hash::{Hasher, BuildHasher}; use std::hash::SipHasher; use std::io::BufReader; use std::mem; use std::slice; use criterion::{black_box, Criterion, ParameterizedBenchmark, Throughput}; use ahash::ABuildHasher; use farmhash::{hash32_with_seed as farmhash32, hash64_with_seed as farmhash64}; use fnv::FnvHasher; use fxhash::{hash32 as fxhash32, hash64 as fxhash64}; use meowhash::MeowHasher; use metrohash::{MetroHash128, MetroHash64}; use murmur3::{murmur3_32, murmur3_x64_128, murmur3_x86_128}; use rustc_hash::FxHasher; use seahash::hash_seeded as seahash64; use t1ha::{t1ha0_32, t1ha1, t1ha2_atonce, t1ha2_atonce128}; use twox_hash::{XxHash as XxHash64, XxHash32}; use xxhash2::{hash32 as xxhash32, hash64 as xxhash64}; #[cfg(target_feature = "aes")] use t1ha::t1ha0_ia32aes_noavx; #[cfg(not(target_feature = "aes"))] fn t1ha0_ia32aes_noavx(_data: &[u8], _seed: u64) -> u64 { 0 } #[cfg(target_feature = "avx2")] use t1ha::t1ha0_ia32aes_avx2; #[cfg(not(target_feature = "avx2"))] fn t1ha0_ia32aes_avx2(_data: &[u8], _seed: u64) -> u64 { 0 } #[cfg(target_feature = "avx")] use t1ha::t1ha0_ia32aes_avx; #[cfg(not(target_feature = "avx"))] fn t1ha0_ia32aes_avx(_data: &[u8], _seed: u64) -> u64 { 0 } const KB: usize = 1024; const SEED: u64 = 0x0123456789ABCDEF; const PARAMS: [usize; 7] = [7, 8, 32, 256, KB, 4 * KB, 16 * KB]; lazy_static! { static ref DATA: Vec<u8> = (0..16 * KB).map(|b| b as u8).collect::<Vec<_>>(); } fn bench_memory(c: &mut Criterion) { c.bench( "memory", ParameterizedBenchmark::new( "sum", move |b, &&size| { let s = unsafe { slice::from_raw_parts(DATA.as_ptr() as *mut u32, size / mem::size_of::<u32>()) }; b.iter(|| { black_box(s.iter().fold(0u64, |acc, &x| acc + x as u64)); }) }, &PARAMS, ) .throughput(|&&size| Throughput::Bytes(size as u32)), ); } fn bench_hash32(c: &mut Criterion) { c.bench( "hash32", ParameterizedBenchmark::new( "t1ha0_32", move |b, &&size| { b.iter(|| t1ha0_32(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("murmur3_32", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); murmur3_32(&mut r, SEED as u32) }); }) .with_function("farmhash32", move |b, &&size| { b.iter(|| farmhash32(&DATA[..size], SEED as u32)); }) .with_function("xxhash32", move |b, &&size| { b.iter(|| xxhash32(&DATA[..size], SEED as u32)); }) .with_function("twox_hash::XxHash32", move |b, &&size| { b.iter(|| { let mut h = XxHash32::with_seed(SEED as u32); h.write(&DATA[..size]); h.finish() }); }) .with_function("fxhash32", move |b, &&size| { b.iter(|| fxhash32(&DATA[..size])); }) .throughput(|&&size| Throughput::Bytes(size as u32)), ); } fn bench_hash64(c: &mut Criterion) { let mut bench = ParameterizedBenchmark::new( "t1ha1", move |b, &&size| { b.iter(|| t1ha1(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("t1ha2_atonce", move |b, &&size| { b.iter(|| t1ha2_atonce(&DATA[..size], SEED)); }); if cfg!(target_feature = "aes") { bench = bench.with_function("t1ha0_ia32aes_noavx", move |b, &&size| { b.iter(|| t1ha0_ia32aes_noavx(&DATA[..size], SEED)); }); } if cfg!(target_feature = "avx") { bench = bench.with_function("t1ha0_ia32aes_avx", move |b, &&size| { b.iter(|| t1ha0_ia32aes_avx(&DATA[..size], SEED)); }); } if cfg!(target_feature = "avx2") { bench = bench.with_function("t1ha0_ia32aes_avx2", move |b, &&size| { b.iter(|| t1ha0_ia32aes_avx2(&DATA[..size], SEED)); }); } c.bench( "hash64", bench .with_function("hash_map::DefaultHasher", move |b, &&size| { b.iter(|| { let mut h = DefaultHasher::new(); h.write(&DATA[..size]); h.finish() }); }) .with_function("siphash", move |b, &&size| { b.iter(|| { let mut h = SipHasher::new_with_keys(SEED, SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("metrohash64", move |b, &&size| { b.iter(|| { let mut h = MetroHash64::with_seed(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("farmhash64", move |b, &&size| { b.iter(|| farmhash64(&DATA[..size], SEED)); }) .with_function("fnv64", move |b, &&size| { b.iter(|| { let mut h = FnvHasher::with_key(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("xxhash64", move |b, &&size| { b.iter(|| xxhash64(&DATA[..size], SEED)); }) .with_function("twox_hash::XxHash", move |b, &&size| { b.iter(|| { let mut h = XxHash64::with_seed(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("seahash", move |b, &&size| { b.iter(|| seahash64(&DATA[..size], SEED, SEED, SEED, SEED)); }) .with_function("fxhash64", move |b, &&size| { b.iter(|| fxhash64(&DATA[..size])); }) .with_function("ahash", move |b, &&size| { let builder = ABuildHasher::new(); b.iter(|| { let mut h = builder.build_hasher(); h.write(&DATA[..size]); h.finish() }); }) .with_function("rustc_hash::FxHasher", move |b, &&size| { b.iter(|| { let mut h = FxHasher::default(); h.write(&DATA[..size]); h.finish() }); }) .throughput(|&&size| Throughput::Bytes(size as u32)), ); } fn bench_hash128(c: &mut Criterion) { let mut bench = ParameterizedBenchmark::new( "t1ha2_atonce128", move |b, &&size| { b.iter(|| t1ha2_atonce128(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("metrohash128", move |b, &&size| { b.iter(|| { let mut h = MetroHash128::with_seed(SEED); h.write(&DATA[..size]); h.finish128() }); }); if cfg!(target_arch = "x86_64") { bench = bench.with_function("murmur3_x64_128", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); let mut out = [0; 16]; murmur3_x64_128(&mut r, SEED as u32, &mut out); }); }); } if cfg!(target_arch = "x86") { bench = bench.with_function("murmur3_x86_128", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); let mut out = [0; 16]; murmur3_x86_128(&mut r, SEED as u32, &mut out); }); }); } if cfg!(target_feature = "aes") { bench = bench.with_function("meowhash128", move |b, &&size| { b.iter(|| MeowHasher::digest_with_seed(SEED as u128, &DATA[..size])); }); } c.bench( "hash128", bench.throughput(|&&size| Throughput::Bytes(size as u32)), ); } criterion_group!( benches, bench_memory, bench_hash32, bench_hash64, bench_hash128 ); criterion_main!(benches);
#![allow(deprecated)] #[macro_use] extern crate lazy_static; #[macro_use] extern crate criterion; use std::collections::hash_map::DefaultHasher; use std::hash::{Hasher, BuildHasher}; use std::hash::SipHasher; use std::io::BufReader; use std::mem; use std::slice; use criterion::{black_box, Criterion, ParameterizedBenchmark, Throughput}; use ahash::ABuildHasher; use farmhash::{hash32_with_seed as farmhash32, hash64_with_seed as farmhash64}; use fnv::FnvHasher; use fxhash::{hash32 as fxhash32, hash64 as fxhash64}; use meowhash::MeowHasher; use metrohash::{MetroHash128, MetroHash64}; use murmur3::{murmur3_32, murmur3_x64_128, murmur3_x86_128}; use rustc_hash::FxHasher; use seahash::hash_seeded as seahash64; use t1ha::{t1ha0_32, t1ha1, t1ha2_atonce, t1ha2_atonce128}; use twox_hash::{XxHash as XxHash64, XxHash32}; use xxhash2::{hash32 as xxhash32, hash64 as xxhash64}; #[cfg(target_feature = "aes")] use t1ha::t1ha0_ia32aes_noavx; #[cfg(not(target_feature = "aes"))] fn t1ha0_ia32aes_noavx(_data: &[u8], _seed: u64) -> u64 { 0 } #[cfg(target_feature = "avx2")] use t1ha::t1ha0_ia32aes_avx2; #[cfg(not(target_feature = "avx2"))] fn t1ha0_ia32aes_avx2(_data: &[u8], _seed: u64) -> u64 { 0 } #[cfg(target_feature = "avx")] use t1ha::t1ha0_ia32aes_avx; #[cfg(not(target_feature = "avx"))] fn t1ha0_ia32aes_avx(_data: &[u8], _seed: u64) -> u64 { 0 } const KB: usize = 1024; const SEED: u64 = 0x0123456789ABCDEF; const PARAMS: [usize; 7] = [7, 8, 32, 256, KB, 4 * KB, 16 * KB]; lazy_static! { static ref DATA: Vec<u8> = (0..16 * KB).map(|b| b as u8).collect::<Vec<_>>(); }
fn bench_hash32(c: &mut Criterion) { c.bench( "hash32", ParameterizedBenchmark::new( "t1ha0_32", move |b, &&size| { b.iter(|| t1ha0_32(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("murmur3_32", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); murmur3_32(&mut r, SEED as u32) }); }) .with_function("farmhash32", move |b, &&size| { b.iter(|| farmhash32(&DATA[..size], SEED as u32)); }) .with_function("xxhash32", move |b, &&size| { b.iter(|| xxhash32(&DATA[..size], SEED as u32)); }) .with_function("twox_hash::XxHash32", move |b, &&size| { b.iter(|| { let mut h = XxHash32::with_seed(SEED as u32); h.write(&DATA[..size]); h.finish() }); }) .with_function("fxhash32", move |b, &&size| { b.iter(|| fxhash32(&DATA[..size])); }) .throughput(|&&size| Throughput::Bytes(size as u32)), ); } fn bench_hash64(c: &mut Criterion) { let mut bench = ParameterizedBenchmark::new( "t1ha1", move |b, &&size| { b.iter(|| t1ha1(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("t1ha2_atonce", move |b, &&size| { b.iter(|| t1ha2_atonce(&DATA[..size], SEED)); }); if cfg!(target_feature = "aes") { bench = bench.with_function("t1ha0_ia32aes_noavx", move |b, &&size| { b.iter(|| t1ha0_ia32aes_noavx(&DATA[..size], SEED)); }); } if cfg!(target_feature = "avx") { bench = bench.with_function("t1ha0_ia32aes_avx", move |b, &&size| { b.iter(|| t1ha0_ia32aes_avx(&DATA[..size], SEED)); }); } if cfg!(target_feature = "avx2") { bench = bench.with_function("t1ha0_ia32aes_avx2", move |b, &&size| { b.iter(|| t1ha0_ia32aes_avx2(&DATA[..size], SEED)); }); } c.bench( "hash64", bench .with_function("hash_map::DefaultHasher", move |b, &&size| { b.iter(|| { let mut h = DefaultHasher::new(); h.write(&DATA[..size]); h.finish() }); }) .with_function("siphash", move |b, &&size| { b.iter(|| { let mut h = SipHasher::new_with_keys(SEED, SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("metrohash64", move |b, &&size| { b.iter(|| { let mut h = MetroHash64::with_seed(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("farmhash64", move |b, &&size| { b.iter(|| farmhash64(&DATA[..size], SEED)); }) .with_function("fnv64", move |b, &&size| { b.iter(|| { let mut h = FnvHasher::with_key(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("xxhash64", move |b, &&size| { b.iter(|| xxhash64(&DATA[..size], SEED)); }) .with_function("twox_hash::XxHash", move |b, &&size| { b.iter(|| { let mut h = XxHash64::with_seed(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("seahash", move |b, &&size| { b.iter(|| seahash64(&DATA[..size], SEED, SEED, SEED, SEED)); }) .with_function("fxhash64", move |b, &&size| { b.iter(|| fxhash64(&DATA[..size])); }) .with_function("ahash", move |b, &&size| { let builder = ABuildHasher::new(); b.iter(|| { let mut h = builder.build_hasher(); h.write(&DATA[..size]); h.finish() }); }) .with_function("rustc_hash::FxHasher", move |b, &&size| { b.iter(|| { let mut h = FxHasher::default(); h.write(&DATA[..size]); h.finish() }); }) .throughput(|&&size| Throughput::Bytes(size as u32)), ); } fn bench_hash128(c: &mut Criterion) { let mut bench = ParameterizedBenchmark::new( "t1ha2_atonce128", move |b, &&size| { b.iter(|| t1ha2_atonce128(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("metrohash128", move |b, &&size| { b.iter(|| { let mut h = MetroHash128::with_seed(SEED); h.write(&DATA[..size]); h.finish128() }); }); if cfg!(target_arch = "x86_64") { bench = bench.with_function("murmur3_x64_128", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); let mut out = [0; 16]; murmur3_x64_128(&mut r, SEED as u32, &mut out); }); }); } if cfg!(target_arch = "x86") { bench = bench.with_function("murmur3_x86_128", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); let mut out = [0; 16]; murmur3_x86_128(&mut r, SEED as u32, &mut out); }); }); } if cfg!(target_feature = "aes") { bench = bench.with_function("meowhash128", move |b, &&size| { b.iter(|| MeowHasher::digest_with_seed(SEED as u128, &DATA[..size])); }); } c.bench( "hash128", bench.throughput(|&&size| Throughput::Bytes(size as u32)), ); } criterion_group!( benches, bench_memory, bench_hash32, bench_hash64, bench_hash128 ); criterion_main!(benches);
fn bench_memory(c: &mut Criterion) { c.bench( "memory", ParameterizedBenchmark::new( "sum", move |b, &&size| { let s = unsafe { slice::from_raw_parts(DATA.as_ptr() as *mut u32, size / mem::size_of::<u32>()) }; b.iter(|| { black_box(s.iter().fold(0u64, |acc, &x| acc + x as u64)); }) }, &PARAMS, ) .throughput(|&&size| Throughput::Bytes(size as u32)), ); }
function_block-full_function
[ { "content": "#[cfg(not(feature = \"unaligned_access\"))]\n\npub fn t1ha1_be(data: &[u8], seed: u64) -> u64 {\n\n if !aligned_to::<u64, _>(data.as_ptr()) {\n\n unsafe { t1h1_body::<BigEndianUnaligned<u64>>(data, seed) }\n\n } else {\n\n unsafe { t1h1_body::<BigEndianAligned<u64>>(data, seed)...
Rust
src/style/builder.rs
vinaychandra/embedded-text
71c5e8abbb940deff1fcbab0c06c2c2fced5de10
use crate::{ alignment::{HorizontalTextAlignment, LeftAligned, TopAligned, VerticalTextAlignment}, style::{ height_mode::{Exact, HeightMode}, vertical_overdraw::FullRowsOnly, TabSize, TextBoxStyle, }, }; use embedded_graphics::{ prelude::*, style::{TextStyle, TextStyleBuilder}, }; pub struct TextBoxStyleBuilder<C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { text_style_builder: TextStyleBuilder<C, F>, alignment: A, vertical_alignment: V, height_mode: H, line_spacing: i32, tab_size: TabSize<F>, underlined: bool, strikethrough: bool, } impl<C, F> TextBoxStyleBuilder<C, F, LeftAligned, TopAligned, Exact<FullRowsOnly>> where C: PixelColor, F: Font + Copy, { #[inline] #[must_use] pub fn new(font: F) -> Self { Self { text_style_builder: TextStyleBuilder::new(font), alignment: LeftAligned, vertical_alignment: TopAligned, height_mode: Exact(FullRowsOnly), line_spacing: 0, tab_size: TabSize::default(), underlined: false, strikethrough: false, } } #[inline] #[must_use] pub fn from_text_style(text_style: TextStyle<C, F>) -> Self { let mut text_style_builder = TextStyleBuilder::new(text_style.font); if let Some(color) = text_style.background_color { text_style_builder = text_style_builder.background_color(color); } if let Some(color) = text_style.text_color { text_style_builder = text_style_builder.text_color(color); } Self { text_style_builder, ..Self::new(text_style.font) } } } impl<C, F, A, V, H> TextBoxStyleBuilder<C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { #[inline] #[must_use] pub fn text_color(self, text_color: C) -> Self { Self { text_style_builder: self.text_style_builder.text_color(text_color), ..self } } #[inline] #[must_use] pub fn line_spacing(self, line_spacing: i32) -> Self { Self { line_spacing, ..self } } #[inline] #[must_use] pub fn background_color(self, background_color: C) -> Self { Self { text_style_builder: self.text_style_builder.background_color(background_color), ..self } } #[inline] #[must_use] #[deprecated] pub fn text_style(self, text_style: TextStyle<C, F>) -> Self { let mut text_style_builder = self.text_style_builder; if let Some(color) = text_style.background_color { text_style_builder = text_style_builder.background_color(color); } if let Some(color) = text_style.text_color { text_style_builder = text_style_builder.text_color(color); } Self { text_style_builder, ..self } } #[inline] #[must_use] pub fn alignment<TA: HorizontalTextAlignment>( self, alignment: TA, ) -> TextBoxStyleBuilder<C, F, TA, V, H> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn vertical_alignment<VA: VerticalTextAlignment>( self, vertical_alignment: VA, ) -> TextBoxStyleBuilder<C, F, A, VA, H> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn height_mode<HM: HeightMode>( self, height_mode: HM, ) -> TextBoxStyleBuilder<C, F, A, V, HM> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn tab_size(self, tab_size: TabSize<F>) -> Self { Self { tab_size, ..self } } #[inline] #[must_use] pub fn underlined(self, underlined: bool) -> Self { Self { underlined, ..self } } #[inline] #[must_use] pub fn strikethrough(self, strikethrough: bool) -> Self { Self { strikethrough, ..self } } #[inline] #[must_use] pub fn build(self) -> TextBoxStyle<C, F, A, V, H> { TextBoxStyle { text_style: self.text_style_builder.build(), alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } } #[cfg(test)] mod test { use super::TextBoxStyleBuilder; use embedded_graphics::{ fonts::Font6x8, pixelcolor::BinaryColor, style::{TextStyle, TextStyleBuilder}, }; #[test] #[allow(deprecated)] fn test_text_style_copy() { let text_styles: [TextStyle<_, _>; 2] = [ TextStyleBuilder::new(Font6x8) .text_color(BinaryColor::On) .build(), TextStyleBuilder::new(Font6x8) .background_color(BinaryColor::On) .build(), ]; for &text_style in text_styles.iter() { let style = TextBoxStyleBuilder::new(Font6x8) .text_style(text_style) .build(); assert_eq!(style.text_style, text_style); } } #[test] fn test_text_style_copy_ctr() { let text_styles: [TextStyle<_, _>; 2] = [ TextStyleBuilder::new(Font6x8) .text_color(BinaryColor::On) .build(), TextStyleBuilder::new(Font6x8) .background_color(BinaryColor::On) .build(), ]; for &text_style in text_styles.iter() { let style = TextBoxStyleBuilder::from_text_style(text_style).build(); assert_eq!(style.text_style, text_style); } } }
use crate::{ alignment::{HorizontalTextAlignment, LeftAligned, TopAligned, VerticalTextAlignment}, style::{ height_mode::{Exact, HeightMode}, vertical_overdraw::FullRowsOnly, TabSize, TextBoxStyle, }, }; use embedded_graphics::{ prelude::*, style::{TextStyle, TextStyleBuilder}, }; pub struct TextBoxStyleBuilder<C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { text_style_builder: TextStyleBuilder<C, F>, alignment: A, vertical_alignment: V, height_mode: H, line_spacing: i32, tab_size: TabSize<F>, underlined: bool, strikethrough: bool, } impl<C, F> TextBoxStyleBuilder<C, F, LeftAligned, TopAligned, Exact<FullRowsOnly>> where C: PixelColor, F: Font + Copy, { #[inline] #[must_use] pub fn new(font: F) -> Self { Self { text_style_builder: TextStyleBuilder::new(font), alignment: LeftAligned, vertical_alignment: TopAligned, height_mode: Exact(FullRowsOnly), line_spacing: 0, tab_size: TabSize::default(), underlined: false, strikethrough: false, } } #[inline] #[must_use] pub fn from_text_style(text_style: TextStyle<C, F>) -> Self { let mut text_style_builder = TextStyleBuilder::new(text_style.font); if let Some(color) = text_style.background_color { text_style_builder = text_style_builder.background_color(color); } if let Some(color) = text_style.text_color { text_style_builder = text_style_builder.text_color(color); } Self { text_style_builder, ..Self::new(text_style.font) } } } impl<C, F, A, V, H> TextBoxStyleBuilder<C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { #[inline] #[must_use] pub fn text_color(self, text_color: C) -> Self { Self { text_style_builder: self.text_style_builder.text_color(text_color), ..self } } #[inline] #[must_use] pub fn line_spacing(self, line_spacing: i32) -> Self { Self { line_spacing, ..self } } #[inline] #[must_use] pub fn background_color(self, background_color: C) -> Self { Self { text_style_builder: self.text_style_builder.background_color(background_color), ..self } } #[inline] #[must_use] #[deprecated] pub fn text_style(self, text_style: TextStyle<C, F>) -> Self { let mut text_style_builder = self.text_style_builder; if let Some(color) = text_style.background_color { text_style_builder = text_style_builder.background_color(color); }
Self { text_style_builder, ..self } } #[inline] #[must_use] pub fn alignment<TA: HorizontalTextAlignment>( self, alignment: TA, ) -> TextBoxStyleBuilder<C, F, TA, V, H> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn vertical_alignment<VA: VerticalTextAlignment>( self, vertical_alignment: VA, ) -> TextBoxStyleBuilder<C, F, A, VA, H> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn height_mode<HM: HeightMode>( self, height_mode: HM, ) -> TextBoxStyleBuilder<C, F, A, V, HM> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn tab_size(self, tab_size: TabSize<F>) -> Self { Self { tab_size, ..self } } #[inline] #[must_use] pub fn underlined(self, underlined: bool) -> Self { Self { underlined, ..self } } #[inline] #[must_use] pub fn strikethrough(self, strikethrough: bool) -> Self { Self { strikethrough, ..self } } #[inline] #[must_use] pub fn build(self) -> TextBoxStyle<C, F, A, V, H> { TextBoxStyle { text_style: self.text_style_builder.build(), alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } } #[cfg(test)] mod test { use super::TextBoxStyleBuilder; use embedded_graphics::{ fonts::Font6x8, pixelcolor::BinaryColor, style::{TextStyle, TextStyleBuilder}, }; #[test] #[allow(deprecated)] fn test_text_style_copy() { let text_styles: [TextStyle<_, _>; 2] = [ TextStyleBuilder::new(Font6x8) .text_color(BinaryColor::On) .build(), TextStyleBuilder::new(Font6x8) .background_color(BinaryColor::On) .build(), ]; for &text_style in text_styles.iter() { let style = TextBoxStyleBuilder::new(Font6x8) .text_style(text_style) .build(); assert_eq!(style.text_style, text_style); } } #[test] fn test_text_style_copy_ctr() { let text_styles: [TextStyle<_, _>; 2] = [ TextStyleBuilder::new(Font6x8) .text_color(BinaryColor::On) .build(), TextStyleBuilder::new(Font6x8) .background_color(BinaryColor::On) .build(), ]; for &text_style in text_styles.iter() { let style = TextBoxStyleBuilder::from_text_style(text_style).build(); assert_eq!(style.text_style, text_style); } } }
if let Some(color) = text_style.text_color { text_style_builder = text_style_builder.text_color(color); }
if_condition
[ { "content": "fn demo_loop<V>(window: &mut Window, bounds: &mut Rectangle, alignment: V) -> bool\n\nwhere\n\n V: VerticalTextAlignment + std::fmt::Debug,\n\n for<'a> &'a StyledTextBox<'a, BinaryColor, Font6x8, LeftAligned, TopAligned, Exact<FullRowsOnly>>:\n\n Drawable<BinaryColor>,\n\n{\n\n let...
Rust
crates/apps/plugin-host/plugin-host-lib/src/commands/options/mod.rs
yamadapc/augmented-audio
2f662cd8aa1a0ba46445f8f41c8483ae2dc552d3
use clap::{App, ArgMatches}; #[derive(Clone)] pub struct RunOptions { plugin_path: String, input_audio: Option<String>, output_audio: Option<String>, open_editor: bool, watch: bool, audio_host_id: Option<String>, output_device_id: Option<String>, buffer_size: Option<usize>, sample_rate: Option<usize>, input_device_id: Option<String>, use_default_input_device: bool, use_mono_input: Option<usize>, } impl RunOptions { pub fn plugin_path(&self) -> &str { &self.plugin_path } pub fn input_audio(&self) -> &Option<String> { &self.input_audio } pub fn output_audio(&self) -> &Option<String> { &self.output_audio } pub fn open_editor(&self) -> bool { self.open_editor } pub fn watch(&self) -> bool { self.watch } pub fn audio_host_id(&self) -> &Option<String> { &self.audio_host_id } pub fn output_device_id(&self) -> &Option<String> { &self.output_device_id } pub fn buffer_size(&self) -> Option<usize> { self.buffer_size } pub fn sample_rate(&self) -> Option<usize> { self.sample_rate } pub fn input_device_id(&self) -> &Option<String> { &self.input_device_id } pub fn use_default_input_device(&self) -> bool { self.use_default_input_device } pub fn use_mono_input(&self) -> Option<usize> { self.use_mono_input } } pub fn build_run_command<'a, 'b>() -> App<'a, 'b> { clap::App::new("run") .about("Process audio") .arg(clap::Arg::from_usage( "-p, --plugin=<PLUGIN_PATH> 'An audio-plugin to load'", )) .arg(clap::Arg::from_usage( "-i, --input=[INPUT_PATH] 'An audio file to process'", )) .arg(clap::Arg::from_usage( "-o, --output=[OUTPUT_PATH] 'If specified, will render offline into file'", )) .arg(clap::Arg::from_usage( "-e, --editor 'Open the editor window'", )) .arg(clap::Arg::from_usage( "-w, --watch 'Watch and reload the VST when it changes'", )) .arg(clap::Arg::from_usage( "--host-id=[HOST_ID] 'Audio host name'", )) .arg(clap::Arg::from_usage( "--output-device-id=[OUTPUT_DEVICE_ID] 'Output device id'", )) .arg(clap::Arg::from_usage( "--buffer-size=[BUFFER_SIZE] 'Buffer size'", )) .arg(clap::Arg::from_usage( "--sample-rate=[SAMPLE_RATE] 'Sample rate'", )) .arg(clap::Arg::from_usage( "--input-device-id=[INPUT_DEVICE_ID] 'Open audio input with Input device id'", )) .arg(clap::Arg::from_usage( "--use-default-input-device 'Open audio input with the default device'", )) .arg(clap::Arg::from_usage( "--use-mono-input=[CHANNEL_NUMBER] 'If specified, the input stream will be mono-ed selecting the desired channel'", )) } pub fn parse_run_options(matches: ArgMatches) -> Option<RunOptions> { let matches = matches.subcommand_matches("run")?; let plugin_path = matches.value_of("plugin")?.to_string(); let input_audio = matches.value_of("input").map(|i| i.to_string()); let output_audio = matches.value_of("output").map(|value| value.to_string()); let open_editor = matches.is_present("editor"); let watch = matches.is_present("watch"); let audio_host_id = matches.value_of("host-id").map(|value| value.to_string()); let output_device_id = matches .value_of("output-device-id") .map(|value| value.to_string()); let buffer_size = matches .value_of("buffer-size") .map(|value| value.parse().expect("Invalid buffer size")); let sample_rate = matches .value_of("sample-rate") .map(|value| value.parse().expect("Invalid sample rate")); let input_device_id = matches .value_of("input-device-id") .map(|value| value.to_string()); let use_default_input_device = matches.is_present("use-default-input-device"); let use_mono_input = matches .value_of("use-mono-input") .map(|s| s.parse().expect("Invalid channel number")); Some(RunOptions { plugin_path, input_audio, output_audio, open_editor, watch, audio_host_id, output_device_id, buffer_size, sample_rate, input_device_id, use_default_input_device, use_mono_input, }) }
use clap::{App, ArgMatches}; #[derive(Clone)] pub struct RunOptions { plugin_path: String, input_audio: Option<String>, output_audio: Option<String>, open_editor: bool, watch: bool, audio_host_id: Option<String>, output_device_id: Option<String>, buffer_size: Option<usize>, sample_rate: Option<usize>, input_device_id: Option<String>, use_default_input_device: bool, use_mono_input: Option<usize>, } impl RunOptions { pub fn plugin_path(&self) -> &str { &self.plugin_path } pub fn input_audio(&self) -> &Option<String> { &self.input_audio } pub fn output_audio(&self) -> &Option<String> { &self.output_audio } pub fn open_editor(&self) -> bool { self.open_editor } pub fn watch(&self) -> bool { self.watch } pub fn audio_host_id(&self) -> &Option<String> { &self.audio_host_id } pub fn output_device_id(&self) -> &Option<String> { &self.output_device_id } pub fn buffer_size(&self) -> Option<usize> { self.buffer_size } pub fn sample_rate(&self) -> Option<usi
") .map(|value| value.parse().expect("Invalid buffer size")); let sample_rate = matches .value_of("sample-rate") .map(|value| value.parse().expect("Invalid sample rate")); let input_device_id = matches .value_of("input-device-id") .map(|value| value.to_string()); let use_default_input_device = matches.is_present("use-default-input-device"); let use_mono_input = matches .value_of("use-mono-input") .map(|s| s.parse().expect("Invalid channel number")); Some(RunOptions { plugin_path, input_audio, output_audio, open_editor, watch, audio_host_id, output_device_id, buffer_size, sample_rate, input_device_id, use_default_input_device, use_mono_input, }) }
ze> { self.sample_rate } pub fn input_device_id(&self) -> &Option<String> { &self.input_device_id } pub fn use_default_input_device(&self) -> bool { self.use_default_input_device } pub fn use_mono_input(&self) -> Option<usize> { self.use_mono_input } } pub fn build_run_command<'a, 'b>() -> App<'a, 'b> { clap::App::new("run") .about("Process audio") .arg(clap::Arg::from_usage( "-p, --plugin=<PLUGIN_PATH> 'An audio-plugin to load'", )) .arg(clap::Arg::from_usage( "-i, --input=[INPUT_PATH] 'An audio file to process'", )) .arg(clap::Arg::from_usage( "-o, --output=[OUTPUT_PATH] 'If specified, will render offline into file'", )) .arg(clap::Arg::from_usage( "-e, --editor 'Open the editor window'", )) .arg(clap::Arg::from_usage( "-w, --watch 'Watch and reload the VST when it changes'", )) .arg(clap::Arg::from_usage( "--host-id=[HOST_ID] 'Audio host name'", )) .arg(clap::Arg::from_usage( "--output-device-id=[OUTPUT_DEVICE_ID] 'Output device id'", )) .arg(clap::Arg::from_usage( "--buffer-size=[BUFFER_SIZE] 'Buffer size'", )) .arg(clap::Arg::from_usage( "--sample-rate=[SAMPLE_RATE] 'Sample rate'", )) .arg(clap::Arg::from_usage( "--input-device-id=[INPUT_DEVICE_ID] 'Open audio input with Input device id'", )) .arg(clap::Arg::from_usage( "--use-default-input-device 'Open audio input with the default device'", )) .arg(clap::Arg::from_usage( "--use-mono-input=[CHANNEL_NUMBER] 'If specified, the input stream will be mono-ed selecting the desired channel'", )) } pub fn parse_run_options(matches: ArgMatches) -> Option<RunOptions> { let matches = matches.subcommand_matches("run")?; let plugin_path = matches.value_of("plugin")?.to_string(); let input_audio = matches.value_of("input").map(|i| i.to_string()); let output_audio = matches.value_of("output").map(|value| value.to_string()); let open_editor = matches.is_present("editor"); let watch = matches.is_present("watch"); let audio_host_id = matches.value_of("host-id").map(|value| value.to_string()); let output_device_id = matches .value_of("output-device-id") .map(|value| value.to_string()); let buffer_size = matches .value_of("buffer-size
random
[ { "content": "/// Check if there's a non-null CFBundle with this identifier.\n\npub fn has_bundle(bundle_identifier: &str) -> bool {\n\n unsafe {\n\n let bundle_identifier = make_cfstring(bundle_identifier);\n\n if let Some(bundle_identifier) = bundle_identifier {\n\n let bundle = CF...
Rust
kaylee/src/instructions/math.rs
electricjones/kaylee
6cdc7e67ae8a3d9a989d8d18def496c9ceecab40
use std::fmt::Error; use kaylee_derive::Instruction; use crate::instructions; use crate::instructions::{display_instruction_with_values, Executable, Instruction, InstructionDocumentation, InstructionSignature, OperandType, OperandValues}; use crate::vm::{ExecutionResult, Kaylee, RegisterValue}; #[derive(Instruction)] #[opcode = 70] #[signature = "ADD $D $L $R"] pub struct Add { operand_values: OperandValues, } impl Executable for Add { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left + right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 71] #[signature = "SUB $D $L $R"] pub struct Subtract { operand_values: OperandValues, } impl Executable for Subtract { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left - right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 72] #[signature = "MUL $D $L $R"] pub struct Multiply { operand_values: OperandValues, } impl Executable for Multiply { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left * right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 73] #[signature = "DIV $D $L $R"] pub struct Divide { operand_values: OperandValues, } impl Executable for Divide { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let destination = self.operand_values[0].as_register_id(); let left = self.get_register_value_for_operand(1, vm).unwrap(); let right = self.get_register_value_for_operand(2, vm).unwrap(); let value = left / right; let remainder = (left % right) as u32; vm.set_register(destination, value).unwrap(); vm.set_remainder(remainder); Ok(ExecutionResult::Value(value)) } } #[cfg(test)] mod tests { use crate::instructions::math::{Add, Divide, Multiply, Subtract}; use crate::program::Program; use crate::vm::Kaylee; #[test] fn test_add() { let program = Program::from(vec![ Add::OPCODE, 29, 0, 2, Add::OPCODE, 30, 1, 3, Add::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 12).unwrap(); vm.set_register(1, 10).unwrap(); vm.set_register(2, 500).unwrap(); vm.set_register(3, 7).unwrap(); vm.run(program); assert_eq!(512, vm.register(29).unwrap()); assert_eq!(17, vm.register(30).unwrap()); assert_eq!(529, vm.register(31).unwrap()); } #[test] fn test_subtract() { let program = Program::from(vec![ Subtract::OPCODE, 29, 0, 2, Subtract::OPCODE, 30, 1, 3, Subtract::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 222).unwrap(); vm.set_register(1, 14).unwrap(); vm.set_register(2, 22).unwrap(); vm.set_register(3, 3).unwrap(); vm.run(program); assert_eq!(200, vm.register(29).unwrap()); assert_eq!(11, vm.register(30).unwrap()); assert_eq!(189, vm.register(31).unwrap()); } #[test] fn test_multiply() { let program = Program::from(vec![ Multiply::OPCODE, 29, 0, 2, Multiply::OPCODE, 30, 1, 3, Multiply::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 2).unwrap(); vm.set_register(1, 4).unwrap(); vm.set_register(2, 6).unwrap(); vm.set_register(3, 8).unwrap(); vm.run(program); assert_eq!(12, vm.register(29).unwrap()); assert_eq!(32, vm.register(30).unwrap()); assert_eq!(384, vm.register(31).unwrap()); } #[test] fn test_divide_no_remainder() { let program = Program::from(vec![ Divide::OPCODE, 31, 0, 1, ]); let mut vm = Kaylee::new(); vm.set_register(0, 16).unwrap(); vm.set_register(1, 2).unwrap(); vm.run(program); assert_eq!(8, vm.register(31).unwrap()); assert_eq!(0, vm.remainder()); } #[test] fn test_divide_with_remainder() { let program = Program::from(vec![ Divide::OPCODE, 31, 0, 1, ]); let mut vm = Kaylee::new(); vm.set_register(0, 13).unwrap(); vm.set_register(1, 5).unwrap(); vm.run(program); assert_eq!(2, vm.register(31).unwrap()); assert_eq!(3, vm.remainder()); } #[test] fn test_math() { let program = Program::from(vec![ Add::OPCODE, 29, 0, 2, Add::OPCODE, 30, 29, 2, Subtract::OPCODE, 30, 29, 1, Add::OPCODE, 28, 3, 4, Multiply::OPCODE, 31, 3, 2, Divide::OPCODE, 3, 29, 30, Subtract::OPCODE, 4, 2, 30, Add::OPCODE, 0, 3, 28, Multiply::OPCODE, 1, 3, 4, Divide::OPCODE, 31, 28, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 2).unwrap(); vm.set_register(1, 4).unwrap(); vm.set_register(2, 6).unwrap(); vm.set_register(3, 8).unwrap(); vm.set_register(4, 9).unwrap(); vm.run(program); assert_eq!(19, vm.register(0).unwrap()); assert_eq!(4, vm.register(1).unwrap()); assert_eq!(6, vm.register(2).unwrap()); assert_eq!(2, vm.register(3).unwrap()); assert_eq!(2, vm.register(4).unwrap()); assert_eq!(17, vm.register(28).unwrap()); assert_eq!(8, vm.register(29).unwrap()); assert_eq!(4, vm.register(30).unwrap()); assert_eq!(4, vm.register(31).unwrap()); assert_eq!(1, vm.remainder()); } }
use std::fmt::Error; use kaylee_derive::Instruction; use crate::instructions; use crate::instructions::{display_instruction_with_values, Executable, Instruction, InstructionDocumentation, InstructionSignature, OperandType, OperandValues}; use crate::vm::{ExecutionResult, Kaylee, RegisterValue}; #[derive(Instruction)] #[opcode = 70] #[signature = "ADD $D $L $R"] pub struct Add { operand_values: OperandValues, } impl Executable for Add { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left + right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 71] #[signature = "SUB $D $L $R"] pub struct Subtract { operand_values: OperandValues, } impl Executable for Subtract { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left - right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 72] #[signature = "MUL $D $L $R"] pub struct Multiply { operand_values: OperandValues, } impl Executable for Multiply { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left * right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 73] #[signature = "DIV $D $L $R"] pub struct Divide { operand_values: OperandValues, } impl Executable for Divide { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let destination = self.operand_values[0].as_register_id(); let left = self.get_register_value_for_operand(1, vm).unwrap(); let right = self.get_register_value_for_operand(2, vm).unwrap(); let value = left / right; let remainder = (left % right) as u32; vm.set_register(destination, value).unwrap(); vm.set_remainder(remainder); Ok(ExecutionResult::Value(value)) } } #[cfg(test)] mod tests { use crate::instructions::math::{Add, Divide, Multiply, Subtract}; use crate::program::Program; use crate::vm::Kaylee; #[test] fn test_add() { let program = Program::from(vec![ Add::OPCODE, 29, 0, 2, Add::OPCODE, 30, 1, 3, Add::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 12).unwrap(); vm.set_register(1, 10).unwrap(); vm.set_register(2, 500).unwrap(); vm.set_register(3, 7).unwrap(); vm.run(program); assert_eq!(512, vm.register(29).unwrap()); assert_eq!(17, vm.register(30).unwrap()); assert_eq!(529, vm.register(31).unwrap()); } #[test] fn test_subtract() { let program = Program::from(vec![ Subtract::OPCODE, 29, 0, 2, Subtract::OPCODE, 30, 1, 3, Subtract::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 222).unwrap(); vm.set_register(1, 14).unwrap(); vm.set_register(2, 22).unwrap(); vm.set_register(3, 3).unwrap(); vm.run(program); assert_eq!(200, vm.register(29).unwrap()); assert_eq!(11, vm.register(30).unwrap()); assert_eq!(189, vm.register(31).unwrap()); } #[test] fn test_multiply() { let program = Program::from(vec![ Multiply::OPCODE, 29, 0, 2, Multiply::OPCODE, 30, 1, 3, Multiply::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 2).unwrap(); vm.set_register(1, 4).unwrap(); vm.set_register(2, 6).unwrap(); vm.set_register(3, 8).unwrap(); vm.run(program); assert_eq!(12, vm.register(29).unwrap()); assert_eq!(32, vm.register(30).unwrap()); assert_eq!(384, vm.register(31).unwrap()); } #[test] fn test_divide_no_remainder() { let program = Program::from(vec![ Divide::OPCODE, 31, 0, 1, ]); let mut vm = Kaylee::new(); vm.set_register(0, 16).unwrap(); vm.set_register(1, 2).unwrap(); vm.run(program); assert_eq!(8, vm.register(31).unwrap()); assert_eq!(0, vm.remainder()); } #[test] fn test_divide_with_remainder() { let program = Program::from(vec![ Divide::OPCODE, 31, 0, 1, ]); let mut vm = Kaylee::new(); vm.set_register(0, 13).unwrap(); vm.set_register(1, 5).unwrap(); vm.run(program); assert_eq!(2, vm.register(31).unwrap()); assert_eq!(3, vm.remainder()); } #[test] fn test_math() { let program = Program::from(ve
}
c![ Add::OPCODE, 29, 0, 2, Add::OPCODE, 30, 29, 2, Subtract::OPCODE, 30, 29, 1, Add::OPCODE, 28, 3, 4, Multiply::OPCODE, 31, 3, 2, Divide::OPCODE, 3, 29, 30, Subtract::OPCODE, 4, 2, 30, Add::OPCODE, 0, 3, 28, Multiply::OPCODE, 1, 3, 4, Divide::OPCODE, 31, 28, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 2).unwrap(); vm.set_register(1, 4).unwrap(); vm.set_register(2, 6).unwrap(); vm.set_register(3, 8).unwrap(); vm.set_register(4, 9).unwrap(); vm.run(program); assert_eq!(19, vm.register(0).unwrap()); assert_eq!(4, vm.register(1).unwrap()); assert_eq!(6, vm.register(2).unwrap()); assert_eq!(2, vm.register(3).unwrap()); assert_eq!(2, vm.register(4).unwrap()); assert_eq!(17, vm.register(28).unwrap()); assert_eq!(8, vm.register(29).unwrap()); assert_eq!(4, vm.register(30).unwrap()); assert_eq!(4, vm.register(31).unwrap()); assert_eq!(1, vm.remainder()); }
function_block-function_prefixed
[ { "content": "/// Decodes the operand values from the Instruction Stream\n\npub fn consume_and_parse_values(signature: InstructionSignature, instructions: &Program, program_counter: &mut usize) -> Result<OperandValues, InstructionDecodeError> {\n\n let mut operand_values: OperandValues = [OperandValue::None,...
Rust
mgl_resource_derive/src/lib.rs
fcard/MGL
9d41d30ce58451b80aa6e0d255b0c398c679b86b
#![feature(box_patterns)] extern crate proc_macro; use proc_macro2::{TokenStream, Span}; use syn::*; use quote::*; macro_rules! ident { ($name: ident) => { Ident::new(stringify!($name), Span::call_site()) } } #[proc_macro_derive(Resource, attributes(array_field, sub_resource, ignore_field))] pub fn derive_resource(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let fields = named_fields(&input.data); let parse = impl_parse_key_values(&fields); proc_macro::TokenStream::from(quote! { impl<T: ResourceAst> Resource<T> for #name { #parse } }) } fn named_fields(data: &Data) -> FieldsNamed { match data { &Data::Struct(ref data) => { match data.fields { Fields::Named(ref fields) => { fields.clone() } _ => panic!("Only structs with named fields are allowed!") } } _ => panic!("Only structs are allowed!") } } fn impl_parse_key_values(fields: &FieldsNamed) -> TokenStream { let mut matches = Vec::new(); let source_ast = quote! { source_ast }; let key = quote! { key }; let value = quote! { value }; let array_index = quote! { array_index }; let sub_field_key = quote! { sub_field_key }; let module = quote! { crate::resources::resource_trait }; for field in &fields.named { let field_name = &field.ident.clone(); let field_str = field_str(&field); let field_attrs = field_attributes(&field); let field_sub = field_attrs.sub; let field_array = field_attrs.array; let field_set; let array_pre_code; let field_indexing; let no_field_assert; if field_array { array_pre_code = quote! { let #array_index = #module::KeyInspector::get_array_index(#field_str, #key)?; if #array_index >= self.#field_name.len() { self.#field_name.resize_with(#array_index + 1, Default::default); } }; field_indexing = quote! { [#array_index] }; } else { array_pre_code = quote! {}; field_indexing = quote! {}; } let full_field = quote! { #field_name#field_indexing }; if field_sub { field_set = quote! { let #sub_field_key = #module::KeyInspector::get_sub_field_key(#field_str, #key)?; self.#full_field.parse_key_value(#source_ast, &#sub_field_key, #value)?; } } else { field_set = quote! { self.#full_field = #module::parse_field_default(#value)?; } } if field_array || field_sub { no_field_assert = quote! {}; } else { no_field_assert = quote! { #module::KeyInspector::assert_field_has_no_index(#field_str, #key)?; }; } if !field_attrs.ignore { matches.push(quote! { #field_str => { #array_pre_code #no_field_assert #field_set }}); } } quote! { fn parse_key_value(&mut self, #source_ast: &T, #key: &Key, #value: &IExpr) -> #module::Result<()> { match #key.name_of().as_ref() { #(#matches),*, field => { return #module::MglError::invalid_field(field, #module::InvalidFieldKind::NotFound) } } Ok(()) } } } fn field_str(field: &Field) -> String { field.ident.as_ref().map(Ident::to_string).unwrap_or(String::new()) } struct FieldAttributes { sub: bool, array: bool, ignore: bool, } fn field_attributes(field: &Field) -> FieldAttributes { let mut attributes = FieldAttributes { sub: false, array: false, ignore: false }; for attr in field.attrs.clone() { if attr.path.is_ident(&ident!(sub_resource)) { attributes.sub = true; } else if attr.path.is_ident(&ident!(array_field)) { attributes.array = true; } else if attr.path.is_ident(&ident!(ignore_field)) { attributes.ignore = true; } } attributes }
#![feature(box_patterns)] extern crate proc_macro; use proc_macro2::{TokenStream, Span}; use syn::*; use quote::*; macro_rules! ident { ($name: ident) => { Ident::new(stringify!($name), Span::call_site()) } } #[proc_macro_derive(Resource, attributes(array_field, sub_resource, ignore_field))] pub fn derive_resource(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let fields = named_fields(&input.data); let parse = impl_parse_key_values(&fields); proc_macro::TokenStream::from(quote! { impl<T: ResourceAst> Resource<T> for #name { #parse } }) } fn named_fields(data: &Data) -> FieldsNamed { match data { &Data::Struct(ref data) => { match data.fields { Fields::Named(ref fields) => { fields.clone() } _ => panic!("Only structs with named fields are allowed!") } } _ => panic!("Only structs are allowed!") } } fn impl_parse_key_values(fields: &FieldsNamed) -> TokenStream { let mut matches = Vec::new(); let source_ast = quote! { source_ast }; let key = quote! { key }; let value = quote! { value }; let array_index = quote! { array_index }; let sub_field_key = quote! { sub_field_key }; let module = quote! { crate::resources::resource_trait }; for field in &fields.named { let field_name = &field.ident.clone(); let field_str = field_str(&field); let field_attrs = field_attributes(&field); let field_sub = field_attrs.sub; let field_array = field_attrs.array; let field_set; let array_pre_code; let field_indexing; let no_field_assert; if field_array { array_pre_code = quote! { let #array_index = #module::KeyInspector::get_array_index(#field_str, #key)?; if #array_index >= self.#field_name.len() { self.#field_name.resize_with(#array_index + 1, Default::default); } }; field_indexing = quote! { [#array_index] }; } else { array_pre_code = quote! {}; field_indexing = quote! {}; } let full_field = quote! { #field_name#field_indexing }; if field_sub { field_set = quote! { let #sub_field_key = #module::KeyInspector::get_sub_field_key(#field_str, #key)?; self.#full_field.parse_key_value(#source_ast, &#sub_field_key, #value)?; } } else { field_set = quote! { self.#full_field = #module::parse_field_default(#value)?; } } if field_array || field_sub { no_field_assert = quote! {}; } else { no_field_assert = quote! { #module::KeyInspector::assert_field_has_no_index(#field_str, #key)?; }; } if !field_attrs.ignore { matches.push(quote! { #field_str => { #array_pre_code #no_field_assert #field_set }}); } } quote! { fn parse_key_value(&mut self, #source_ast: &T, #key: &Key, #value: &IExpr) -> #module::Result<()> { match #key.name_of().as_ref() { #(#matches),*, field => { return #module::MglError::invalid_field(field, #module::InvalidFieldKind::NotFound) } } Ok(()) } } } fn field_str(field: &Field) -> String { field.ident.as_ref().map(Ident::to_string).unwrap_or(String::new()) } struct FieldAttributes { sub: bool, array: bool, ignore: bool, } fn field_attributes(field: &Field) -> FieldAttributes { let mut attributes = FieldAttributes { sub: false, array: false, ignore: false }; for attr in field.attrs.clone() { if attr.path.is_ident(&ident!(sub_resource)) { attributes.sub = true; }
else if attr.path.is_ident(&ident!(array_field)) { attributes.array = true; } else if attr.path.is_ident(&ident!(ignore_field)) { attributes.ignore = true; } } attributes }
function_block-function_prefix_line
[ { "content": "pub fn parse_key(mut tks: InnerTokens) -> Key {\n\n let name = tks.next().unwrap().as_str();\n\n let mut key = Key::name(name);\n\n\n\n while let Some(rule) = tks.peek().map(|p| p.as_rule()) {\n\n match rule {\n\n Rule::name => {\n\n let right = parse_key(tks);\n\n key = K...
Rust
sdks/rust/src/sdk.rs
xxtanisxx/agones
3f00aa67518a8f3e43b02415c228808f1511d3cb
use std::{env, time::Duration}; use tonic::transport::Channel; mod api { tonic::include_proto!("agones.dev.sdk"); } use api::sdk_client::SdkClient; pub use api::GameServer; pub type WatchStream = tonic::Streaming<GameServer>; use crate::{alpha::Alpha, errors::Result}; #[inline] fn empty() -> api::Empty { api::Empty {} } #[derive(Clone)] pub struct Sdk { client: SdkClient<Channel>, alpha: Alpha, } impl Sdk { pub async fn new(port: Option<u16>, keep_alive: Option<Duration>) -> Result<Self> { let addr: http::Uri = format!( "http://localhost:{}", port.unwrap_or_else(|| { env::var("AGONES_SDK_GRPC_PORT") .ok() .and_then(|s| s.parse().ok()) .unwrap_or(9357) }) ) .parse()?; let builder = tonic::transport::channel::Channel::builder(addr) .connect_timeout(Duration::from_secs(30)) .keep_alive_timeout(keep_alive.unwrap_or_else(|| Duration::from_secs(30))); let channel = builder.connect_lazy()?; let mut client = SdkClient::new(channel.clone()); let alpha = Alpha::new(channel); tokio::time::timeout(Duration::from_secs(30), async { let mut connect_interval = tokio::time::interval(Duration::from_millis(100)); loop { connect_interval.tick().await; if client.get_game_server(empty()).await.is_ok() { break; } } }) .await?; Ok(Self { client, alpha }) } #[inline] pub fn alpha(&self) -> &Alpha { &self.alpha } pub async fn ready(&mut self) -> Result<()> { Ok(self.client.ready(empty()).await.map(|_| ())?) } pub async fn allocate(&mut self) -> Result<()> { Ok(self.client.allocate(empty()).await.map(|_| ())?) } pub async fn shutdown(&mut self) -> Result<()> { Ok(self.client.shutdown(empty()).await.map(|_| ())?) } pub fn health_check(&self) -> tokio::sync::mpsc::Sender<()> { let mut health_client = self.clone(); let (tx, mut rx) = tokio::sync::mpsc::channel(10); tokio::task::spawn(async move { let health_stream = async_stream::stream! { while rx.recv().await.is_some() { yield empty(); } }; let _ = health_client.client.health(health_stream).await; }); tx } pub async fn set_label( &mut self, key: impl Into<String>, value: impl Into<String>, ) -> Result<()> { Ok(self .client .set_label(api::KeyValue { key: key.into(), value: value.into(), }) .await .map(|_| ())?) } pub async fn set_annotation( &mut self, key: impl Into<String>, value: impl Into<String>, ) -> Result<()> { Ok(self .client .set_annotation(api::KeyValue { key: key.into(), value: value.into(), }) .await .map(|_| ())?) } pub async fn get_gameserver(&mut self) -> Result<GameServer> { Ok(self .client .get_game_server(empty()) .await .map(|res| res.into_inner())?) } pub async fn reserve(&mut self, duration: Duration) -> Result<()> { Ok(self .client .reserve(api::Duration { seconds: std::cmp::max(duration.as_secs() as i64, 1), }) .await .map(|_| ())?) } pub async fn watch_gameserver(&mut self) -> Result<WatchStream> { Ok(self .client .watch_game_server(empty()) .await .map(|stream| stream.into_inner())?) } }
use std::{env, time::Duration}; use tonic::transport::Channel; mod api { tonic::include_proto!("agones.dev.sdk"); } use api::sdk_client::SdkClient; pub use api::GameServer; pub type WatchStream = tonic::Streaming<GameServer>; use crate::{alpha::Alpha, errors::Result}; #[inline] fn empty() -> api::Empty { api::Empty {} } #[derive(Clone)] pub struct Sdk { client: SdkClient<Channel>, alpha: Alpha, } impl Sdk {
#[inline] pub fn alpha(&self) -> &Alpha { &self.alpha } pub async fn ready(&mut self) -> Result<()> { Ok(self.client.ready(empty()).await.map(|_| ())?) } pub async fn allocate(&mut self) -> Result<()> { Ok(self.client.allocate(empty()).await.map(|_| ())?) } pub async fn shutdown(&mut self) -> Result<()> { Ok(self.client.shutdown(empty()).await.map(|_| ())?) } pub fn health_check(&self) -> tokio::sync::mpsc::Sender<()> { let mut health_client = self.clone(); let (tx, mut rx) = tokio::sync::mpsc::channel(10); tokio::task::spawn(async move { let health_stream = async_stream::stream! { while rx.recv().await.is_some() { yield empty(); } }; let _ = health_client.client.health(health_stream).await; }); tx } pub async fn set_label( &mut self, key: impl Into<String>, value: impl Into<String>, ) -> Result<()> { Ok(self .client .set_label(api::KeyValue { key: key.into(), value: value.into(), }) .await .map(|_| ())?) } pub async fn set_annotation( &mut self, key: impl Into<String>, value: impl Into<String>, ) -> Result<()> { Ok(self .client .set_annotation(api::KeyValue { key: key.into(), value: value.into(), }) .await .map(|_| ())?) } pub async fn get_gameserver(&mut self) -> Result<GameServer> { Ok(self .client .get_game_server(empty()) .await .map(|res| res.into_inner())?) } pub async fn reserve(&mut self, duration: Duration) -> Result<()> { Ok(self .client .reserve(api::Duration { seconds: std::cmp::max(duration.as_secs() as i64, 1), }) .await .map(|_| ())?) } pub async fn watch_gameserver(&mut self) -> Result<WatchStream> { Ok(self .client .watch_game_server(empty()) .await .map(|stream| stream.into_inner())?) } }
pub async fn new(port: Option<u16>, keep_alive: Option<Duration>) -> Result<Self> { let addr: http::Uri = format!( "http://localhost:{}", port.unwrap_or_else(|| { env::var("AGONES_SDK_GRPC_PORT") .ok() .and_then(|s| s.parse().ok()) .unwrap_or(9357) }) ) .parse()?; let builder = tonic::transport::channel::Channel::builder(addr) .connect_timeout(Duration::from_secs(30)) .keep_alive_timeout(keep_alive.unwrap_or_else(|| Duration::from_secs(30))); let channel = builder.connect_lazy()?; let mut client = SdkClient::new(channel.clone()); let alpha = Alpha::new(channel); tokio::time::timeout(Duration::from_secs(30), async { let mut connect_interval = tokio::time::interval(Duration::from_millis(100)); loop { connect_interval.tick().await; if client.get_game_server(empty()).await.is_ok() { break; } } }) .await?; Ok(Self { client, alpha }) }
function_block-full_function
[ { "content": "struct SDK::SDKImpl {\n\n std::string host_;\n\n std::shared_ptr<grpc::Channel> channel_;\n\n std::unique_ptr<agones::dev::sdk::SDK::Stub> stub_;\n\n std::unique_ptr<grpc::ClientWriter<agones::dev::sdk::Empty>> health_;\n\n std::unique_ptr<grpc::ClientContext> health_context_;\n\n};\n\n\n\nSD...
Rust
athena-coreserver/src/services/unit_service.rs
athena-intelli/athena-rs
7546137e14c3248fe20c2a68626beffc0c9ab7d3
use tokio_stream::wrappers::ReceiverStream; use tonic::{Request, Response, Status}; use athena_api::pb::structures::{ChangePriorityRequest, Unit}; use athena_api::pb::unit_service::*; use athena_api::pb::unit_service::unit_service_server::UnitService; #[derive(Default)] pub struct UnitServiceImpl; #[tonic::async_trait] impl UnitService for UnitServiceImpl { async fn add_to_queue(&self, request: Request<AddUnitToQueueRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn save_unit(&self, request: Request<Unit>) -> Result<Response<Unit>, Status> { todo!() } async fn cancel(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn close(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn finish(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn hold(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn pause(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn quarantine(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn release(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn ship(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn scrap(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_close(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_finish(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_scrap(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_ship(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_bom(&self, request: Request<UnitChangeBomRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_bom_from_part(&self, request: Request<UnitChangeBomFromPartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_lot(&self, request: Request<ChangeLotRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_part(&self, request: Request<UnitChangePartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_part_only(&self, request: Request<UnitChangePartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_priority(&self, request: Request<ChangePriorityRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_production_line(&self, request: Request<ChangeProductionLineRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_route(&self, request: Request<UnitChangeRouteRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_serial_number(&self, request: Request<ChangeSerialNumberRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn complete_at_route_step(&self, request: Request<UnitCompleteAtRouteStepRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn start_at_route_step(&self, request: Request<UnitStartAtRouteStepRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn restart(&self, request: Request<RestartUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn create_stand_alone_unit(&self, request: Request<CreateStandAloneUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } type CreateStandAloneUnitsStream = ReceiverStream<Result<Unit, Status>>; async fn create_stand_alone_units(&self, request: Request<CreateStandAloneUnitRequest>) -> Result<Response<Self::CreateStandAloneUnitsStream>, Status> { todo!() } }
use tokio_stream::wrappers::ReceiverStream; use tonic::{Request, Response, Status}; use athena_api::pb::structures::{ChangePriorityRequest, Unit}; use athena_api::pb::unit_service::*; use athena_api::pb::unit_service::unit_service_server::UnitService; #[derive(Default)] pub struct UnitServiceImpl; #[tonic::async_trait] impl UnitService for UnitServiceImpl { async fn add_to_queue(&self, request: Request<AddUnitToQueueRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn save_unit(&self, request: Request<Unit>) -> Result<Response<Unit>, Status> { todo!() } async fn cancel(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn close(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn finish(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn hold(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn pause(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn quarantine(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn release(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn ship(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn scrap(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_close(&self, request: Request<TransitionUnitRequest>) -> Result<Respo
, Status> { todo!() } async fn complete_at_route_step(&self, request: Request<UnitCompleteAtRouteStepRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn start_at_route_step(&self, request: Request<UnitStartAtRouteStepRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn restart(&self, request: Request<RestartUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn create_stand_alone_unit(&self, request: Request<CreateStandAloneUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } type CreateStandAloneUnitsStream = ReceiverStream<Result<Unit, Status>>; async fn create_stand_alone_units(&self, request: Request<CreateStandAloneUnitRequest>) -> Result<Response<Self::CreateStandAloneUnitsStream>, Status> { todo!() } }
nse<Unit>, Status> { todo!() } async fn undo_finish(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_scrap(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_ship(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_bom(&self, request: Request<UnitChangeBomRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_bom_from_part(&self, request: Request<UnitChangeBomFromPartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_lot(&self, request: Request<ChangeLotRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_part(&self, request: Request<UnitChangePartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_part_only(&self, request: Request<UnitChangePartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_priority(&self, request: Request<ChangePriorityRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_production_line(&self, request: Request<ChangeProductionLineRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_route(&self, request: Request<UnitChangeRouteRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_serial_number(&self, request: Request<ChangeSerialNumberRequest>) -> Result<Response<Unit>
random
[ { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with UnitServiceServer.\"]\n\n #[async_trait]\n\n pub trait UnitService: Send + Sync + 'static {\n\n async fn add_to_queue(\n\n &self,\n\n request: tonic::Request<super::AddU...
Rust
layer_core/src/entry.rs
Sorenon/sorenon_openxr_layer
f32597269ac70ac63b7eb7ec9c6300cd8e6eca64
use std::ops::Deref; use std::sync::atomic::AtomicBool; use std::{ffi::CStr, sync::Arc}; use crate::loader_interfaces::*; use crate::wrappers::instance::{InnerInstance, InstanceWrapper, Runtime}; use crate::wrappers::XrHandle; use crate::ToResult; use log::{debug, error, info}; use openxr::sys as xr; use openxr::{ExtensionSet, InstanceExtensions, Result}; pub(crate) unsafe extern "system" fn create_api_layer_instance( instance_info: *const xr::InstanceCreateInfo, layer_info: *const ApiLayerCreateInfo, instance: *mut xr::Instance, ) -> xr::Result { std::panic::catch_unwind(|| create_instance(&*instance_info, &*layer_info, &mut *instance)) .map_or(xr::Result::ERROR_RUNTIME_FAILURE, |res| match res { Ok(res) => res, Err(res) => res, }) } fn create_instance( instance_info: &xr::InstanceCreateInfo, layer_info: &ApiLayerCreateInfo, instance: &mut xr::Instance, ) -> Result<xr::Result> { let next_info = &unsafe { *layer_info.next_info }; if unsafe { CStr::from_ptr(std::mem::transmute(next_info.layer_name.as_ptr())) } .to_string_lossy() != crate::LAYER_NAME { error!( "Crate instance failed: Incorrect layer_name `{}`", unsafe { CStr::from_ptr(std::mem::transmute(next_info.layer_name.as_ptr())) } .to_string_lossy() ); return Err(xr::Result::ERROR_VALIDATION_FAILURE); } debug!("Initializing OpenXR Entry"); let entry = unsafe { openxr::Entry::from_proc_addr(next_info.next_get_instance_proc_addr)? }; let available_extensions = entry.enumerate_extensions()?; let disable_opengl = true; let result = unsafe { let mut needs_opengl_replacement = false; let mut extensions = std::slice::from_raw_parts( instance_info.enabled_extension_names, instance_info.enabled_extension_count as usize, ) .iter() .filter_map(|ext| { let ext_name = CStr::from_ptr(*ext).to_str().unwrap(); if ext_name == "XR_KHR_opengl_enable" { if disable_opengl { needs_opengl_replacement = true; } else if !available_extensions.khr_opengl_enable { needs_opengl_replacement = true; return None; } } Some(*ext) }) .collect::<Vec<_>>(); if needs_opengl_replacement { extensions.push("XR_KHR_vulkan_enable2\0".as_ptr() as *const i8); } let mut instance_info2 = *instance_info; instance_info2.enabled_extension_names = extensions.as_ptr(); instance_info2.enabled_extension_count = extensions.len() as u32; let mut layer_info2 = *layer_info; layer_info2.next_info = (*layer_info2.next_info).next; (next_info.next_create_api_layer_instance)(&instance_info2, &layer_info2, instance).result() }?; let mut supported_extensions = ExtensionSet::default(); supported_extensions.khr_vulkan_enable2 = true; let inner = unsafe { InnerInstance { poison: AtomicBool::new(false), core: openxr::raw::Instance::load(&entry, *instance)?, exts: InstanceExtensions::load(&entry, *instance, &supported_extensions)?, } }; let runtime_name = unsafe { let mut instance_properties = xr::InstanceProperties::out(std::ptr::null_mut()); (inner.core.get_instance_properties)(*instance, instance_properties.as_mut_ptr()) .result()?; let instance_properties = instance_properties.assume_init(); CStr::from_ptr(std::mem::transmute( instance_properties.runtime_name.as_ptr(), )) .to_string_lossy() }; let runtime = match runtime_name.deref() { "SteamVR/OpenXR" => Runtime::SteamVR, "Oculus" => Runtime::Oculus, "Windows Mixed Reality Runtime" => Runtime::WMR, "Monado(XRT) by Collabora et al" => Runtime::Monado, _ => Runtime::Other(runtime_name.to_string()), }; let wrapper = InstanceWrapper { handle: *instance, inner: Arc::new(inner), systems: Default::default(), sessions: Default::default(), runtime, }; xr::Instance::all_wrappers().insert(*instance, Arc::new(wrapper)); info!("Instance created with name `{}`", unsafe { CStr::from_ptr(&instance_info.application_info.application_name as _).to_string_lossy() }); Ok(result) }
use std::ops::Deref; use std::sync::atomic::AtomicBool; use std::{ffi::CStr, sync::Arc}; use crate::loader_interfaces::*; use crate::wrappers::instance::{InnerInstance, InstanceWrapper, Runtime}; use crate::wrappers::XrHandle; use crate::ToResult; use log::{debug, error, info}; use openxr::sys as xr; use openxr::{ExtensionSet, InstanceExtensions, Result}; pub(crate) unsafe extern "system" fn create_api_layer_instance( instance_info: *const xr::InstanceCreateInfo, layer_info: *const ApiLayerCreateInfo, instance: *mut xr::Instance, ) -> xr::Result { std::panic::catch_unwind(|| create_instance(&*instance_info, &*layer_info, &mut *instance)) .map_or(xr::Result::ERROR_RUNTIME_FAILURE, |res| match res { Ok(res) => res, Err(res) => res, }) } fn create_instance( instance_info: &xr::InstanceCreateInfo, layer_info: &ApiLayerCreateInfo, instance: &mut xr::Instance, ) -> Result<xr::Res
dows Mixed Reality Runtime" => Runtime::WMR, "Monado(XRT) by Collabora et al" => Runtime::Monado, _ => Runtime::Other(runtime_name.to_string()), }; let wrapper = InstanceWrapper { handle: *instance, inner: Arc::new(inner), systems: Default::default(), sessions: Default::default(), runtime, }; xr::Instance::all_wrappers().insert(*instance, Arc::new(wrapper)); info!("Instance created with name `{}`", unsafe { CStr::from_ptr(&instance_info.application_info.application_name as _).to_string_lossy() }); Ok(result) }
ult> { let next_info = &unsafe { *layer_info.next_info }; if unsafe { CStr::from_ptr(std::mem::transmute(next_info.layer_name.as_ptr())) } .to_string_lossy() != crate::LAYER_NAME { error!( "Crate instance failed: Incorrect layer_name `{}`", unsafe { CStr::from_ptr(std::mem::transmute(next_info.layer_name.as_ptr())) } .to_string_lossy() ); return Err(xr::Result::ERROR_VALIDATION_FAILURE); } debug!("Initializing OpenXR Entry"); let entry = unsafe { openxr::Entry::from_proc_addr(next_info.next_get_instance_proc_addr)? }; let available_extensions = entry.enumerate_extensions()?; let disable_opengl = true; let result = unsafe { let mut needs_opengl_replacement = false; let mut extensions = std::slice::from_raw_parts( instance_info.enabled_extension_names, instance_info.enabled_extension_count as usize, ) .iter() .filter_map(|ext| { let ext_name = CStr::from_ptr(*ext).to_str().unwrap(); if ext_name == "XR_KHR_opengl_enable" { if disable_opengl { needs_opengl_replacement = true; } else if !available_extensions.khr_opengl_enable { needs_opengl_replacement = true; return None; } } Some(*ext) }) .collect::<Vec<_>>(); if needs_opengl_replacement { extensions.push("XR_KHR_vulkan_enable2\0".as_ptr() as *const i8); } let mut instance_info2 = *instance_info; instance_info2.enabled_extension_names = extensions.as_ptr(); instance_info2.enabled_extension_count = extensions.len() as u32; let mut layer_info2 = *layer_info; layer_info2.next_info = (*layer_info2.next_info).next; (next_info.next_create_api_layer_instance)(&instance_info2, &layer_info2, instance).result() }?; let mut supported_extensions = ExtensionSet::default(); supported_extensions.khr_vulkan_enable2 = true; let inner = unsafe { InnerInstance { poison: AtomicBool::new(false), core: openxr::raw::Instance::load(&entry, *instance)?, exts: InstanceExtensions::load(&entry, *instance, &supported_extensions)?, } }; let runtime_name = unsafe { let mut instance_properties = xr::InstanceProperties::out(std::ptr::null_mut()); (inner.core.get_instance_properties)(*instance, instance_properties.as_mut_ptr()) .result()?; let instance_properties = instance_properties.assume_init(); CStr::from_ptr(std::mem::transmute( instance_properties.runtime_name.as_ptr(), )) .to_string_lossy() }; let runtime = match runtime_name.deref() { "SteamVR/OpenXR" => Runtime::SteamVR, "Oculus" => Runtime::Oculus, "Win
random
[ { "content": "type Func<H, T> = unsafe extern \"system\" fn(\n\n handle: H,\n\n format_capacity_input: u32,\n\n format_count_output: *mut u32,\n\n out: *mut T,\n\n) -> xr::Result;\n\n\n\npub unsafe fn call_enumerate<H: Copy, T: Copy>(\n\n handle: H,\n\n f: Func<H, T>,\n\n default: T,\n\n) -...
Rust
src/handlers/common/global.rs
ALinuxPerson/try-drop
1b2ac32ba746747f3af51278a689d19a94261691
pub(crate) mod imports {} use crate::handlers::common::Handler; use crate::handlers::UninitializedError; use parking_lot::{ MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock, RwLockReadGuard, RwLockWriteGuard, }; use std::marker::PhantomData; pub trait GlobalDefinition: Handler { const UNINITIALIZED_ERROR: &'static str; type Global: 'static; fn global() -> &'static RwLock<Option<Self::Global>>; } pub trait DefaultGlobalDefinition: GlobalDefinition { fn default() -> Self::Global; } pub struct Global<T: GlobalDefinition>(PhantomData<T>); impl<T: GlobalDefinition> Global<T> { pub fn install_dyn(strategy: T::Global) { T::global().write().replace(strategy); } pub fn install(strategy: impl Into<T::Global>) { Self::install_dyn(strategy.into()) } pub fn try_read() -> Result<MappedRwLockReadGuard<'static, T::Global>, UninitializedError> { let global = T::global().read(); if global.is_some() { Ok(RwLockReadGuard::map(global, |global| { global.as_ref().unwrap() })) } else { Err(UninitializedError(())) } } pub fn read() -> MappedRwLockReadGuard<'static, T::Global> { Self::try_read().expect(T::UNINITIALIZED_ERROR) } pub fn try_write() -> Result<MappedRwLockWriteGuard<'static, T::Global>, UninitializedError> { let global = T::global().write(); if global.is_some() { Ok(RwLockWriteGuard::map(global, |global| { global.as_mut().unwrap() })) } else { Err(UninitializedError(())) } } pub fn write() -> MappedRwLockWriteGuard<'static, T::Global> { Self::try_write().expect(T::UNINITIALIZED_ERROR) } pub fn uninstall() { *T::global().write() = None } } impl<T: DefaultGlobalDefinition> Global<T> { pub fn read_or_default() -> MappedRwLockReadGuard<'static, T::Global> { drop(Self::write_or_default()); Self::read() } pub fn write_or_default() -> MappedRwLockWriteGuard<'static, T::Global> { RwLockWriteGuard::map(T::global().write(), |drop_strategy| { drop_strategy.get_or_insert_with(T::default) }) } } macro_rules! global_methods { ( Global = $global:ident; GenericStrategy = $generic_strategy:ident; DynStrategy = $dyn_strategy:ident; feature = $feature:literal; $(#[$($install_dyn_tt:tt)*])* install_dyn; $(#[$($install_tt:tt)*])* install; $(#[$($try_read_tt:tt)*])* try_read; $(#[$($read_tt:tt)*])* read; $(#[$($try_write_tt:tt)*])* try_write; $(#[$($write_tt:tt)*])* write; $(#[$($uninstall_tt:tt)*])* uninstall; $(#[$($read_or_default_tt:tt)*])* read_or_default; $(#[$($write_or_default_tt:tt)*])* write_or_default; ) => { #[allow(unused_imports)] use $crate::handlers::common::global::imports::*; $(#[$($install_dyn_tt)*])* pub fn install_dyn(strategy: $dyn_strategy) { $global::install_dyn(strategy) } $(#[$($install_tt)*])* pub fn install(strategy: impl $generic_strategy) { $global::install(strategy) } $(#[$($try_read_tt)*])* pub fn try_read() -> Result<MappedRwLockReadGuard<'static, $dyn_strategy>, UninitializedError> { $global::try_read() } $(#[$($read_tt)*])* pub fn read() -> MappedRwLockReadGuard<'static, $dyn_strategy> { $global::read() } $(#[$($try_write_tt)*])* pub fn try_write() -> Result<MappedRwLockWriteGuard<'static, $dyn_strategy>, UninitializedError> { $global::try_write() } $(#[$($write_tt)*])* pub fn write() -> MappedRwLockWriteGuard<'static, $dyn_strategy> { $global::write() } $(#[$($uninstall_tt)*])* pub fn uninstall() { $global::uninstall() } $(#[$($read_or_default_tt)*])* #[cfg(feature = $feature)] pub fn read_or_default() -> MappedRwLockReadGuard<'static, $dyn_strategy> { $global::read_or_default() } $(#[$($write_or_default_tt)*])* #[cfg(feature = $feature)] pub fn write_or_default() -> MappedRwLockWriteGuard<'static, $dyn_strategy> { $global::write_or_default() } }; }
pub(crate) mod imports {} use crate::handlers::common::Handler; use crate::handlers::UninitializedError; use parking_lot::{ MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock, RwLockReadGuard, RwLockWriteGuard, }; use std::marker::PhantomData; pub trait GlobalDefinition: Handler { const UNINITIALIZED_ERROR: &'static str; type Global: 'static; fn global() -> &'static RwLock<Option<Self::Global>>; } pub trait DefaultGlobalDefinition: GlobalDefinition { fn default() -> Self::Global; } pub struct Global<T: GlobalDefinition>(PhantomData<T>); impl<T: GlobalDefinition> Global<T> { pub fn install_dyn(strategy: T::Global) { T::global().write().replace(strategy); } pub fn install(strategy: impl Into<T::Global>) { Self::install_dyn(strategy.into()) } pub fn try_read() -> Result<MappedRwLockReadGuard<'static, T::Global>, UninitializedError> { let global = T::global().read(); if global.is_some() { Ok(RwLockReadGuard::map(global, |global| { global.as_ref().unwrap() })) } else { Err(UninitializedError(())) } } pub fn read() -> MappedRwLockReadGuard<'static, T::Global> { Self::try_read().expect(T::UNINITIALIZED_ERROR) } pub fn try_write() -> Result<MappedRwLockWriteGuard<'static, T::Global>, UninitializedError> { let global = T::global().write(); if glo
pub fn write() -> MappedRwLockWriteGuard<'static, T::Global> { Self::try_write().expect(T::UNINITIALIZED_ERROR) } pub fn uninstall() { *T::global().write() = None } } impl<T: DefaultGlobalDefinition> Global<T> { pub fn read_or_default() -> MappedRwLockReadGuard<'static, T::Global> { drop(Self::write_or_default()); Self::read() } pub fn write_or_default() -> MappedRwLockWriteGuard<'static, T::Global> { RwLockWriteGuard::map(T::global().write(), |drop_strategy| { drop_strategy.get_or_insert_with(T::default) }) } } macro_rules! global_methods { ( Global = $global:ident; GenericStrategy = $generic_strategy:ident; DynStrategy = $dyn_strategy:ident; feature = $feature:literal; $(#[$($install_dyn_tt:tt)*])* install_dyn; $(#[$($install_tt:tt)*])* install; $(#[$($try_read_tt:tt)*])* try_read; $(#[$($read_tt:tt)*])* read; $(#[$($try_write_tt:tt)*])* try_write; $(#[$($write_tt:tt)*])* write; $(#[$($uninstall_tt:tt)*])* uninstall; $(#[$($read_or_default_tt:tt)*])* read_or_default; $(#[$($write_or_default_tt:tt)*])* write_or_default; ) => { #[allow(unused_imports)] use $crate::handlers::common::global::imports::*; $(#[$($install_dyn_tt)*])* pub fn install_dyn(strategy: $dyn_strategy) { $global::install_dyn(strategy) } $(#[$($install_tt)*])* pub fn install(strategy: impl $generic_strategy) { $global::install(strategy) } $(#[$($try_read_tt)*])* pub fn try_read() -> Result<MappedRwLockReadGuard<'static, $dyn_strategy>, UninitializedError> { $global::try_read() } $(#[$($read_tt)*])* pub fn read() -> MappedRwLockReadGuard<'static, $dyn_strategy> { $global::read() } $(#[$($try_write_tt)*])* pub fn try_write() -> Result<MappedRwLockWriteGuard<'static, $dyn_strategy>, UninitializedError> { $global::try_write() } $(#[$($write_tt)*])* pub fn write() -> MappedRwLockWriteGuard<'static, $dyn_strategy> { $global::write() } $(#[$($uninstall_tt)*])* pub fn uninstall() { $global::uninstall() } $(#[$($read_or_default_tt)*])* #[cfg(feature = $feature)] pub fn read_or_default() -> MappedRwLockReadGuard<'static, $dyn_strategy> { $global::read_or_default() } $(#[$($write_or_default_tt)*])* #[cfg(feature = $feature)] pub fn write_or_default() -> MappedRwLockWriteGuard<'static, $dyn_strategy> { $global::write_or_default() } }; }
bal.is_some() { Ok(RwLockWriteGuard::map(global, |global| { global.as_mut().unwrap() })) } else { Err(UninitializedError(())) } }
function_block-function_prefixed
[ { "content": "pub trait Handler: private::Sealed {}\n\n\n\npub enum Primary {}\n\nimpl private::Sealed for Primary {}\n\nimpl Handler for Primary {}\n\n\n\npub enum Fallback {}\n\nimpl private::Sealed for Fallback {}\n\nimpl Handler for Fallback {}\n\n\n", "file_path": "src/handlers/common/mod.rs", "ran...
Rust
crates/taplo-cli/src/commands/toml_test.rs
koalp/taplo
03044c5d4fd2ab64656d371ad2043ccd0146325b
use crate::Taplo; use anyhow::anyhow; use serde::{ ser::{SerializeMap, SerializeSeq}, Serialize, }; use taplo::dom::{ node::{DateTimeValue, DomNode}, Node, }; use taplo_common::environment::Environment; use tokio::io::AsyncReadExt; impl<E: Environment> Taplo<E> { pub async fn execute_toml_test(&self) -> Result<(), anyhow::Error> { let mut buf = String::new(); self.env.stdin().read_to_string(&mut buf).await?; let parse = taplo::parser::parse(&buf); if !parse.errors.is_empty() { for err in parse.errors { eprintln!("{err}"); } return Err(anyhow!("invalid toml")); } let dom = parse.into_dom(); if let Err(err) = dom.validate() { for err in err { eprintln!("{err}"); } return Err(anyhow!("invalid toml")); } serde_json::to_writer(std::io::stdout(), &TomlTestValue::new(&dom))?; Ok(()) } } #[derive(Clone, Copy, Serialize)] #[serde(rename_all = "lowercase")] pub enum TomlTestType { String, Integer, Float, Bool, DateTime, #[serde(rename = "datetime-local")] DateTimeLocal, #[serde(rename = "date-local")] DateLocal, #[serde(rename = "time-local")] TimeLocal, } impl TomlTestType { fn of(node: &Node) -> Option<Self> { match node { Node::Bool(_) => Some(TomlTestType::Bool), Node::Integer(_) => Some(TomlTestType::Integer), Node::Float(_) => Some(TomlTestType::Float), Node::Str(_) => Some(TomlTestType::String), Node::Date(d) => match d.value() { DateTimeValue::OffsetDateTime(_) => Some(TomlTestType::DateTime), DateTimeValue::LocalDateTime(_) => Some(TomlTestType::DateTimeLocal), DateTimeValue::Date(_) => Some(TomlTestType::DateLocal), DateTimeValue::Time(_) => Some(TomlTestType::TimeLocal), }, Node::Array(_) => None, Node::Table(_) => None, Node::Invalid(_) => unreachable!(), } } } pub struct TomlTestValue<'a> { r#type: Option<TomlTestType>, node: &'a Node, } impl<'a> TomlTestValue<'a> { pub fn new(node: &'a Node) -> Self { Self { r#type: TomlTestType::of(node), node, } } } impl<'a> Serialize for TomlTestValue<'a> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { if let Some(ty) = self.r#type { let mut map = serializer.serialize_map(Some(2))?; map.serialize_entry("type", &ty)?; map.serialize_entry( "value", &match self.node { Node::Str(d) => d.value().to_string(), Node::Float(f) if f.value().is_nan() => String::from("nan"), Node::Float(f) if f.value().is_infinite() => f.syntax().unwrap().to_string(), _ => serde_json::to_string(&self.node).map_err(serde::ser::Error::custom)?, }, )?; map.end() } else { match &self.node { Node::Array(array) => { let items = array.items().read(); let mut seq = serializer.serialize_seq(Some(items.len()))?; for value in &**items { seq.serialize_element(&TomlTestValue::new(value))?; } seq.end() } Node::Table(table) => { let entries = table.entries().read(); let mut map = serializer.serialize_map(Some(entries.len()))?; for (key, value) in entries.iter() { map.serialize_entry(key.value(), &TomlTestValue::new(value))?; } map.end() } _ => unreachable!(), } } } }
use crate::Taplo; use anyhow::anyhow; use serde::{ ser::{SerializeMap, SerializeSeq}, Serialize, }; use taplo::dom::{ node::{DateTimeValue, DomNode}, Node, }; use taplo_common::environment::Environment; use tokio::io::AsyncReadExt; impl<E: Environment> Taplo<E> { pub async fn execute_toml_test(&self) -> Result<(), anyhow::Error> { let mut buf = String::new(); self.env.stdin().read_to_string(&mut buf).await?; let parse = taplo::parser::parse(&buf); if !parse.errors.is_empty() { for err in parse.errors { eprintln!("{err}"); } return Err(anyhow!("invalid toml")); } let dom = parse.into_dom(); if let Err(err) = dom.validate() { for err in err { eprintln!("{err}"); } return Err(anyhow!("invalid toml")); } serde_json::to_writer(std::io::stdout(), &TomlTestValue::new(&dom))?; Ok(()) } } #[derive(Clone, Copy, Serialize)] #[serde(rename_all = "lowercase")] pub enum TomlTestType { String, Integer, Float, Bool, DateTime, #[serde(rename = "datetime-local")] DateTimeLocal, #[serde(rename = "date-local")] DateLocal, #[serde(rename = "time-local")] TimeLocal, } impl TomlTestType { fn of(node: &Node) -> Option<Self> { match node { Node::Bool(_) => Some(TomlTestType::Bool), Node::Integer(_) => Some(TomlTestType::Integer), Node::Float(_) => Some(TomlTestType::Float), Node::Str(_) => Some(TomlTestType::String), Node::Date(d) => match d.value() { DateTimeValue::OffsetDateTime(_) => Some(TomlTestType::DateTime), DateTimeValue::LocalDateTime(_) => Some(TomlTestType::DateTimeLocal), DateTimeValue::Date(_) => Some(TomlTestType::DateLocal), DateTimeValue::Time(_) => Some(TomlTestType::TimeLocal), }, Node::Array(_) => None, Node::Table(_) => None, Node::Invalid(_) => unreachable!(), } } } pub struct TomlTestValue<'a> { r#type: Option<TomlTestType>, node: &'a Node, } impl<'a> TomlTestValue<'a> { pub fn new(node: &'a Node) -> Self { Self { r#type: TomlTestType::of(node), node, } } } impl<'a> Serialize for TomlTestValue<'a> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Erro
Node::Float(f) if f.value().is_nan() => String::from("nan"), Node::Float(f) if f.value().is_infinite() => f.syntax().unwrap().to_string(), _ => serde_json::to_string(&self.node).map_err(serde::ser::Error::custom)?, }, )?; map.end() } else { match &self.node { Node::Array(array) => { let items = array.items().read(); let mut seq = serializer.serialize_seq(Some(items.len()))?; for value in &**items { seq.serialize_element(&TomlTestValue::new(value))?; } seq.end() } Node::Table(table) => { let entries = table.entries().read(); let mut map = serializer.serialize_map(Some(entries.len()))?; for (key, value) in entries.iter() { map.serialize_entry(key.value(), &TomlTestValue::new(value))?; } map.end() } _ => unreachable!(), } } } }
r> where S: serde::Serializer, { if let Some(ty) = self.r#type { let mut map = serializer.serialize_map(Some(2))?; map.serialize_entry("type", &ty)?; map.serialize_entry( "value", &match self.node { Node::Str(d) => d.value().to_string(),
random
[ { "content": "fn extract_value(node: &Node) -> Result<String, anyhow::Error> {\n\n Ok(match node {\n\n Node::Table(_) => {\n\n return Err(anyhow!(\n\n r#\"cannot print tables with the given output format, specify a different output format (e.g. with `-o json`) \"#\n\n ...
Rust
midi/src/message/system_common.rs
alisomay/koto_midi
7cf4579bb8d3c6c8bd4f29d7b101b4d2e8501a08
use crate::impl_midi_message; use crate::Category; use crate::MidiMessage; #[derive(Debug)] pub struct SystemExclusive { bytes: Vec<u8>, pub manufacturer_id: Vec<u8>, pub category: Category, } impl SystemExclusive { pub fn new(manufacturer_id: &[u8], message_content: &[u8]) -> Self { let mut message = message_content.to_vec(); message.insert(0, 0xF0); if manufacturer_id.len() == 1 { message.insert(1, manufacturer_id[0]); } else { message.insert(1, manufacturer_id[0]); message.insert(2, manufacturer_id[1]); message.insert(3, manufacturer_id[2]); } message.push(0xF7); Self { bytes: message, manufacturer_id: manufacturer_id.to_vec(), category: Category::SystemCommon, } } } impl From<&[u8]> for SystemExclusive { fn from(raw_bytes: &[u8]) -> Self { let mut manufacturer_id: Vec<u8> = vec![]; if raw_bytes[1] != 0 { manufacturer_id.push(raw_bytes[1]); } else { manufacturer_id.push(raw_bytes[1]); manufacturer_id.push(raw_bytes[2]); manufacturer_id.push(raw_bytes[3]); } SystemExclusive { bytes: raw_bytes.to_vec(), manufacturer_id, category: Category::SystemCommon, } } } impl Default for SystemExclusive { fn default() -> Self { Self { bytes: vec![0xF0, 0x01, 0x0, 0x0, 0xF7], manufacturer_id: vec![0x01], category: Category::SystemCommon, } } } #[derive(Debug)] pub struct TimeCodeQuarterFrame { bytes: [u8; 2], message_type: u8, values: u8, pub category: Category, } impl TimeCodeQuarterFrame { pub fn new(message_type: u64, values: u64) -> Self { Self { bytes: [ 0xF1, (message_type.min(7) << 4) as u8 | values.min(15) as u8, ], message_type: message_type.min(7) as u8, values: values.min(15) as u8, category: Category::SystemCommon, } } pub fn message_type(&self) -> u8 { self.message_type } pub fn values(&self) -> u8 { self.values } pub fn change_message_type(&mut self, message_type: u8) { self.message_type = message_type; self.bytes[1] = (self.message_type.min(7) << 4) | self.values.min(15); } pub fn change_values(&mut self, values: u8) { self.values = values; self.bytes[1] = (self.message_type.min(7) << 4) | self.values.min(15); } } impl From<&[u8]> for TimeCodeQuarterFrame { fn from(raw_bytes: &[u8]) -> Self { TimeCodeQuarterFrame { bytes: [raw_bytes[0], raw_bytes[1]], message_type: (raw_bytes[1] & 0b0111_0000) >> 4, values: raw_bytes[1] & 0b0000_1111, category: Category::SystemCommon, } } } impl Default for TimeCodeQuarterFrame { fn default() -> Self { todo!() } } #[derive(Debug)] pub struct SongPosition { bytes: [u8; 3], midi_beats_elapsed: u16, pub category: Category, } impl SongPosition { pub fn new(midi_beats_elapsed: u64) -> Self { let midi_beats_elapsed = midi_beats_elapsed.min(16383) as u16; let msb = ((midi_beats_elapsed >> 7) as u8) & 0b0111_1111; let lsb = (midi_beats_elapsed as u8) & 0b0111_1111; Self { bytes: [0xF2, lsb.min(127) as u8, msb.min(127) as u8], midi_beats_elapsed, category: Category::SystemCommon, } } pub fn midi_beats_elapsed(&self) -> u16 { self.midi_beats_elapsed } pub fn change_midi_beats_elapsed(&mut self, midi_beats_elapsed: u16) { self.midi_beats_elapsed = midi_beats_elapsed.min(16383); let msb = ((midi_beats_elapsed >> 7) as u8) & 0b0111_1111; let lsb = (midi_beats_elapsed as u8) & 0b0111_1111; self.bytes[1] = lsb; self.bytes[2] = msb; } } impl From<&[u8]> for SongPosition { fn from(raw_bytes: &[u8]) -> Self { let midi_beats_elapsed = ((raw_bytes[2] as u16) << 7) | raw_bytes[1] as u16; SongPosition { bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]], midi_beats_elapsed, category: Category::SystemCommon, } } } impl Default for SongPosition { fn default() -> Self { Self { bytes: [0xF2, 0, 0], midi_beats_elapsed: 0, category: Category::SystemCommon, } } } #[derive(Debug)] pub struct SongSelect { bytes: [u8; 2], number: u8, pub category: Category, } impl SongSelect { pub fn new(number: u64) -> Self { Self { bytes: [0xF3, number.min(127) as u8], number: number.min(127) as u8, category: Category::SystemCommon, } } pub fn number(&self) -> u8 { self.number } } impl From<&[u8]> for SongSelect { fn from(raw_bytes: &[u8]) -> Self { SongSelect { bytes: [raw_bytes[0], raw_bytes[1]], number: raw_bytes[1], category: Category::SystemCommon, } } } impl Default for SongSelect { fn default() -> Self { Self { bytes: [0xF3, 0], number: 0, category: Category::SystemCommon, } } } #[derive(Debug)] pub struct TuneRequest { bytes: [u8; 1], pub category: Category, } impl Default for TuneRequest { fn default() -> Self { TuneRequest { bytes: [0xF6], category: Category::SystemCommon, } } } impl TuneRequest { pub fn new() -> Self { TuneRequest::default() } } impl From<&[u8]> for TuneRequest { fn from(raw_bytes: &[u8]) -> Self { TuneRequest { bytes: [raw_bytes[0]], category: Category::SystemCommon, } } } #[derive(Debug)] pub struct EndOfExclusive { bytes: [u8; 1], pub category: Category, } impl Default for EndOfExclusive { fn default() -> Self { EndOfExclusive { bytes: [0xF7], category: Category::SystemCommon, } } } impl EndOfExclusive { pub fn new() -> Self { EndOfExclusive::default() } } impl From<&[u8]> for EndOfExclusive { fn from(raw_bytes: &[u8]) -> Self { EndOfExclusive { bytes: [raw_bytes[0]], category: Category::SystemCommon, } } } impl_midi_message!(SystemExclusive); impl_midi_message!(TimeCodeQuarterFrame); impl_midi_message!(SongPosition); impl_midi_message!(SongSelect); impl_midi_message!(TuneRequest); impl_midi_message!(EndOfExclusive);
use crate::impl_midi_message; use crate::Category; use crate::MidiMessage; #[derive(Debug)] pub struct SystemExclusive { bytes: Vec<u8>, pub manufacturer_id: Vec<u8>, pub category: Category, } impl SystemExclusive { pub fn new(manufacturer_id: &[u8], message_content: &[u8]) -> Self { let mut message = message_content.to_vec(); message.insert(0, 0xF0); if manufacturer_id.len() == 1 { message.insert(1, manufacturer_id[0]); } else { message.insert(1, manufacturer_id[0]); message.insert(2, manufacturer_id[1]); message.insert(3, manufacturer_id[2]); } message.push(0xF7); Self { bytes: message, manufacturer_id: manufacturer_id.to_vec(), category: Category::SystemCommon, } } } impl From<&[u8]> for SystemExclusive { fn from(raw_bytes: &[u8]) -> Self { let mut manufacturer_id: Vec<u8> = vec![]; if raw_bytes[1] != 0 { manufacturer_id.push(raw_bytes[1]); } else { manufacturer_id.push(raw_bytes[1]); manufacturer_id.push(raw_bytes[2]); manufacturer_id.push(raw_bytes[3]); } SystemExclusive { bytes: raw_bytes.to_vec(), manufacturer_id, category: Category::SystemCommon, } } } impl Default for SystemExclusive { fn default() -> Self { Self { bytes: vec![0xF0, 0x01, 0x0, 0x0, 0xF7], manufacturer_id: vec![0x01], category: Category::SystemCommon, } } } #[derive(Debug)] pub struct TimeCodeQuarterFrame { bytes: [u8; 2], message_type: u8, values: u8, pub category: Category, } impl TimeCodeQuarterFrame { pub fn new(message_type: u64, values: u64) -> Self { Self { bytes: [ 0xF1, (message_type.min(7) << 4) as u8 | values.min(15) as u8, ], message_type: message_type.min(7) as u8, values: values.min(15) as u8, category: Category::SystemCommon, } } pub fn message_type(&self) -> u8 { self.message_type } pub fn values(&self) -> u8 { self.values } pub fn change_message_type(&mut self, message_type: u8) { self.message_type = message_type; self.bytes[1] = (self.message_type.min(7) << 4) | self.values.min(15); } pub fn change_values(&mut self, values: u8) { self.values = values; self.bytes[1] = (self.message_type.min(7) << 4) | self.values.min(15); } } impl From<&[u8]> for TimeCodeQuarterFrame { fn from(raw_bytes: &[u8]) -> Self { TimeCodeQuarterFrame { bytes: [raw_bytes[0], raw_bytes[1]], message_type: (raw_bytes[1] & 0b0111_0000) >> 4, values: raw_bytes[1] & 0b0000_1111, category: Category::SystemCommon, } } } impl Default for TimeCodeQuarterFrame { fn default() -> Self { todo!() } } #[derive(Debug)] pub struct SongPosition { bytes: [u8; 3], midi_beats_elapsed: u16, pub category: Category, } impl SongPosition { pub fn new(midi_beats_elapsed: u64) -> Self { let midi_beats_elapsed = midi_beats_elapsed.min(16383) as u16; let msb = ((midi_beats_elapsed >> 7) as u8) & 0b0111_1111; let lsb = (midi_beats_elapsed as u8) & 0b0111_1111; Self { bytes: [0xF2, lsb.min(127) as u8, msb.min(127) as u8], midi_beats_elapsed, category: Category::SystemCommon, } } pub fn midi_beats_elapsed(&self) -> u16 { self.midi_beats_elapsed } pub fn change_midi_beats_elapsed(&mut self, midi_beats_elapsed: u16) { self.midi_beats_elapsed = midi_beats_elapsed.min(16383); let msb = ((midi_beats_elapsed >> 7) as u8) & 0b0111_1111; let lsb = (midi_beats_elapsed as u8) & 0b0111_1111; self.bytes[1] = lsb; self.bytes[2] = msb; } } impl From<&[u8]> for SongPosition { fn from(raw_bytes: &[u8]) -> Self { let midi_beats_elapsed = ((raw_bytes[2] as u16) << 7) | raw_bytes[1] as u16; SongPosition { bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]], midi_beats_elapsed, category: Category::SystemCommon, } } } impl Default for SongPosition { fn default() -> Self { Self { bytes: [0xF2, 0, 0], midi_beats_elapsed: 0, category: Category::SystemCommon, } } } #[derive(Debug)] pub struct SongSelect { bytes: [u8; 2], number: u8, pub category: Category, } impl SongSelect { pub fn new(number: u64) -> Self { Self { bytes: [0xF3, number.min(127) as u8], number: number.min(127) as u8, category: Category::SystemCommon, } } pub fn number(&self) -> u8 { self.number } } impl From<&[u8]> for SongSelect { fn from(raw_bytes: &[u8]) -> Self { SongSelect { bytes: [raw_bytes[0], raw_bytes[1]], number: raw_bytes[1], category: Category::SystemCommon, } } } impl Default for SongSelect { fn default() -> Self { Self { bytes: [0xF3, 0], number: 0, category: Category::SystemCommon, } } } #[derive(Debug)] pub struct TuneRequest { bytes: [u8; 1], pub category: Category, } impl Default for TuneRequest { fn default() -> Self { TuneRequest { bytes: [0xF6], category: Category::SystemCommon, } } } impl TuneRequest { pub fn new() -> Self { TuneRequest::default() } } impl From<&[u8]> for TuneRequest { fn from(raw_bytes: &[u8]) -> Self { TuneRequest { bytes: [raw_bytes[0]], category: Category::SystemCommon, } } } #[derive(Debug)] pub struct EndOfExclusive { bytes: [u8; 1], pub category: Category, } impl Default for EndOfExclusive { fn default() -> Self { EndOfExclusive { bytes: [0xF7], category: Category::SystemCommon, } } } impl EndOfExclusive { pub fn new() -> Self { EndOfExclusive::default() } } impl From<&[u8]> for EndOfExclusive {
} impl_midi_message!(SystemExclusive); impl_midi_message!(TimeCodeQuarterFrame); impl_midi_message!(SongPosition); impl_midi_message!(SongSelect); impl_midi_message!(TuneRequest); impl_midi_message!(EndOfExclusive);
fn from(raw_bytes: &[u8]) -> Self { EndOfExclusive { bytes: [raw_bytes[0]], category: Category::SystemCommon, } }
function_block-full_function
[ { "content": "// TODO: Solve unnecessary repetition of list collectors for different types ot cases if there is.\n\npub fn collect_list_of_midi_bytes_as_u8(\n\n message: &ValueList,\n\n error: &str,\n\n) -> std::result::Result<Vec<u8>, RuntimeError> {\n\n let arguments = message\n\n .data()\n\n ...
Rust
avr-hal-generic/src/wdt.rs
IamTheCarl/avr-hal
10d311ba74a07289d0ad3afa05f4b22800a2ec7a
use core::marker::PhantomData; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub enum Timeout { Ms16, Ms32, Ms64, Ms125, Ms250, Ms500, Ms1000, Ms2000, Ms4000, Ms8000, } pub trait WdtOps<H> { type MCUSR; fn raw_init(&mut self, m: &Self::MCUSR); fn raw_start(&mut self, timeout: Timeout) -> Result<(), ()>; fn raw_feed(&mut self); fn raw_stop(&mut self); } pub struct Wdt<H, WDT> { p: WDT, _h: PhantomData<H>, } impl<H, WDT: WdtOps<H>> Wdt<H, WDT> { pub fn new(mut p: WDT, m: &WDT::MCUSR) -> Self { p.raw_init(m); Self { p, _h: PhantomData } } pub fn start(&mut self, timeout: Timeout) -> Result<(), ()> { self.p.raw_start(timeout) } pub fn feed(&mut self) { self.p.raw_feed() } pub fn stop(&mut self) { self.p.raw_stop() } } #[macro_export] macro_rules! impl_wdt { ( hal: $HAL:ty, peripheral: $WDT:ty, mcusr: $MCUSR:ty, timeout: |$to:ident, $w:ident| $to_match:expr, ) => { impl $crate::wdt::WdtOps<$HAL> for $WDT { type MCUSR = $MCUSR; #[inline] fn raw_init(&mut self, m: &Self::MCUSR) { m.modify(|_, w| w.wdrf().clear_bit()); } #[inline] fn raw_start(&mut self, timeout: Timeout) -> Result<(), ()> { $crate::avr_device::interrupt::free(|_| { self.raw_feed(); self.wdtcsr .modify(|_, w| w.wdce().set_bit().wde().set_bit()); self.wdtcsr.write(|w| { let $to = timeout; let $w = w; ($to_match).wde().set_bit().wdce().clear_bit() }); Ok(()) }) } #[inline] fn raw_feed(&mut self) { avr_device::asm::wdr(); } #[inline] fn raw_stop(&mut self) { $crate::avr_device::interrupt::free(|_| { self.raw_feed(); self.wdtcsr .modify(|_, w| w.wdce().set_bit().wde().set_bit()); self.wdtcsr.reset(); }) } } }; }
use core::marker::PhantomData; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub enum Timeout { Ms16, Ms32, Ms64, Ms125, Ms250, Ms500, Ms1000, Ms2000, Ms4000, Ms8000, } pub trait WdtOps<H> { type MCUSR; fn raw_init(&mut self, m: &Self::MCUSR); fn raw_start(&mut self, timeout: Timeout) -> Result<(), ()>; fn raw_feed(&mut self); fn raw_stop(&mut self); } pub struct Wdt<H, WDT> { p: WDT, _h: PhantomData<H>, } impl<H, WDT: WdtOps<H>> Wdt<H, WDT> { pub fn new(mut p: WDT, m: &WDT::MCUSR) -> Self { p.raw_init(m); Self { p, _h: PhantomData } } pub fn start(&mut self, timeout: Timeout) -> Result<(), ()> { self.p.raw_start(timeout) } pub fn feed(&mut self) { self.p.raw_feed() } pub fn stop(&mut self) { self.p.raw_stop() } } #[macro_export] macro_rules! impl_wdt { ( hal: $HAL:ty, peripheral: $WDT:ty, mcusr: $MCUSR:ty,
$crate::avr_device::interrupt::free(|_| { self.raw_feed(); self.wdtcsr .modify(|_, w| w.wdce().set_bit().wde().set_bit()); self.wdtcsr.reset(); }) } } }; }
timeout: |$to:ident, $w:ident| $to_match:expr, ) => { impl $crate::wdt::WdtOps<$HAL> for $WDT { type MCUSR = $MCUSR; #[inline] fn raw_init(&mut self, m: &Self::MCUSR) { m.modify(|_, w| w.wdrf().clear_bit()); } #[inline] fn raw_start(&mut self, timeout: Timeout) -> Result<(), ()> { $crate::avr_device::interrupt::free(|_| { self.raw_feed(); self.wdtcsr .modify(|_, w| w.wdce().set_bit().wde().set_bit()); self.wdtcsr.write(|w| { let $to = timeout; let $w = w; ($to_match).wde().set_bit().wdce().clear_bit() }); Ok(()) }) } #[inline] fn raw_feed(&mut self) { avr_device::asm::wdr(); } #[inline] fn raw_stop(&mut self) {
random
[ { "content": " pub trait Sealed {}\n\n}\n\npub(crate) use sealed::Sealed;\n", "file_path": "avr-hal-generic/src/lib.rs", "rank": 1, "score": 114647.06503214789 }, { "content": "/// A clock speed\n\npub trait Clock {\n\n /// Frequency of this clock in Hz\n\n const FREQ: u32;\n\n}\n\n...
Rust
src/main.rs
hardcorebadger/rustchain
e03bbf952726bf4e424f6be0331d8d9a8b16cef2
#[macro_use] extern crate lazy_static; #[macro_use] extern crate mime; #[macro_use] extern crate serde_derive; extern crate iron; extern crate router; extern crate serde; extern crate serde_json; extern crate uuid; extern crate bodyparser; use iron::prelude::*; use iron::status; use router::Router; use std::sync::{Arc, Mutex, RwLock}; use uuid::Uuid; mod blockchain; mod block; mod transaction; use blockchain::Blockchain; use transaction::Transaction; fn main() { lazy_static! { static ref CHAIN: Arc<RwLock<Blockchain>> = { let mut chain: Arc<RwLock<Blockchain>> = Arc::new(RwLock::new(Blockchain::new())); chain }; } println!("Serving on http://localhost:3000..."); let mut router = Router::new(); let node_id: Uuid = Uuid::new_v4(); router.get("/", get_hello, "root"); router.get("/mine", move |r: &mut Request| { get_mine(r, Arc::clone(&CHAIN), node_id.clone()) }, "mine"); router.get("/chain", move |r: &mut Request| { get_chain(r, Arc::clone(&CHAIN)) }, "chain"); router.post("/transactions/new", move |r: &mut Request| { post_transaction(r, Arc::clone(&CHAIN)) }, "transaction"); Iron::new(router).http("localhost:3000").unwrap(); } fn get_hello(_request: &mut Request) -> IronResult<Response> { let mut response = Response::new(); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut("Hey! I'm Rustchain, welcome to the future.\n"); Ok(response) } fn get_mine(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>, _node_id: Uuid) -> IronResult<Response> { let mut response = Response::new(); let node_id_str = _node_id.simple().to_string(); let mut ch = _chain.write().unwrap(); let last_proof = ch.last_proof(); let new_proof = ch.proof_of_work(last_proof); let index = ch.new_transaction("0", node_id_str.as_str(), 1); let new_block = ch.new_block(new_proof); assert_eq!(index, new_block.index); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut("Click. ** That's the sound of me mining a new block **\n"); Ok(response) } fn get_chain(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>) -> IronResult<Response> { let mut response = Response::new(); let ch = _chain.read().unwrap(); let full_chain = ch.get_chain(); let json = serde_json::to_string(&full_chain); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut(json.unwrap()); Ok(response) } fn post_transaction(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>) -> IronResult<Response> { let body = _request.get::<bodyparser::Raw>().unwrap().unwrap(); let transaction: Transaction = serde_json::from_str(&body).unwrap(); print!("before tx lock"); let mut ch = _chain.write().unwrap(); print!("after tx lock"); let index = ch.new_transaction(transaction.sender.as_str(), transaction.recipient.as_str(), transaction.amount); let mut response = Response::new(); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut(format!("transaction complete: block index: {}\n", index)); Ok(response) }
#[macro_use] extern crate lazy_static; #[macro_use] extern crate mime; #[macro_use] extern crate serde_derive; extern crate iron; extern crate router; extern crate serde; extern crate serde_json; extern crate uuid; extern crate bodyparser; use iron::prelude::*; use iron::status; use router::Router; use std::sync::{Arc, Mutex, RwLock}; use uuid::Uuid; mod blockchain; mod block; mod transaction; use blockchain::Blockchain; use transaction::Transaction;
fn get_hello(_request: &mut Request) -> IronResult<Response> { let mut response = Response::new(); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut("Hey! I'm Rustchain, welcome to the future.\n"); Ok(response) } fn get_mine(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>, _node_id: Uuid) -> IronResult<Response> { let mut response = Response::new(); let node_id_str = _node_id.simple().to_string(); let mut ch = _chain.write().unwrap(); let last_proof = ch.last_proof(); let new_proof = ch.proof_of_work(last_proof); let index = ch.new_transaction("0", node_id_str.as_str(), 1); let new_block = ch.new_block(new_proof); assert_eq!(index, new_block.index); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut("Click. ** That's the sound of me mining a new block **\n"); Ok(response) } fn get_chain(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>) -> IronResult<Response> { let mut response = Response::new(); let ch = _chain.read().unwrap(); let full_chain = ch.get_chain(); let json = serde_json::to_string(&full_chain); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut(json.unwrap()); Ok(response) } fn post_transaction(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>) -> IronResult<Response> { let body = _request.get::<bodyparser::Raw>().unwrap().unwrap(); let transaction: Transaction = serde_json::from_str(&body).unwrap(); print!("before tx lock"); let mut ch = _chain.write().unwrap(); print!("after tx lock"); let index = ch.new_transaction(transaction.sender.as_str(), transaction.recipient.as_str(), transaction.amount); let mut response = Response::new(); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut(format!("transaction complete: block index: {}\n", index)); Ok(response) }
fn main() { lazy_static! { static ref CHAIN: Arc<RwLock<Blockchain>> = { let mut chain: Arc<RwLock<Blockchain>> = Arc::new(RwLock::new(Blockchain::new())); chain }; } println!("Serving on http://localhost:3000..."); let mut router = Router::new(); let node_id: Uuid = Uuid::new_v4(); router.get("/", get_hello, "root"); router.get("/mine", move |r: &mut Request| { get_mine(r, Arc::clone(&CHAIN), node_id.clone()) }, "mine"); router.get("/chain", move |r: &mut Request| { get_chain(r, Arc::clone(&CHAIN)) }, "chain"); router.post("/transactions/new", move |r: &mut Request| { post_transaction(r, Arc::clone(&CHAIN)) }, "transaction"); Iron::new(router).http("localhost:3000").unwrap(); }
function_block-full_function
[ { "content": "use std::fmt;\n\nuse transaction::Transaction;\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct Block {\n\n\tpub index: i64,\n\n\tpub timestamp: i64,\n\n\tpub transactions: Vec<Transaction>,\n\n\tpub proof: i64,\n\n\tpub previous_hash: String\n\n\t//pub previous_hash: GenericArray<u8,...
Rust
src/lib.rs
mov-rax/smart_buffer
1ab3f45f1c2021d0cd5e87f25852a24dc569e40c
#![no_std] #![feature(min_const_generics)] #[macro_use] extern crate alloc; #[doc(hidden)] pub extern crate core as __core; use alloc::alloc::{alloc, dealloc, Layout}; use alloc::vec::Vec; use core::mem::size_of; use crate::iter::SmartBufferIterRef; use alloc::boxed::Box; use crate::__core::fmt::{Debug, Formatter}; pub mod iter; mod index; pub mod into; #[macro_use] #[cfg(test)] mod tests { use crate::SmartBuffer; use alloc::string::String; use crate::buf; use alloc::vec::Vec; #[test] fn it_works() { let mut buf = buf!(0u32, 5, 10); buf.insert_arr(&[4,9,3,2,1,9,3,2,10,19]); let mut buf_clone = buf.clone(); let test = SmartBuffer::from_arr([1u8,4,5,6,7], 5, true); } } #[feature(min_const_generics)] pub struct SmartBuffer<T, const N:usize> where T: Clone { s_buf: [T; N], d_buf: Option<*mut T>, layout: Option<Layout>, size: usize, capacity: usize, default: T, cursor: usize, } impl<T, const N:usize> SmartBuffer<T,N> where T: Clone { pub fn clear(&mut self){ let default = self.default.clone(); for elem in self{ *elem = default.clone(); } } pub fn push(&mut self, other: T){ if self.size < N{ self.s_buf[self.size] = other; self.size += 1; } else if self.size < self.capacity{ unsafe {*((self.d_buf.unwrap() as usize + (self.size - N) * size_of::<T>()) as *mut T) = other}; self.size += 1; } } pub fn set_size(&mut self, size:usize){ if self.size < self.capacity{ self.size = size; } } pub fn insert_slice(&mut self, slice: &[T]){ for elem in slice{ self.push(elem.clone()); } } pub fn insert_slice_at(&mut self, slice: &[T], mut index:usize){ for elem in slice{ self.insert(elem.clone(), index); index += 1; } } pub fn insert_arr<const M: usize>(&mut self, arr: &[T; M]){ for elem in arr{ self.push(elem.clone()); } } pub fn insert(&mut self, other: T, index: usize){ if index < N{ self.s_buf[index] = other; if index > self.size{ self.size = index; } } else if index < self.capacity{ unsafe {*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *mut T) = other}; if index > self.size{ self.size = index; } } } pub fn get(&self, index:usize) -> Option<&T> { if index < N { return Some(&(self.s_buf[index])) } else if index < self.capacity { return unsafe { Some(&*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *const T)) } } None } pub unsafe fn get_unchecked(&self, index:usize) -> &T{ if index < N{ return &self.s_buf[index]; } &*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *const T) } pub unsafe fn get_mut_unchecked(&mut self, index:usize) -> &mut T{ if index < N{ return &mut self.s_buf[index] } &mut *((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *mut T) } pub fn as_mut_ptr(mut self) -> *mut Self{ &mut self as *mut Self } pub(crate) fn allocate(&mut self, elements:usize){ let layout = Layout::from_size_align(elements*size_of::<T>(), 1); if let Ok(layout) = layout{ let ptr = unsafe {alloc(layout) as *mut T}; self.capacity += layout.size()/size_of::<T>(); self.layout = Some(layout); self.d_buf = Some(ptr); } } pub fn new(value: T, len:usize) -> Self where T: Copy + Clone { let mut buf = Self{ s_buf: [value; N], d_buf: None, layout: None, size: 0, capacity: N, default: value, cursor: 0, }; if N < len{ buf.allocate(len - N); } buf } pub fn from_arr(buf:[T; N], len:usize, set_size:bool) -> Self where T: Clone { let def = buf[0].clone(); let mut buf = Self{ s_buf: buf, d_buf: None, layout: None, size: if set_size { N } else { 0 }, capacity: N, default: def, cursor: 0, }; if N < len{ buf.allocate(len - N); } buf } pub fn get_size(&self) -> usize{ self.size } pub fn map<F>(&mut self, mut f: F) where T: Clone + Copy, F: FnMut(T) -> T { for i in 0..self.size{ self[i] = f(self[i]) } } pub fn shl(&mut self, count:usize){ self.s_buf.rotate_left(count); if let Some(ptr) = self.d_buf{ for i in 0..count{ self.s_buf[N - count + i] = unsafe {(*ptr.offset(i as isize)).clone()}; } for i in 0..(self.capacity - N){ if i + count < self.capacity - N{ unsafe { *ptr.offset(i as isize) = (*ptr.offset((i + count) as isize)).clone()}; } } for i in 0..count{ unsafe { (*ptr.offset((self.capacity - N - count + i) as isize)) = self.default.clone()}; } } else { for i in 0..count{ self.s_buf[N-count+i] = self.default.clone(); } } } pub fn shr(&mut self, count:usize){ if let Some(ptr) = self.d_buf{ for i in 0..(self.capacity - N){ if (self.capacity as i32 - N as i32 - i as i32- count as i32 - 1) >= 0{ unsafe{ *ptr.offset((self.capacity - N - i - 1) as isize) = (*ptr.offset((self.capacity - N - i - count - 1) as isize)).clone()}; } } for i in 0..count{ unsafe { (*ptr.offset(i as isize)) = self.s_buf[(N as i32 - count as i32 + i as i32) as usize].clone()}; } } self.s_buf.rotate_right(count); for i in 0..count{ self.s_buf[i] = self.default.clone(); } } } impl<T, const N:usize> SmartBuffer<T,N> where T:Clone + PartialEq { pub fn calc_size(&mut self){ let default = self.default.clone(); let mut size = 0; for elem in &*self{ if *elem == default{ break; } size += 1; } self.set_size(size + 1); } } impl<T, const N:usize> Drop for SmartBuffer<T,N> where T: Clone { fn drop(&mut self) { if let Some(ptr) = self.d_buf{ unsafe {dealloc(ptr as *mut u8, self.layout.unwrap())}; } } } impl<T, const N:usize> Debug for SmartBuffer<T,N> where T: Clone + Debug { fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { f.debug_list() .entries(self.into_iter()) .finish() } } impl<T, const N:usize> Clone for SmartBuffer<T,N> where T: Clone { fn clone(&self) -> Self { let mut temp_buf = Self::from_arr(self.s_buf.clone(), self.capacity, true); if let Some(ptr) = self.d_buf{ temp_buf.allocate(self.capacity - N); unsafe { core::intrinsics::copy(ptr, temp_buf.d_buf.unwrap(), self.capacity - N); } } temp_buf.default = self.default.clone(); temp_buf } } #[doc(hidden)] #[non_exhaustive] pub struct Token; impl Token { #[doc(hidden)] #[inline] pub const unsafe fn new() -> Self { Token } } #[macro_export] macro_rules! buf { ($data:expr, $s_len:expr, $t_len:expr) => { $crate::SmartBuffer::<_,$s_len>::from_arr($crate::array!(_ => $data; $s_len), $t_len, false) } } #[macro_export] macro_rules! array { [$expr:expr; $count:expr] => {{ let value = $expr; $crate::array![_ => $crate::__core::clone::Clone::clone(&value); $count] }}; [$i:pat => $e:expr; $count:expr] => {{ const __COUNT: $crate::__core::primitive::usize = $count; #[repr(transparent)] struct __ArrayVec<T>(__ArrayVecInner<T>); impl<T> $crate::__core::ops::Drop for __ArrayVec<T> { fn drop(&mut self) { for val in &mut self.0.arr[..self.0.len] { unsafe { val.as_mut_ptr().drop_in_place() } } } } struct __ArrayVecInner<T> { arr: [$crate::__core::mem::MaybeUninit<T>; __COUNT], len: $crate::__core::primitive::usize, token: $crate::Token, } #[repr(C)] union __Transmuter<T> { init_uninit_array: $crate::__core::mem::ManuallyDrop<$crate::__core::mem::MaybeUninit<[T; __COUNT]>>, uninit_array: $crate::__core::mem::ManuallyDrop<[$crate::__core::mem::MaybeUninit<T>; __COUNT]>, out: $crate::__core::mem::ManuallyDrop<[T; __COUNT]>, } #[repr(C)] union __ArrayVecTransmuter<T> { vec: $crate::__core::mem::ManuallyDrop<__ArrayVec<T>>, inner: $crate::__core::mem::ManuallyDrop<__ArrayVecInner<T>>, } let mut vec = __ArrayVec(__ArrayVecInner { arr: $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __Transmuter { init_uninit_array: $crate::__core::mem::ManuallyDrop::new($crate::__core::mem::MaybeUninit::uninit()), } .uninit_array }), len: 0, token: unsafe { $crate::Token::new() }, }); while vec.0.len < __COUNT { let $i = vec.0.len; let _please_do_not_use_continue_without_label; let value; struct __PleaseDoNotUseBreakWithoutLabel; loop { _please_do_not_use_continue_without_label = (); value = $e; break __PleaseDoNotUseBreakWithoutLabel; }; vec.0.arr[vec.0.len] = $crate::__core::mem::MaybeUninit::new(value); vec.0.len += 1; } let inner = $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __ArrayVecTransmuter { vec: $crate::__core::mem::ManuallyDrop::new(vec), } .inner }); $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __Transmuter { uninit_array: $crate::__core::mem::ManuallyDrop::new(inner.arr), } .out }) }}; }
#![no_std] #![feature(min_const_generics)] #[macro_use] extern crate alloc; #[doc(hidden)] pub extern crate core as __core; use alloc::alloc::{alloc, dealloc, Layout}; use alloc::vec::Vec; use core::mem::size_of; use crate::iter::SmartBufferIterRef; use alloc::boxed::Box; use crate::__core::fmt::{Debug, Formatter}; pub mod iter; mod index; pub mod into; #[macro_use] #[cfg(test)] mod tests { use crate::SmartBuffer; use alloc::string::String; use crate::buf; use alloc::vec::Vec; #[test] fn it_works() { let mut buf = buf!(0u32, 5, 10); buf.insert_arr(&[4,9,3,2,1,9,3,2,10,19]); let mut buf_clone = buf.clone(); let test = SmartBuffer::from_arr([1u8,4,5,6,7], 5, true); } } #[feature(min_const_generics)] pub struct SmartBuffer<T, const N:usize> where T: Clone { s_buf: [T; N], d_buf: Option<*mut T>, layout: Option<Layout>, size: usize, capacity: usize, default: T, cursor: usize, } impl<T, const N:usize> SmartBuffer<T,N> where T: Clone { pub fn clear(&mut self){ let default = self.default.clone(); for elem in self{ *elem = default.clone(); } } pub fn push(&mut self, other: T){ if self.size < N{ self.s_buf[self.size] = other; self.size += 1; } else if self.size < self.capacity{ unsafe {*((self.d_buf.unwrap() as usize + (self.size - N) * size_of::<T>()) as *mut T) = other}; self.size += 1; } } pub fn set_size(&mut self, size:usize){ if self.size < self.capacity{ self.size = size; } } pub fn insert_slice(&mut self, slice: &[T]){ for elem in slice{ self.push(elem.clone()); } } pub fn insert_slice_at(&mut self, slice: &[T], mut index:usize){ for elem in slice{ self.insert(elem.clone(), index); index += 1; } } pub fn insert_arr<const M: usize>(&mut self, arr: &[T; M]){ for elem in arr{ self.push(elem.clone()); } } pub fn insert(&mut self, other: T, index: usize){ if index < N{ self.s_buf[index] = other; if index > self.size{ self.size = index; } } else if index < self.capacity{ unsafe {*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *mut T) = other}; if index > self.size{ self.size = index; } } } pub fn get(&self, index:usize) -> Option<&T> { if index < N { return Some(&(self.s_buf[index])) } else if index < self.capacity { return unsafe { Some(&*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *const T)) } } None } pub unsafe fn get_unchecked(&self, index:usize) -> &T{ if index < N{ return &self.s_buf[index]; } &*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *const T) } pub unsafe fn get_mut_unchecked(&mut self, index:usize) -> &mut T{ if index < N{ return &mut self.s_buf[index] } &mut *((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *mut T) } pub fn as_mut_ptr(mut self) -> *mut Self{ &mut self as *mut Self } pub(crate) fn allocate(&mut self, elements:usize){ let layout = Layout::from_size_align(elements*size_of::<T>(), 1);
} pub fn new(value: T, len:usize) -> Self where T: Copy + Clone { let mut buf = Self{ s_buf: [value; N], d_buf: None, layout: None, size: 0, capacity: N, default: value, cursor: 0, }; if N < len{ buf.allocate(len - N); } buf } pub fn from_arr(buf:[T; N], len:usize, set_size:bool) -> Self where T: Clone { let def = buf[0].clone(); let mut buf = Self{ s_buf: buf, d_buf: None, layout: None, size: if set_size { N } else { 0 }, capacity: N, default: def, cursor: 0, }; if N < len{ buf.allocate(len - N); } buf } pub fn get_size(&self) -> usize{ self.size } pub fn map<F>(&mut self, mut f: F) where T: Clone + Copy, F: FnMut(T) -> T { for i in 0..self.size{ self[i] = f(self[i]) } } pub fn shl(&mut self, count:usize){ self.s_buf.rotate_left(count); if let Some(ptr) = self.d_buf{ for i in 0..count{ self.s_buf[N - count + i] = unsafe {(*ptr.offset(i as isize)).clone()}; } for i in 0..(self.capacity - N){ if i + count < self.capacity - N{ unsafe { *ptr.offset(i as isize) = (*ptr.offset((i + count) as isize)).clone()}; } } for i in 0..count{ unsafe { (*ptr.offset((self.capacity - N - count + i) as isize)) = self.default.clone()}; } } else { for i in 0..count{ self.s_buf[N-count+i] = self.default.clone(); } } } pub fn shr(&mut self, count:usize){ if let Some(ptr) = self.d_buf{ for i in 0..(self.capacity - N){ if (self.capacity as i32 - N as i32 - i as i32- count as i32 - 1) >= 0{ unsafe{ *ptr.offset((self.capacity - N - i - 1) as isize) = (*ptr.offset((self.capacity - N - i - count - 1) as isize)).clone()}; } } for i in 0..count{ unsafe { (*ptr.offset(i as isize)) = self.s_buf[(N as i32 - count as i32 + i as i32) as usize].clone()}; } } self.s_buf.rotate_right(count); for i in 0..count{ self.s_buf[i] = self.default.clone(); } } } impl<T, const N:usize> SmartBuffer<T,N> where T:Clone + PartialEq { pub fn calc_size(&mut self){ let default = self.default.clone(); let mut size = 0; for elem in &*self{ if *elem == default{ break; } size += 1; } self.set_size(size + 1); } } impl<T, const N:usize> Drop for SmartBuffer<T,N> where T: Clone { fn drop(&mut self) { if let Some(ptr) = self.d_buf{ unsafe {dealloc(ptr as *mut u8, self.layout.unwrap())}; } } } impl<T, const N:usize> Debug for SmartBuffer<T,N> where T: Clone + Debug { fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { f.debug_list() .entries(self.into_iter()) .finish() } } impl<T, const N:usize> Clone for SmartBuffer<T,N> where T: Clone { fn clone(&self) -> Self { let mut temp_buf = Self::from_arr(self.s_buf.clone(), self.capacity, true); if let Some(ptr) = self.d_buf{ temp_buf.allocate(self.capacity - N); unsafe { core::intrinsics::copy(ptr, temp_buf.d_buf.unwrap(), self.capacity - N); } } temp_buf.default = self.default.clone(); temp_buf } } #[doc(hidden)] #[non_exhaustive] pub struct Token; impl Token { #[doc(hidden)] #[inline] pub const unsafe fn new() -> Self { Token } } #[macro_export] macro_rules! buf { ($data:expr, $s_len:expr, $t_len:expr) => { $crate::SmartBuffer::<_,$s_len>::from_arr($crate::array!(_ => $data; $s_len), $t_len, false) } } #[macro_export] macro_rules! array { [$expr:expr; $count:expr] => {{ let value = $expr; $crate::array![_ => $crate::__core::clone::Clone::clone(&value); $count] }}; [$i:pat => $e:expr; $count:expr] => {{ const __COUNT: $crate::__core::primitive::usize = $count; #[repr(transparent)] struct __ArrayVec<T>(__ArrayVecInner<T>); impl<T> $crate::__core::ops::Drop for __ArrayVec<T> { fn drop(&mut self) { for val in &mut self.0.arr[..self.0.len] { unsafe { val.as_mut_ptr().drop_in_place() } } } } struct __ArrayVecInner<T> { arr: [$crate::__core::mem::MaybeUninit<T>; __COUNT], len: $crate::__core::primitive::usize, token: $crate::Token, } #[repr(C)] union __Transmuter<T> { init_uninit_array: $crate::__core::mem::ManuallyDrop<$crate::__core::mem::MaybeUninit<[T; __COUNT]>>, uninit_array: $crate::__core::mem::ManuallyDrop<[$crate::__core::mem::MaybeUninit<T>; __COUNT]>, out: $crate::__core::mem::ManuallyDrop<[T; __COUNT]>, } #[repr(C)] union __ArrayVecTransmuter<T> { vec: $crate::__core::mem::ManuallyDrop<__ArrayVec<T>>, inner: $crate::__core::mem::ManuallyDrop<__ArrayVecInner<T>>, } let mut vec = __ArrayVec(__ArrayVecInner { arr: $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __Transmuter { init_uninit_array: $crate::__core::mem::ManuallyDrop::new($crate::__core::mem::MaybeUninit::uninit()), } .uninit_array }), len: 0, token: unsafe { $crate::Token::new() }, }); while vec.0.len < __COUNT { let $i = vec.0.len; let _please_do_not_use_continue_without_label; let value; struct __PleaseDoNotUseBreakWithoutLabel; loop { _please_do_not_use_continue_without_label = (); value = $e; break __PleaseDoNotUseBreakWithoutLabel; }; vec.0.arr[vec.0.len] = $crate::__core::mem::MaybeUninit::new(value); vec.0.len += 1; } let inner = $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __ArrayVecTransmuter { vec: $crate::__core::mem::ManuallyDrop::new(vec), } .inner }); $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __Transmuter { uninit_array: $crate::__core::mem::ManuallyDrop::new(inner.arr), } .out }) }}; }
if let Ok(layout) = layout{ let ptr = unsafe {alloc(layout) as *mut T}; self.capacity += layout.size()/size_of::<T>(); self.layout = Some(layout); self.d_buf = Some(ptr); }
if_condition
[ { "content": "use crate::SmartBuffer;\n\nuse core::ops::Index;\n\nuse core::ops::IndexMut;\n\n\n\n\n\nimpl<T, const N:usize> Index<usize> for &SmartBuffer<T,N>\n\n where T: Clone\n\n{\n\n type Output = T;\n\n\n\n fn index(&self, index: usize) -> &Self::Output {\n\n unsafe {self.get_unchecked(ind...
Rust
src/sys/component_manager/src/model/events/source.rs
casey/fuchsia
2b965e9a1e8f2ea346db540f3611a5be16bb4d6b
use { crate::{ capability::{CapabilityProvider, CapabilitySource, FrameworkCapability}, model::{ error::ModelError, events::{ event::SyncMode, registry::{EventRegistry, RoutedEvent}, serve::serve_event_source_sync, stream::EventStream, }, hooks::EventType, model::Model, moniker::AbsoluteMoniker, realm::Realm, routing, }, }, async_trait::async_trait, cm_rust::{CapabilityName, UseDecl, UseEventDecl}, fidl::endpoints::ServerEnd, fidl_fuchsia_sys2 as fsys, fuchsia_async as fasync, fuchsia_zircon as zx, futures::lock::Mutex, maplit::hashset, std::{ collections::{HashMap, HashSet}, path::PathBuf, sync::{Arc, Weak}, }, thiserror::Error, }; #[derive(Clone)] pub struct EventSource { model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: Weak<EventRegistry>, resolve_instance_event_stream: Arc<Mutex<Option<EventStream>>>, debug: bool, sync_mode: SyncMode, } #[derive(Debug, Error)] pub enum EventsError { #[error("Registry not found")] RegistryNotFound, #[error("Events not allowed for subscription {:?}", names)] NotAvailable { names: Vec<CapabilityName> }, #[error("Routing failed")] RoutingFailed(#[source] ModelError), } struct RouteEventsResult { mapping: HashMap<CapabilityName, HashSet<AbsoluteMoniker>>, } impl RouteEventsResult { fn new() -> Self { Self { mapping: HashMap::new() } } fn insert(&mut self, source_name: CapabilityName, scope_moniker: AbsoluteMoniker) { self.mapping.entry(source_name).or_insert(HashSet::new()).insert(scope_moniker); } fn len(&self) -> usize { self.mapping.len() } fn contains_event(&self, event_name: &CapabilityName) -> bool { self.mapping.contains_key(event_name) } fn to_vec(self) -> Vec<RoutedEvent> { self.mapping .into_iter() .map(|(source_name, scope_monikers)| RoutedEvent { source_name, scope_monikers }) .collect() } } impl EventSource { pub async fn new( model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: &Arc<EventRegistry>, sync_mode: SyncMode, ) -> Result<Self, ModelError> { let resolve_instance_event_stream = Arc::new(Mutex::new(if sync_mode == SyncMode::Async { None } else { Some( registry .subscribe( &sync_mode, vec![RoutedEvent { source_name: EventType::Resolved.into(), scope_monikers: hashset!(target_moniker.clone()), }], ) .await, ) })); Ok(Self { registry: Arc::downgrade(&registry), model, target_moniker, resolve_instance_event_stream, debug: false, sync_mode, }) } pub async fn new_for_debug( model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: &Arc<EventRegistry>, ) -> Result<Self, ModelError> { let mut event_source = Self::new(model, target_moniker, registry, SyncMode::Sync).await?; event_source.debug = true; Ok(event_source) } pub async fn start_component_tree(&mut self) { let mut resolve_instance_event_stream = self.resolve_instance_event_stream.lock().await; *resolve_instance_event_stream = None; } pub async fn subscribe( &mut self, events: Vec<CapabilityName>, ) -> Result<EventStream, EventsError> { let events = if self.debug { events .into_iter() .map(|event| RoutedEvent { source_name: event.clone(), scope_monikers: hashset!(AbsoluteMoniker::root()), }) .collect() } else { let route_result = self.route_events(&events).await.map_err(|e| EventsError::RoutingFailed(e))?; if route_result.len() != events.len() { let names = events .into_iter() .filter(|event| !route_result.contains_event(&event)) .collect(); return Err(EventsError::NotAvailable { names }); } route_result.to_vec() }; if let Some(registry) = self.registry.upgrade() { return Ok(registry.subscribe(&self.sync_mode, events).await); } Err(EventsError::RegistryNotFound) } pub fn serve(self, stream: fsys::BlockingEventSourceRequestStream) { fasync::spawn(async move { serve_event_source_sync(self, stream).await; }); } async fn route_events( &self, events: &Vec<CapabilityName>, ) -> Result<RouteEventsResult, ModelError> { let model = self.model.upgrade().ok_or(ModelError::ModelNotAvailable)?; let realm = model.look_up_realm(&self.target_moniker).await?; let decl = { let state = realm.lock_state().await; state.as_ref().expect("route_events: not registered").decl().clone() }; let mut result = RouteEventsResult::new(); for use_decl in decl.uses { match &use_decl { UseDecl::Event(event_decl) => { if !events.contains(&event_decl.target_name) { continue; } let (source_name, scope_moniker) = self.route_event(event_decl, &realm).await?; result.insert(source_name, scope_moniker); } _ => {} } } Ok(result) } async fn route_event( &self, event_decl: &UseEventDecl, realm: &Arc<Realm>, ) -> Result<(CapabilityName, AbsoluteMoniker), ModelError> { routing::route_use_event_capability(&UseDecl::Event(event_decl.clone()), &realm).await.map( |source| match source { CapabilitySource::Framework { capability: FrameworkCapability::Event(source_name), scope_moniker: Some(scope_moniker), } => (source_name, scope_moniker), _ => unreachable!(), }, ) } } #[async_trait] impl CapabilityProvider for EventSource { async fn open( self: Box<Self>, _flags: u32, _open_mode: u32, _relative_path: PathBuf, server_end: zx::Channel, ) -> Result<(), ModelError> { let stream = ServerEnd::<fsys::BlockingEventSourceMarker>::new(server_end) .into_stream() .expect("could not convert channel into stream"); self.serve(stream); Ok(()) } }
use { crate::{ capability::{CapabilityProvider, CapabilitySource, FrameworkCapability}, model::{ error::ModelError, events::{ event::SyncMode, registry::{EventRegistry, RoutedEvent}, serve::serve_event_source_sync, stream::EventStream, }, hooks::EventType, model::Model, moniker::AbsoluteMoniker, realm::Realm, routing, }, }, async_trait::async_trait, cm_rust::{CapabilityName, UseDecl, UseEventDecl}, fidl::endpoints::ServerEnd, fidl_fuchsia_sys2 as fsys, fuchsia_async as fasync, fuchsia_zircon as zx, futures::lock::Mutex, maplit::hashset, std::{ collections::{HashMap, HashSet}, path::PathBuf, sync::{Arc, Weak}, }, thiserror::Error, }; #[derive(Clone)] pub struct EventSource { model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: Weak<EventRegistry>, resolve_instance_event_stream: Arc<Mutex<Option<EventStream>>>, debug: bool, sync_mode: SyncMode, } #[derive(Debug, Error)] pub enum EventsError { #[error("Registry not found")] RegistryNotFound, #[error("Events not allowed for subscri
.into_iter() .filter(|event| !route_result.contains_event(&event)) .collect(); return Err(EventsError::NotAvailable { names }); } route_result.to_vec() }; if let Some(registry) = self.registry.upgrade() { return Ok(registry.subscribe(&self.sync_mode, events).await); } Err(EventsError::RegistryNotFound) } pub fn serve(self, stream: fsys::BlockingEventSourceRequestStream) { fasync::spawn(async move { serve_event_source_sync(self, stream).await; }); } async fn route_events( &self, events: &Vec<CapabilityName>, ) -> Result<RouteEventsResult, ModelError> { let model = self.model.upgrade().ok_or(ModelError::ModelNotAvailable)?; let realm = model.look_up_realm(&self.target_moniker).await?; let decl = { let state = realm.lock_state().await; state.as_ref().expect("route_events: not registered").decl().clone() }; let mut result = RouteEventsResult::new(); for use_decl in decl.uses { match &use_decl { UseDecl::Event(event_decl) => { if !events.contains(&event_decl.target_name) { continue; } let (source_name, scope_moniker) = self.route_event(event_decl, &realm).await?; result.insert(source_name, scope_moniker); } _ => {} } } Ok(result) } async fn route_event( &self, event_decl: &UseEventDecl, realm: &Arc<Realm>, ) -> Result<(CapabilityName, AbsoluteMoniker), ModelError> { routing::route_use_event_capability(&UseDecl::Event(event_decl.clone()), &realm).await.map( |source| match source { CapabilitySource::Framework { capability: FrameworkCapability::Event(source_name), scope_moniker: Some(scope_moniker), } => (source_name, scope_moniker), _ => unreachable!(), }, ) } } #[async_trait] impl CapabilityProvider for EventSource { async fn open( self: Box<Self>, _flags: u32, _open_mode: u32, _relative_path: PathBuf, server_end: zx::Channel, ) -> Result<(), ModelError> { let stream = ServerEnd::<fsys::BlockingEventSourceMarker>::new(server_end) .into_stream() .expect("could not convert channel into stream"); self.serve(stream); Ok(()) } }
ption {:?}", names)] NotAvailable { names: Vec<CapabilityName> }, #[error("Routing failed")] RoutingFailed(#[source] ModelError), } struct RouteEventsResult { mapping: HashMap<CapabilityName, HashSet<AbsoluteMoniker>>, } impl RouteEventsResult { fn new() -> Self { Self { mapping: HashMap::new() } } fn insert(&mut self, source_name: CapabilityName, scope_moniker: AbsoluteMoniker) { self.mapping.entry(source_name).or_insert(HashSet::new()).insert(scope_moniker); } fn len(&self) -> usize { self.mapping.len() } fn contains_event(&self, event_name: &CapabilityName) -> bool { self.mapping.contains_key(event_name) } fn to_vec(self) -> Vec<RoutedEvent> { self.mapping .into_iter() .map(|(source_name, scope_monikers)| RoutedEvent { source_name, scope_monikers }) .collect() } } impl EventSource { pub async fn new( model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: &Arc<EventRegistry>, sync_mode: SyncMode, ) -> Result<Self, ModelError> { let resolve_instance_event_stream = Arc::new(Mutex::new(if sync_mode == SyncMode::Async { None } else { Some( registry .subscribe( &sync_mode, vec![RoutedEvent { source_name: EventType::Resolved.into(), scope_monikers: hashset!(target_moniker.clone()), }], ) .await, ) })); Ok(Self { registry: Arc::downgrade(&registry), model, target_moniker, resolve_instance_event_stream, debug: false, sync_mode, }) } pub async fn new_for_debug( model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: &Arc<EventRegistry>, ) -> Result<Self, ModelError> { let mut event_source = Self::new(model, target_moniker, registry, SyncMode::Sync).await?; event_source.debug = true; Ok(event_source) } pub async fn start_component_tree(&mut self) { let mut resolve_instance_event_stream = self.resolve_instance_event_stream.lock().await; *resolve_instance_event_stream = None; } pub async fn subscribe( &mut self, events: Vec<CapabilityName>, ) -> Result<EventStream, EventsError> { let events = if self.debug { events .into_iter() .map(|event| RoutedEvent { source_name: event.clone(), scope_monikers: hashset!(AbsoluteMoniker::root()), }) .collect() } else { let route_result = self.route_events(&events).await.map_err(|e| EventsError::RoutingFailed(e))?; if route_result.len() != events.len() { let names = events
random
[]
Rust
src/message.rs
media-cloud-ai/rs_http_worker
0848d1dec046ea13be1b3e75e90be73d698cd855
use amqp_worker::*; use amqp_worker::job::*; use reqwest; use reqwest::StatusCode; use std::fs::File; use std::io::prelude::*; pub fn process(message: &str) -> Result<JobResult, MessageError> { let job = Job::new(message)?; debug!("reveived message: {:?}", job); match job.check_requirements() { Ok(_) => {} Err(message) => { return Err(message); } } let source_path = job.get_string_parameter("source_path"); let destination_path = job.get_string_parameter("destination_path"); if source_path.is_none() { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("missing source path parameter".to_string()); return Err(MessageError::ProcessingError(result)); } if destination_path.is_none() { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("missing destination path parameter".to_string()); return Err(MessageError::ProcessingError(result)); } let url = source_path.unwrap(); let filename = destination_path.unwrap(); let client = reqwest::Client::builder() .build() .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let mut response = client .get(url.as_str()) .send() .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let status = response.status(); if status != StatusCode::OK { println!("ERROR {:?}", response); let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("bad response status".to_string()); return Err(MessageError::ProcessingError(result)); } let mut body: Vec<u8> = vec![]; response .copy_to(&mut body) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let mut file = File::create(filename.as_str()) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; file.write_all(&body) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; Ok(JobResult::new(job.job_id, JobStatus::Completed, vec![])) } #[test] fn ack_message_test() { let msg = r#"{ "parameters": [ { "id": "requirements", "type": "requirements", "value": {"paths": []} }, { "id": "source_path", "type": "string", "value": "https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml" }, { "id": "source_paths", "type": "array_of_strings", "value": ["https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml"] }, { "id": "destination_path", "type": "string", "value": "/tmp/172524974-5a843dcd126f8-1518616910.ttml" } ], "job_id":690 }"#; let result = process(msg); assert!(result.is_ok()); } #[test] fn nack_message_test() { let msg = r#"{ "parameters": [ { "id": "requirements", "type": "requirements", "value": {"paths": [ "/tmp/FiLe_ThAt_$h0uld_N0t_3xist$" ]} }, { "id": "source_path", "type": "string", "value": "https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml" }, { "id": "source_paths", "type": "array_of_strings", "value": ["https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml"] }, { "id": "destination_path", "type": "string", "value": "/tmp/172524974-5a843dcd126f8-1518616910.ttml" } ], "job_id":690 }"#; let result = process(msg); assert_eq!( result, Err(MessageError::RequirementsError( "Warning: Required file does not exists: \"/tmp/FiLe_ThAt_$h0uld_N0t_3xist$\"" .to_string() )) ); }
use amqp_worker::*; use amqp_worker::job::*; use reqwest; use reqwest::StatusCode; use std::fs::File; use std::io::prelude::*; pub fn process(message: &str) -> Result<JobResult, MessageError> { let job = Job::new(message)?; debug!("reveived message: {:?}", job);
let source_path = job.get_string_parameter("source_path"); let destination_path = job.get_string_parameter("destination_path"); if source_path.is_none() { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("missing source path parameter".to_string()); return Err(MessageError::ProcessingError(result)); } if destination_path.is_none() { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("missing destination path parameter".to_string()); return Err(MessageError::ProcessingError(result)); } let url = source_path.unwrap(); let filename = destination_path.unwrap(); let client = reqwest::Client::builder() .build() .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let mut response = client .get(url.as_str()) .send() .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let status = response.status(); if status != StatusCode::OK { println!("ERROR {:?}", response); let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("bad response status".to_string()); return Err(MessageError::ProcessingError(result)); } let mut body: Vec<u8> = vec![]; response .copy_to(&mut body) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let mut file = File::create(filename.as_str()) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; file.write_all(&body) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; Ok(JobResult::new(job.job_id, JobStatus::Completed, vec![])) } #[test] fn ack_message_test() { let msg = r#"{ "parameters": [ { "id": "requirements", "type": "requirements", "value": {"paths": []} }, { "id": "source_path", "type": "string", "value": "https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml" }, { "id": "source_paths", "type": "array_of_strings", "value": ["https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml"] }, { "id": "destination_path", "type": "string", "value": "/tmp/172524974-5a843dcd126f8-1518616910.ttml" } ], "job_id":690 }"#; let result = process(msg); assert!(result.is_ok()); } #[test] fn nack_message_test() { let msg = r#"{ "parameters": [ { "id": "requirements", "type": "requirements", "value": {"paths": [ "/tmp/FiLe_ThAt_$h0uld_N0t_3xist$" ]} }, { "id": "source_path", "type": "string", "value": "https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml" }, { "id": "source_paths", "type": "array_of_strings", "value": ["https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml"] }, { "id": "destination_path", "type": "string", "value": "/tmp/172524974-5a843dcd126f8-1518616910.ttml" } ], "job_id":690 }"#; let result = process(msg); assert_eq!( result, Err(MessageError::RequirementsError( "Warning: Required file does not exists: \"/tmp/FiLe_ThAt_$h0uld_N0t_3xist$\"" .to_string() )) ); }
match job.check_requirements() { Ok(_) => {} Err(message) => { return Err(message); } }
if_condition
[ { "content": "fn main() {\n\n println!(\"HTTP Worker (version {}) started \", env::var(\"VERSION\").expect(\"missing softwareversion\"));\n\n if env::var(\"VERBOSE\").is_ok() {\n\n simple_logger::init_with_level(Level::Debug).unwrap();\n\n } else {\n\n simple_logger::init_with_level(Level::Warn).unwrap...
Rust
code/src/example_catalog.rs
cloudfuse-io/buzz-rust
7199662d40de5da138b53a3c8e62aeaf69545848
use std::sync::Arc; use crate::datasource::{CatalogFile, CatalogTable, StaticCatalogTable}; use arrow::datatypes::{DataType, Field, Schema, TimeUnit}; pub fn nyc_taxi_cloudfuse_sample() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Microsecond), "us-east-2".to_owned(), "cloudfuse-taxi-data".to_owned(), vec!["month".to_owned()], vec![CatalogFile::new( "raw_small/2009/01/data.parquet", 27301328, vec!["2009/01".to_owned()], )], ))) } pub fn nyc_taxi_cloudfuse() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Microsecond), "us-east-2".to_owned(), "cloudfuse-taxi-data".to_owned(), vec!["month".to_owned()], vec![ CatalogFile::new( "raw_5M/2009/01/data.parquet", 388070114, vec!["2009/01".to_owned()], ), CatalogFile::new( "raw_5M/2009/02/data.parquet", 368127982, vec!["2009/02".to_owned()], ), CatalogFile::new( "raw_5M/2009/03/data.parquet", 398600815, vec!["2009/03".to_owned()], ), CatalogFile::new( "raw_5M/2009/04/data.parquet", 396353841, vec!["2009/04".to_owned()], ), CatalogFile::new( "raw_5M/2009/05/data.parquet", 410283205, vec!["2009/05".to_owned()], ), ], ))) } pub fn nyc_taxi_ursa() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Nanosecond), "us-east-2".to_owned(), "ursa-labs-taxi-data".to_owned(), vec!["month".to_owned()], vec![ CatalogFile::new( "2009/01/data.parquet", 461966527, vec!["2009/01".to_owned()], ), CatalogFile::new( "2009/02/data.parquet", 436405669, vec!["2009/02".to_owned()], ), CatalogFile::new( "2009/03/data.parquet", 474795751, vec!["2009/03".to_owned()], ), CatalogFile::new( "2009/04/data.parquet", 470914229, vec!["2009/04".to_owned()], ), CatalogFile::new( "2009/05/data.parquet", 489248585, vec!["2009/05".to_owned()], ), CatalogFile::new( "2009/06/data.parquet", 465578495, vec!["2009/06".to_owned()], ), CatalogFile::new( "2009/07/data.parquet", 448227037, vec!["2009/07".to_owned()], ), CatalogFile::new( "2009/08/data.parquet", 450774566, vec!["2009/08".to_owned()], ), CatalogFile::new( "2009/09/data.parquet", 460835784, vec!["2009/09".to_owned()], ), CatalogFile::new( "2009/10/data.parquet", 517609313, vec!["2009/10".to_owned()], ), CatalogFile::new( "2009/11/data.parquet", 471148697, vec!["2009/11".to_owned()], ), CatalogFile::new( "2009/12/data.parquet", 479899902, vec!["2009/12".to_owned()], ), ], ))) } fn nyc_taxi_v1_schema(time_unit: TimeUnit) -> Arc<Schema> { Arc::new(Schema::new(vec![ Field::new("vendor_id", DataType::Utf8, true), Field::new( "pickup_at", DataType::Timestamp(time_unit.clone(), Option::None), true, ), Field::new( "dropoff_at", DataType::Timestamp(time_unit.clone(), Option::None), true, ), Field::new("passenger_count", DataType::Int8, true), Field::new("trip_distance", DataType::Float32, true), Field::new("pickup_longitude", DataType::Float32, true), Field::new("pickup_latitude", DataType::Float32, true), Field::new("rate_code_id", DataType::Null, true), Field::new("store_and_fwd_flag", DataType::Utf8, true), Field::new("dropoff_longitude", DataType::Float32, true), Field::new("dropoff_latitude", DataType::Float32, true), Field::new("payment_type", DataType::Utf8, true), Field::new("fare_amount", DataType::Float32, true), Field::new("extra", DataType::Float32, true), Field::new("mta_tax", DataType::Float32, true), Field::new("tip_amount", DataType::Float32, true), Field::new("tolls_amount", DataType::Float32, true), Field::new("total_amount", DataType::Float32, true), ])) }
use std::sync::Arc; use crate::datasource::{CatalogFile, CatalogTable, StaticCatalogTable}; use arrow::datatypes::{DataType, Field, Schema, TimeUnit}; pub fn nyc_taxi_cloudfuse_sample() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Microsecond), "us-east-2".to_owned(), "cloudfuse-taxi-data".to_owned(), vec!["month".to_owned()], vec![CatalogFile::new( "raw_small/2009/01/data.parquet", 27301328, vec!["2009/01".to_owned()], )], ))) } pub fn nyc_taxi_cloudfuse() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Microsecond), "us-east-2".to_owned(), "cloudfuse-taxi-data".to_owned(), vec!["month".to_owned()], vec![ CatalogFile::new( "raw_5M/2009/01/data.parquet",
474795751, vec!["2009/03".to_owned()], ), CatalogFile::new( "2009/04/data.parquet", 470914229, vec!["2009/04".to_owned()], ), CatalogFile::new( "2009/05/data.parquet", 489248585, vec!["2009/05".to_owned()], ), CatalogFile::new( "2009/06/data.parquet", 465578495, vec!["2009/06".to_owned()], ), CatalogFile::new( "2009/07/data.parquet", 448227037, vec!["2009/07".to_owned()], ), CatalogFile::new( "2009/08/data.parquet", 450774566, vec!["2009/08".to_owned()], ), CatalogFile::new( "2009/09/data.parquet", 460835784, vec!["2009/09".to_owned()], ), CatalogFile::new( "2009/10/data.parquet", 517609313, vec!["2009/10".to_owned()], ), CatalogFile::new( "2009/11/data.parquet", 471148697, vec!["2009/11".to_owned()], ), CatalogFile::new( "2009/12/data.parquet", 479899902, vec!["2009/12".to_owned()], ), ], ))) } fn nyc_taxi_v1_schema(time_unit: TimeUnit) -> Arc<Schema> { Arc::new(Schema::new(vec![ Field::new("vendor_id", DataType::Utf8, true), Field::new( "pickup_at", DataType::Timestamp(time_unit.clone(), Option::None), true, ), Field::new( "dropoff_at", DataType::Timestamp(time_unit.clone(), Option::None), true, ), Field::new("passenger_count", DataType::Int8, true), Field::new("trip_distance", DataType::Float32, true), Field::new("pickup_longitude", DataType::Float32, true), Field::new("pickup_latitude", DataType::Float32, true), Field::new("rate_code_id", DataType::Null, true), Field::new("store_and_fwd_flag", DataType::Utf8, true), Field::new("dropoff_longitude", DataType::Float32, true), Field::new("dropoff_latitude", DataType::Float32, true), Field::new("payment_type", DataType::Utf8, true), Field::new("fare_amount", DataType::Float32, true), Field::new("extra", DataType::Float32, true), Field::new("mta_tax", DataType::Float32, true), Field::new("tip_amount", DataType::Float32, true), Field::new("tolls_amount", DataType::Float32, true), Field::new("total_amount", DataType::Float32, true), ])) }
388070114, vec!["2009/01".to_owned()], ), CatalogFile::new( "raw_5M/2009/02/data.parquet", 368127982, vec!["2009/02".to_owned()], ), CatalogFile::new( "raw_5M/2009/03/data.parquet", 398600815, vec!["2009/03".to_owned()], ), CatalogFile::new( "raw_5M/2009/04/data.parquet", 396353841, vec!["2009/04".to_owned()], ), CatalogFile::new( "raw_5M/2009/05/data.parquet", 410283205, vec!["2009/05".to_owned()], ), ], ))) } pub fn nyc_taxi_ursa() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Nanosecond), "us-east-2".to_owned(), "ursa-labs-taxi-data".to_owned(), vec!["month".to_owned()], vec![ CatalogFile::new( "2009/01/data.parquet", 461966527, vec!["2009/01".to_owned()], ), CatalogFile::new( "2009/02/data.parquet", 436405669, vec!["2009/02".to_owned()], ), CatalogFile::new( "2009/03/data.parquet",
random
[ { "content": "pub fn catalog_schema(partition_names: &[String]) -> Arc<Schema> {\n\n let mut fields = vec![\n\n Field::new(\"key\", DataType::Utf8, false),\n\n Field::new(\"length\", DataType::UInt64, false),\n\n ];\n\n for col in partition_names {\n\n fields.push(Field::new(col, D...
Rust
src/main.rs
jonathanmorley/rpg-cli
93a816e875287a884b5045c430dffbb4e2a4ce27
use game::Game; mod character; mod game; mod item; mod location; mod log; mod randomizer; use crate::location::Location; use clap::Clap; #[derive(Clap)] struct Opts { destination: Option<String>, #[clap(long)] pwd: bool, #[clap(long)] reset: bool, #[clap(long)] run: bool, #[clap(long)] bribe: bool, #[clap(short, long)] shop: bool, #[clap(short, long)] inventory: bool, } fn main() { let opts: Opts = Opts::parse(); let mut game = Game::load().unwrap_or_else(|_| Game::new()); if opts.pwd { println!("{}", game.location.path_string()); } else if opts.reset { game.reset() } else if opts.shop { shop(&mut game, &opts.destination); } else if opts.inventory { inventory(&mut game, &opts.destination); } else if let Some(dest) = opts.destination { go_to(&mut game, &dest, opts.run, opts.bribe); } else { log::status(&game); } game.save().unwrap() } fn go_to(game: &mut Game, dest: &str, run: bool, bribe: bool) { if let Ok(dest) = Location::from(&dest) { if let Err(game::Error::GameOver) = game.go_to(&dest, run, bribe) { game.reset(); } } else { println!("No such file or directory"); std::process::exit(1); } } fn shop(game: &mut Game, item_name: &Option<String>) { if game.location.is_home() { if let Some(item_name) = item_name { let item_name = sanitize(item_name); match item::shop::buy(game, &item_name) { Err(item::shop::Error::NotEnoughGold) => { println!("Not enough gold.") } Err(item::shop::Error::ItemNotAvailable) => { println!("Item not available.") } Ok(()) => {} } } else { item::shop::list(game); } } else { println!("Shop is only allowed at home.") } } fn inventory(game: &mut Game, item_name: &Option<String>) { if let Some(item_name) = item_name { let item_name = sanitize(item_name); if let Err(game::Error::ItemNotFound) = game.use_item(&item_name) { println!("Item not found."); } } else { println!("{}", log::format_inventory(&game)); } } fn sanitize(name: &str) -> String { let name = name.to_lowercase(); let name = match name.as_str() { "p" | "potion" => "potion", "e" | "escape" => "escape", "sw" | "sword" => "sword", "sh" | "shield" => "shield", n => n, }; name.to_string() }
use game::Game; mod character; mod game; mod item; mod location; mod log; mod randomizer; use crate::location::Location; use clap::Clap; #[derive(Clap)] struct Opts { destination: Option<String>, #[clap(long)] pwd: bool, #[clap(long)] reset: bool, #[clap(long)] run: bool, #[clap(long)] bribe: bool, #[clap(short, long)] shop: bool, #[clap(short, long)] inventory: bool, } fn main() { let opts: Opts = Opts::parse(); let mut game = Game::load().unwrap_or_else(|_| Game::new()); if opts.pwd { println!("{}", game.location.path_string()); } else if opts.reset { game.reset() } else if opts.shop { shop(&mut game, &opts.destination); } else if opts.inventory { inventory(&mut game, &opts.destination); } else if let Some(dest) = opts.destination { go_to(&mut game, &dest, opts.run, opts.bribe); } else { log::status(&game); } game.save().unwrap() } fn go_to(game: &mut Game, dest: &str, run: bool, bribe: bool) { if let Ok(dest) = Location::from(&dest) { if let Err(game::Error::GameOve
n => n, }; name.to_string() }
r) = game.go_to(&dest, run, bribe) { game.reset(); } } else { println!("No such file or directory"); std::process::exit(1); } } fn shop(game: &mut Game, item_name: &Option<String>) { if game.location.is_home() { if let Some(item_name) = item_name { let item_name = sanitize(item_name); match item::shop::buy(game, &item_name) { Err(item::shop::Error::NotEnoughGold) => { println!("Not enough gold.") } Err(item::shop::Error::ItemNotAvailable) => { println!("Item not available.") } Ok(()) => {} } } else { item::shop::list(game); } } else { println!("Shop is only allowed at home.") } } fn inventory(game: &mut Game, item_name: &Option<String>) { if let Some(item_name) = item_name { let item_name = sanitize(item_name); if let Err(game::Error::ItemNotFound) = game.use_item(&item_name) { println!("Item not found."); } } else { println!("{}", log::format_inventory(&game)); } } fn sanitize(name: &str) -> String { let name = name.to_lowercase(); let name = match name.as_str() { "p" | "potion" => "potion", "e" | "escape" => "escape", "sw" | "sword" => "sword", "sh" | "shield" => "shield",
random
[]
Rust
crates/apollo-smith/src/input_value.rs
isabella232/apollo-rs
963b3552deaf7ba3a7eb5a698a90f20d0fc08242
use crate::{description::Description, directive::Directive, name::Name, ty::Ty, DocumentBuilder}; use arbitrary::Result; #[derive(Debug, Clone, PartialEq)] pub enum InputValue { Variable(Name), Int(i64), Float(f64), String(String), Boolean(bool), Null, Enum(Name), List(Vec<InputValue>), Object(Vec<(Name, InputValue)>), } impl From<InputValue> for apollo_encoder::Value { fn from(input_value: InputValue) -> Self { match input_value { InputValue::Variable(v) => Self::Variable(v.into()), InputValue::Int(i) => Self::Int(i), InputValue::Float(f) => Self::Float(f), InputValue::String(s) => Self::String(s), InputValue::Boolean(b) => Self::Boolean(b), InputValue::Null => Self::Null, InputValue::Enum(enm) => Self::Enum(enm.into()), InputValue::List(l) => Self::List(l.into_iter().map(Into::into).collect()), InputValue::Object(o) => { Self::Object(o.into_iter().map(|(n, i)| (n.into(), i.into())).collect()) } } } } impl From<InputValue> for String { fn from(input_val: InputValue) -> Self { match input_val { InputValue::Variable(v) => format!("${}", String::from(v)), InputValue::Int(i) => format!("{i}"), InputValue::Float(f) => format!("{f}"), InputValue::String(s) => s, InputValue::Boolean(b) => format!("{b}"), InputValue::Null => String::from("null"), InputValue::Enum(val) => val.into(), InputValue::List(list) => format!( "[{}]", list.into_iter() .map(String::from) .collect::<Vec<String>>() .join(", ") ), InputValue::Object(obj) => format!( "{{ {} }}", obj.into_iter() .map(|(k, v)| format!("{}: {}", String::from(k), String::from(v))) .collect::<Vec<String>>() .join(", ") ), } } } #[derive(Debug, Clone, PartialEq)] pub struct InputValueDef { pub(crate) description: Option<Description>, pub(crate) name: Name, pub(crate) ty: Ty, pub(crate) default_value: Option<InputValue>, pub(crate) directives: Vec<Directive>, } impl From<InputValueDef> for apollo_encoder::InputValueDefinition { fn from(input_val: InputValueDef) -> Self { let mut new_input_val = Self::new(input_val.name.into(), input_val.ty.into()); new_input_val.description(input_val.description.map(String::from)); new_input_val.default(input_val.default_value.map(String::from)); input_val .directives .into_iter() .for_each(|directive| new_input_val.directive(directive.into())); new_input_val } } impl From<InputValueDef> for apollo_encoder::InputField { fn from(input_val: InputValueDef) -> Self { let mut new_input_val = Self::new(input_val.name.into(), input_val.ty.into()); new_input_val.description(input_val.description.map(String::from)); new_input_val.default(input_val.default_value.map(String::from)); input_val .directives .into_iter() .for_each(|directive| new_input_val.directive(directive.into())); new_input_val } } impl<'a> DocumentBuilder<'a> { pub fn input_value(&mut self) -> Result<InputValue> { let val = match self.u.int_in_range(0..=8usize)? { 0 => InputValue::Int(self.u.arbitrary()?), 1 => InputValue::Float(self.u.arbitrary()?), 2 => InputValue::String(self.limited_string(40)?), 3 => InputValue::Boolean(self.u.arbitrary()?), 4 => InputValue::Null, 5 => { if !self.enum_type_defs.is_empty() { let enum_choosed = self.choose_enum()?.clone(); InputValue::Enum(self.arbitrary_variant(&enum_choosed)?.clone()) } else { self.input_value()? } } 6 => { InputValue::List( (0..self.u.int_in_range(2..=4usize)?) .map(|_| self.input_value()) .collect::<Result<Vec<_>>>()?, ) } 7 => InputValue::Object( (0..self.u.int_in_range(2..=4usize)?) .map(|_| Ok((self.name()?, self.input_value()?))) .collect::<Result<Vec<_>>>()?, ), 8 => InputValue::Variable(self.name()?), _ => unreachable!(), }; Ok(val) } pub fn input_values_def(&mut self) -> Result<Vec<InputValueDef>> { let arbitrary_iv_num = self.u.int_in_range(2..=5usize)?; let mut input_values = Vec::with_capacity(arbitrary_iv_num - 1); for i in 0..arbitrary_iv_num { let description = self .u .arbitrary() .unwrap_or(false) .then(|| self.description()) .transpose()?; let name = self.name_with_index(i)?; let ty = self.choose_ty(&self.list_existing_types())?; let directives = self.directives()?; let default_value = self .u .arbitrary() .unwrap_or(false) .then(|| self.input_value()) .transpose()?; input_values.push(InputValueDef { description, name, ty, default_value, directives, }); } Ok(input_values) } pub fn input_value_def(&mut self) -> Result<InputValueDef> { let description = self .u .arbitrary() .unwrap_or(false) .then(|| self.description()) .transpose()?; let name = self.name()?; let ty = self.choose_ty(&self.list_existing_types())?; let directives = self.directives()?; let default_value = self .u .arbitrary() .unwrap_or(false) .then(|| self.input_value()) .transpose()?; Ok(InputValueDef { description, name, ty, default_value, directives, }) } }
use crate::{description::Description, directive::Directive, name::Name, ty::Ty, DocumentBuilder}; use arbitrary::Result; #[derive(Debug, Clone, PartialEq)] pub enum InputValue { Variable(Name), Int(i64), Float(f64), String(String), Boolean(bool), Null, Enum(Name), List(Vec<InputValue>), Object(Vec<(Name, InputValue)>), } impl From<InputValue> for apollo_encoder::Value {
} impl From<InputValue> for String { fn from(input_val: InputValue) -> Self { match input_val { InputValue::Variable(v) => format!("${}", String::from(v)), InputValue::Int(i) => format!("{i}"), InputValue::Float(f) => format!("{f}"), InputValue::String(s) => s, InputValue::Boolean(b) => format!("{b}"), InputValue::Null => String::from("null"), InputValue::Enum(val) => val.into(), InputValue::List(list) => format!( "[{}]", list.into_iter() .map(String::from) .collect::<Vec<String>>() .join(", ") ), InputValue::Object(obj) => format!( "{{ {} }}", obj.into_iter() .map(|(k, v)| format!("{}: {}", String::from(k), String::from(v))) .collect::<Vec<String>>() .join(", ") ), } } } #[derive(Debug, Clone, PartialEq)] pub struct InputValueDef { pub(crate) description: Option<Description>, pub(crate) name: Name, pub(crate) ty: Ty, pub(crate) default_value: Option<InputValue>, pub(crate) directives: Vec<Directive>, } impl From<InputValueDef> for apollo_encoder::InputValueDefinition { fn from(input_val: InputValueDef) -> Self { let mut new_input_val = Self::new(input_val.name.into(), input_val.ty.into()); new_input_val.description(input_val.description.map(String::from)); new_input_val.default(input_val.default_value.map(String::from)); input_val .directives .into_iter() .for_each(|directive| new_input_val.directive(directive.into())); new_input_val } } impl From<InputValueDef> for apollo_encoder::InputField { fn from(input_val: InputValueDef) -> Self { let mut new_input_val = Self::new(input_val.name.into(), input_val.ty.into()); new_input_val.description(input_val.description.map(String::from)); new_input_val.default(input_val.default_value.map(String::from)); input_val .directives .into_iter() .for_each(|directive| new_input_val.directive(directive.into())); new_input_val } } impl<'a> DocumentBuilder<'a> { pub fn input_value(&mut self) -> Result<InputValue> { let val = match self.u.int_in_range(0..=8usize)? { 0 => InputValue::Int(self.u.arbitrary()?), 1 => InputValue::Float(self.u.arbitrary()?), 2 => InputValue::String(self.limited_string(40)?), 3 => InputValue::Boolean(self.u.arbitrary()?), 4 => InputValue::Null, 5 => { if !self.enum_type_defs.is_empty() { let enum_choosed = self.choose_enum()?.clone(); InputValue::Enum(self.arbitrary_variant(&enum_choosed)?.clone()) } else { self.input_value()? } } 6 => { InputValue::List( (0..self.u.int_in_range(2..=4usize)?) .map(|_| self.input_value()) .collect::<Result<Vec<_>>>()?, ) } 7 => InputValue::Object( (0..self.u.int_in_range(2..=4usize)?) .map(|_| Ok((self.name()?, self.input_value()?))) .collect::<Result<Vec<_>>>()?, ), 8 => InputValue::Variable(self.name()?), _ => unreachable!(), }; Ok(val) } pub fn input_values_def(&mut self) -> Result<Vec<InputValueDef>> { let arbitrary_iv_num = self.u.int_in_range(2..=5usize)?; let mut input_values = Vec::with_capacity(arbitrary_iv_num - 1); for i in 0..arbitrary_iv_num { let description = self .u .arbitrary() .unwrap_or(false) .then(|| self.description()) .transpose()?; let name = self.name_with_index(i)?; let ty = self.choose_ty(&self.list_existing_types())?; let directives = self.directives()?; let default_value = self .u .arbitrary() .unwrap_or(false) .then(|| self.input_value()) .transpose()?; input_values.push(InputValueDef { description, name, ty, default_value, directives, }); } Ok(input_values) } pub fn input_value_def(&mut self) -> Result<InputValueDef> { let description = self .u .arbitrary() .unwrap_or(false) .then(|| self.description()) .transpose()?; let name = self.name()?; let ty = self.choose_ty(&self.list_existing_types())?; let directives = self.directives()?; let default_value = self .u .arbitrary() .unwrap_or(false) .then(|| self.input_value()) .transpose()?; Ok(InputValueDef { description, name, ty, default_value, directives, }) } }
fn from(input_value: InputValue) -> Self { match input_value { InputValue::Variable(v) => Self::Variable(v.into()), InputValue::Int(i) => Self::Int(i), InputValue::Float(f) => Self::Float(f), InputValue::String(s) => Self::String(s), InputValue::Boolean(b) => Self::Boolean(b), InputValue::Null => Self::Null, InputValue::Enum(enm) => Self::Enum(enm.into()), InputValue::List(l) => Self::List(l.into_iter().map(Into::into).collect()), InputValue::Object(o) => { Self::Object(o.into_iter().map(|(n, i)| (n.into(), i.into())).collect()) } } }
function_block-full_function
[ { "content": "enum NapSpots @testDirective(first: \"one\") {\n\n \"Top bunk of a cat tree.\"\n\n CAT_TREE\n\n BED\n\n CARDBOARD_BOX @deprecated(reason: \"Box was recycled.\")\n\n}\n\n\"#\n\n );\n\n }\n\n\n\n #[test]\n\n fn it_encodes_enum_extension() {\n\n let mut enum_ty_1 = EnumValu...
Rust
crates/store/src/state/mem_pool_state_db.rs
driftluo/godwoken
667a4bc435a9894b131cd804daf6403fd5cd4026
use crate::mem_pool_store::{ Value, MEM_POOL_COL_DATA, MEM_POOL_COL_SCRIPT, MEM_POOL_COL_SCRIPT_PREFIX, }; use crate::smt::mem_pool_smt_store::MemPoolSMTStore; use crate::{traits::KVStore, transaction::StoreTransaction}; use anyhow::Result; use gw_common::{error::Error as StateError, smt::SMT, state::State, H256}; use gw_db::error::Error; use gw_db::schema::{COLUMN_DATA, COLUMN_SCRIPT, COLUMN_SCRIPT_PREFIX}; use gw_traits::CodeStore; use gw_types::{ bytes::Bytes, packed::{self, AccountMerkleState}, prelude::*, }; use super::state_tracker::StateTracker; pub struct MemPoolStateTree<'a> { tree: SMT<MemPoolSMTStore<'a>>, account_count: u32, tracker: StateTracker, } impl<'a> MemPoolStateTree<'a> { pub fn new(tree: SMT<MemPoolSMTStore<'a>>, account_count: u32) -> Self { MemPoolStateTree { tree, account_count, tracker: Default::default(), } } pub fn tracker_mut(&mut self) -> &mut StateTracker { &mut self.tracker } pub fn get_merkle_state(&self) -> AccountMerkleState { AccountMerkleState::new_builder() .merkle_root(self.tree.root().pack()) .count(self.account_count.pack()) .build() } pub fn submit_tree_to_mem_block(&self) -> Result<(), Error> { self.db() .set_mem_block_account_smt_root(*self.tree.root()) .expect("set smt root"); self.db() .set_mem_block_account_count(self.account_count) .expect("set smt root"); Ok(()) } fn db(&self) -> &StoreTransaction { self.tree.store().inner_store() } } impl<'a> State for MemPoolStateTree<'a> { fn get_raw(&self, key: &H256) -> Result<H256, StateError> { self.tracker.touch_key(key); let v = self.tree.get(key)?; Ok(v) } fn update_raw(&mut self, key: H256, value: H256) -> Result<(), StateError> { self.tracker.touch_key(&key); self.tree.update(key, value)?; Ok(()) } fn get_account_count(&self) -> Result<u32, StateError> { Ok(self.account_count) } fn set_account_count(&mut self, count: u32) -> Result<(), StateError> { self.account_count = count; Ok(()) } fn calculate_root(&self) -> Result<H256, StateError> { let root = self.tree.root(); Ok(*root) } } impl<'a> CodeStore for MemPoolStateTree<'a> { fn insert_script(&mut self, script_hash: H256, script: packed::Script) { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store.insert( MEM_POOL_COL_SCRIPT, script_hash.as_slice().to_vec().into(), Value::Exist(script.as_slice().to_vec().into()), ); mem_pool_store.insert( MEM_POOL_COL_SCRIPT_PREFIX, script_hash.as_slice()[..20].to_vec().into(), Value::Exist(script_hash.as_slice().to_vec().into()), ); } fn get_script(&self, script_hash: &H256) -> Option<packed::Script> { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store .get(MEM_POOL_COL_SCRIPT, script_hash.as_slice()) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_SCRIPT, script_hash.as_slice()) .map(Into::into) }) .map(|slice| packed::ScriptReader::from_slice_should_be_ok(slice.as_ref()).to_entity()) } fn get_script_hash_by_short_address(&self, script_hash_prefix: &[u8]) -> Option<H256> { let mem_pool_store = self.db().mem_pool.load(); match mem_pool_store .get(MEM_POOL_COL_SCRIPT_PREFIX, script_hash_prefix) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_SCRIPT_PREFIX, script_hash_prefix) .map(Into::into) }) { Some(slice) => { let mut hash = [0u8; 32]; hash.copy_from_slice(slice.as_ref()); Some(hash.into()) } None => None, } } fn insert_data(&mut self, data_hash: H256, code: Bytes) { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store.insert( MEM_POOL_COL_DATA, data_hash.as_slice().to_vec().into(), Value::Exist(code), ); } fn get_data(&self, data_hash: &H256) -> Option<Bytes> { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store .get(MEM_POOL_COL_DATA, data_hash.as_slice()) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_DATA, data_hash.as_slice()) .map(Into::into) }) .map(|slice| Bytes::from(slice.to_vec())) } }
use crate::mem_pool_store::{ Value, MEM_POOL_COL_DATA, MEM_POOL_COL_SCRIPT, MEM_POOL_COL_SCRIPT_PREFIX, }; use crate::smt::mem_pool_smt_store::MemPoolSMTStore; use crate::{traits::KVStore, transaction::StoreTransaction}; use anyhow::Result; use gw_common::{error::Error as StateError, smt::SMT, state::State, H256}; use gw_db::error::Error; use gw_db::schema::{COLUMN_DATA, COLUMN_SCRIPT, COLUMN_SCRIPT_PREFIX}; use gw_traits::CodeStore; use gw_types::{ bytes::Bytes, packed::{self, AccountMerkleState}, prelude::*, }; use super::state_tracker::StateTracker; pub struct MemPoolStateTree<'a> { tree: SMT<MemPoolSMTStore<'a>>, account_count: u32, tracker: StateTracker, } impl<'a> MemPoolStateTree<'a> { pub fn new(tree: SMT<MemPoolSMTStore<'a>>, account_count: u32) -> Self { MemPoolStateTree { tree, account_count, tracker: Default::default(), } } pub fn tracker_mut(&mut self) -> &mut StateTracker { &mut self.tracker } pub fn get_merkle_state(&self) -> AccountMerkleState { AccountMerkleState::new_builder() .merkle_root(self.tree.root().pack()) .count(self.account_count.pack()) .build() } pub fn submit_tree_to_mem_block(&self) -> Result<(), Error> { self.db() .set_mem_block_account_smt_root(*self.tree.root()) .expect("set smt root"); self.db() .set_mem_block_account_count(self.account_count) .expect("set smt root"); Ok(()) } fn db(&self) -> &StoreTransaction { self.tree.store().inner_store() } } impl<'a> State for MemPoolStateTree<'a> { fn get_raw(&self, key: &H256) -> Result<H256, StateError> { self.tracker.touch_key(key); let v = self.tree.get(key)?; Ok(v) } fn update_raw(&mut self, key: H256, value: H256) -> Result<(), StateError> { self.tracker.touch_key(&key); self.tree.update(key, value)?; Ok(()) } fn get_account_count(&self) -> Result<u32, StateError> { Ok(self.account_count) } fn set_account_count(&mut self, count: u32) -> Result<(), StateError> { self.account_count = count; Ok(()) } fn calculate_root(&self) -> Result<H256, StateError> { let root = self.tree.root(); Ok(*root) } } impl<'a> CodeStore for MemPoolStateTree<'a> { fn insert_script(&mut self, script_hash: H256, script: packed::Script) { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store.insert( MEM_POOL_COL_SCRIPT, script_hash.as_slice().to_vec().into(), Value::Exist(script.as_slice().to_vec().into()), ); mem_pool_store.insert( MEM_POOL_COL_SCRIPT_PREFIX, script_hash.as_slice()[..20].to_vec().into(), Value::Exist(script_hash.as_slice().to_vec().into()), ); }
fn get_script_hash_by_short_address(&self, script_hash_prefix: &[u8]) -> Option<H256> { let mem_pool_store = self.db().mem_pool.load(); match mem_pool_store .get(MEM_POOL_COL_SCRIPT_PREFIX, script_hash_prefix) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_SCRIPT_PREFIX, script_hash_prefix) .map(Into::into) }) { Some(slice) => { let mut hash = [0u8; 32]; hash.copy_from_slice(slice.as_ref()); Some(hash.into()) } None => None, } } fn insert_data(&mut self, data_hash: H256, code: Bytes) { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store.insert( MEM_POOL_COL_DATA, data_hash.as_slice().to_vec().into(), Value::Exist(code), ); } fn get_data(&self, data_hash: &H256) -> Option<Bytes> { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store .get(MEM_POOL_COL_DATA, data_hash.as_slice()) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_DATA, data_hash.as_slice()) .map(Into::into) }) .map(|slice| Bytes::from(slice.to_vec())) } }
fn get_script(&self, script_hash: &H256) -> Option<packed::Script> { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store .get(MEM_POOL_COL_SCRIPT, script_hash.as_slice()) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_SCRIPT, script_hash.as_slice()) .map(Into::into) }) .map(|slice| packed::ScriptReader::from_slice_should_be_ok(slice.as_ref()).to_entity()) }
function_block-full_function
[ { "content": "// Calculate compacted account root\n\npub fn calculate_state_checkpoint(root: &H256, count: u32) -> H256 {\n\n let mut hash = [0u8; 32];\n\n let mut hasher = new_blake2b();\n\n hasher.update(root.as_slice());\n\n hasher.update(&count.to_le_bytes());\n\n hasher.finalize(&mut hash);\...
Rust
src/lib.rs
SilverBzH/wasm-sorter
303cb7c16e764025d5b3d1eedeef219365892830
mod utils; use wasm_bindgen::prelude::*; #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SortType { Bubble, BubbleOptimizied, QuickSort, } #[wasm_bindgen] pub struct Sorter { data: Vec<u32>, swapped_index: Vec<u32>, } #[wasm_bindgen] impl Sorter { pub fn new(data: Vec<u32>) -> Sorter { let swapped_index = Vec::new(); Sorter { data, swapped_index, } } pub fn run(&mut self, sort_type: SortType) { self.swapped_index.clear(); match sort_type { SortType::Bubble => { println!("BUBBLE SORT"); Sorter::bubble_sort(self); }, SortType::BubbleOptimizied => { println!("BUBBLE SORT OPTIMIZED"); Sorter::bubble_sort_optimized(self); }, SortType::QuickSort => { println!("QUICK SORT"); let first_index = 0; let last_index = self.data.len()-1; Sorter::quick_sort(self, first_index, last_index); } } } pub fn get_data(&mut self) -> Vec<u32> { let data = self.data.clone(); data } pub fn get_swapped_indexes(&mut self) -> Vec<u32> { let indexes = self.swapped_index.clone(); self.swapped_index.clear(); indexes } pub fn update_data(&mut self, data: Vec<u32>) { self.data = data; } } impl Sorter { fn bubble_sort(&mut self) { let length = self.data.len(); for _ in 0..length { for j in 0..length-1 { if self.data[j+1] < self.data[j] { self.data.swap(j, j+1); self.swapped_index.push(j as u32); self.swapped_index.push((j+1) as u32); } } } } fn bubble_sort_optimized(&mut self) { let mut is_sorted; let length = self.data.len(); for _ in 0..length { is_sorted = true; for i in 0..length-1 { if self.data[i+1] < self.data[i] { self.data.swap(i, i+1); self.swapped_index.push(i as u32); self.swapped_index.push((i+1) as u32); is_sorted = false; } } if is_sorted { return; } } } fn partition(&mut self, first_index: usize, last_index: usize) -> usize { let pivot = self.data[last_index]; let mut i = first_index; for j in first_index..last_index { if self.data[j] < pivot { self.data.swap(i, j); self.swapped_index.push(i as u32); self.swapped_index.push(j as u32); i += 1; } } self.data.swap(i, last_index); self.swapped_index.push(i as u32); self.swapped_index.push(last_index as u32); i } fn quick_sort(&mut self, first_index: usize, last_index: usize) { if first_index < last_index { let pivot = Sorter::partition(self, first_index.clone(), last_index.clone()); let pivot_low = if pivot == 0 { 0 } else { pivot-1 }; let pivot_high = if pivot >= last_index { last_index } else { pivot+1 }; Sorter::quick_sort(self, first_index, pivot_low); Sorter::quick_sort(self, pivot_high, last_index); } } } #[cfg(test)] mod test { use super::*; #[test] fn bubble() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6]; let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::Bubble); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); } #[test] fn bubble_optimized() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6]; let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::BubbleOptimizied); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); } #[test] fn quick_sort() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6]; let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::QuickSort); println!("data: {:?}", data); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); } }
mod utils; use wasm_bindgen::prelude::*; #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SortType { Bubble, BubbleOptimizied, QuickSort, } #[wasm_bindgen] pub struct Sorter { data: Vec<u32>, swapped_index: Vec<u32>, } #[wasm_bindgen] impl Sorter { pub fn new(data: Vec<u32>) -> Sorter { let swapped_index = Vec::new(); Sorter { data, swapped_index, } } pub fn run(&mut self, sort_type: SortType) { self.swapped_index.clear(); match sort_type { SortType::Bubble => { println!("BUBBLE SORT"); Sorter::bubble_sort(self); }, SortType::BubbleOptimizied => { println!("BUBBLE SORT OPTIMIZED"); Sorter::bubble_sort_optimized(self); }, SortType::QuickSort => { println!("QUICK SORT"); let first_index = 0; let last_index = self.data.len()-1; Sorter::quick_sort(self, first_index, last_index); } } } pub fn get_data(&mut self) -> Vec<u32> { let data = self.data.clone(); data } pub fn get_swapped_indexes(&mut self) -> Vec<u32> { let indexes = self.swapped_index.clone(); self.swapped_index.clear(); indexes } pub fn update_data(&mut self, data: Vec<u32>) { self.data = data; } } impl Sorter { fn bubble_sort(&mut self) { let length = self.data.len(); for _ in 0..length { for j in 0..length-1 { if self.data[j+1] < self.data[j] { self.data.swap(j, j+1); self.swapped_index.push(j as u32); self.swapped_index.push((j+1) as u32); } } } } fn bubble_sort_optimized(&mut self) { let mut is_sorted; let length = self.data.len(); for _ in 0..length { is_sorted = true; for i in 0..length-1 { if self.data[i+1] < self.data[i] { self.data.swap(i, i+1); self.swapped_index.push(i as u32); self.swapped_index.push((i+1) as u32); is_sorted = false; } } if is_sorted { return; } } } fn partition(&mut self, first_index: usize, last_index: usize) -> usize { let pivot = self.data[last_index]; let mut i = first_index; for j in first_index..last_index { if self.data[j] < pivot { self.data.swap(i, j); self.swapped_index.push(i as u32); self.swapped_index.push(j as u32); i += 1; } } self.data.swap(i, last_index); self.swapped_index.push(i as u32); self.swapped_index.push(last_index as u32); i } fn quick_sort(&mut self, first_index: usize, last_index: usize) { if first_index < last_index { let pivot = Sorter::partition(self, first_index.clone(), last_index.clone()); let pivot_low = if pivot == 0 { 0 } else { pivot-1 }; let pivot_high = if pivot >= last_index { last_index } else { pivot+1 }; Sorter::quick_sort(self, first_index, pivot_low); Sorter::quick_sort(self, pivot_high, last_index); } } } #[cfg(test)] mod test { use super::*; #[test] fn bubble() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6];
#[test] fn bubble_optimized() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6]; let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::BubbleOptimizied); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); } #[test] fn quick_sort() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6]; let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::QuickSort); println!("data: {:?}", data); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); } }
let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::Bubble); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); }
function_block-function_prefix_line
[ { "content": "pub fn set_panic_hook() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details se...
Rust
tests/test_casbin.rs
smrpn/actix-casbin
c4e91e518e3414ae9c3542f47ba04c358e38e1b1
use actix_casbin::{CasbinActor, CasbinCmd, CasbinResult}; use casbin::prelude::*; #[actix_rt::test] async fn test_enforcer() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::Enforce(test_enforce) = addr .send(CasbinCmd::Enforce( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, test_enforce); } } #[actix_rt::test] async fn test_enforcer_threads() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); for _ in 0..8 { let clone_addr = addr.clone(); tokio::spawn(async move { if let CasbinResult::Enforce(test_enforce) = clone_addr .send(CasbinCmd::Enforce( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, test_enforce); } }); } } #[actix_rt::test] async fn test_policy_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::RemovePolicy(remove_policy) = addr .send(CasbinCmd::RemovePolicy( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, remove_policy); } if let CasbinResult::RemoveFilteredNamedPolicy(remove_filtered_policy) = addr .send(CasbinCmd::RemoveFilteredNamedPolicy( "p".to_string(), 1, vec!["data2"].iter().map(|s| s.to_string()).collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, remove_filtered_policy); } if let CasbinResult::AddPolicy(add_policy) = addr .send(CasbinCmd::AddPolicy( vec!["eve", "data3", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, add_policy); } if let CasbinResult::AddPolicy(add_policies) = addr .send(CasbinCmd::AddPolicies(vec![ vec!["lucy", "data3", "write"] .iter() .map(|s| s.to_string()) .collect(), vec!["jack", "data4", "read"] .iter() .map(|s| s.to_string()) .collect(), ])) .await .unwrap() .unwrap() { assert_eq!(true, add_policies); } if let CasbinResult::RemovePolicies(remove_policies) = addr .send(CasbinCmd::RemovePolicies(vec![ vec!["lucy", "data3", "write"] .iter() .map(|s| s.to_string()) .collect(), vec!["jack", "data4", "read"] .iter() .map(|s| s.to_string()) .collect(), ])) .await .unwrap() .unwrap() { assert_eq!(true, remove_policies); } } #[actix_rt::test] async fn test_roles_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::AddRoleForUser(add_role_for_user) = addr .send(CasbinCmd::AddRoleForUser( "alice".to_string(), "data1_admin".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, add_role_for_user); } if let CasbinResult::AddRolesForUser(add_roles_for_user) = addr .send(CasbinCmd::AddRolesForUser( "bob".to_string(), vec!["data1_admin", "data2_admin"] .iter() .map(|s| s.to_string()) .collect(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, add_roles_for_user); } if let CasbinResult::DeleteRoleForUser(delete_role_for_user) = addr .send(CasbinCmd::DeleteRoleForUser( "alice".to_string(), "data1_admin".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, delete_role_for_user); } if let CasbinResult::DeleteRolesForUser(delete_roles_for_user) = addr .send(CasbinCmd::DeleteRolesForUser("bob".to_string(), None)) .await .unwrap() .unwrap() { assert_eq!(true, delete_roles_for_user); } } #[actix_rt::test] async fn test_implicit_roles_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_with_hierarchy_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::GetImplicitRolesForUser(implicit_roles_alice) = addr .send(CasbinCmd::GetImplicitRolesForUser( "alice".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec!["admin", "data1_admin", "data2_admin"], sort_unstable(implicit_roles_alice) ); } if let CasbinResult::GetImplicitRolesForUser(implicit_roles_bob) = addr .send(CasbinCmd::GetImplicitRolesForUser("bob".to_string(), None)) .await .unwrap() .unwrap() { assert_eq!(vec![String::new(); 0], implicit_roles_bob); } } #[actix_rt::test] async fn test_implicit_permissions_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_with_hierarchy_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::GetImplicitPermissionsForUser(implicit_permissions_alice) = addr .send(CasbinCmd::GetImplicitPermissionsForUser( "alice".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec![ vec!["alice", "data1", "read"], vec!["data1_admin", "data1", "read"], vec!["data1_admin", "data1", "write"], vec!["data2_admin", "data2", "read"], vec!["data2_admin", "data2", "write"], ], sort_unstable(implicit_permissions_alice) ); } if let CasbinResult::GetImplicitPermissionsForUser(implicit_permissions_bob) = addr .send(CasbinCmd::GetImplicitPermissionsForUser( "bob".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec![vec!["bob", "data2", "write"]], implicit_permissions_bob ); } } fn sort_unstable<T: Ord>(mut v: Vec<T>) -> Vec<T> { v.sort_unstable(); v }
use actix_casbin::{CasbinActor, CasbinCmd, CasbinResult}; use casbin::prelude::*; #[actix_rt::test] async fn test_enforcer() { let m = DefaultModel::from_file("
, vec!["data1_admin", "data2_admin"] .iter() .map(|s| s.to_string()) .collect(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, add_roles_for_user); } if let CasbinResult::DeleteRoleForUser(delete_role_for_user) = addr .send(CasbinCmd::DeleteRoleForUser( "alice".to_string(), "data1_admin".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, delete_role_for_user); } if let CasbinResult::DeleteRolesForUser(delete_roles_for_user) = addr .send(CasbinCmd::DeleteRolesForUser("bob".to_string(), None)) .await .unwrap() .unwrap() { assert_eq!(true, delete_roles_for_user); } } #[actix_rt::test] async fn test_implicit_roles_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_with_hierarchy_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::GetImplicitRolesForUser(implicit_roles_alice) = addr .send(CasbinCmd::GetImplicitRolesForUser( "alice".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec!["admin", "data1_admin", "data2_admin"], sort_unstable(implicit_roles_alice) ); } if let CasbinResult::GetImplicitRolesForUser(implicit_roles_bob) = addr .send(CasbinCmd::GetImplicitRolesForUser("bob".to_string(), None)) .await .unwrap() .unwrap() { assert_eq!(vec![String::new(); 0], implicit_roles_bob); } } #[actix_rt::test] async fn test_implicit_permissions_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_with_hierarchy_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::GetImplicitPermissionsForUser(implicit_permissions_alice) = addr .send(CasbinCmd::GetImplicitPermissionsForUser( "alice".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec![ vec!["alice", "data1", "read"], vec!["data1_admin", "data1", "read"], vec!["data1_admin", "data1", "write"], vec!["data2_admin", "data2", "read"], vec!["data2_admin", "data2", "write"], ], sort_unstable(implicit_permissions_alice) ); } if let CasbinResult::GetImplicitPermissionsForUser(implicit_permissions_bob) = addr .send(CasbinCmd::GetImplicitPermissionsForUser( "bob".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec![vec!["bob", "data2", "write"]], implicit_permissions_bob ); } } fn sort_unstable<T: Ord>(mut v: Vec<T>) -> Vec<T> { v.sort_unstable(); v }
examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::Enforce(test_enforce) = addr .send(CasbinCmd::Enforce( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, test_enforce); } } #[actix_rt::test] async fn test_enforcer_threads() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); for _ in 0..8 { let clone_addr = addr.clone(); tokio::spawn(async move { if let CasbinResult::Enforce(test_enforce) = clone_addr .send(CasbinCmd::Enforce( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, test_enforce); } }); } } #[actix_rt::test] async fn test_policy_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::RemovePolicy(remove_policy) = addr .send(CasbinCmd::RemovePolicy( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, remove_policy); } if let CasbinResult::RemoveFilteredNamedPolicy(remove_filtered_policy) = addr .send(CasbinCmd::RemoveFilteredNamedPolicy( "p".to_string(), 1, vec!["data2"].iter().map(|s| s.to_string()).collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, remove_filtered_policy); } if let CasbinResult::AddPolicy(add_policy) = addr .send(CasbinCmd::AddPolicy( vec!["eve", "data3", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, add_policy); } if let CasbinResult::AddPolicy(add_policies) = addr .send(CasbinCmd::AddPolicies(vec![ vec!["lucy", "data3", "write"] .iter() .map(|s| s.to_string()) .collect(), vec!["jack", "data4", "read"] .iter() .map(|s| s.to_string()) .collect(), ])) .await .unwrap() .unwrap() { assert_eq!(true, add_policies); } if let CasbinResult::RemovePolicies(remove_policies) = addr .send(CasbinCmd::RemovePolicies(vec![ vec!["lucy", "data3", "write"] .iter() .map(|s| s.to_string()) .collect(), vec!["jack", "data4", "read"] .iter() .map(|s| s.to_string()) .collect(), ])) .await .unwrap() .unwrap() { assert_eq!(true, remove_policies); } } #[actix_rt::test] async fn test_roles_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::AddRoleForUser(add_role_for_user) = addr .send(CasbinCmd::AddRoleForUser( "alice".to_string(), "data1_admin".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, add_role_for_user); } if let CasbinResult::AddRolesForUser(add_roles_for_user) = addr .send(CasbinCmd::AddRolesForUser( "bob".to_string()
random
[ { "content": "use actix_casbin_auth::CasbinService;\n\n\n\n#[actix_rt::main]\n\nasync fn main() -> Result<()> {\n\n let m = DefaultModel::from_file(\"examples/rbac_model.conf\")\n\n .await?;\n\n let a = FileAdapter::new(\"examples/rbac_policy.csv\");\n\n\n\n let mut casbin_middleware = CasbinSer...
Rust
examples/iam/src/bin/iam-getting-started.rs
eduardomourar/aws-sdk-rust
58569c863afbe7bc442da8254df6c3970111de38
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. Purpose Shows how to use the AWS SDK for PHP (v3) to get started using AWS Identity and Access Management (IAM). Create an IAM user, assume a role, and perform AWS actions. 1. Create a user that has no permissions. 2. Create a role and policy that grant s3:ListAllMyBuckets permission. 3. Grant the user permission to assume the role. 4. Create an S3 client object as the user and try to list buckets (this should fail). 5. Get temporary credentials by assuming the role. 6. Create an S3 client object with the temporary credentials and list the buckets (this should succeed). 7. Delete all the resources. To run the bin file directly, use the following command: cargo --bin iam-getting-started To run the service class tests run: cargo test */ use aws_config::meta::region::RegionProviderChain; use aws_sdk_iam::Error as iamError; use aws_sdk_iam::{Client as iamClient, Credentials as iamCredentials}; use aws_sdk_s3::Client as s3Client; use aws_sdk_sts::Client as stsClient; use aws_types::region::Region; use std::borrow::Borrow; use tokio::time::{sleep, Duration}; use uuid::Uuid; #[tokio::main] async fn main() -> Result<(), iamError> { let (client, uuid, list_all_buckets_policy_document, inline_policy_document) = initialize_variables().await; if let Err(e) = run_iam_operations( client, uuid, list_all_buckets_policy_document, inline_policy_document, ) .await { println!("{:?}", e); }; Ok(()) } async fn initialize_variables() -> (iamClient, String, String, String) { let region_provider = RegionProviderChain::first_try(Region::new("us-west-2")); let shared_config = aws_config::from_env().region(region_provider).load().await; let client = iamClient::new(&shared_config); let uuid = Uuid::new_v4().to_string(); let list_all_buckets_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Action\": \"s3:ListAllMyBuckets\", \"Resource\": \"arn:aws:s3:::*\"}] }" .to_string(); let inline_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Action\": \"sts:AssumeRole\", \"Resource\": \"{}\"}] }" .to_string(); ( client, uuid, list_all_buckets_policy_document, inline_policy_document, ) } async fn run_iam_operations( client: iamClient, uuid: String, list_all_buckets_policy_document: String, inline_policy_document: String, ) -> Result<(), iamError> { let user = iam_service::create_user(&client, &format!("{}{}", "iam_demo_user_", uuid)).await?; println!( "Created the user with the name: {}", user.user_name.as_ref().unwrap() ); let key = iam_service::create_access_key(&client, user.user_name.as_ref().unwrap()).await?; let assume_role_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Principal\": {\"AWS\": \"{}\"}, \"Action\": \"sts:AssumeRole\" }] }" .to_string() .replace("{}", user.arn.as_ref().unwrap()); let assume_role_role = iam_service::create_role( &client, &format!("{}{}", "iam_demo_role_", uuid), &assume_role_policy_document, ) .await?; println!( "Created the role with the ARN: {}", assume_role_role.arn.as_ref().unwrap() ); let list_all_buckets_policy = iam_service::create_policy( &client, &format!("{}{}", "iam_demo_policy_", uuid), &list_all_buckets_policy_document, ) .await?; println!( "Created policy: {}", list_all_buckets_policy.policy_name.as_ref().unwrap() ); let attach_role_policy_result = iam_service::attach_role_policy(&client, &assume_role_role, &list_all_buckets_policy) .await?; println!( "Attached the policy to the role: {:?}", attach_role_policy_result ); let inline_policy_name = &format!("{}{}", "iam_demo_inline_policy_", uuid); let inline_policy_document = inline_policy_document.replace("{}", assume_role_role.arn.as_ref().unwrap()); iam_service::create_user_policy(&client, &user, &inline_policy_name, &inline_policy_document) .await?; println!("Created inline policy."); let creds = iamCredentials::from_keys( key.access_key_id.as_ref().unwrap(), key.secret_access_key.as_ref().unwrap(), None, ); let fail_config = aws_config::from_env() .credentials_provider(creds.clone()) .load() .await; println!("Fail config: {:?}", fail_config); let fail_client: s3Client = s3Client::new(&fail_config); match fail_client.list_buckets().send().await { Ok(e) => { println!("This should not run. {:?}", e); } Err(e) => { println!("Successfully failed with error: {:?}", e) } } let sts_config = aws_config::from_env() .credentials_provider(creds.clone()) .load() .await; let sts_client: stsClient = stsClient::new(&sts_config); sleep(Duration::from_secs(10)).await; let assumed_role = sts_client .assume_role() .role_arn(assume_role_role.arn.as_ref().unwrap()) .role_session_name(&format!("{}{}", "iam_demo_assumerole_session_", uuid)) .send() .await; println!("Assumed role: {:?}", assumed_role); sleep(Duration::from_secs(10)).await; let assumed_credentials = iamCredentials::from_keys( assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .access_key_id .as_ref() .unwrap(), assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .secret_access_key .as_ref() .unwrap(), assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .session_token .borrow() .clone(), ); let succeed_config = aws_config::from_env() .credentials_provider(assumed_credentials) .load() .await; println!("succeed config: {:?}", succeed_config); let succeed_client: s3Client = s3Client::new(&succeed_config); sleep(Duration::from_secs(10)).await; match succeed_client.list_buckets().send().await { Ok(_) => { println!("This should now run successfully.") } Err(e) => { println!("This should not run. {:?}", e); panic!() } } iam_service::detach_role_policy( &client, assume_role_role.role_name.as_ref().unwrap(), list_all_buckets_policy.arn.as_ref().unwrap(), ) .await?; iam_service::delete_policy(&client, list_all_buckets_policy).await?; iam_service::delete_role(&client, &assume_role_role).await?; println!( "Deleted role {}", assume_role_role.role_name.as_ref().unwrap() ); iam_service::delete_access_key(&client, &user, &key).await?; println!("Deleted key for {}", key.user_name.as_ref().unwrap()); iam_service::delete_user_policy(&client, &user, &inline_policy_name).await?; println!("Deleted inline user policy: {}", inline_policy_name); iam_service::delete_user(&client, &user).await?; println!("Deleted user {}", user.user_name.as_ref().unwrap()); Ok(()) }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. Purpose Shows how to use the AWS SDK for PHP (v3) to get started using AWS Identity and Access Management (IAM). Create an IAM user, assume a role, and perform AWS actions. 1. Create a user that has no permissions. 2. Create a role and policy that grant s3:ListAllMyBuckets permission. 3. Grant the user permission to assume the role. 4. Create an S3 client object as the user and try to list buckets (this should fail). 5. Get temporary credentials by assuming the role. 6. Create an S3 client object with the temporary credentials and list the buckets (this should succeed). 7. Delete all the resources. To run the bin file directly, use the following command: cargo --bin iam-getting-started To run the service class tests run: cargo test */ use aws_config::meta::region::RegionProviderChain; use aws_sdk_iam::Error as iamError; use aws_sdk_iam::{Client as iamClient, Credentials as iamCredentials}; use aws_sdk_s3::Client as s3Client; use aws_sdk_sts::Client as stsClient; use aws_types::region::Region; use std::borrow::Borrow; use tokio::time::{sleep, Duration}; use uuid::Uuid; #[tokio::main] async fn main() -> Result<(), iamError> { let (client, uuid, list_all_buckets_policy_document, inline_policy_document) = initialize_variables().await; if let Err(e) = run_iam_operations( client, uuid, list_all_buckets_policy_document, inline_policy_document, ) .await { println!("{:?}", e); }; Ok(()) } async fn initialize_variables() -> (iamClient, String, String, String) { let region_provider = RegionProviderChain::first_try(Region::new("us-west-2")); let shared_config = aws_config::from_env().region(region_provider).load().await; let client = iamClient::new(&shared_config); let uuid = Uuid::new_v4().to_string(); let list_all_buckets_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Action\": \"s3:ListAllMyBuckets\", \"Resource\": \"arn:aws:s3:::*\"}] }" .to_string(); let inline_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Action\": \"sts:AssumeRole\", \"Resource\": \"{}\"}] }" .to_string(); ( client, uuid, list_all_buckets_policy_document, inline_policy_document, ) } async fn run_iam_operations( client: iamClient, uuid: String, list_all_buckets_policy_document: String, inline_policy_document: String, ) -> Result<(), iamError> { let user = iam_service::create_user(&client, &format!("{}{}", "iam_demo_user_", uuid)).await?; println!( "Created the user with the name: {}", user.user_name.as_ref().unwrap() ); let key = iam_service::create_access_key(&client, user.user_name.as_ref().unwrap()).await?; let assume_role_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Principal\": {\"AWS\": \"{}\"}, \"Action\": \"sts:AssumeRole\" }] }" .to_string() .replace("{}", user.arn.as_ref().unwrap()); let assume_role_role = iam_service::create_role( &client, &format!("{}{}", "iam_demo_role_", uuid), &assume_role_policy_document, ) .await?; println!( "Created the role with the ARN: {}", assume_role_role.arn.as_ref().unwrap() ); let list_all_buckets_policy = iam_service::create_policy( &client, &format!("{}{}", "iam_demo_policy_", uuid), &list_all_buckets_policy_document, ) .await?; println!( "Created p
y).await?; iam_service::delete_role(&client, &assume_role_role).await?; println!( "Deleted role {}", assume_role_role.role_name.as_ref().unwrap() ); iam_service::delete_access_key(&client, &user, &key).await?; println!("Deleted key for {}", key.user_name.as_ref().unwrap()); iam_service::delete_user_policy(&client, &user, &inline_policy_name).await?; println!("Deleted inline user policy: {}", inline_policy_name); iam_service::delete_user(&client, &user).await?; println!("Deleted user {}", user.user_name.as_ref().unwrap()); Ok(()) }
olicy: {}", list_all_buckets_policy.policy_name.as_ref().unwrap() ); let attach_role_policy_result = iam_service::attach_role_policy(&client, &assume_role_role, &list_all_buckets_policy) .await?; println!( "Attached the policy to the role: {:?}", attach_role_policy_result ); let inline_policy_name = &format!("{}{}", "iam_demo_inline_policy_", uuid); let inline_policy_document = inline_policy_document.replace("{}", assume_role_role.arn.as_ref().unwrap()); iam_service::create_user_policy(&client, &user, &inline_policy_name, &inline_policy_document) .await?; println!("Created inline policy."); let creds = iamCredentials::from_keys( key.access_key_id.as_ref().unwrap(), key.secret_access_key.as_ref().unwrap(), None, ); let fail_config = aws_config::from_env() .credentials_provider(creds.clone()) .load() .await; println!("Fail config: {:?}", fail_config); let fail_client: s3Client = s3Client::new(&fail_config); match fail_client.list_buckets().send().await { Ok(e) => { println!("This should not run. {:?}", e); } Err(e) => { println!("Successfully failed with error: {:?}", e) } } let sts_config = aws_config::from_env() .credentials_provider(creds.clone()) .load() .await; let sts_client: stsClient = stsClient::new(&sts_config); sleep(Duration::from_secs(10)).await; let assumed_role = sts_client .assume_role() .role_arn(assume_role_role.arn.as_ref().unwrap()) .role_session_name(&format!("{}{}", "iam_demo_assumerole_session_", uuid)) .send() .await; println!("Assumed role: {:?}", assumed_role); sleep(Duration::from_secs(10)).await; let assumed_credentials = iamCredentials::from_keys( assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .access_key_id .as_ref() .unwrap(), assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .secret_access_key .as_ref() .unwrap(), assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .session_token .borrow() .clone(), ); let succeed_config = aws_config::from_env() .credentials_provider(assumed_credentials) .load() .await; println!("succeed config: {:?}", succeed_config); let succeed_client: s3Client = s3Client::new(&succeed_config); sleep(Duration::from_secs(10)).await; match succeed_client.list_buckets().send().await { Ok(_) => { println!("This should now run successfully.") } Err(e) => { println!("This should not run. {:?}", e); panic!() } } iam_service::detach_role_policy( &client, assume_role_role.role_name.as_ref().unwrap(), list_all_buckets_policy.arn.as_ref().unwrap(), ) .await?; iam_service::delete_policy(&client, list_all_buckets_polic
random
[]
Rust
src/mdbook/fenced_blocks.rs
FifthTry/ft-cli
155c0765bc2e74d5d6dada52b95f8c4422d0e0a3
pub(crate) fn fenced_to_code(content: &str, img_src: &std::path::Path) -> String { #[derive(PartialEq)] enum ParsingState { WaitingForBackTick, WaitingForEndBackTick, } struct State { state: ParsingState, sections: Vec<String>, } let mut state = State { state: ParsingState::WaitingForBackTick, sections: vec![], }; fn parse_lang(line: &str) -> String { let line = line.replace("```", ""); let line = line.trim().split(',').collect::<Vec<_>>(); (match line.get(0) { Some(&"rust") => "rs", Some(&"console") => "sh", Some(&"cmd") => "sh", Some(&"toml") => "toml", Some(&"java") => "java", Some(&"python") => "py", _ => "txt", }) .to_string() } fn finalize(state: State) -> String { state.sections.join("\n") } let mut buffer: String = "".to_string(); let mut is_markdown = false; let mut filename = Option::<String>::None; for line in content.split('\n') { if line.trim().starts_with("<span class=\"filename\"") && line.trim().ends_with("</span>") { let dom = html_parser::Dom::parse(line.trim()).unwrap(); if let Some(html_parser::Node::Element(e)) = dom.children.get(0) { if let Some(html_parser::Node::Text(text)) = e.children.get(0) { let text = if text.contains(':') { match text.split(':').collect::<Vec<_>>().last() { Some(s) => s.to_string(), None => text.to_string(), } } else { text.to_string() }; filename = Some(text); } } } else if line.trim().starts_with("```") && state.state == ParsingState::WaitingForBackTick { let lang = parse_lang(line); if !buffer.is_empty() { let content = buffer.drain(..).collect::<String>(); if !content.trim().is_empty() { let section = if is_markdown { ftd::Markdown::from_body(&content).to_p1().to_string() } else { content }; state.sections.push(section); } } state.state = ParsingState::WaitingForEndBackTick; buffer = format!( "-- code:\nlang: {}{}\n\n", lang, filename .take() .map(|x| format!("\nfilename: {}", x)) .unwrap_or_else(|| "".to_string()) ); is_markdown = false; } else if line.trim().starts_with("```") && state.state == ParsingState::WaitingForEndBackTick { state.sections.push(buffer.drain(..).collect()); state.state = ParsingState::WaitingForBackTick; is_markdown = true; } else { buffer.push_str(line); buffer.push('\n'); } } if !buffer.is_empty() { let content = buffer.drain(..).collect::<String>(); if !content.trim().is_empty() { let section = if is_markdown { ftd::Markdown::from_body(&content).to_p1().to_string() } else { content }; state.sections.push(section); } } let content = finalize(state); img_to_code(content.as_str(), img_src) } pub(crate) fn img_to_code(content: &str, img_src: &std::path::Path) -> String { let mut sections = vec![]; let mut is_markdown = false; let mut buffer: String = "".to_string(); for line in content.lines() { if line.starts_with("<img") && line.ends_with("/>") { if !buffer.is_empty() { let sec = if is_markdown { ftd::Markdown::from_body(&buffer.drain(..).collect::<String>()) .to_p1() .to_string() } else { buffer.drain(..).collect::<String>() }; sections.push(sec); } is_markdown = true; let dom = html_parser::Dom::parse(line) .unwrap_or_else(|_| panic!("unable to parse: {}", line)); if let Some(html_parser::Node::Element(element)) = dom.children.get(0) { if let Some(Some(src)) = element.attributes.get("src") { let cap = if let Some(Some(alt)) = element.attributes.get("alt") { alt.as_str() } else { "" }; let src = img_src.join(src); let sec = ftd::Image::default() .with_src(&src.to_string_lossy()) .with_caption(cap) .with_width(500) .with_alt(cap) .to_p1() .to_string(); sections.push(sec); } } } else { buffer.push_str(line); buffer.push('\n'); } } if !buffer.is_empty() { let sec = if is_markdown { ftd::Markdown::from_body(&buffer.drain(..).collect::<String>()) .to_p1() .to_string() } else { buffer.drain(..).collect::<String>() }; sections.push(sec); } sections.join("\n\n") }
pub(crate) fn fenced_to_code(content: &str, img_src: &std::path::Path) -> String { #[derive(PartialEq)] enum ParsingState { WaitingForBackTick, WaitingForEndBackTick, } struct State { state: ParsingState, sections: Vec<String>, } let mut state = State { state: ParsingState::WaitingForBackTick, sections: vec![], }; fn parse_lang(line: &str) -> String { let line = line.replace("```", ""); let line = line.trim().split(',').collect::<Vec<_>>(); (match line.get(0) { Some(&"rust") => "rs", Some(&"console") => "sh", Some(&"cmd") => "sh", Some(&"toml") => "toml", Some(&"java") => "java", Some(&"python") => "py", _ => "txt", }) .to_string() } fn finalize(state: State) -> String { state.sections.join("\n") } let mut buffer: String = "".to_string(); let mut is_markdown = false; let mut filename = Option::<String>::None; for line in content.split('\n') { if line.trim().starts_with("<span class=\"filename\"") && line.trim().ends_with("</span>") { let dom = html_parser::Dom::parse(line.trim()).unwrap();
ftd::Markdown::from_body(&content).to_p1().to_string() } else { content }; state.sections.push(section); } } state.state = ParsingState::WaitingForEndBackTick; buffer = format!( "-- code:\nlang: {}{}\n\n", lang, filename .take() .map(|x| format!("\nfilename: {}", x)) .unwrap_or_else(|| "".to_string()) ); is_markdown = false; } else if line.trim().starts_with("```") && state.state == ParsingState::WaitingForEndBackTick { state.sections.push(buffer.drain(..).collect()); state.state = ParsingState::WaitingForBackTick; is_markdown = true; } else { buffer.push_str(line); buffer.push('\n'); } } if !buffer.is_empty() { let content = buffer.drain(..).collect::<String>(); if !content.trim().is_empty() { let section = if is_markdown { ftd::Markdown::from_body(&content).to_p1().to_string() } else { content }; state.sections.push(section); } } let content = finalize(state); img_to_code(content.as_str(), img_src) } pub(crate) fn img_to_code(content: &str, img_src: &std::path::Path) -> String { let mut sections = vec![]; let mut is_markdown = false; let mut buffer: String = "".to_string(); for line in content.lines() { if line.starts_with("<img") && line.ends_with("/>") { if !buffer.is_empty() { let sec = if is_markdown { ftd::Markdown::from_body(&buffer.drain(..).collect::<String>()) .to_p1() .to_string() } else { buffer.drain(..).collect::<String>() }; sections.push(sec); } is_markdown = true; let dom = html_parser::Dom::parse(line) .unwrap_or_else(|_| panic!("unable to parse: {}", line)); if let Some(html_parser::Node::Element(element)) = dom.children.get(0) { if let Some(Some(src)) = element.attributes.get("src") { let cap = if let Some(Some(alt)) = element.attributes.get("alt") { alt.as_str() } else { "" }; let src = img_src.join(src); let sec = ftd::Image::default() .with_src(&src.to_string_lossy()) .with_caption(cap) .with_width(500) .with_alt(cap) .to_p1() .to_string(); sections.push(sec); } } } else { buffer.push_str(line); buffer.push('\n'); } } if !buffer.is_empty() { let sec = if is_markdown { ftd::Markdown::from_body(&buffer.drain(..).collect::<String>()) .to_p1() .to_string() } else { buffer.drain(..).collect::<String>() }; sections.push(sec); } sections.join("\n\n") }
if let Some(html_parser::Node::Element(e)) = dom.children.get(0) { if let Some(html_parser::Node::Text(text)) = e.children.get(0) { let text = if text.contains(':') { match text.split(':').collect::<Vec<_>>().last() { Some(s) => s.to_string(), None => text.to_string(), } } else { text.to_string() }; filename = Some(text); } } } else if line.trim().starts_with("```") && state.state == ParsingState::WaitingForBackTick { let lang = parse_lang(line); if !buffer.is_empty() { let content = buffer.drain(..).collect::<String>(); if !content.trim().is_empty() { let section = if is_markdown {
random
[ { "content": "fn content_with_extract_title(content: &str) -> (String, Option<String>) {\n\n let lines = content.lines().into_iter().collect::<Vec<_>>();\n\n let mut title_line = None;\n\n for line in lines.iter() {\n\n if line.trim().starts_with('#') {\n\n title_line = Some(line.to_s...
Rust
path_tracing/src/utilities.rs
sansumbrella/path-tracer-rs
9de8797bfa957b82393d1757b84c24357ddc0f02
use super::vector::*; use rand::distributions::{Distribution, UnitSphereSurface}; use rand::prelude::*; use std::borrow::Borrow; use std::ops::{Add, Mul, Sub}; pub fn mix<T, U>(a: T, b: T, t: U) -> T where T: Copy + Add<T, Output = T> + Sub<T, Output = T> + Mul<U, Output = T>, { a + (b - a) * t } pub fn random_in_unit_sphere() -> Vec3 { let mut rng = rand::thread_rng(); let sphere = UnitSphereSurface::new(); Vec3(sphere.sample(&mut rng)) } pub fn random_in_unit_disk() -> [f64; 2] { let mut rng = rand::thread_rng(); loop { let p = Vec3::new( 2.0 * rng.gen::<f64>() - 1.0, 2.0 * rng.gen::<f64>() - 1.0, 0.0, ); if p.length_squared() < 1.0 { return [p.0[0], p.0[1]]; } } } pub fn rand() -> f64 { let mut rng = rand::thread_rng(); rng.gen::<f64>() } pub fn normalize<T>(vector: T) -> Vec3 where T: Borrow<Vec3>, { let vector = vector.borrow(); let &[x, y, z] = &vector.0; Vec3::new(x, y, z) / vector.length() } pub fn dot<T, U>(a: T, b: U) -> f64 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let &[ax, ay, az] = &a.borrow().0; let &[bx, by, bz] = &b.borrow().0; ax * bx + ay * by + az * bz } pub fn cross<T, U>(a: T, b: U) -> Vec3 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let &[ax, ay, az] = &a.borrow().0; let &[bx, by, bz] = &b.borrow().0; Vec3::new(ay * bz - az * by, -(ax * bz - az * bx), ax * by - ay * bx) } pub fn reflect<T, U>(vector: T, normal: U) -> Vec3 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let vector = vector.borrow(); let normal = normal.borrow(); vector - &(normal * 2.0 * dot(vector, normal)) } pub fn refract<T, U>(vector: T, normal: U, ni_over_nt: f64) -> Option<Vec3> where T: Borrow<Vec3>, U: Borrow<Vec3>, { let vector = vector.borrow(); let normal = normal.borrow(); let vector = normalize(vector); let dt = dot(&vector, normal); let discriminant = 1.0 - ni_over_nt * ni_over_nt * (1.0 - dt * dt); if discriminant > 0.0 { let refracted = (vector - normal * dt) * ni_over_nt - normal * f64::sqrt(discriminant); return Some(refracted); } None } pub fn schlick(cosine: f64, refractive_index: f64) -> f64 { let r0 = (1.0 - refractive_index) / (1.0 + refractive_index); let r0 = r0 * r0; r0 + (1.0 - r0) * f64::powi(1.0 - cosine, 5) } #[cfg(test)] mod tests { use super::*; use approx::assert_relative_eq; #[test] fn mixing_built_in_types() { assert_eq!(mix(1.0, 2.0, 0.5), 1.5); assert_eq!(mix(1.0, -1.0, 0.5), 0.0); assert_eq!(mix(10 as f32, 20 as f32, 0.5) as i32, 15); } #[test] fn mixing_vector_types() { assert_eq!( mix(Vec3::new(0.0, 1.0, 2.0), Vec3::new(2.0, 1.0, 0.0), 0.5), Vec3::new(1.0, 1.0, 1.0) ); } #[test] fn sphere_random() { assert_relative_eq!(random_in_unit_sphere().length_squared(), 1.0); } #[test] fn trigonometry_functions() { assert_relative_eq!(f64::sin(std::f64::consts::PI), 0.0); } #[test] fn dot_product() { let a = Vec3::new(1.0, 0.0, 0.0); let b = Vec3::new(0.5, 0.5, 0.5); assert_eq!( dot(&a, &b), 0.5, "Dot product returns a scalar measuring similarity of two vectors" ); assert_eq!( dot(&Vec3::new(0.0, -1.0, 0.0), &Vec3::new(0.0, 1.0, 0.0)), -1.0, "Dot product returns a scalar measuring similarity of two vectors" ); } #[test] fn cross_product() { let x = Vec3::new(1.0, 0.0, 0.0); let y = Vec3::new(0.0, 1.0, 0.0); let z = Vec3::new(0.0, 0.0, 1.0); assert_eq!( cross(&x, &y), z, "Cross product returns a vector orthogonal to both inputs" ); assert_eq!( cross(&y, &x), Vec3::new(0.0, 0.0, -1.0), "Cross product is not commutative" ); assert_eq!( cross(&y, &z), x, "Cross product returns a vector orthogonal to both inputs" ); assert_eq!( cross(&z, &x), y, "Cross product returns a vector orthogonal to both inputs" ); } #[test] fn normalize_vectors() { let b = Vec3::new(1.0, 1.0, 0.0); normalize(b); normalize(&b); } }
use super::vector::*; use rand::distributions::{Distribution, UnitSphereSurface}; use rand::prelude::*; use std::borrow::Borrow; use std::ops::{Add, Mul, Sub}; pub fn mix<T, U>(a: T, b: T, t: U) -> T where T: Copy + Add<T, Output = T> + Sub<T, Output = T> + Mul<U, Output = T>, { a + (b - a) * t } pub fn random_in_unit_sphere() -> Vec3 { let mut rng = rand::thread_rng(); let sphere = UnitSphereSurface::new(); Vec3(sphere.sample(&mut rng)) } pub fn random_in_unit_disk() -> [f64; 2] { let mut rng = rand::thread_rng(); loop { let p = Vec3::new( 2.0 * rng.gen::<f64>() - 1.0, 2.0 * rng.gen::<f64>() - 1.0, 0.0, ); if p.length_squared() < 1.0 { return [p.0[0], p.0[1]]; } } } pub fn rand() -> f64 { let mut rng = rand::thread_rng(); rng.gen::<f64>() } pub fn normalize<T>(vector: T) -> Vec3 where T: Borrow<Vec3>, { let vector = vector.borrow(); let &[x, y, z] = &vector.0; Vec3::new(x, y, z) / vector.length() }
pub fn cross<T, U>(a: T, b: U) -> Vec3 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let &[ax, ay, az] = &a.borrow().0; let &[bx, by, bz] = &b.borrow().0; Vec3::new(ay * bz - az * by, -(ax * bz - az * bx), ax * by - ay * bx) } pub fn reflect<T, U>(vector: T, normal: U) -> Vec3 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let vector = vector.borrow(); let normal = normal.borrow(); vector - &(normal * 2.0 * dot(vector, normal)) } pub fn refract<T, U>(vector: T, normal: U, ni_over_nt: f64) -> Option<Vec3> where T: Borrow<Vec3>, U: Borrow<Vec3>, { let vector = vector.borrow(); let normal = normal.borrow(); let vector = normalize(vector); let dt = dot(&vector, normal); let discriminant = 1.0 - ni_over_nt * ni_over_nt * (1.0 - dt * dt); if discriminant > 0.0 { let refracted = (vector - normal * dt) * ni_over_nt - normal * f64::sqrt(discriminant); return Some(refracted); } None } pub fn schlick(cosine: f64, refractive_index: f64) -> f64 { let r0 = (1.0 - refractive_index) / (1.0 + refractive_index); let r0 = r0 * r0; r0 + (1.0 - r0) * f64::powi(1.0 - cosine, 5) } #[cfg(test)] mod tests { use super::*; use approx::assert_relative_eq; #[test] fn mixing_built_in_types() { assert_eq!(mix(1.0, 2.0, 0.5), 1.5); assert_eq!(mix(1.0, -1.0, 0.5), 0.0); assert_eq!(mix(10 as f32, 20 as f32, 0.5) as i32, 15); } #[test] fn mixing_vector_types() { assert_eq!( mix(Vec3::new(0.0, 1.0, 2.0), Vec3::new(2.0, 1.0, 0.0), 0.5), Vec3::new(1.0, 1.0, 1.0) ); } #[test] fn sphere_random() { assert_relative_eq!(random_in_unit_sphere().length_squared(), 1.0); } #[test] fn trigonometry_functions() { assert_relative_eq!(f64::sin(std::f64::consts::PI), 0.0); } #[test] fn dot_product() { let a = Vec3::new(1.0, 0.0, 0.0); let b = Vec3::new(0.5, 0.5, 0.5); assert_eq!( dot(&a, &b), 0.5, "Dot product returns a scalar measuring similarity of two vectors" ); assert_eq!( dot(&Vec3::new(0.0, -1.0, 0.0), &Vec3::new(0.0, 1.0, 0.0)), -1.0, "Dot product returns a scalar measuring similarity of two vectors" ); } #[test] fn cross_product() { let x = Vec3::new(1.0, 0.0, 0.0); let y = Vec3::new(0.0, 1.0, 0.0); let z = Vec3::new(0.0, 0.0, 1.0); assert_eq!( cross(&x, &y), z, "Cross product returns a vector orthogonal to both inputs" ); assert_eq!( cross(&y, &x), Vec3::new(0.0, 0.0, -1.0), "Cross product is not commutative" ); assert_eq!( cross(&y, &z), x, "Cross product returns a vector orthogonal to both inputs" ); assert_eq!( cross(&z, &x), y, "Cross product returns a vector orthogonal to both inputs" ); } #[test] fn normalize_vectors() { let b = Vec3::new(1.0, 1.0, 0.0); normalize(b); normalize(&b); } }
pub fn dot<T, U>(a: T, b: U) -> f64 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let &[ax, ay, az] = &a.borrow().0; let &[bx, by, bz] = &b.borrow().0; ax * bx + ay * by + az * bz }
function_block-full_function
[ { "content": "pub fn trace_scene(\n\n world: &World,\n\n camera: &Camera,\n\n rows: u32,\n\n columns: u32,\n\n num_samples: u32,\n\n) -> Vec<Vec3> {\n\n let colors: Vec<Vec3> = (0..(rows * columns))\n\n .into_par_iter()\n\n .map(|index| {\n\n let x = index % columns;\n...
Rust
rust/xaynet-server/src/state_machine/phases/sum2.rs
little-dude/xain-fl
9c421c03bf1b98c7717593c0856fe856b6f338f7
use xaynet_core::{ mask::{Aggregation, MaskObject}, SumDict, SumParticipantPublicKey, }; use crate::state_machine::{ coordinator::MaskDict, phases::{Handler, Phase, PhaseName, PhaseState, Shared, StateError, Unmask}, requests::{StateMachineRequest, Sum2Request}, StateMachine, StateMachineError, }; #[cfg(feature = "metrics")] use crate::metrics; use tokio::time::{timeout, Duration}; #[derive(Debug)] pub struct Sum2 { sum_dict: SumDict, model_agg: Aggregation, scalar_agg: Aggregation, model_mask_dict: MaskDict, scalar_mask_dict: MaskDict, } #[cfg(test)] impl Sum2 { pub fn sum_dict(&self) -> &SumDict { &self.sum_dict } pub fn aggregation(&self) -> &Aggregation { &self.model_agg } pub fn mask_dict(&self) -> &MaskDict { &self.model_mask_dict } pub fn scalar_agg(&self) -> &Aggregation { &self.scalar_agg } pub fn scalar_mask_dict(&self) -> &MaskDict { &self.scalar_mask_dict } } #[async_trait] impl Phase for PhaseState<Sum2> where Self: Handler, { const NAME: PhaseName = PhaseName::Sum2; async fn run(&mut self) -> Result<(), StateError> { let min_time = self.shared.state.min_sum_time; debug!("in sum2 phase for a minimum of {} seconds", min_time); self.process_during(Duration::from_secs(min_time)).await?; let time_left = self.shared.state.max_sum_time - min_time; timeout(Duration::from_secs(time_left), self.process_until_enough()).await??; info!( "{} sum2 messages handled (min {} required)", self.mask_count(), self.shared.state.min_sum_count ); Ok(()) } fn next(self) -> Option<StateMachine> { Some( PhaseState::<Unmask>::new( self.shared, self.inner.model_agg, self.inner.scalar_agg, self.inner.model_mask_dict, self.inner.scalar_mask_dict, ) .into(), ) } } impl PhaseState<Sum2> where Self: Handler + Phase, { async fn process_until_enough(&mut self) -> Result<(), StateError> { while !self.has_enough_sum2s() { debug!( "{} sum2 messages handled (min {} required)", self.mask_count(), self.shared.state.min_sum_count ); self.process_single().await?; } Ok(()) } } impl Handler for PhaseState<Sum2> { fn handle_request(&mut self, req: StateMachineRequest) -> Result<(), StateMachineError> { match req { StateMachineRequest::Sum2(sum2_req) => { metrics!( self.shared.io.metrics_tx, metrics::message::sum2::increment(self.shared.state.round_id, Self::NAME) ); self.handle_sum2(sum2_req) } _ => Err(StateMachineError::MessageRejected), } } } impl PhaseState<Sum2> { pub fn new( shared: Shared, sum_dict: SumDict, model_agg: Aggregation, scalar_agg: Aggregation, ) -> Self { info!("state transition"); Self { inner: Sum2 { sum_dict, model_agg, scalar_agg, model_mask_dict: MaskDict::new(), scalar_mask_dict: MaskDict::new(), }, shared, } } fn handle_sum2(&mut self, req: Sum2Request) -> Result<(), StateMachineError> { let Sum2Request { participant_pk, model_mask, scalar_mask, } = req; self.add_mask(&participant_pk, model_mask, scalar_mask) } fn add_mask( &mut self, pk: &SumParticipantPublicKey, model_mask: MaskObject, scalar_mask: MaskObject, ) -> Result<(), StateMachineError> { if self.inner.sum_dict.remove(pk).is_none() { return Err(StateMachineError::MessageRejected); } if let Some(count) = self.inner.model_mask_dict.get_mut(&model_mask) { *count += 1; } else { self.inner.model_mask_dict.insert(model_mask, 1); } if let Some(count) = self.inner.scalar_mask_dict.get_mut(&scalar_mask) { *count += 1; } else { self.inner.scalar_mask_dict.insert(scalar_mask, 1); } Ok(()) } fn mask_count(&self) -> usize { let sum1 = self.inner.model_mask_dict.values().sum(); let sum2: usize = self.inner.scalar_mask_dict.values().sum(); if sum1 != sum2 { warn!( "unexpected difference in mask sum count: {} vs {}", sum1, sum2 ); } sum1 } fn has_enough_sum2s(&self) -> bool { self.mask_count() >= self.shared.state.min_sum_count } } #[cfg(test)] mod test { use super::*; use crate::state_machine::{ events::Event, tests::{builder::StateMachineBuilder, utils}, }; use xaynet_core::{ common::RoundSeed, crypto::{ByteObject, EncryptKeyPair}, mask::{FromPrimitives, Model}, SumDict, }; #[tokio::test] pub async fn sum2_to_unmask() { let n_updaters = 1; let n_summers = 1; let seed = RoundSeed::generate(); let sum_ratio = 0.5; let update_ratio = 1.0; let coord_keys = EncryptKeyPair::generate(); let model_size = 4; let mut summer = utils::generate_summer(&seed, sum_ratio, update_ratio); let ephm_pk = utils::ephm_pk(&summer.compose_sum_message(coord_keys.public)); let mut sum_dict = SumDict::new(); sum_dict.insert(summer.pk, ephm_pk); let updater = utils::generate_updater(&seed, sum_ratio, update_ratio); let scalar = 1.0 / (n_updaters as f64 * update_ratio); let model = Model::from_primitives(vec![0; model_size].into_iter()).unwrap(); let msg = updater.compose_update_message(coord_keys.public, &sum_dict, scalar, model.clone()); let masked_model = utils::masked_model(&msg); let masked_scalar = utils::masked_scalar(&msg); let local_seed_dict = utils::local_seed_dict(&msg); let mut aggregation = Aggregation::new(utils::mask_settings().into(), model_size); aggregation.aggregate(masked_model.clone()); let mut scalar_agg = Aggregation::new(utils::mask_settings().into(), 1); scalar_agg.aggregate(masked_scalar.clone()); let sum2 = Sum2 { sum_dict, model_agg: aggregation, scalar_agg, model_mask_dict: MaskDict::new(), scalar_mask_dict: MaskDict::new(), }; let (state_machine, request_tx, events) = StateMachineBuilder::new() .with_seed(seed.clone()) .with_phase(sum2) .with_sum_ratio(sum_ratio) .with_update_ratio(update_ratio) .with_min_sum(n_summers) .with_min_update(n_updaters) .with_mask_config(utils::mask_settings().into()) .build(); assert!(state_machine.is_sum2()); let msg = summer .compose_sum2_message(coord_keys.public, &local_seed_dict, masked_model.data.len()) .unwrap(); let req = async { request_tx.msg(&msg).await.unwrap() }; let transition = async { state_machine.next().await.unwrap() }; let ((), state_machine) = tokio::join!(req, transition); assert!(state_machine.is_unmask()); let PhaseState { inner: unmask_state, .. } = state_machine.into_unmask_phase_state(); assert_eq!(unmask_state.mask_dict().len(), 1); let (mask, count) = unmask_state.mask_dict().iter().next().unwrap().clone(); assert_eq!(*count, 1); let unmasked_model = unmask_state .aggregation() .unwrap() .clone() .unmask(mask.clone()); assert_eq!(unmasked_model, model); assert_eq!( events.phase_listener().get_latest(), Event { round_id: 0, event: PhaseName::Sum2, } ); } }
use xaynet_core::{ mask::{Aggregation, MaskObject}, SumDict, SumParticipantPublicKey, }; use crate::state_machine::{ coordinator::MaskDict, phases::{Handler, Phase, PhaseName, PhaseState, Shared, StateError, Unmask}, requests::{StateMachineRequest, Sum2Request}, StateMachine, StateMachineError, }; #[cfg(feature = "metrics")] use crate::metrics; use tokio::time::{timeout, Duration}; #[derive(Debug)] pub struct Sum2 { sum_dict: SumDict, model_agg: Aggregation, scalar_agg: Aggregation, model_mask_dict: MaskDict, scalar_mask_dict: MaskDict, } #[cfg(test)] impl Sum2 { pub fn sum_dict(&self) -> &SumDict { &self.sum_dict } pub fn aggregation(&self) -> &Aggregation { &self.model_agg } pub fn mask_dict(&self) -> &MaskDict { &self.model_mask_dict } pub fn scalar_agg(&self) -> &Aggregation { &self.scalar_agg } pub fn scalar_mask_dict(&self) -> &MaskDict { &self.scalar_mask_dict } } #[async_trait] impl Phase for PhaseState<Sum2> where Self: Handler, { const NAME: PhaseName = PhaseName::Sum2; async fn run(&mut self) -> Result<(), StateError> { let min_time = self.shared.state.min_sum_time; debug!("in sum2 phase for a minimum of {} seconds", min_time); self.process_during(Duration::from_secs(min_time)).await?; let time_left = self.shared.state.max_sum_time - min_time; timeout(Duration::from_secs(time_left), self.process_until_enough()).await??; info!( "{} sum2 messages handled (min {} required)", self.mask_count(), self.shared.state.min_sum_count ); Ok(()) } fn next(self) -> Option<StateMachine> { Some( PhaseState::<Unmask>::new( self.shared, self.inner.model_agg, self.inner.scalar_agg, self.inner.model_mask_dict, self.inner.scalar_mask_dict, ) .into(), ) } } impl PhaseState<Sum2> where Self: Handler + Phase, { async fn process_until_enough(&mut self) -> Result<(), StateError> { while !self.has_enough_sum2s() { debug!( "{} sum2 messages handled (min {} required)", self.mask_count(), self.shared.state.min_sum_count ); self.process_single().await?; } Ok(()) } } impl Handler for PhaseState<Sum2> { fn handle_request(&mut self, req: StateMachineRequest) -> Result<(), StateMachineError> {
} } impl PhaseState<Sum2> { pub fn new( shared: Shared, sum_dict: SumDict, model_agg: Aggregation, scalar_agg: Aggregation, ) -> Self { info!("state transition"); Self { inner: Sum2 { sum_dict, model_agg, scalar_agg, model_mask_dict: MaskDict::new(), scalar_mask_dict: MaskDict::new(), }, shared, } } fn handle_sum2(&mut self, req: Sum2Request) -> Result<(), StateMachineError> { let Sum2Request { participant_pk, model_mask, scalar_mask, } = req; self.add_mask(&participant_pk, model_mask, scalar_mask) } fn add_mask( &mut self, pk: &SumParticipantPublicKey, model_mask: MaskObject, scalar_mask: MaskObject, ) -> Result<(), StateMachineError> { if self.inner.sum_dict.remove(pk).is_none() { return Err(StateMachineError::MessageRejected); } if let Some(count) = self.inner.model_mask_dict.get_mut(&model_mask) { *count += 1; } else { self.inner.model_mask_dict.insert(model_mask, 1); } if let Some(count) = self.inner.scalar_mask_dict.get_mut(&scalar_mask) { *count += 1; } else { self.inner.scalar_mask_dict.insert(scalar_mask, 1); } Ok(()) } fn mask_count(&self) -> usize { let sum1 = self.inner.model_mask_dict.values().sum(); let sum2: usize = self.inner.scalar_mask_dict.values().sum(); if sum1 != sum2 { warn!( "unexpected difference in mask sum count: {} vs {}", sum1, sum2 ); } sum1 } fn has_enough_sum2s(&self) -> bool { self.mask_count() >= self.shared.state.min_sum_count } } #[cfg(test)] mod test { use super::*; use crate::state_machine::{ events::Event, tests::{builder::StateMachineBuilder, utils}, }; use xaynet_core::{ common::RoundSeed, crypto::{ByteObject, EncryptKeyPair}, mask::{FromPrimitives, Model}, SumDict, }; #[tokio::test] pub async fn sum2_to_unmask() { let n_updaters = 1; let n_summers = 1; let seed = RoundSeed::generate(); let sum_ratio = 0.5; let update_ratio = 1.0; let coord_keys = EncryptKeyPair::generate(); let model_size = 4; let mut summer = utils::generate_summer(&seed, sum_ratio, update_ratio); let ephm_pk = utils::ephm_pk(&summer.compose_sum_message(coord_keys.public)); let mut sum_dict = SumDict::new(); sum_dict.insert(summer.pk, ephm_pk); let updater = utils::generate_updater(&seed, sum_ratio, update_ratio); let scalar = 1.0 / (n_updaters as f64 * update_ratio); let model = Model::from_primitives(vec![0; model_size].into_iter()).unwrap(); let msg = updater.compose_update_message(coord_keys.public, &sum_dict, scalar, model.clone()); let masked_model = utils::masked_model(&msg); let masked_scalar = utils::masked_scalar(&msg); let local_seed_dict = utils::local_seed_dict(&msg); let mut aggregation = Aggregation::new(utils::mask_settings().into(), model_size); aggregation.aggregate(masked_model.clone()); let mut scalar_agg = Aggregation::new(utils::mask_settings().into(), 1); scalar_agg.aggregate(masked_scalar.clone()); let sum2 = Sum2 { sum_dict, model_agg: aggregation, scalar_agg, model_mask_dict: MaskDict::new(), scalar_mask_dict: MaskDict::new(), }; let (state_machine, request_tx, events) = StateMachineBuilder::new() .with_seed(seed.clone()) .with_phase(sum2) .with_sum_ratio(sum_ratio) .with_update_ratio(update_ratio) .with_min_sum(n_summers) .with_min_update(n_updaters) .with_mask_config(utils::mask_settings().into()) .build(); assert!(state_machine.is_sum2()); let msg = summer .compose_sum2_message(coord_keys.public, &local_seed_dict, masked_model.data.len()) .unwrap(); let req = async { request_tx.msg(&msg).await.unwrap() }; let transition = async { state_machine.next().await.unwrap() }; let ((), state_machine) = tokio::join!(req, transition); assert!(state_machine.is_unmask()); let PhaseState { inner: unmask_state, .. } = state_machine.into_unmask_phase_state(); assert_eq!(unmask_state.mask_dict().len(), 1); let (mask, count) = unmask_state.mask_dict().iter().next().unwrap().clone(); assert_eq!(*count, 1); let unmasked_model = unmask_state .aggregation() .unwrap() .clone() .unmask(mask.clone()); assert_eq!(unmasked_model, model); assert_eq!( events.phase_listener().get_latest(), Event { round_id: 0, event: PhaseName::Sum2, } ); } }
match req { StateMachineRequest::Sum2(sum2_req) => { metrics!( self.shared.io.metrics_tx, metrics::message::sum2::increment(self.shared.state.round_id, Self::NAME) ); self.handle_sum2(sum2_req) } _ => Err(StateMachineError::MessageRejected), }
if_condition
[ { "content": "/// A trait that must be implemented by a state to handle a request.\n\npub trait Handler {\n\n /// Handles a request.\n\n fn handle_request(&mut self, req: StateMachineRequest) -> Result<(), StateMachineError>;\n\n}\n\n\n\n/// I/O interfaces.\n\n#[derive(Debug)]\n\npub struct IO {\n\n //...
Rust
src/ppu/ppu_memory.rs
bheisler/Corrosion
5ca2b3a03825c3d58623df774a8596de32b46812
use super::Color; use super::TilePattern; use cart::Cart; use memory::MemSegment; use std::cell::UnsafeCell; use std::rc::Rc; pub struct PPUMemory { cart: Rc<UnsafeCell<Cart>>, vram: Box<[u8; 0x0F00]>, palette: [Color; 0x20], } impl PPUMemory { pub fn new(cart: Rc<UnsafeCell<Cart>>) -> PPUMemory { PPUMemory { cart: cart, vram: Box::new([0u8; 0x0F00]), palette: [Color::from_bits_truncate(0); 0x20], } } } fn get_tile_addr(tile_id: u8, plane: u8, fine_y_scroll: u16, tile_table: u16) -> u16 { let mut tile_addr = 0u16; tile_addr |= fine_y_scroll; tile_addr |= plane as u16; tile_addr |= (tile_id as u16) << 4; tile_addr |= tile_table; tile_addr } impl PPUMemory { pub fn read_bypass_palette(&mut self, idx: u16) -> u8 { let idx = self.translate_vram_address(idx); self.vram[idx] } fn translate_vram_address(&self, idx: u16) -> usize { let idx = idx & 0x0FFF; let nametable_num = (idx / 0x0400) as usize; let idx_in_nametable = idx % 0x400; let table: &[u16; 4] = unsafe { (*self.cart.get()).get_mirroring_table() }; let translated = table[nametable_num] + idx_in_nametable; translated as usize % self.vram.len() } #[cfg(feature = "vectorize")] pub fn get_palettes(&self) -> (::simd::u8x16, ::simd::u8x16) { let palette_bytes: &[u8; 0x20] = unsafe { ::std::mem::transmute(&self.palette) }; ( ::simd::u8x16::load(palette_bytes, 0), ::simd::u8x16::load(palette_bytes, 16), ) } #[cfg(not(feature = "vectorize"))] pub fn read_palette(&self, idx: super::PaletteIndex) -> Color { self.palette[idx.to_index()] } pub fn read_tile_pattern( &mut self, tile_id: u8, fine_y_scroll: u16, tile_table: u16, ) -> TilePattern { let lo_addr = get_tile_addr(tile_id, 0, fine_y_scroll, tile_table); let hi_addr = get_tile_addr(tile_id, 8, fine_y_scroll, tile_table); TilePattern { lo: self.read(lo_addr), hi: self.read(hi_addr), } } #[allow(dead_code)] pub fn dump_nametable(&mut self, idx: u16) { let start_idx = 0x2000 + (idx * 0x400); println!("Nametable {}:", idx); self.print_columns(start_idx..(start_idx + 0x3C0), 32) } #[allow(dead_code)] pub fn dump_attribute_table(&mut self, idx: u16) { let start_idx = 0x2000 + (idx * 0x400); println!("Attribute table {}:", idx); self.print_columns((start_idx + 0x3C0)..(start_idx + 0x400), 32); } } impl MemSegment for PPUMemory { fn read(&mut self, idx: u16) -> u8 { match idx { 0x0000...0x1FFF => unsafe { (*self.cart.get()).chr_read(idx) }, 0x2000...0x3EFF => self.read_bypass_palette(idx), 0x3F00...0x3FFF => self.palette[(idx & 0x1F) as usize].bits(), x => invalid_address!(x), } } fn write(&mut self, idx: u16, val: u8) { match idx { 0x0000...0x1FFF => unsafe { (*self.cart.get()).chr_write(idx, val) }, 0x2000...0x3EFF => { let idx = self.translate_vram_address(idx); self.vram[idx] = val; } 0x3F00...0x3FFF => { let val = Color::from_bits_truncate(val); let idx = (idx & 0x001F) as usize; match idx { 0x10 => self.palette[0x00] = val, 0x00 => self.palette[0x10] = val, 0x14 => self.palette[0x04] = val, 0x04 => self.palette[0x14] = val, 0x18 => self.palette[0x08] = val, 0x08 => self.palette[0x18] = val, 0x1C => self.palette[0x0C] = val, 0x0C => self.palette[0x1C] = val, _ => (), }; self.palette[idx] = val; } x => invalid_address!(x), } } } #[cfg(test)] mod tests { use cart::ScreenMode; use memory::MemSegment; use ppu::{Color, PPU}; use ppu::tests::*; #[test] fn ppu_can_read_write_palette() { let mut ppu = create_test_ppu(); ppu.reg.v = 0x3F00; ppu.write(0x2007, 12); ppu.reg.v = 0x3F00; assert_eq!(ppu.ppu_mem.palette[0], Color::from_bits_truncate(12)); ppu.reg.v = 0x3F01; ppu.write(0x2007, 212); ppu.reg.v = 0x3F01; assert_eq!(ppu.read(0x2007), 212 & 0x3F); } #[test] fn test_palette_mirroring() { let mut ppu = create_test_ppu(); let mirrors = [0x3F10, 0x3F14, 0x3F18, 0x3F1C]; let targets = [0x3F00, 0x3F04, 0x3F08, 0x3F0C]; for x in 0..4 { ppu.reg.v = targets[x]; ppu.write(0x2007, 12); ppu.reg.v = mirrors[x]; assert_eq!(ppu.read(0x2007), 12); ppu.reg.v = mirrors[x]; ppu.write(0x2007, 12); ppu.reg.v = targets[x]; assert_eq!(ppu.read(0x2007), 12); } } fn to_nametable_idx(idx: u16, tbl: u16) -> u16 { 0x2000 + (0x0400 * tbl) + idx } fn assert_mirrored(ppu: &mut PPU, tbl1: u16, tbl2: u16) { for idx in 0x0000..0x0400 { let tbl1_idx = to_nametable_idx(idx, tbl1); let tbl2_idx = to_nametable_idx(idx, tbl2); println!( "Translated: tbl1: {:04X}, tbl2: {:04X}", ppu.ppu_mem.translate_vram_address(tbl1_idx), ppu.ppu_mem.translate_vram_address(tbl2_idx), ); ppu.ppu_mem.write(tbl1_idx, 0xFF); assert_eq!(0xFF, ppu.ppu_mem.read(tbl2_idx)); ppu.ppu_mem.write(tbl2_idx, 0x61); assert_eq!(0x61, ppu.ppu_mem.read(tbl1_idx)); } } fn assert_not_mirrored(ppu: &mut PPU, tbl1: u16, tbl2: u16) { for idx in 0x0000..0x0400 { let tbl1_idx = to_nametable_idx(idx, tbl1); let tbl2_idx = to_nametable_idx(idx, tbl2); println!( "Translated: tbl1: {:04X}, tbl2: {:04X}", ppu.ppu_mem.translate_vram_address(tbl1_idx), ppu.ppu_mem.translate_vram_address(tbl2_idx), ); ppu.ppu_mem.write(tbl1_idx, 0x00); ppu.ppu_mem.write(tbl2_idx, 0x00); ppu.ppu_mem.write(tbl1_idx, 0xFF); assert_eq!(0x00, ppu.ppu_mem.read(tbl2_idx)); ppu.ppu_mem.write(tbl2_idx, 0x61); assert_eq!(0xFF, ppu.ppu_mem.read(tbl1_idx)); } } #[test] fn single_screen_mirroring_mirrors_both_ways() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::OneScreenLow); assert_mirrored(&mut ppu, 0, 1); assert_mirrored(&mut ppu, 1, 2); assert_mirrored(&mut ppu, 2, 3); } #[test] fn four_screen_mirroring_mirrors_both_ways() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::FourScreen); assert_not_mirrored(&mut ppu, 0, 1); assert_not_mirrored(&mut ppu, 1, 2); assert_not_mirrored(&mut ppu, 2, 3); } #[test] fn horizontal_mirroring_mirrors_horizontally() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::Horizontal); assert_mirrored(&mut ppu, 0, 1); assert_mirrored(&mut ppu, 2, 3); assert_not_mirrored(&mut ppu, 0, 2); assert_not_mirrored(&mut ppu, 1, 3); } #[test] fn vertical_mirroring_mirrors_vertically() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::Vertical); assert_not_mirrored(&mut ppu, 0, 1); assert_not_mirrored(&mut ppu, 2, 3); assert_mirrored(&mut ppu, 0, 2); assert_mirrored(&mut ppu, 1, 3); } }
use super::Color; use super::TilePattern; use cart::Cart; use memory::MemSegment; use std::cell::UnsafeCell; use std::rc::Rc; pub struct PPUMemory { cart: Rc<UnsafeCell<Cart>>, vram: Box<[u8; 0x0F00]>, palette: [Color; 0x20], } impl PPUMemory { pub fn new(cart: Rc<UnsafeCell<Cart>>) -> PPUMemory { PPUMemory { cart: cart, vram: Box::new([0u8; 0x0F00]), palette: [Color::from_bits_truncate(0); 0x20], } } } fn get_tile_addr(tile_id: u8, plane: u8, fine_y_scroll: u16, tile_table: u16) -> u16 { let mut tile_addr = 0u16; tile_addr |= fine_y_scroll; tile_addr |= plane as u16; tile_addr |= (tile_id as u16) << 4; tile_addr |= tile_table; tile_addr } impl PPUMemory { pub fn read_bypass_palette(&mut self, idx: u16) -> u8 { let idx = self.translate_vram_address(idx); self.vram[idx] } fn translate_vram_address(&self, idx: u16) -> usize { let idx = idx & 0x0FFF; let nametable_num = (idx / 0x0400) as usize; let idx_in_nametable = idx % 0x400; let table: &[u16; 4] = unsafe { (*self.cart.get()).get_mirroring_table() }; let translated = table[nametable_num] + idx_in_nametable; translated as usize % self.vram.len() } #[cfg(feature = "vectorize")] pub fn get_palettes(&self) -> (::simd::u8x16, ::simd::u8x16) { let palette_bytes: &[u8; 0x20] = unsafe { ::std::mem::transmute(&self.palette) }; ( ::simd::u8x16::load(palette_bytes, 0), ::simd::u8x16::load(palette_bytes, 16), ) } #[cfg(not(feature = "vectorize"))] pub fn read_palette(&self, idx: super::PaletteIndex) -> Color { self.palette[idx.to_index()] } pub fn read_tile_pattern( &mut self, tile_id: u8, fine_y_scroll: u16, tile_table: u16, ) -> TilePattern { let lo_addr = get_tile_addr(tile_id, 0, fine_y_scroll, tile_table); let hi_addr = get_tile_addr(tile_id, 8, fine_y_scroll, tile_table); TilePattern { lo: self.read(lo_addr), hi: self.read(hi_addr), } } #[allow(dead_code)] pub fn dump_nametable(&mut self, idx: u16) { let start_idx = 0x2000 + (idx * 0x400); println!("Nametable {}:", idx); self.print_columns(start_idx..(start_idx + 0x3C0), 32) } #[allow(dead_code)] pub fn dump_attribute_table(&mut self, idx: u16) { let start_idx = 0x2000 + (idx * 0x400); println!("Attribute table {}:", idx); self.print_columns((start_idx + 0x3C0)..(start_idx + 0x400), 32); } } impl MemSegment for PPUMemory { fn read(&mut self, idx: u16) -> u8 { match idx { 0x0000...0x1FFF => unsafe { (*self.cart.get()).chr_read(idx) }, 0x2000...0x3EFF => self.read_bypass_palette(idx), 0x3F00...0x3FFF => self.palette[(idx & 0x1F) as usize].bits(), x => invalid_address!(x), } } fn write(&mut self, idx: u16, val: u8) { match idx { 0x0000...0x1FFF => unsafe { (*self.cart.get()).chr_write(idx, val) }, 0x2000...0x3EFF => { let idx = self.translate_vram_address(idx); self.vram[idx] = val; } 0x3F00...0x3FFF => { let val = Color::from_bits_truncate(val); let idx = (idx & 0x001F) as usize; match idx { 0x10 => self.palette[0x00] = val, 0x00 => self.palette[0x10] = val, 0x14 => self.palette[0x04] = val, 0x04 => self.palette[0x14] = val, 0x18 => self.palette[0x08] = val, 0x08 => self.palette[0x18] = val, 0x1C => self.palette[0x0C] = val, 0x0C => self.palette[0x1C] = val, _ => (), }; self.palette[idx] = val; } x => invalid_address!(x), } } } #[cfg(test)] mod tests { use cart::ScreenMode; use memory::MemSegment; use ppu::{Color, PPU}; use ppu::tests::*; #[test] fn ppu_can_read_write_palette() { let mut ppu = create_test_ppu(); ppu.reg.v = 0x3F00; ppu.write(0x2007, 12); ppu.reg.v = 0x3F00; assert_eq!(ppu.ppu_mem.palette[0], Color::from_bits_truncate(12)); ppu.reg.v = 0x3F01; ppu.write(0x2007, 212); ppu.reg.v = 0x3F01; assert_eq!(ppu.read(0x2007), 212 & 0x3F); } #[test] fn test_palette_mirroring() { let mut ppu = create_test_ppu(); let mirrors = [0x3F10, 0x3F14, 0x3F18, 0x3F1C]; let targets = [0x3F00, 0x3F04, 0x3F08, 0x3F0C]; for x in 0..4 { ppu.reg.v = targets[x]; ppu.write(0x2007, 12); ppu.reg.v = mirrors[x]; assert_eq!(ppu.read(0x2007), 12); ppu.reg.v = mirrors[x]; ppu.write(0x2007, 12); ppu.reg.v = targets[x]; assert_eq!(ppu.read(0x2007), 12); } } fn to_nametable_idx(idx: u16, tbl: u16) -> u16 { 0x2000 + (0x0400 * tbl) + idx } fn assert_mirrored(ppu: &mut PPU, tbl1: u16, tbl2: u16) { for idx in 0x0000..0x0400 { let tbl1_idx = to_nametable_idx(idx, tbl1); let tbl2_idx = to_nametable_idx(idx, tbl2); println!( "Translated: tbl1: {:04X}, tbl2: {:04X}", ppu.ppu_mem.translate_vram_address(tbl1_idx), ppu.ppu_mem.translate_vram_address(tbl2_idx), ); ppu.ppu_mem.write(tbl1_idx, 0xFF); assert_eq!(0xFF, ppu.ppu_mem.read(tbl2_idx)); ppu.ppu_mem.write(tbl2_idx, 0x61); assert_eq!(0x61, ppu.ppu_mem.read(tbl1_idx)); } } fn assert_not_mirrored(ppu: &mut PPU, tbl1: u16, tbl2: u16) { for idx in 0x0000..0x0400 { let tbl1_idx = to_nametable_idx(idx, tbl1); let tbl2_idx = to_nametable_idx(idx, tbl2); println!( "Translated: tbl1: {:04X}, tbl2: {:04X}", ppu.ppu_mem.translate_vram_address(tbl1_idx), ppu.ppu_mem.translate_vram_address(tbl2_idx), ); ppu.ppu_mem.write(tbl1_idx, 0x00); ppu.ppu_mem.write(tbl2_idx, 0x00); ppu.ppu_mem.write(tbl1_idx, 0xFF); assert_eq!(0x00, ppu.ppu_mem.read(tbl2_idx)); ppu.ppu_mem.write(tbl2_idx, 0x61); assert_eq!(0xFF, ppu.ppu_mem.read(tbl1_idx)); } } #[test] fn single_screen_mirroring_mirrors_both_ways() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::OneScreenLow); assert_mirrored(&mut ppu, 0, 1); assert_mirrored(&mut ppu, 1, 2); assert_mirrored(&mut ppu, 2, 3); } #[test]
#[test] fn horizontal_mirroring_mirrors_horizontally() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::Horizontal); assert_mirrored(&mut ppu, 0, 1); assert_mirrored(&mut ppu, 2, 3); assert_not_mirrored(&mut ppu, 0, 2); assert_not_mirrored(&mut ppu, 1, 3); } #[test] fn vertical_mirroring_mirrors_vertically() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::Vertical); assert_not_mirrored(&mut ppu, 0, 1); assert_not_mirrored(&mut ppu, 2, 3); assert_mirrored(&mut ppu, 0, 2); assert_mirrored(&mut ppu, 1, 3); } }
fn four_screen_mirroring_mirrors_both_ways() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::FourScreen); assert_not_mirrored(&mut ppu, 0, 1); assert_not_mirrored(&mut ppu, 1, 2); assert_not_mirrored(&mut ppu, 2, 3); }
function_block-full_function
[ { "content": "#[test]\n\nfn blargg_ppu_test_vram_access() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(18, \"cb15f68f631c1d409beefb775bcff990286096fb\");\n\n\n\n run_system_test(\n\n 19,...
Rust
lib/src/api/tests.rs
untoldwind/t-rust-less
e4ecd17b624e303626b55350b06319e1632a5743
use crate::{ api::{ Identity, PasswordStrength, Secret, SecretAttachment, SecretEntry, SecretEntryMatch, SecretList, SecretListFilter, SecretProperties, SecretType, SecretVersion, SecretVersionRef, Status, ZeroizeDateTime, }, memguard::SecretBytes, }; use chrono::{TimeZone, Utc}; use quickcheck::{quickcheck, Arbitrary, Gen}; use std::collections::{BTreeMap, HashMap}; use super::{Command, PasswordGeneratorCharsParam, PasswordGeneratorParam, PasswordGeneratorWordsParam, StoreConfig}; use crate::memguard::ZeroizeBytesBuffer; impl Arbitrary for Identity { fn arbitrary(g: &mut Gen) -> Self { Identity { id: String::arbitrary(g), name: String::arbitrary(g), email: String::arbitrary(g), hidden: bool::arbitrary(g), } } } impl Arbitrary for ZeroizeDateTime { fn arbitrary(g: &mut Gen) -> Self { ZeroizeDateTime::from(Utc.timestamp_millis(u32::arbitrary(g) as i64)) } } impl Arbitrary for Status { fn arbitrary(g: &mut Gen) -> Self { Status { locked: bool::arbitrary(g), unlocked_by: Option::arbitrary(g), autolock_at: Option::arbitrary(g), version: String::arbitrary(g), autolock_timeout: u64::arbitrary(g), } } } impl Arbitrary for SecretType { fn arbitrary(g: &mut Gen) -> Self { match g.choose(&[0, 1, 2, 3, 4, 5]).unwrap() { 0 => SecretType::Login, 1 => SecretType::Note, 2 => SecretType::Licence, 3 => SecretType::Wlan, 4 => SecretType::Password, _ => SecretType::Other, } } } impl Arbitrary for SecretListFilter { fn arbitrary(g: &mut Gen) -> Self { SecretListFilter { url: Option::arbitrary(g), tag: Option::arbitrary(g), secret_type: Option::arbitrary(g), name: Option::arbitrary(g), deleted: bool::arbitrary(g), } } } impl Arbitrary for SecretEntry { fn arbitrary(g: &mut Gen) -> Self { SecretEntry { id: String::arbitrary(g), name: String::arbitrary(g), secret_type: SecretType::arbitrary(g), tags: Vec::arbitrary(g), urls: Vec::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), deleted: bool::arbitrary(g), } } } impl Arbitrary for SecretEntryMatch { fn arbitrary(g: &mut Gen) -> Self { SecretEntryMatch { entry: SecretEntry::arbitrary(g), name_score: isize::arbitrary(g), name_highlights: Vec::arbitrary(g), url_highlights: Vec::arbitrary(g), tags_highlights: Vec::arbitrary(g), } } } impl Arbitrary for SecretList { fn arbitrary(g: &mut Gen) -> Self { SecretList { all_tags: Vec::arbitrary(g), entries: vec![SecretEntryMatch::arbitrary(g)], } } } impl Arbitrary for SecretAttachment { fn arbitrary(g: &mut Gen) -> Self { SecretAttachment { name: String::arbitrary(g), mime_type: String::arbitrary(g), content: Vec::arbitrary(g), } } } impl Arbitrary for SecretProperties { fn arbitrary(g: &mut Gen) -> Self { let keys = Vec::<String>::arbitrary(g); let mut properties = BTreeMap::new(); for key in keys { properties.insert(key, String::arbitrary(g)); } SecretProperties::new(properties) } } impl Arbitrary for SecretVersion { fn arbitrary(g: &mut Gen) -> Self { SecretVersion { secret_id: String::arbitrary(g), secret_type: SecretType::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), name: String::arbitrary(g), tags: Vec::arbitrary(g), urls: Vec::arbitrary(g), properties: SecretProperties::arbitrary(g), attachments: Vec::arbitrary(g), deleted: bool::arbitrary(g), recipients: Vec::arbitrary(g), } } } impl Arbitrary for SecretVersionRef { fn arbitrary(g: &mut Gen) -> Self { SecretVersionRef { block_id: String::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), } } } impl Arbitrary for PasswordStrength { fn arbitrary(g: &mut Gen) -> Self { let entropy = f64::arbitrary(g); let crack_time = f64::arbitrary(g); PasswordStrength { entropy: if entropy.is_finite() { entropy } else { 0.0 }, crack_time: if crack_time.is_finite() { crack_time } else { 0.0 }, crack_time_display: String::arbitrary(g), score: u8::arbitrary(g), } } } impl Arbitrary for Secret { fn arbitrary(g: &mut Gen) -> Self { Secret { id: String::arbitrary(g), secret_type: SecretType::arbitrary(g), current: SecretVersion::arbitrary(g), current_block_id: String::arbitrary(g), versions: Vec::arbitrary(g), password_strengths: HashMap::arbitrary(g), } } } impl Arbitrary for StoreConfig { fn arbitrary(g: &mut Gen) -> Self { StoreConfig { name: String::arbitrary(g), store_url: String::arbitrary(g), remote_url: Option::arbitrary(g), sync_interval_sec: u32::arbitrary(g), client_id: String::arbitrary(g), autolock_timeout_secs: u64::arbitrary(g), default_identity_id: Option::arbitrary(g), } } } impl Arbitrary for PasswordGeneratorParam { fn arbitrary(g: &mut Gen) -> Self { match g.choose(&[0, 1]).unwrap() { 0 => PasswordGeneratorParam::Chars(PasswordGeneratorCharsParam { num_chars: u8::arbitrary(g), include_uppers: bool::arbitrary(g), include_numbers: bool::arbitrary(g), include_symbols: bool::arbitrary(g), require_upper: bool::arbitrary(g), require_number: bool::arbitrary(g), require_symbol: bool::arbitrary(g), exclude_similar: bool::arbitrary(g), exclude_ambiguous: bool::arbitrary(g), }), _ => PasswordGeneratorParam::Words(PasswordGeneratorWordsParam { num_words: u8::arbitrary(g), delim: char::arbitrary(g), }), } } } impl Arbitrary for SecretBytes { fn arbitrary(g: &mut Gen) -> Self { SecretBytes::from(Vec::arbitrary(g)) } } impl Arbitrary for Command { fn arbitrary(g: &mut Gen) -> Self { match g .choose(&[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, ]) .unwrap() { 0 => Command::ListStores, 1 => Command::UpsertStoreConfig(StoreConfig::arbitrary(g)), 2 => Command::DeleteStoreConfig(String::arbitrary(g)), 3 => Command::GetDefaultStore, 4 => Command::SetDefaultStore(String::arbitrary(g)), 5 => Command::GenerateId, 6 => Command::GeneratePassword(PasswordGeneratorParam::arbitrary(g)), 7 => Command::PollEvents(u64::arbitrary(g)), 8 => Command::Status(String::arbitrary(g)), 9 => Command::Lock(String::arbitrary(g)), 10 => Command::Unlock { store_name: String::arbitrary(g), identity_id: String::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 11 => Command::Identities(String::arbitrary(g)), 12 => Command::AddIdentity { store_name: String::arbitrary(g), identity: Identity::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 13 => Command::ChangePassphrase { store_name: String::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 14 => Command::List { store_name: String::arbitrary(g), filter: SecretListFilter::arbitrary(g), }, 15 => Command::UpdateIndex(String::arbitrary(g)), 16 => Command::Add { store_name: String::arbitrary(g), secret_version: SecretVersion::arbitrary(g), }, 17 => Command::Get { store_name: String::arbitrary(g), secret_id: String::arbitrary(g), }, 18 => Command::GetVersion { store_name: String::arbitrary(g), block_id: String::arbitrary(g), }, 19 => Command::SecretToClipboard { store_name: String::arbitrary(g), block_id: String::arbitrary(g), properties: Vec::arbitrary(g), }, 20 => Command::ClipboardIsDone, 21 => Command::ClipboardCurrentlyProviding, 22 => Command::ClipboardProvideNext, _ => Command::ClipboardDestroy, } } } #[test] fn identity_capnp_serialization() { fn check_serialize(identity: Identity) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &identity).unwrap(); let deserialized: Identity = rmp_serde::from_read_ref(&buf).unwrap(); identity == deserialized } quickcheck(check_serialize as fn(Identity) -> bool); } #[test] fn status_capnp_serialization() { fn check_serialize(status: Status) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &status).unwrap(); let deserialized: Status = rmp_serde::from_read_ref(&buf).unwrap(); status == deserialized } quickcheck(check_serialize as fn(Status) -> bool); } #[test] fn secret_list_filter_capnp_serialization() { fn check_serialize(filter: SecretListFilter) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &filter).unwrap(); let deserialized: SecretListFilter = rmp_serde::from_read_ref(&buf).unwrap(); filter == deserialized } quickcheck(check_serialize as fn(SecretListFilter) -> bool); } #[test] fn secret_list_capnp_serialization() { fn check_serialize(list: SecretList) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &list).unwrap(); let deserialized: SecretList = rmp_serde::from_read_ref(&buf).unwrap(); list == deserialized } quickcheck(check_serialize as fn(SecretList) -> bool); } #[test] fn secret_version_capnp_serialization() { fn check_serialize(secret_version: SecretVersion) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &secret_version).unwrap(); let deserialized: SecretVersion = rmp_serde::from_read_ref(&buf).unwrap(); secret_version == deserialized } quickcheck(check_serialize as fn(SecretVersion) -> bool); } #[test] fn password_strength_capnp_serialization() { fn check_serialize(password_strength: PasswordStrength) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &password_strength).unwrap(); let deserialized: PasswordStrength = rmp_serde::from_read_ref(&buf).unwrap(); password_strength == deserialized } quickcheck(check_serialize as fn(PasswordStrength) -> bool); } #[test] fn secret_capnp_serialization() { fn check_serialize(secret: Secret) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &secret).unwrap(); let deserialized: Secret = rmp_serde::from_read_ref(&buf).unwrap(); secret == deserialized } quickcheck(check_serialize as fn(Secret) -> bool); } #[test] fn command_serialization() { fn check_serialize(command: Command) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &command).unwrap(); let deserialized: Command = rmp_serde::from_read_ref(&buf).unwrap(); command == deserialized } quickcheck(check_serialize as fn(Command) -> bool); }
use crate::{ api::{ Identity, PasswordStrength, Secret, SecretAttachment, SecretEntry, SecretEntryMatch, SecretList, SecretListFilter, SecretProperties, SecretType, SecretVersion, SecretVersionRef, Status, ZeroizeDateTime, }, memguard::SecretBytes, }; use chrono::{TimeZone, Utc}; use quickcheck::{quickcheck, Arbitrary, Gen}; use std::collections::{BTreeMap, HashMap}; use super::{Command, PasswordGeneratorCharsParam, PasswordGeneratorParam, PasswordGeneratorWordsParam, StoreConfig}; use crate::memguard::ZeroizeBytesBuffer; impl Arbitrary for Identity { fn arbitrary(g: &mut Gen) -> Self { Identity { id: String::arbitrary(g), name: String::arbitrary(g), email: String::arbitrary(g), hidden: bool::arbitrary(g), } } } impl Arbitrary for ZeroizeDateTime { fn arbitrary(g: &mut Gen) -> Self { ZeroizeDateTime::from(Utc.timestamp_millis(u32::arbitrary(g) as i64)) } } impl Arbitrary for Status { fn arbitrary(g: &mut Gen) -> Self { Status { locked: bool::arbitrary(g), unlocked_by: Option::arbitrary(g), autolock_at: Option::arbitrary(g), version: String::arbitrary(g), autolock_timeout: u64::arbitrary(g), } } } impl Arbitrary for SecretType { fn arbitrary(g: &mut Gen) -> Self {
} } impl Arbitrary for SecretListFilter { fn arbitrary(g: &mut Gen) -> Self { SecretListFilter { url: Option::arbitrary(g), tag: Option::arbitrary(g), secret_type: Option::arbitrary(g), name: Option::arbitrary(g), deleted: bool::arbitrary(g), } } } impl Arbitrary for SecretEntry { fn arbitrary(g: &mut Gen) -> Self { SecretEntry { id: String::arbitrary(g), name: String::arbitrary(g), secret_type: SecretType::arbitrary(g), tags: Vec::arbitrary(g), urls: Vec::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), deleted: bool::arbitrary(g), } } } impl Arbitrary for SecretEntryMatch { fn arbitrary(g: &mut Gen) -> Self { SecretEntryMatch { entry: SecretEntry::arbitrary(g), name_score: isize::arbitrary(g), name_highlights: Vec::arbitrary(g), url_highlights: Vec::arbitrary(g), tags_highlights: Vec::arbitrary(g), } } } impl Arbitrary for SecretList { fn arbitrary(g: &mut Gen) -> Self { SecretList { all_tags: Vec::arbitrary(g), entries: vec![SecretEntryMatch::arbitrary(g)], } } } impl Arbitrary for SecretAttachment { fn arbitrary(g: &mut Gen) -> Self { SecretAttachment { name: String::arbitrary(g), mime_type: String::arbitrary(g), content: Vec::arbitrary(g), } } } impl Arbitrary for SecretProperties { fn arbitrary(g: &mut Gen) -> Self { let keys = Vec::<String>::arbitrary(g); let mut properties = BTreeMap::new(); for key in keys { properties.insert(key, String::arbitrary(g)); } SecretProperties::new(properties) } } impl Arbitrary for SecretVersion { fn arbitrary(g: &mut Gen) -> Self { SecretVersion { secret_id: String::arbitrary(g), secret_type: SecretType::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), name: String::arbitrary(g), tags: Vec::arbitrary(g), urls: Vec::arbitrary(g), properties: SecretProperties::arbitrary(g), attachments: Vec::arbitrary(g), deleted: bool::arbitrary(g), recipients: Vec::arbitrary(g), } } } impl Arbitrary for SecretVersionRef { fn arbitrary(g: &mut Gen) -> Self { SecretVersionRef { block_id: String::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), } } } impl Arbitrary for PasswordStrength { fn arbitrary(g: &mut Gen) -> Self { let entropy = f64::arbitrary(g); let crack_time = f64::arbitrary(g); PasswordStrength { entropy: if entropy.is_finite() { entropy } else { 0.0 }, crack_time: if crack_time.is_finite() { crack_time } else { 0.0 }, crack_time_display: String::arbitrary(g), score: u8::arbitrary(g), } } } impl Arbitrary for Secret { fn arbitrary(g: &mut Gen) -> Self { Secret { id: String::arbitrary(g), secret_type: SecretType::arbitrary(g), current: SecretVersion::arbitrary(g), current_block_id: String::arbitrary(g), versions: Vec::arbitrary(g), password_strengths: HashMap::arbitrary(g), } } } impl Arbitrary for StoreConfig { fn arbitrary(g: &mut Gen) -> Self { StoreConfig { name: String::arbitrary(g), store_url: String::arbitrary(g), remote_url: Option::arbitrary(g), sync_interval_sec: u32::arbitrary(g), client_id: String::arbitrary(g), autolock_timeout_secs: u64::arbitrary(g), default_identity_id: Option::arbitrary(g), } } } impl Arbitrary for PasswordGeneratorParam { fn arbitrary(g: &mut Gen) -> Self { match g.choose(&[0, 1]).unwrap() { 0 => PasswordGeneratorParam::Chars(PasswordGeneratorCharsParam { num_chars: u8::arbitrary(g), include_uppers: bool::arbitrary(g), include_numbers: bool::arbitrary(g), include_symbols: bool::arbitrary(g), require_upper: bool::arbitrary(g), require_number: bool::arbitrary(g), require_symbol: bool::arbitrary(g), exclude_similar: bool::arbitrary(g), exclude_ambiguous: bool::arbitrary(g), }), _ => PasswordGeneratorParam::Words(PasswordGeneratorWordsParam { num_words: u8::arbitrary(g), delim: char::arbitrary(g), }), } } } impl Arbitrary for SecretBytes { fn arbitrary(g: &mut Gen) -> Self { SecretBytes::from(Vec::arbitrary(g)) } } impl Arbitrary for Command { fn arbitrary(g: &mut Gen) -> Self { match g .choose(&[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, ]) .unwrap() { 0 => Command::ListStores, 1 => Command::UpsertStoreConfig(StoreConfig::arbitrary(g)), 2 => Command::DeleteStoreConfig(String::arbitrary(g)), 3 => Command::GetDefaultStore, 4 => Command::SetDefaultStore(String::arbitrary(g)), 5 => Command::GenerateId, 6 => Command::GeneratePassword(PasswordGeneratorParam::arbitrary(g)), 7 => Command::PollEvents(u64::arbitrary(g)), 8 => Command::Status(String::arbitrary(g)), 9 => Command::Lock(String::arbitrary(g)), 10 => Command::Unlock { store_name: String::arbitrary(g), identity_id: String::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 11 => Command::Identities(String::arbitrary(g)), 12 => Command::AddIdentity { store_name: String::arbitrary(g), identity: Identity::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 13 => Command::ChangePassphrase { store_name: String::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 14 => Command::List { store_name: String::arbitrary(g), filter: SecretListFilter::arbitrary(g), }, 15 => Command::UpdateIndex(String::arbitrary(g)), 16 => Command::Add { store_name: String::arbitrary(g), secret_version: SecretVersion::arbitrary(g), }, 17 => Command::Get { store_name: String::arbitrary(g), secret_id: String::arbitrary(g), }, 18 => Command::GetVersion { store_name: String::arbitrary(g), block_id: String::arbitrary(g), }, 19 => Command::SecretToClipboard { store_name: String::arbitrary(g), block_id: String::arbitrary(g), properties: Vec::arbitrary(g), }, 20 => Command::ClipboardIsDone, 21 => Command::ClipboardCurrentlyProviding, 22 => Command::ClipboardProvideNext, _ => Command::ClipboardDestroy, } } } #[test] fn identity_capnp_serialization() { fn check_serialize(identity: Identity) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &identity).unwrap(); let deserialized: Identity = rmp_serde::from_read_ref(&buf).unwrap(); identity == deserialized } quickcheck(check_serialize as fn(Identity) -> bool); } #[test] fn status_capnp_serialization() { fn check_serialize(status: Status) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &status).unwrap(); let deserialized: Status = rmp_serde::from_read_ref(&buf).unwrap(); status == deserialized } quickcheck(check_serialize as fn(Status) -> bool); } #[test] fn secret_list_filter_capnp_serialization() { fn check_serialize(filter: SecretListFilter) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &filter).unwrap(); let deserialized: SecretListFilter = rmp_serde::from_read_ref(&buf).unwrap(); filter == deserialized } quickcheck(check_serialize as fn(SecretListFilter) -> bool); } #[test] fn secret_list_capnp_serialization() { fn check_serialize(list: SecretList) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &list).unwrap(); let deserialized: SecretList = rmp_serde::from_read_ref(&buf).unwrap(); list == deserialized } quickcheck(check_serialize as fn(SecretList) -> bool); } #[test] fn secret_version_capnp_serialization() { fn check_serialize(secret_version: SecretVersion) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &secret_version).unwrap(); let deserialized: SecretVersion = rmp_serde::from_read_ref(&buf).unwrap(); secret_version == deserialized } quickcheck(check_serialize as fn(SecretVersion) -> bool); } #[test] fn password_strength_capnp_serialization() { fn check_serialize(password_strength: PasswordStrength) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &password_strength).unwrap(); let deserialized: PasswordStrength = rmp_serde::from_read_ref(&buf).unwrap(); password_strength == deserialized } quickcheck(check_serialize as fn(PasswordStrength) -> bool); } #[test] fn secret_capnp_serialization() { fn check_serialize(secret: Secret) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &secret).unwrap(); let deserialized: Secret = rmp_serde::from_read_ref(&buf).unwrap(); secret == deserialized } quickcheck(check_serialize as fn(Secret) -> bool); } #[test] fn command_serialization() { fn check_serialize(command: Command) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &command).unwrap(); let deserialized: Command = rmp_serde::from_read_ref(&buf).unwrap(); command == deserialized } quickcheck(check_serialize as fn(Command) -> bool); }
match g.choose(&[0, 1, 2, 3, 4, 5]).unwrap() { 0 => SecretType::Login, 1 => SecretType::Note, 2 => SecretType::Licence, 3 => SecretType::Wlan, 4 => SecretType::Password, _ => SecretType::Other, }
if_condition
[ { "content": "fn secret_to_clipboard(properties: &'static [&'static str]) -> impl Fn(&mut Cursive) {\n\n move |s: &mut Cursive| {\n\n let maybe_secret = {\n\n let secret_view = s.find_name::<SecretView>(\"secret_view\").unwrap();\n\n secret_view.current_secret()\n\n };\n\n let state = s.user...